+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.91yrPoDpBb --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [8061/8061 modules configured] [2814/4464 modules rendered] [2 ymakes processing] [8061/8061 modules configured] [4314/4464 modules rendered] [2 ymakes processing] [8061/8061 modules configured] [4464/4464 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [8067/8067 modules configured] [4464/4464 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 3.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut | 3.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a | 7.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a | 7.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a | 8.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a | 8.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a | 8.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a | 8.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a | 8.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a | 8.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a | 8.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a | 8.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a | 8.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a | 8.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a | 8.6%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a | 8.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a | 8.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a | 8.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a | 8.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a | 9.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a | 9.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |11.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |11.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |12.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |12.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |13.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |13.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |14.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |14.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |15.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |15.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |15.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |15.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |15.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |16.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |16.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |16.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |16.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |16.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |16.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |16.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |16.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |16.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |16.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |17.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |17.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |17.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |17.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp |17.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |17.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |17.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |17.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |17.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |17.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |18.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |18.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |18.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |19.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |19.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |19.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |19.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |19.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |19.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |19.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |19.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |20.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |20.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp |20.4%| PREPARE $(VCS) |20.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |20.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |20.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |20.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |21.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |22.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |22.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |22.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |23.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |23.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |23.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |23.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |23.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |23.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |23.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |24.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp |24.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |24.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp |24.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |24.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |24.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |24.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |24.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |24.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |24.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |25.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |25.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |25.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |25.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp |25.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |25.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |25.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |25.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |25.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |25.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |25.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp |26.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |26.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |26.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |26.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |26.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |26.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |26.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |26.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |27.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |26.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |26.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |27.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |27.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp |27.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp |27.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp |27.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |27.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |28.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |28.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |28.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |28.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |28.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |28.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |28.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |28.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |28.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |28.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp |28.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |28.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |28.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |28.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |28.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp |28.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp |28.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |28.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |29.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |29.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |29.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |29.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |29.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |29.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |29.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |29.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |29.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |29.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |29.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |29.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |29.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |30.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp |30.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |30.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |30.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |30.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |30.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |33.9%| PREPARE $(YMAKE_PYTHON3-4256832079) |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |35.6%| PREPARE $(CLANG_FORMAT-1286082657) |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |36.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |36.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |37.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |38.2%| PREPARE $(LLD_ROOT-3808007503) |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |39.6%| PREPARE $(FLAKE8_PY3-715603131) |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__data_erasure_manager.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |42.4%| PREPARE $(TEST_TOOL_HOST-sbr:8444524403) |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |43.3%| PREPARE $(PYTHON) |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_data_erasure_manager.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_data_erasure_manager.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |47.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |49.0%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |48.6%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |48.5%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan/libclang_rt.asan-x86_64.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |45.9%| {BAZEL_DOWNLOAD} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |42.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/batch/libkqp-common-batch.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |43.1%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |39.4%| PREPARE $(JDK_DEFAULT-472926544) |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |38.9%| PREPARE $(WITH_JDK-sbr:7832760150) |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |38.8%| PREPARE $(CLANG-1922233694) |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |38.0%| PREPARE $(JDK17-472926544) |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |37.5%| PREPARE $(WITH_JDK17-sbr:7832760150) |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |37.2%| PREPARE $(GDB) |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |37.2%| [UN] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |37.6%| RESOURCE $(sbr:4966407557) |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |37.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |37.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |37.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |37.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |37.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |37.9%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |38.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |38.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |38.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |38.3%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |38.4%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |38.5%| PREPARE $(BLACK_LINTER-sbr:8415400280) |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |38.7%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/flake8_linter/flake8_linter |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |38.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |38.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |38.8%| PREPARE $(CLANG16-1380963495) |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |38.9%| PREPARE $(CLANG-2518231432) |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |38.9%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/black_linter/black_linter |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |39.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |39.2%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |39.0%| PREPARE $(CLANG18-3363451693) |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |39.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |39.5%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |38.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |38.8%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |39.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |39.5%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |39.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |39.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |39.8%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a >> conftest.py::black [GOOD] |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_mysql.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] >> test_workload.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp >> test_quoting.py::flake8 [GOOD] |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] |40.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |40.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |40.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |40.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |40.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp >> test.py::flake8 [GOOD] |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |40.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |40.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a >> test_encryption.py::flake8 [GOOD] >> test_liveness_wardens.py::flake8 [GOOD] |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp >> test_example.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp >> base.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp >> test.py::flake8 [GOOD] |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp >> test_sql_streaming.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |40.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp >> test_actorsystem.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch_spilling.py::flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp >> test_restarts.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp >> test.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] >> column_table_helper.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> overlapping_portions.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/flake8 >> test_tpch_import.py::flake8 [GOOD] |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp >> tablet_scheme_tests.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> test_dml.py::flake8 [GOOD] |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp >> tier_delete.py::flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp >> ttl_delete_s3.py::flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp >> ttl_unavailable_s3.py::flake8 [GOOD] |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp >> test.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |41.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |41.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |41.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |40.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |40.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a >> test_query_cache.py::flake8 [GOOD] |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |40.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |40.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/datashard/dml/flake8 >> test_dml.py::flake8 [GOOD] |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] >> test_log_scenario.py::flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp >> zip_bomb.py::flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |41.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp >> test_clickbench.py::flake8 [GOOD] |41.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] >> test_diff_processing.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp >> test_tpch.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |41.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp >> test_workload.py::flake8 [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp >> test_mixed.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp >> test_big_state.py::flake8 [GOOD] |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> common.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_mysql.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_pdisk_format_info.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp >> test_disk.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp >> test_compatibility.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test_sql.py::flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] >> test_leader_start_inflight.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/mixedpy/flake8 >> test_mixed.py::flake8 [GOOD] |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp >> test_auditlog.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |42.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |42.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp >> test_fifo_messaging.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp >> test.py::flake8 [GOOD] |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp >> test_schemeshard_limits.py::flake8 [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |43.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |42.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |42.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |43.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |43.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |43.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |43.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |43.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> allure_utils.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |43.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |43.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |43.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |43.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |43.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |43.2%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_validation.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |43.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_9c56ea1b7d34c7d8f6329bfcfd.o |43.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_f364ff47dd846bb94c3e83f2a8.o |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_ec3163328cb5ab8f222e66dd41.o |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_d1ba757d227a70ff4910717854.o |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |43.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |43.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_a5c82b9ecb3bf738ea9e628123.o |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/45be6e48ea8f2ac38577085d0d_raw.auxcpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |43.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_854d6cc7a0cc5cdd793cfc1e6d.o |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_2f7ac0f750374152d13c6bfbcf.o |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_4f9d76a39d2f7ba2b9f198f28c.o |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_ac7eeedcbf7038a60a7673762a.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_938861be99a6cedecb22904193.o |43.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_1b4bf9f1f46a6111d16337dee0.o |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_9caa7583d1e4955730dbd6f3fd.o |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_6e536fb2c379a4ebe79c499de8.o |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_2d296dfaf373f7f15e6312517a.o |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |44.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_data_cleanup.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_1a397c908c9859dc40a771ddf1.o |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_199ab4be3deaff025e1ab92143.o |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_5831cbd77ecc92a241b6cf1ea2.o |44.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |44.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |44.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |44.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |44.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |44.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |44.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_93197284f82f9ae9fc0256ee95.o |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |44.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_8e9f839326d1a9224e4b2e15e2.o |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_3efa41af97c0510be1d2e99f05.o |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_ae5b9f6e7a00f305f01a3dde87.o |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/01e1cebcd98e239de10ed70b94_raw.auxcpp |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_422ca1effff14e5a08952658d0.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_a3fc9153ce93c876df4c755b36.o |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_4ea639aebd19c36ee3cdb4479d.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_5525925030ba2866c1b1040841.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_8ac5034640eee44b1cd5fa5253.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_96d3f689fe7d6b16f9da31e376.o |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/static/libcomplete-name-static.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8649dacdf340abe7c53df69638.o |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/static/libcomplete-name-static.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |44.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2de2accab39327e9b10680901f.o |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |44.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |44.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |45.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_340b457b8174f6293d5748588e.o |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |45.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |45.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |45.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |45.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_3e8bf44ed681ff82ae143aaec3.o |45.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |45.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_023a23fcfdf79043d814bb8aab.o |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |45.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/df691ac52d0b755cb039db39b5_raw.auxcpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |45.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |45.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |45.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |45.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |45.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_34efc91ed920a8b27d971c44a6.o |45.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |45.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_0a29eb8c456ab5b998f2d12ba1.o |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |45.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |45.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |45.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |45.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_f42b1add98328abd34a53e4aef.o |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |45.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |45.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_53073eb93c76466fca8f474c5f.o |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |45.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |45.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/552c373b422666221556a5a9bd_raw.auxcpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |45.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_a0bee0ed11edab150a8172af5c.o |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |45.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |45.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |45.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |45.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |45.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |45.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |45.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |45.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |45.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |45.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |45.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |45.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |45.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |45.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |45.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |45.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |45.6%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |45.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |45.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |45.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/c52bef66453eb652f14989b79d_raw.auxcpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |45.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |45.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |45.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |45.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |45.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.{pb.h ... grpc.pb.h} |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |45.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/0dff0b13f2d02975a4a973a1e8_raw.auxcpp |45.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_79fff48e52404c1611400b8a2c.o |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |45.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/b9d4e191a9fd03221b46c5af49_raw.auxcpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |45.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |45.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |45.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |45.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_55f2556d6eafcd77ebc4c517d4.o |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |46.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_b91160bcee04ad1f57e80af064.o |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |46.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_320dc1aa155c1ac9123afec54c.o |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |46.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1007df29dec27b0b7a1587d49f.o |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |45.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |46.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_0c4ce75555cfd5c0dd63e9dfbd.o |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_c84c8d511807425dc18073129b.o |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_86ad37399122e504f3e6d8378d.o |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_5a4a401f33f46c70417a65f584.o |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_96b8686cd075e874d95d4aa5c5.o |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_d0255dda539959b69d421868a2.o |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_6a5c78aa9f679a0920be5264fe.o |46.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/4129dc9878c2058404494fb088_raw.auxcpp |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_173de88696c8239b22567e7ece.o |46.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/7179c606fb7373cb8f04d9971a_raw.auxcpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/lib/libpy3tests-datashard-lib.global.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_9314464e3560b2511ac931acd9.o |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8db6616d40f8020d0632222fe3.o |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_40093eea68489f3f0f11ed00e4.o |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler_ut.cpp |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_065e9244d685c2b8f0ab66e414.o |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_461999da7ba13deab5689c18ec.o |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_4cf502b19212965f14d6660a20.o |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.{pb.h ... grpc.pb.h} |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.{pb.h ... grpc.pb.h} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_3e7b0e88092417daa72b89bfde.o |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_b8aa61f402be805d2e3e9e75a2.o |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_c65a9d5efe13dc05c1466090ba.o |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_f580ed931409135de17b6aff8b.o |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_7897d1b03fc78e49620c18f81a.o |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_909bbfbd36bf4d7cf0544f0406.o |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_083605b223ce507d0fef919d0d.o |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_4246ee6b3505ab22753eb44ce7.o |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_69ec8108bd4bdc059abab5b374.o |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |46.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |46.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_a40d299361b06d7622f78b2238.o |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |46.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1c0f807c059fe226699115f242.o |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |46.5%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |46.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_data_cleanup.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ut_data_erasure_reboots.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |46.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |46.7%| [PK] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |46.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |46.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1515671fe2dfb16894dfbe901e.o |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/trace_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |46.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure/ut_data_erasure.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_27c0687ceeb7ce4ff5e4cea90a.o |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_b0a88dfa3c67850033b8c21ce7.o |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_19dadf8afeb30502d735b660ce.o |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_12d01741952bd4afa836364d84.o |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_15e284a8ecb30c90903e842e70.o |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |47.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_7eab954373d77ffb1fab95ca0d.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |46.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_0b6bc206b470900b0b94249ade.o |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_e3640190fc6b98b359c2a9e990.o |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |47.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |47.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_41295709119857c2e0f1a41f31.o |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.{pb.h ... grpc.pb.h} |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |47.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_6cfba3dbee97ec121b2f346459.o |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_ce073e3cc612363936bdd04210.o |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_dad83df4ab6e900f5632eadf2b.o |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_c43ce24509a50b033fa4050a33.o |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_8d06c552fa5830dcf8274026f0.o |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_1d56c9a5fd80acfc9c28483f06.o |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_1a75593bdb000d1e31dd6e96d5.o |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/32049c3ef1f885f0e34984b3bf_raw.auxcpp |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_363bd76b3bc04955e01c402ffc.o |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_7e7e709046fe8acad91d924675.o |47.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |47.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49bad8251d240ad7c49d384b91.o |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/e294827eb799173498fe26d398_raw.auxcpp |47.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_6f577a0a3d7a659599df51626e.o |47.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_181bdcd1743e9a1a78fafe4b60.o |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.{pb.h ... grpc.pb.h} |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |47.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_88a0e187d0d0f8235a5e3f2fff.o |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_323a17e94d8d570989807d19d3.o |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_4352b8b3e3cf61532c865b371b.o |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_3ddbad334a37a829b3772ddb05.o |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/7fdc9492198d5f306aa05e0de1_raw.auxcpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_2b60b599fc27771d93e79090fc.o |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_2e1dd9c9bc385e6efd22b78136.o |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_a99732b1d02edd62e674483ffe.o |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |48.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |48.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |48.8%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_817637e20254f271e9be62d14e.o |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_ef822f612b696eb514a5565056.o |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |48.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |48.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_2cc418e8604751e5b8f9029a81.o |48.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |48.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_3fff1f10d19427b0695d82e1c0.o |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_69bb4174ba5b22bacbabacd799.o |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_0664e2ab2eb37ae9f02538e483.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_bac05c8b5a79735451f58d9322.o |48.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_703c8e1d9a9a2b271b8b995a29.o |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_b90d4f51856a4b346b62877f37.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_994fcbd53c4e2174c302bdb5ab.o |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_4c839b0fc6ee60e0bb4adc7079.o |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_ddabc037fdef8d5f7c6c2f9b47.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_912038ceef7de48e0e15c25307.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |48.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.{pb.h ... grpc.pb.h} |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_b4d1a41a4041b6372d2a384279.o |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_7c328c2741f9dd7697a2e0e8b1.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |48.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_a6e393b6d53f4c73feac80b55c.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_10b0cfa01297f7d7392eb4d9e4.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_0665be2c60952715f39eb25568.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |48.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_0221134ccf2a949614ce2a2056.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_b306c2955ce13e6db6cae73363.o |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_1c18035bb4b3759d5e029db746.o |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_862077c20b8d599559a861c2df.o |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_4b2e093abff756c97b675c0a31.o |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/docs/generator/generator |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_71d73932c95681fccfc7215041.o |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_133cf22e149f008572dab30674.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_e9b644ce7912a7deb04bbf40a8.o |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c77713875cf17988efd8fc0fb3.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_903d4758faea71f1363e296b3f.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/e01aded916ad04e888f13223cf_raw.auxcpp |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |49.4%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_504b845d57f1a23561e970de61.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_452efd8b0828678a61ff4e0569.o |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_1574e8a5a6c530c7bfd6378c4d.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_0a1f127d9343562caddfbacf79.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_79d897640a3a634a87f173e2f4.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_a54664d42025a3be375f961b82.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_178e64ce5db822fc6aa8b3e608.o |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |49.4%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_51000f45ee1f1ab0908a7e71c9.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_a388edbfdbfbac232101c7f510.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_d009f62008041e2f09cdbf7def.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_e5d897582dc0fbda7c578cb53f.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/objcopy_ac8dbe7f54a2cb7efb6636f75f.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_bf6c9c02784d65e20a01685ce8.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/proxy.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_895e78a038dc7069fda56c2e82.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_0e928e66807fd553d7fcaa58a3.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_52476c20dac0af4f59edc2917e.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/e1ff312a3308444783623a7c6e_raw.auxcpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_6b8c453743f8fd2c5380af70c6.o |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_5dc9c76fd90ae0562084321e87.o |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_496e4638abf3c5ef12eafab52c.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_5051832ffa0b6b13cebe014eb1.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_f363a941fa24746cadffc60594.o |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_b08299d456f3448b368e814cb8.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_696078ddd4c2d0788472b3ebfe.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/libetcd-grpc.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |49.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/main.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d1dee10c0c00d50989b086bd3f.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/cache_eviction_ut.cpp |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_0553360a969b2c9633badb428d.o |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/list_all_topics_ut.cpp |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_1d0482d354dc270d18e7123281.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_8f2fbd9f79880fbfa3c1838d80.o |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_backup.pb.{h, cc} |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_d2e759e2d0ff1243166a3bc7d9.o |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_51562f83ff52d1ceaac0c36a08.o |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_53273ad3976098fa8cbd55f5a9.o |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_8bf6865076719c3317e71551a4.o |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |49.2%| COMPACTING CACHE 657.4MiB |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |49.2%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |49.2%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |49.2%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |49.2%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |49.2%| [TS] {RESULT} ydb/tests/library/ut/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |49.2%| [TS] {RESULT} ydb/tests/sql/flake8 |49.3%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |49.3%| [TS] {RESULT} ydb/tests/example/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |49.3%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |49.3%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |49.3%| [TS] {RESULT} ydb/tests/olap/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |49.3%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |49.4%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |49.4%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |49.4%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |49.4%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 |49.4%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |49.4%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |49.4%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |49.4%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |49.4%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |49.4%| [TS] {RESULT} ydb/tests/sql/large/flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |49.4%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |49.4%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |49.4%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |49.4%| [TS] {RESULT} ydb/tests/datashard/dml/flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |49.5%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |49.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |49.5%| [TS] {RESULT} ydb/tests/olap/common/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |49.5%| [TS] {RESULT} ydb/tests/stress/mixedpy/flake8 |49.5%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |49.6%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |49.6%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |49.6%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/config/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |49.6%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/common/flake8 |49.7%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |49.7%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |49.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |49.7%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |50.0%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |50.1%| [TS] {RESULT} ydb/tests/olap/load/flake8 |50.1%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |50.1%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |50.1%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |50.1%| [TS] {RESULT} ydb/tests/functional/api/flake8 |50.1%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |55.3%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |55.3%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |61.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |61.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |61.6%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |61.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |61.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |61.9%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |62.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |62.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |62.1%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |62.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |62.1%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |62.1%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |62.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |62.1%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |62.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |62.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |62.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |62.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |62.2%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |62.2%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |62.4%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |62.8%| [LD] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tools/sql2yql/sql2yql |62.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |62.8%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |63.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |63.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |63.2%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |64.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |64.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |64.2%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |65.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |65.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |65.2%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |65.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |65.4%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |65.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |65.4%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |65.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |65.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |65.5%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |65.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |65.5%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |65.5%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |65.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |65.5%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |65.7%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |65.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |65.7%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |65.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |65.9%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |66.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |66.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |66.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |66.0%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |66.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |66.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |66.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |66.2%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |66.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |66.3%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |66.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |66.4%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |67.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |67.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |67.4%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |67.4%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |67.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |67.7%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |68.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |68.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |68.4%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |69.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |69.1%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |69.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |69.1%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |69.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |69.2%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |69.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |69.2%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |69.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |69.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |69.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |70.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |70.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |70.2%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |70.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |70.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |70.3%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |70.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |70.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |70.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |70.3%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |70.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |70.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |70.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |70.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |70.7%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |70.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |71.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |71.0%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |71.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |71.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |71.5%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |71.9%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |72.0%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |72.0%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |72.1%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |72.1%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |72.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |72.2%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |72.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |72.2%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |72.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |72.3%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |72.4%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |72.4%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |72.5%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/docs/generator/generator |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |72.5%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |72.6%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |72.7%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |72.8%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ydb-tests-olap |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |72.9%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |72.9%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |72.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |73.1%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.1%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |73.1%| [LD] {RESULT} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |73.1%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |73.2%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.2%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |73.3%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |73.3%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |73.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |73.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |73.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |73.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> GroupWriteTest::WithRead >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails >> ConvertTableDescription::StorageSettings >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> GroupWriteTest::Simple >> GroupWriteTest::TwoTables |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> GroupWriteTest::ByTableName |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |73.5%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |73.5%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |73.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> LongTxService::BasicTransactions >> TCmsTenatsTest::TestNoneTenantPolicy >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TMaintenanceApiTest::SingleCompositeActionGroup >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TCmsTest::ManageRequestsWrong >> TCmsTest::RequestReplaceDevices >> TCmsTest::TestOutdatedState >> TCmsTest::RequestRestartServicesOk >> TCmsTenatsTest::TestClusterRatioLimit >> TCmsTenatsTest::TestClusterLimit >> TCmsTest::RequestReplaceBrokenDevices |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTest::StateStorageNodesFromOneRing >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> TCmsTest::WalleRebootDownNode >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TCmsTest::TestKeepAvailableMode >> TCmsTenatsTest::TestTenantLimit >> TCmsTest::TestForceRestartMode >> TCmsTest::CollectInfo >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTest::ActionIssuePartialPermissions >> TCmsTest::RequestRestartServicesMultipleNodes >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot >> TCmsTest::StateRequestNode >> TCmsTest::StateRequest >> TSchemeShardTest::CreateIndexedTable >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false >> TSchemeShardTest::CreateTable >> TSchemeShardTest::CacheEffectiveACL >> TSchemeShardTest::InitRootAgain >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> TSchemeShardTest::Boot >> TCmsTest::WalleTasks >> TSchemeShardTest::RmDirTwice >> TCmsTest::ManagePermissions >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TCmsTest::TestKeepAvailableModeScheduled >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardTest::MkRmDir >> TCmsTest::RequestRestartServicesRejectSecond >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::CreateTime >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceDevicePDisk >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |73.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TCmsTest::StateRequest [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::Notifications >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::LastRefreshTime >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownNode >> TMaintenanceApiTest::ActionReason [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::StateStorageTwoRings |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> TSchemeShardTest::DependentOps >> TSchemeShardTest::CreateAlterTableWithCodec |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ActionReason [GOOD] >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup >> LongTxService::LockSubscribe [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> TRtmrTest::CreateWithoutTimeCastBuckets >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2025-04-17T17:45:42.650398Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:45:42.655642Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00043e/r3tmp/tmpq5J8aa/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:45:42.657760Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00043e/r3tmp/tmpq5J8aa/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00043e/r3tmp/tmpq5J8aa/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8503717548529463013 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:45:42.859914Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvBeginTx from [1:434:2322] 2025-04-17T17:45:42.860524Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 2025-04-17T17:45:42.895377Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:435:2099] LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 2025-04-17T17:45:42.896246Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:100:2048] 2025-04-17T17:45:42.897205Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:147:2088] LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 2025-04-17T17:45:42.898208Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:435:2099] LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 2025-04-17T17:45:42.899428Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvCommitTx from [2:147:2088] LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 2025-04-17T17:45:42.899911Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 without side-effects 2025-04-17T17:45:42.901075Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:435:2099] LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 2025-04-17T17:45:42.902010Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:147:2088] LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 2025-04-17T17:45:42.903798Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:435:2099] LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 2025-04-17T17:45:42.904390Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:147:2088] LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=1 2025-04-17T17:45:42.905564Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-04-17T17:45:42.906924Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-17T17:45:42.909521Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:100:2048] 2025-04-17T17:45:42.911921Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:83:2074] ServerId# [1:354:2272] TabletId# 72057594037932033 PipeClientId# [2:83:2074] 2025-04-17T17:45:42.932801Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:435:2099] LongTxId# ydb://long-tx/000000001j7sss1xwft7yc0v0q?node_id=3 2025-04-17T17:45:42.933802Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:473:2101] 2025-04-17T17:45:50.232003Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:50.232232Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:50.384778Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:45:52.358086Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:45:52.363660Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00043e/r3tmp/tmpAZzS3W/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:45:52.369259Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00043e/r3tmp/tmpAZzS3W/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00043e/r3tmp/tmpAZzS3W/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11441632528719708851 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:45:52.918783Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:509:2382] for database /dc-1 2025-04-17T17:45:52.919458Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-17T17:45:52.937609Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-17T17:45:52.938574Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Sending navigate request for /dc-1 2025-04-17T17:45:52.963325Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Received navigate response status Ok 2025-04-17T17:45:52.963563Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Sending acquire step to coordinator 72057594046316545 2025-04-17T17:45:53.007748Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Received read step 1000 2025-04-17T17:45:53.008232Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2025-04-17T17:45:53.025066Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:509:2382] 2025-04-17T17:45:53.025315Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-17T17:45:53.038572Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-17T17:45:53.039298Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Sending navigate request for /dc-1 2025-04-17T17:45:53.040472Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Received navigate response status Ok 2025-04-17T17:45:53.040514Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Sending acquire step to coordinator 72057594046316545 2025-04-17T17:45:53.041249Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Received read step 1500 2025-04-17T17:45:53.041892Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2025-04-17T17:45:53.042120Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2025-04-17T17:45:53.051304Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:509:2382] 2025-04-17T17:45:53.051512Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-17T17:45:53.062846Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-17T17:45:53.063727Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Sending navigate request for /dc-1 2025-04-17T17:45:53.073472Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Received navigate response status Ok 2025-04-17T17:45:53.073854Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Sending acquire step to coordinator 72057594046316545 2025-04-17T17:45:53.079302Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Received read step 1500 2025-04-17T17:45:53.079743Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2025-04-17T17:45:53.080189Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001ea72yqkd74j6e5zck2?node_id=3&snapshot=1500%3Amax 2025-04-17T17:45:57.777769Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:45:57.778162Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00043e/r3tmp/tmpyClda5/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:45:57.778317Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00043e/r3tmp/tmpyClda5/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00043e/r3tmp/tmpyClda5/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4669772910268470430 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:45:57.918156Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2025-04-17T17:45:57.918675Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:433:2321] for LockId# 987 LockNode# 5 2025-04-17T17:45:57.975708Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:434:2099] for LockId# 987 LockNode# 5 2025-04-17T17:45:57.976215Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:98:2048] 2025-04-17T17:45:57.977089Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:147:2088] for LockId# 987 LockNode# 5 2025-04-17T17:45:57.981625Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-04-17T17:45:57.989687Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:433:2321] for LockId# 123 LockNode# 5 2025-04-17T17:45:57.989840Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:434:2099] for LockId# 123 LockNode# 5 2025-04-17T17:45:57.989989Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:147:2088] for LockId# 123 LockNode# 5 2025-04-17T17:45:57.990172Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2025-04-17T17:45:57.990303Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2025-04-17T17:45:57.990418Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-04-17T17:45:57.990536Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:434:2099] for LockId# 234 LockNode# 5 2025-04-17T17:45:57.992088Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-17T17:45:57.993632Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-17T17:45:57.994409Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:98:2048] 2025-04-17T17:45:57.995361Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:83:2074] ServerId# [5:353:2271] TabletId# 72057594037932033 PipeClientId# [6:83:2074] 2025-04-17T17:45:58.305130Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:465:2048] 2025-04-17T17:45:58.305334Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-17T17:45:58.305373Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-17T17:45:58.305401Z node 5 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 6 2025-04-17T17:45:58.305674Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:465:2048] 2025-04-17T17:45:58.305804Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:466:2100] ServerId# [5:470:2341] TabletId# 72057594037932033 PipeClientId# [6:466:2100] 2025-04-17T17:45:58.305928Z node 6 :TX_PROXY WARN: actor# [6:145:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-17T17:45:58.576364Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:493:2048] 2025-04-17T17:45:58.576868Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-17T17:45:58.576925Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-17T17:45:58.577597Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:493:2048] 2025-04-17T17:45:58.577934Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:494:2101] ServerId# [5:499:2361] TabletId# 72057594037932033 PipeClientId# [6:494:2101] 2025-04-17T17:45:58.900354Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:515:2048] 2025-04-17T17:45:58.901578Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-17T17:45:58.901853Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-17T17:45:58.910832Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:515:2048] 2025-04-17T17:45:58.911733Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:516:2103] ServerId# [5:520:2375] TabletId# 72057594037932033 PipeClientId# [6:516:2103] |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:46:00.143066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:46:00.143149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:00.143181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:46:00.143215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:46:00.143253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:46:00.143294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:46:00.143351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:00.143532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:46:00.143853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:46:00.212954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:46:00.212996Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:00.228930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:46:00.229425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:46:00.229586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:46:00.236550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:46:00.236691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:46:00.237292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:00.237511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:46:00.239978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:00.240874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:00.240916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:00.241044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:46:00.241128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:00.241161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:46:00.241242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.248391Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:46:00.376226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:46:00.376481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.376658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:46:00.376861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:46:00.376915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.379150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:00.379280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:46:00.379460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.379506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:46:00.379542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:46:00.379577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:46:00.381399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.381462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:46:00.381499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:46:00.383068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.383109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.383153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:00.383208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:46:00.386799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:46:00.388757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:46:00.388958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:46:00.390036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:00.390165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:46:00.390243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:00.390495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:46:00.390547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:00.390706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:46:00.390779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:46:00.393055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:00.393100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:00.393284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:00.393328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:46:00.393654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.393698Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:46:00.393790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:00.393820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:00.393863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:00.393894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:00.393945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:46:00.394001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:00.394037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:46:00.394066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:46:00.394125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:46:00.394160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:46:00.394208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:46:00.395912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:00.396026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:00.396063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:46:00.424352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:46:00.424371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:46:00.424395Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2025-04-17T17:46:00.424413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:46:00.424459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-04-17T17:46:00.435468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.435540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId# 100:0 at tablet72057594046678944 2025-04-17T17:46:00.435584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-17T17:46:00.437796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T17:46:00.438166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T17:46:00.439647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.439707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:46:00.439759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-17T17:46:00.439902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:46:00.441611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T17:46:00.441711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T17:46:00.442005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:00.442113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:46:00.442163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-04-17T17:46:00.442271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T17:46:00.442395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:46:00.442442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T17:46:00.443991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:00.444018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:00.444137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:46:00.444199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:00.444222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T17:46:00.444245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-17T17:46:00.444448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:46:00.444486Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T17:46:00.444593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T17:46:00.444631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:46:00.444668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T17:46:00.444699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:46:00.444735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T17:46:00.444785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:46:00.444820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T17:46:00.444851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T17:46:00.444914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:46:00.444969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-17T17:46:00.445007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T17:46:00.445037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T17:46:00.445635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:46:00.445714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:46:00.445748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:46:00.445786Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:46:00.445823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:46:00.446419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:46:00.446491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:46:00.446520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:46:00.446546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:46:00.446573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:46:00.446639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-17T17:46:00.449192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T17:46:00.449257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-17T17:46:00.449462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T17:46:00.449498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-17T17:46:00.449835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T17:46:00.449908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T17:46:00.449935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 100 2025-04-17T17:46:00.450397Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:46:00.450580Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 189us result status StatusSuccess 2025-04-17T17:46:00.450888Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> TCmsTest::RequestReplaceManyDevicesOnOneNode |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |73.6%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest |73.6%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TCmsTest::ManageRequests >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] |73.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |73.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |73.6%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropPQ >> TCmsTest::RequestReplacePDiskDoesntBreakGroup [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |73.7%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-04-17T17:45:46.022531Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-17T17:45:46.397178Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-17T17:45:46.462653Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-17T17:45:46.606543Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-17T17:45:59.831391Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-17T17:45:59.831452Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-17T17:45:59.831473Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-17T17:45:59.831500Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-17T17:45:59.831521Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-17T17:45:59.831553Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-17T17:45:59.831572Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-17T17:45:59.831590Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> TCmsTest::VDisksEviction [GOOD] >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] |73.7%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> TCmsTest::SysTabletsNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-04-17T17:46:00.252128Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:46:00.252203Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:46:00.252332Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-17T17:46:00.254024Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120026512 } } 2025-04-17T17:46:00.255143Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120026512 } 2025-04-17T17:46:00.255371Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.002512s 2025-04-17T17:46:00.255410Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:46:00.255555Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-17T17:46:00.255613Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-17T17:46:00.255658Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-17T17:46:00.255770Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:46:00.255931Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:46:00.255973Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-17T17:46:00.256174Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-17T17:46:00.256213Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-17T17:46:00.256240Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-17T17:46:00.256265Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-17T17:46:00.256290Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-1 ... mp: 120540560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120540560 } } 2025-04-17T17:46:00.602977Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120540560 } 2025-04-17T17:46:00.603308Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-17T17:46:00.603391Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-17T17:46:00.603480Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-17T17:46:00.603652Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:46:00.603869Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:46:00.603921Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-17T17:46:00.604184Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2025-04-17T17:46:00.604240Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:46:00.604362Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-17T17:46:00.604416Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-17T17:46:00.604449Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-17T17:46:00.604478Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-17T17:46:00.604508Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-17T17:46:00.604547Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-04-17T17:46:00.604578Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-04-17T17:46:00.604608Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-17T17:46:00.604808Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120442072 ChangeTime: 120442072 Path: "/18/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-17T17:46:00.605416Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120442072 ChangeTime: 120442072 Path: "/23/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-17T17:46:00.605586Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120442072 ChangeTime: 120442072 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-17T17:46:00.605682Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120442072 ChangeTime: 120442072 Path: "/19/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-17T17:46:00.605728Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120442072 ChangeTime: 120442072 Path: "/20/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-17T17:46:00.605780Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120442072 ChangeTime: 120442072 Path: "/21/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-17T17:46:00.605834Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120442072 ChangeTime: 120442072 Path: "/22/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-17T17:46:00.605873Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120442072 ChangeTime: 120442072 Path: "/24/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-17T17:46:00.605909Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T17:46:00.620713Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-04-17T17:46:00.620979Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-04-17T17:46:00.621641Z node 18 :CMS INFO: User user removes request user-r-3 2025-04-17T17:46:00.621699Z node 18 :CMS DEBUG: Resulting status: OK 2025-04-17T17:46:00.621763Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2025-04-17T17:46:00.621804Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2025-04-17T17:46:00.621949Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-04-17T17:46:00.634307Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2025-04-17T17:46:00.634494Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLEngineFlatTest::TestPureProgram >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2025-04-17T17:45:58.396113Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-04-17T17:45:58.396618Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-17T17:45:58.429850Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:45:58.429996Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-17T17:45:58.431782Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120027512 } } 2025-04-17T17:45:58.432445Z node 10 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120027512 } 2025-04-17T17:45:58.432690Z node 10 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003512s 2025-04-17T17:45:58.432734Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:45:58.432821Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-17T17:45:58.432872Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-17T17:45:58.432903Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-17T17:45:58.432930Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-17T17:45:58.432953Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-17T17:45:58.432990Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-17T17:45:58.433011Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-17T17:45:58.433036Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:83883506 ... :CMS DEBUG: Running CleanupWalleTasks 2025-04-17T17:45:58.812571Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-17T17:45:58.812619Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-17T17:45:58.812642Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-17T17:45:58.812672Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-17T17:45:58.812705Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-17T17:45:58.812728Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-17T17:45:58.812754Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-17T17:45:58.812798Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-17T17:45:58.812960Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:45:58.813620Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:45:58.813725Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:45:58.813779Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:45:58.813831Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:45:58.813895Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:45:58.813968Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:45:58.814018Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-17T17:45:58.814063Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T17:45:58.814314Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2025-04-17T17:45:58.814411Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-17T17:45:58.814631Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-17T17:45:58.814808Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2025-04-17T17:45:58.814862Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2025-04-17T17:45:58.837974Z node 10 :CMS DEBUG: TTxLogAndSend Complete 2025-04-17T17:45:58.883729Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:45:58.883807Z node 10 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:45:58.883858Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:04:00Z 2025-04-17T17:45:58.887098Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:45:58.887638Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2025-04-17T17:45:58.888149Z node 10 :CMS DEBUG: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2025-04-17T17:45:58.889082Z node 10 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:45:58.893703Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:45:58.916418Z node 10 :CMS DEBUG: TTxStorePermissions complete 2025-04-17T17:45:58.917697Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2025-04-17T17:45:58.918881Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-17T17:45:58.942901Z node 10 :CMS DEBUG: TTxUpdateConfig Complete 2025-04-17T17:45:58.944022Z node 10 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-04-17T17:45:59.022649Z node 10 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-17T17:45:59.022702Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:45:59.022780Z node 10 :CMS DEBUG: Running CleanupWalleTasks 2025-04-17T17:45:59.023046Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-17T17:45:59.023121Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-17T17:45:59.023170Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-17T17:45:59.023196Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-17T17:45:59.023225Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-17T17:45:59.023249Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-17T17:45:59.023293Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-17T17:45:59.023334Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-17T17:45:59.023552Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:45:59.024083Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:45:59.024246Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:45:59.024383Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:45:59.024438Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:45:59.024533Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:45:59.024595Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:45:59.024655Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-17T17:45:59.024709Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T17:45:59.024913Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-17T17:45:59.025075Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-17T17:45:59.025248Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-17T17:45:59.025488Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2025-04-17T17:45:59.025532Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb >> TMiniKQLEngineFlatTest::TestEraseRow >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TMiniKQLEngineFlatTest::TestCASBoth2Success >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb >> TCmsTest::PriorityRange [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLEngineFlatTest::TestSelectRowNoShards >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLProtoTestYdb::TestExportListTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> KikimrIcGateway::TestLoadExternalTable >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TSchemeShardTest::AlterTableSettings >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TMiniKQLProtoTestYdb::TestExportUuidYdb |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::StateStorageLockedNodes >> TCmsTest::SamePriorityRequest2 [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2025-04-17T17:46:00.136215Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-17T17:46:00.136272Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-17T17:46:00.136293Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-17T17:46:00.136328Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-17T17:46:00.136350Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-17T17:46:00.136368Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-17T17:46:00.136386Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-17T17:46:00.136403Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-17T17:46:00.141866Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-17T17:46:00.141916Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-17T17:46:00.141937Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-17T17:46:00.141957Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-17T17:46:00.141976Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-17T17:46:00.141994Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-17T17:46:00.142012Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-17T17:46:00.142031Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-17T17:46:00.183259Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-17T17:46:00.183317Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-17T17:46:00.183338Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-17T17:46:00.183356Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-17T17:46:00.183375Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-17T17:46:00.183393Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-17T17:46:00.183411Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-17T17:46:00.183428Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTest::Mirror3dcPermissions [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2025-04-17T17:46:02.315228Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:46:02.315352Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:46:02.315486Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-17T17:46:02.317491Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120029512 } } 2025-04-17T17:46:02.318359Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120029512 } 2025-04-17T17:46:02.318599Z node 25 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003512s 2025-04-17T17:46:02.318654Z node 25 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T17:46:02.318846Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-17T17:46:02.318919Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-17T17:46:02.318980Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 25 has not yet been completed) 2025-04-17T17:46:02.319141Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:46:02.319358Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:46:02.319419Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 25, marker# MARKER_DISK_FAULTY 2025-04-17T17:46:02.319680Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-17T17:46:02.319734Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2025-04-17T17:46:02.319770Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2025-04-17T17:46:02.319861Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-04-17T17:46:02.319896Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-17T17:46:02.319929Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-17T17:46:02.319959Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-17T17:46:02.319989Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-17T17:46:02.332417Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: ... [28:8388350642965737326:1634689637] 2025-04-17T17:46:02.548933Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-17T17:46:02.548955Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-17T17:46:02.548996Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-17T17:46:02.549019Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-17T17:46:02.549226Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-04-17T17:46:02.549839Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-04-17T17:46:02.549937Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-04-17T17:46:02.550071Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-04-17T17:46:02.550139Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-04-17T17:46:02.550221Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-04-17T17:46:02.550315Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-04-17T17:46:02.550366Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-04-17T17:46:02.550406Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T17:46:02.550577Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-17T17:46:02.550631Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-17T17:46:02.550669Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-04-17T17:46:02.550851Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-17T17:46:02.551009Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-17T17:46:02.551108Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2025-04-17T17:46:02.551148Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2025-04-17T17:46:02.551180Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2025-04-17T17:46:02.564794Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-17T17:46:02.564863Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-17T17:46:02.580482Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:46:02.580566Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:46:02.580627Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-17T17:46:02.581471Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:46:02.581557Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2025-04-17T17:46:02.581611Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-04-17T17:46:02.581660Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2025-04-17T17:46:02.581688Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2025-04-17T17:46:02.581707Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2025-04-17T17:46:02.581729Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-17T17:46:02.581861Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-04-17T17:46:02.581925Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-17T17:46:02.582004Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:46:02.582169Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.129512Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-17T17:46:02.582715Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:46:02.599051Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-17T17:46:02.599397Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780129512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2025-04-17T17:46:02.599495Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.129512Z 2025-04-17T17:46:02.616406Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-17T17:46:02.616832Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-17T17:46:02.616904Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-17T17:46:02.616969Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-17T17:46:02.617829Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:46:02.617937Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2025-04-17T17:46:02.617999Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-04-17T17:46:02.618049Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-17T17:46:02.618355Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-04-17T17:46:02.618424Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2025-04-17T17:46:02.618526Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-17T17:46:02.618714Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.231024Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-04-17T17:46:02.618840Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-17T17:46:02.633273Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-17T17:46:02.634278Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780231024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-04-17T17:46:02.634894Z node 25 :CMS INFO: User user is done with permissions user-p-1 2025-04-17T17:46:02.634942Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-17T17:46:02.635013Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-17T17:46:02.635106Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2025-04-17T17:46:02.635217Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2025-04-17T17:46:02.635277Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-04-17T17:46:02.648136Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-17T17:46:02.648374Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-04-17T17:46:02.648964Z node 25 :CMS INFO: User user is done with permissions user-p-2 2025-04-17T17:46:02.649010Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-17T17:46:02.649074Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-17T17:46:02.649165Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2025-04-17T17:46:02.649261Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2025-04-17T17:46:02.649313Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-04-17T17:46:02.663769Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-17T17:46:02.663959Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableById >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> TMiniKQLProtoTestYdb::TestExportVariantYdb |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> BasicUsage::WriteSessionNoAvailableDatabase >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableConfig >> KikimrIcGateway::TestCreateExternalTable >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> GroupWriteTest::WithRead [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TCmsTest::SamePriorityRequest [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById |73.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 8523054439726253862 2025-04-17T17:45:35.019571Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-17T17:45:35.036128Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-17T17:45:35.036211Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-17T17:45:35.037875Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-17T17:45:35.060081Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:45:35.062233Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-17T17:46:04.426628Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:46:04.426729Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:46:04.426785Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:46:04.426825Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:46:05.674361Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-04-17T17:46:05.674480Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageAvailabilityMode [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> TCmsTest::StateStorageLockedNodes [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] >> TCmsTest::ActionIssue [GOOD] |73.8%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |73.9%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::TestLogOperationsRollback [GOOD] |73.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQFail |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] >> TSchemeShardTest::CopyIndexedTable |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> GroupWriteTest::TwoTables [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 6811790286091807290 2025-04-17T17:45:35.172244Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-17T17:45:35.172317Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-17T17:45:35.194857Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-17T17:45:35.194919Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-17T17:45:35.195003Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-17T17:45:35.195027Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-17T17:45:35.198377Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-17T17:45:35.198450Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-17T17:45:35.211932Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:45:35.211992Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:45:35.214308Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-17T17:45:35.214345Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-17T17:46:08.649992Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:46:08.650737Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:46:08.650976Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:46:08.651009Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:46:08.651044Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:46:08.651082Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:46:08.651108Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:46:08.651141Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:46:08.651176Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:46:09.359972Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-04-17T17:46:09.360061Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-04-17T17:46:09.360101Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-04-17T17:46:09.360141Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-04-17T17:46:09.360194Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} 2025-04-17T17:46:09.360236Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> TSchemeShardTest::RejectSystemViewPath [GOOD] >> TSchemeShardTest::SplitKey >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> TDatabaseResolverTests::PostgreSQL >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::Ydb_Dedicated >> data_correctness.py::TestDataCorrectness::test >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CheckItemProgress >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TExportToS3Tests::ShouldSucceedOnSingleShardTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-04-17T17:46:21.830271Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> tier_delete.py::TestTierDelete::test_delete_s3_ttl >> TExportToS3Tests::DropSourceTableBeforeTransferring |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2025-04-17T17:46:21.829970Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> RemoteTopicReader::ReadTopic |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |74.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change |74.0%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> ttl_unavailable_s3.py::TestUnavailableS3::test |74.0%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.1%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> unstable_connection.py::TestUnstableConnection::test >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CancelledExportEndTime >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TDatabaseResolverTests::Greenplum_MasterNode >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::UidAsIdempotencyKey >> TDatabaseResolverTests::DataStreams_Serverless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-04-17T17:46:33.783979Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport >> TSchemeShardTest::CreateDropKesus >> TDatabaseResolverTests::MySQL >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-04-17T17:46:34.603787Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied >> TDatabaseResolverTests::Ydb_Serverless >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> TDatabaseResolverTests::ClickHouseNative >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-04-17T17:46:37.099647Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-04-17T17:46:37.061822Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-04-17T17:46:36.967503Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |74.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |74.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TExportToS3Tests::Changefeeds >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix |74.2%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::LeftJoinPushdownPredicate_Nulls >> KqpJoinOrder::CanonizedJoinOrderTPCH14 >> KqpJoin::JoinDupColumnRightPure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:45:54.850389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:45:54.853427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:54.853652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:45:54.854092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:45:54.856775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:45:54.856983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:45:54.857218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:54.859994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:45:54.867805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:45:56.495498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:56.495706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:56.662978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:45:56.674868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:45:56.675571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:45:56.816365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:45:56.818119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:45:56.821130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.821492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:45:56.867541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.883770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:56.883993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.884637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:45:56.885208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:56.885401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:45:56.895396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.997768Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:45:57.337504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:45:57.337695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.337880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:45:57.338092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:45:57.338140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.345813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.345921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:45:57.346082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.346127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:45:57.346160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:45:57.346209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:45:57.355010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.355082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:45:57.355123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:45:57.364188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.364255Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.364480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.364618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.371658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:45:57.375874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:45:57.376056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:45:57.376949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.377076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:45:57.377135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.377364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:45:57.377408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.377555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:45:57.377621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:45:57.385223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:57.385457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:57.386109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.386324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:45:57.387248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.387444Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:45:57.387543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.387579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.387616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.387646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.387680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:45:57.387714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.387755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:45:57.387798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:45:57.387859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:45:57.387902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:45:57.387948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:45:57.389608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.390164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.390305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Step: 5000005 2025-04-17T17:46:39.186779Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-17T17:46:39.186812Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 2, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-17T17:46:39.186835Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-17T17:46:39.186859Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-17T17:46:39.187480Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-17T17:46:39.187545Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-17T17:46:39.187581Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-17T17:46:39.187609Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-17T17:46:39.187633Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-17T17:46:39.187654Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-17T17:46:39.187838Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-17T17:46:39.187897Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-17T17:46:39.187931Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-17T17:46:39.187955Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-17T17:46:39.187975Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-17T17:46:39.188113Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-17T17:46:39.188302Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-17T17:46:39.194778Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:46:39.194878Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:46:39.194940Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:46:39.194993Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:46:39.195104Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:39.195126Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:46:39.195304Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:39.195329Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [13:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-17T17:46:39.195515Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:46:39.195551Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T17:46:39.195689Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:46:39.195728Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:46:39.195772Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:46:39.195810Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:46:39.195854Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T17:46:39.195900Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:46:39.195953Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:46:39.195991Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:46:39.196158Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2025-04-17T17:46:39.196208Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-04-17T17:46:39.196251Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-17T17:46:39.196672Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:46:39.196732Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:46:39.196759Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:46:39.196801Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T17:46:39.196842Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-17T17:46:39.196938Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-17T17:46:39.202648Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T17:46:39.223025Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T17:46:39.223080Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T17:46:39.223507Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T17:46:39.223602Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:46:39.223643Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [13:1480:3278] TestWaitNotification: OK eventTxId 104 2025-04-17T17:46:39.224129Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:46:39.224299Z node 13 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 215us result status StatusSuccess 2025-04-17T17:46:39.224803Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |74.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.2%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> KikimrIcGateway::TestALterResourcePool [GOOD] >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions >> TExportToS3Tests::CorruptedDyNumber >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 5227, MsgBus: 11851 2025-04-17T17:46:07.029357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339832954971720:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:07.049860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bbf/r3tmp/tmpHyq7CU/pdisk_1.dat 2025-04-17T17:46:07.541133Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:07.544899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:07.544979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:07.547515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5227, node 1 2025-04-17T17:46:07.727074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:07.727095Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:07.727105Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:07.727198Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11851 TClient is connected to server localhost:11851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:46:08.633527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:08.795485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:46:08.964708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14492, MsgBus: 61251 2025-04-17T17:46:20.945075Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494339889610792048:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:20.945501Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bbf/r3tmp/tmpwiBChI/pdisk_1.dat 2025-04-17T17:46:21.124972Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:21.159088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:21.159158Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14492, node 2 2025-04-17T17:46:21.161884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:46:21.278658Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:21.278679Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:21.278684Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:21.278785Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61251 TClient is connected to server localhost:61251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:46:21.888556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:46:21.909323Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:46:21.925571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8958, MsgBus: 23527 2025-04-17T17:46:32.302036Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494339937289800792:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:32.325555Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bbf/r3tmp/tmphwzLYr/pdisk_1.dat 2025-04-17T17:46:33.145402Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:33.145472Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:33.158055Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:46:33.174499Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8958, node 3 2025-04-17T17:46:33.971181Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:33.971199Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:33.971414Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:33.971707Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23527 2025-04-17T17:46:36.893057Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494339937289800792:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:36.893118Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:23527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:46:37.131322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:37.162099Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:46:37.188111Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710659:0, at schemeshard: 72057594046644480 >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::RebootDuringCompletion >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> TExportToS3Tests::Changefeeds [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TExportToS3Tests::TablePermissions [GOOD] >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableForBackup >> GroupWriteTest::Simple [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |74.2%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 12050203421235501974 2025-04-17T17:45:34.900632Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-17T17:45:34.912548Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-17T17:45:34.912591Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-17T17:45:34.913982Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-17T17:45:34.923350Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:45:34.924771Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-17T17:47:03.715220Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:47:03.715529Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:47:03.715774Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:47:03.715807Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:47:04.105724Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-04-17T17:47:04.106027Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:46:22.635793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:46:22.635880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:22.635917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:46:22.636027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:46:22.636070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:46:22.636095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:46:22.636145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:22.636226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:46:22.636513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:46:22.744040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:46:22.744095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:22.766500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:46:22.766626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:46:22.766775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:46:22.777768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:46:22.778081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:46:22.778689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:22.778982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:46:22.783803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:22.785220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:22.785283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:22.785427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:46:22.785476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:22.785516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:46:22.785711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:46:22.791867Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:46:22.949914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:46:22.950129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:22.950372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:46:22.950591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:46:22.950652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:22.953342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:22.953449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:46:22.953637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:22.953683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:46:22.953712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:46:22.953738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:46:22.961356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:22.961621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:46:22.961844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:46:22.977821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:22.977869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:22.978099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:22.978985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:46:23.014291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:46:23.032424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:46:23.033859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:46:23.053503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:23.053672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:46:23.053727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:23.054012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:46:23.054065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:23.055035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:46:23.055990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:46:23.076374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:23.076849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:23.078135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:23.078972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:46:23.080212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.080837Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:46:23.081669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:23.081923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:23.082343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:23.082575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:23.082810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:46:23.082843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:23.082871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:46:23.082898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:46:23.083433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:46:23.083930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:46:23.084392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:46:23.095990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:23.096705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:23.096937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, LocalPathId: 9], 18446744073709551615 2025-04-17T17:46:52.260424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:46:52.290460Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:52.290771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:52.290906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-04-17T17:46:52.291485Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:52.291517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-17T17:46:52.291547Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 9 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-17T17:46:52.293332Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:46:52.293615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:46:52.293657Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:46:52.293703Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-17T17:46:52.293749Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:46:52.309106Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:46:52.309729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:46:52.310028Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:46:52.310322Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-04-17T17:46:52.310356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-17T17:46:52.310883Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-04-17T17:46:52.311610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:129:2154] 2025-04-17T17:46:52.348805Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:46:52.352845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:46:52.353190Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-17T17:46:52.353243Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-17T17:46:52.353279Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:46:52.353313Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-04-17T17:46:52.353337Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-04-17T17:46:52.422728Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:46:52.423090Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T17:46:52.423136Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1239:3033] TestWaitNotification: OK eventTxId 105 2025-04-17T17:46:52.424278Z node 3 :EXPORT DEBUG: TExport::TTxForget, dropping export tables, info: { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 9] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:46:52.433568Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:46:52.433872Z node 3 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 105 2025-04-17T17:46:52.434660Z node 3 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 9] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:46:52.434944Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:46:52.435750Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 105, at schemeshard: 72057594046678944 2025-04-17T17:46:52.436029Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:46:52.436544Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 105 2025-04-17T17:46:52.436869Z node 3 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 9] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2025-04-17T17:46:52.437709Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:46:52.491818Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-105" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:46:52.492642Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/export-105, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2025-04-17T17:46:52.509203Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:46:52.536254Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 9 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:46:52.537172Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-105 2025-04-17T17:46:52.562306Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-04-17T17:46:52.563154Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 9 PathDropTxId: 281474976710761 2025-04-17T17:46:52.563688Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:46:52.564017Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-04-17T17:46:52.564526Z node 3 :EXPORT TRACE: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 9 PathDropTxId: 281474976710761 2025-04-17T17:46:52.565130Z node 3 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 9] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2025-04-17T17:46:52.587398Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:46:52.587976Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:46:52.588083Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-17T17:46:52.588151Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-17T17:46:52.588192Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:46:52.588222Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-04-17T17:46:52.588260Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-04-17T17:46:52.601044Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 105 2025-04-17T17:46:52.602148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T17:46:52.615048Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T17:46:52.624053Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T17:46:52.625017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T17:46:52.625565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1502:3282] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:46:23.792481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:46:23.792983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:23.793824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:46:23.794046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:46:23.798079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:46:23.798489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:46:23.798719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:23.799297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:46:23.801973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:46:24.640933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:46:24.641659Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:24.854091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:46:24.875313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:46:24.876189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:46:25.005547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:46:25.007521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:46:25.025250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:25.026167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:46:25.074436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:25.118311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:25.118575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:25.119571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:46:25.120033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:25.120691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:46:25.122087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:46:25.346097Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:46:27.135835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:46:27.137205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:27.146717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:46:27.149069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:46:27.149528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:27.163220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:27.163384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:46:27.163581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:27.163655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:46:27.163688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:46:27.163722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:46:27.170922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:27.170978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:46:27.171014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:46:27.172778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:27.172826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:27.172865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:27.172925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:46:27.221741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:46:27.255152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:46:27.255387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:46:27.257652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:27.257782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:46:27.257833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:27.258078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:46:27.258127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:27.258301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:46:27.258365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:46:27.286408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:27.286842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:27.287583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:27.287980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:46:27.303022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:27.303493Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:46:27.304374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:27.304581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:27.304820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:27.305263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:27.305494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:46:27.305725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:27.306168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:46:27.314587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:46:27.315257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:46:27.315495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:46:27.315912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:46:27.396918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:27.411124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:27.411393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-17T17:47:01.853972Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 2025-04-17T17:47:01.854666Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T17:47:01.854699Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T17:47:01.854762Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:47:01.854890Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:47:01.855310Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.855365Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.855386Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:47:01.855417Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:47:01.855441Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:01.856221Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.856273Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.856291Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:47:01.856311Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-17T17:47:01.856332Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:47:01.856378Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-17T17:47:01.868574Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:47:01.868604Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-17T17:47:01.868632Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:47:01.876567Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-17T17:47:01.876658Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:47:01.892008Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-04-17T17:47:01.893404Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:01.893514Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 17179871339 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:01.893564Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000007, at schemeshard: 72057594046678944 2025-04-17T17:47:01.893694Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T17:47:01.893754Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T17:47:01.893794Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:47:01.893844Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T17:47:01.893891Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:47:01.893938Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:01.893999Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:01.894034Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-04-17T17:47:01.894077Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:47:01.894108Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-17T17:47:01.894141Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-17T17:47:01.894208Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:47:01.894242Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-04-17T17:47:01.894275Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-04-17T17:47:01.894309Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-17T17:47:01.895019Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.909727Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:01.909775Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:01.910618Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:47:01.910962Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:01.910989Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-17T17:47:01.911015Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-17T17:47:01.913892Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.916450Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.916503Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:47:01.916747Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-17T17:47:01.917007Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:01.918847Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.919115Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.919146Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:47:01.919177Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T17:47:01.919202Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:01.919258Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-04-17T17:47:01.919293Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-17T17:47:01.921533Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.921836Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:01.921903Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-17T17:47:01.921959Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-17T17:47:01.936779Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:47:01.937098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:479:2440] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:46:26.639624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:46:26.640136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:26.640404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:46:26.640872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:46:26.641509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:46:26.641963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:46:26.662539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:26.663520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:46:26.682579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:46:27.598965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:46:27.599428Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:27.862807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:46:27.865478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:46:27.866418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:46:27.962467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:46:27.962639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:46:27.963228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:27.963443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:46:27.971977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:27.974342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:27.974523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:27.975284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:46:27.975618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:27.975954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:46:27.976966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:46:28.001165Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:46:29.031284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:46:29.032227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:29.032449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:46:29.032645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:46:29.032692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:29.034926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:29.035053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:46:29.035221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:29.035279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:46:29.035312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:46:29.035344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:46:29.041131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:29.041434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:46:29.041669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:46:29.051020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:29.051066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:29.051105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:29.051813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:46:29.072280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:46:29.083129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:46:29.084394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:46:29.095571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:29.096807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:46:29.097067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:29.098792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:46:29.099043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:29.099622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:46:29.099701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:46:29.101739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:29.101776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:29.101991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:29.102038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:46:29.102379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:29.102418Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:46:29.102495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:29.102529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:29.102565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:29.102590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:29.102620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:46:29.102651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:29.102679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:46:29.102707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:46:29.102765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:46:29.102795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:46:29.102819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:46:29.104363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:29.104442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:29.104475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:607:2551], at schemeshard: 72057594046678944, txId: 281474976720758, path id: 1 2025-04-17T17:47:01.186111Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:607:2551], at schemeshard: 72057594046678944, txId: 281474976720758, path id: 3 2025-04-17T17:47:01.199355Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72057594046678944 2025-04-17T17:47:01.199400Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720758:0, at schemeshard: 72057594046678944 2025-04-17T17:47:01.199709Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720758 ready parts: 1/1 2025-04-17T17:47:01.201198Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720758 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:47:01.202097Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.202158Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.234293Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720758 2025-04-17T17:47:01.234364Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T17:47:01.234399Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:01.237172Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.237776Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.237799Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720758 2025-04-17T17:47:01.237822Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720758, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-17T17:47:01.237845Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:47:01.238143Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 0/1, is published: true 2025-04-17T17:47:01.261293Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720758, at schemeshard: 72057594046678944 2025-04-17T17:47:01.261338Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 0/1, is published: true 2025-04-17T17:47:01.261592Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720758, at schemeshard: 72057594046678944 2025-04-17T17:47:01.289336Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720758 msg type: 269090816 2025-04-17T17:47:01.289454Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720758, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:47:01.289624Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720758 FAKE_COORDINATOR: Add transaction: 281474976720758 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720758 at step: 5000007 2025-04-17T17:47:01.290015Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.290381Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:01.290700Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720758 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 17179871339 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:01.290954Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720758:0, step: 5000007, at schemeshard: 72057594046678944 2025-04-17T17:47:01.292042Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720758:0, at schemeshard: 72057594046678944 2025-04-17T17:47:01.292371Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720758:0 progress is 1/1 2025-04-17T17:47:01.292682Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-04-17T17:47:01.293070Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720758:0 progress is 1/1 2025-04-17T17:47:01.293111Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-04-17T17:47:01.293186Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:01.293268Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:01.293311Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: false 2025-04-17T17:47:01.307642Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-04-17T17:47:01.307959Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720758:0 2025-04-17T17:47:01.308504Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720758:0 2025-04-17T17:47:01.309064Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:47:01.309103Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720758, publications: 2, subscribers: 1 2025-04-17T17:47:01.309388Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T17:47:01.310036Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720758, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-17T17:47:01.323213Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:01.323294Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:01.323482Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720758, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:47:01.323631Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:01.323675Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:607:2551], at schemeshard: 72057594046678944, txId: 281474976720758, path id: 1 2025-04-17T17:47:01.323712Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:607:2551], at schemeshard: 72057594046678944, txId: 281474976720758, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720758 2025-04-17T17:47:01.324417Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.324541Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.324582Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720758 2025-04-17T17:47:01.324631Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:47:01.324689Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:01.325264Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.325332Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.325361Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720758 2025-04-17T17:47:01.325385Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720758, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T17:47:01.325429Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:01.325493Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720758, subscribers: 1 2025-04-17T17:47:01.330911Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:557:2512] 2025-04-17T17:47:01.348094Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.349126Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720758 2025-04-17T17:47:01.349212Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-04-17T17:47:01.349274Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720758 2025-04-17T17:47:01.355346Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:47:01.355401Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:648:2587] TestWaitNotification: OK eventTxId 102 >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |74.2%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |74.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::RebootDuringAbortion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:45:55.642781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:45:55.643606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:55.643829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:45:55.644247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:45:55.644282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:45:55.644314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:45:55.644374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:55.645098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:45:55.666717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:45:56.429497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:56.430122Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:56.568418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:45:56.570114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:45:56.570641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:45:56.655405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:45:56.656336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:45:56.660248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.660799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:45:56.684384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.695850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:56.696265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.696771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:45:56.696823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:56.696860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:45:56.696981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.735160Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:45:57.236094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:45:57.236301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.236501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:45:57.236693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:45:57.236749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.246842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.246968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:45:57.247121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.247166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:45:57.247199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:45:57.247227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:45:57.249586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.249632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:45:57.249668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:45:57.253345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.253403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.253442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.253491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.256621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:45:57.259134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:45:57.259330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:45:57.260233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.260375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:45:57.260430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.260655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:45:57.260704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.260846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:45:57.260911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:45:57.262802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:57.262841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:57.263140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.263200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:45:57.263528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.263621Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:45:57.263721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.263747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.263781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.263804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.263835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:45:57.263871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.263899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:45:57.263937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:45:57.263991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:45:57.264023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:45:57.264065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:45:57.265732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.265824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.265853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ], 18446744073709551615 2025-04-17T17:47:05.559903Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:47:05.559981Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:47:05.560018Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:47:05.560077Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:47:05.560136Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:47:05.560447Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:47:05.560511Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:47:05.560537Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:47:05.560563Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:47:05.560589Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:05.560651Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-17T17:47:05.563256Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:47:05.563330Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:47:05.563367Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:47:05.563399Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:47:05.564144Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:47:05.564319Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-04-17T17:47:05.565253Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:47:05.565534Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-17T17:47:05.565971Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T17:47:05.566329Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:47:05.566816Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:05.567031Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:47:05.567240Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-17T17:47:05.567379Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T17:47:05.567530Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T17:47:05.567684Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-04-17T17:47:05.568737Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:47:05.568898Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 2025-04-17T17:47:05.569522Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:47:05.569596Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:47:05.569688Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:47:05.571543Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:47:05.571630Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T17:47:05.574745Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:47:05.574791Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T17:47:05.574879Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T17:47:05.574908Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T17:47:05.575071Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:47:05.575123Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:47:05.576422Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T17:47:05.576718Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:47:05.576800Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:47:05.577362Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:47:05.577488Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:47:05.577547Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [15:540:2494] TestWaitNotification: OK eventTxId 103 2025-04-17T17:47:05.578207Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:47:05.578507Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 319us result status StatusPathDoesNotExist 2025-04-17T17:47:05.578810Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-04-17T17:47:05.579357Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-17T17:47:05.579446Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-17T17:47:05.579493Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-04-17T17:47:05.579551Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-04-17T17:47:05.580098Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:47:05.580330Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 245us result status StatusSuccess 2025-04-17T17:47:05.580794Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> KqpJoinOrder::DatetimeConstantFold+ColumnStore |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |74.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::Checksums >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull >> RemoteTopicReader::ReadTopic [GOOD] |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |74.3%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::CreateSystemColumn >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TExportToS3Tests::AutoDropping >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::SchemaMapping >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2025-04-17T17:46:27.076772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339920013367681:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:27.077236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016c1/r3tmp/tmp9aD0Tf/pdisk_1.dat 2025-04-17T17:46:29.800018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:32.642874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:32.643004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:32.762932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14594 TServer::EnableGrpc on GrpcPort 7254, node 1 2025-04-17T17:46:37.061544Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339920013367681:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:37.061848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:37.061865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:46:37.114307Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:37.126647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:37.126671Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:37.126680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:37.126793Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:46:40.024646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:40.310305Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:46:41.109568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:46:48.666793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:46:48.666820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:49.706254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340014502649146:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:49.707054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:49.726381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340014502649161:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:49.726458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340014502649162:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:49.753885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-17T17:46:49.907024Z node 1 :TX_PROXY ERROR: Actor# [1:7494340014502649168:2478] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:46:49.953423Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T17:46:49.955158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340014502649166:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T17:46:49.955221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340014502649165:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T17:46:50.041730Z node 1 :TX_PROXY ERROR: Actor# [1:7494340018797616510:2508] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:46:50.059416Z node 1 :TX_PROXY ERROR: Actor# [1:7494340018797616528:2516] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:47:00.261016Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2drjdx8kpswd9f2byghpmq", SessionId: ydb://session/3?node_id=1&id=YTg3ZGM3OWUtOWFiZGEyM2QtOWM3MjI4OGYtMjRlZmJmNTU=, Slow query, duration: 10.599496s, status: STATUS_CODE_UNSPECIFIED, user: metadata@system, results: 0b, text: "DECLARE $ids AS List>;\nSELECT * FROM `//Root/.metadata/initialization/migrations`\nWHERE (componentId, modificationId) IN $ids\n", parameters: 56b 2025-04-17T17:47:01.492224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T17:47:03.485477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:47:04.992280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T17:47:05.578160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:47:06.218040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.429959Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061366:2878] Handshake: worker# [1:7494339975847943165:2310] 2025-04-17T17:47:07.434104Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061366:2878] Create read session: session# [1:7494340091812061369:2309] 2025-04-17T17:47:07.438302Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061366:2878] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:47:07.464223Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061366:2878] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2025-04-17T17:47:07.316000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-17T17:47:07.468408Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061366:2878] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:47:07.619291Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061366:2878] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-04-17T17:47:07.603000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-17T17:47:07.726264Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061483:2917] Handshake: worker# [1:7494339975847943165:2310] 2025-04-17T17:47:07.759621Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061483:2917] Create read session: session# [1:7494340091812061484:2309] 2025-04-17T17:47:07.762910Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061483:2917] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:47:07.819482Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340091812061483:2917] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-04-17T17:47:07.603000Z MessageGroupId: producer ProducerId: producer }] } } >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::NestedDirs >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> Viewer::PDiskMerging >> Viewer::LevenshteinDistance [GOOD] >> Viewer::JsonStorageListingV2 >> Viewer::JsonAutocompleteStartOfDatabaseName >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> Viewer::Cluster10000Tablets |74.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> Viewer::SelectStringWithNoBase64Encoding >> Viewer::JsonAutocompleteEmpty >> Viewer::PDiskMerging [GOOD] >> Viewer::SelectStringWithBase64Encoding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:45:54.809294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:45:54.810401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:54.810620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:45:54.811027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:45:54.811486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:45:54.811507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:45:54.811948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:54.812210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:45:54.815864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:45:56.032066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:56.032562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:56.248092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:45:56.250724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:45:56.251637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:45:56.329100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:45:56.330446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:45:56.333597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.334167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:45:56.354802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.362842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:56.363091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.363786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:45:56.364598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:56.365007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:45:56.366794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.410820Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:45:57.210237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:45:57.210982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.211758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:45:57.213594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:45:57.213725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.220308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.220955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:45:57.221262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.221313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:45:57.221348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:45:57.221380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:45:57.223240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.223304Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:45:57.223344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:45:57.227133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.227270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.227371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.227577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.233074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:45:57.237335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:45:57.238049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:45:57.241897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.242534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:45:57.242653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.243539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:45:57.243741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.244547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:45:57.245043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:45:57.252404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:57.252610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:57.253347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.253589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:45:57.254658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.254852Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:45:57.255305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.255339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.255377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.255490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.255680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:45:57.255717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.255748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:45:57.255775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:45:57.255835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:45:57.255870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:45:57.255946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:45:57.257630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.258172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.258331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5-04-17T17:47:10.029418Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T17:47:10.029696Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:10.031230Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:47:10.031299Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:47:10.031323Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:47:10.031347Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T17:47:10.031369Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T17:47:10.031424Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T17:47:10.049087Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-17T17:47:10.049940Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-17T17:47:10.051723Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:10.052307Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 64424511594 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:10.052779Z node 15 :FLAT_TX_SCHEMESHARD INFO: TDropSolomon TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-04-17T17:47:10.053085Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 3] name: Obj type: EPathTypeSolomonVolume state: EPathStateDrop stepDropped: 0 droppedTxId: 104 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:10.053347Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:47:10.053891Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:47:10.058618Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 130 2025-04-17T17:47:10.058825Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:47:10.058904Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:10.067170Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:47:10.067778Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-04-17T17:47:10.071415Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:10.071446Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:10.071556Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:47:10.071900Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:10.071926Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:449:2409], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-17T17:47:10.071959Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:449:2409], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-17T17:47:10.072006Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:47:10.072049Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-04-17T17:47:10.072545Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:47:10.072833Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:47:10.073378Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:47:10.073654Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:47:10.073934Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T17:47:10.074439Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:47:10.074704Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:47:10.074970Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:47:10.075523Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:47:10.075840Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-17T17:47:10.076123Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T17:47:10.076384Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-17T17:47:10.077251Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:47:10.077312Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:47:10.077559Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:47:10.078077Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T17:47:10.081385Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:10.082029Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:47:10.082091Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:47:10.082116Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:47:10.082380Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:47:10.082406Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:10.082688Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-17T17:47:10.086519Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:47:10.097107Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T17:47:10.102454Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:47:10.103670Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:47:10.105242Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:47:10.105719Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:47:10.106242Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409547 2025-04-17T17:47:10.107666Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:47:10.107752Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:47:10.113683Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:47:10.113916Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:47:10.114542Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T17:47:10.115440Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T17:47:10.115681Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T17:47:10.117406Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T17:47:10.117947Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:47:10.130598Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [15:575:2517] TestWaitNotification: OK eventTxId 104 >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:45:54.950937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:45:54.953583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:54.954041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:45:54.962480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:45:54.963146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:45:54.963390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:45:54.963879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:54.964541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:45:54.971307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:45:55.336664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:55.336906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:55.470598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:45:55.473225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:45:55.474140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:45:55.528152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:45:55.529675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:45:55.531962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:55.532126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:45:55.534667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:55.535597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:55.535635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:55.535798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:45:55.536267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:55.536518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:45:55.538168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.586518Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:45:56.123238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:45:56.124696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.126520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:45:56.128737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:45:56.129186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.149773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.163378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:45:56.165086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.165348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:45:56.165831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:45:56.166060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:45:56.188923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.189448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:45:56.190114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:45:56.216132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.216175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.216836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:56.217154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:45:56.264516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:45:56.291798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:45:56.293225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:45:56.317092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.323175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:45:56.323629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:56.325567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:45:56.325843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:56.335606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:45:56.336246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:45:56.369523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:56.369757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:56.377270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.377718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:45:56.380058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.380696Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:45:56.381570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:56.382002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:56.394804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:56.395020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:56.395219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:45:56.395459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:56.395671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:45:56.395876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:45:56.396336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:45:56.402222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:45:56.402618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:45:56.428416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:56.429089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:56.429515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... CompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 31 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:10.196823Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:47:10.198508Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" took 986us result status StatusSuccess 2025-04-17T17:47:10.200531Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" PathDescription { Self { Name: "DirA" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 28 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:10.226795Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:47:10.229127Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" took 2.87ms result status StatusSuccess 2025-04-17T17:47:10.243637Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 32 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:10.267444Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:47:10.268848Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" took 6.19ms result status StatusSuccess 2025-04-17T17:47:10.275874Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:10.301831Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:47:10.304063Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" took 2.01ms result status StatusSuccess 2025-04-17T17:47:10.308821Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 33 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateSystemColumn [GOOD] >> Viewer::TabletMergingPacked >> TExportToS3Tests::AutoDropping [GOOD] >> TExportToS3Tests::Checksums [GOOD] >> TExportToS3Tests::ChecksumsWithCompression >> KqpJoin::JoinDupColumnRightPure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:45:55.282021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:45:55.282129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:55.282169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:45:55.282225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:45:55.282266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:45:55.282291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:45:55.282352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:55.282432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:45:55.282736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:45:56.013373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:56.013419Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:56.025587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:45:56.025900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:45:56.026028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:45:56.062096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:45:56.063634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:45:56.068728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.069110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:45:56.090231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.106051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:56.110577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.111962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:45:56.112402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:56.112646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:45:56.115676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.190139Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:45:56.833609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:45:56.840002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.840901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:45:56.842655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:45:56.843247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.875109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.890503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:45:56.892278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.892745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:45:56.892971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:45:56.893196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:45:56.912794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.912837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:45:56.913225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:45:56.927292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.927338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.927377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:56.927421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:45:56.948886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:45:56.965982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:45:56.968051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:45:56.995595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.996089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:45:56.996130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:56.997259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:45:56.997707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.007177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:45:57.007918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:45:57.044477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:57.044686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:57.045625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.045687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:45:57.045992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.046035Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:45:57.046127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.046155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.054532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.054759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.055141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:45:57.055360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.055808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:45:57.056024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:45:57.056257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:45:57.056483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:45:57.056875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:45:57.092308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.093327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.093565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... eadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:47:13.290734Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:47:13.291035Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:47:13.291627Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:47:13.292280Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:344:2323] message: TxId: 102 2025-04-17T17:47:13.292644Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:47:13.292989Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:47:13.293298Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:47:13.296208Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:47:13.305539Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:47:13.305880Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:345:2324] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-17T17:47:13.317987Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "SystemColumnInCopyAllowed" CopyFromTable: "/MyRoot/SystemColumnAllowed" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:47:13.319794Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/SystemColumnInCopyAllowed, opId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:47:13.323980Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SystemColumnInCopyAllowed, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:47:13.324363Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-17T17:47:13.324665Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:47:13.325015Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:47:13.325349Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:13.327806Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:47:13.330225Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:13.330917Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:47:13.343116Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-04-17T17:47:13.344078Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2025-04-17T17:47:13.366344Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:13.366692Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:13.367669Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:47:13.368324Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:13.368869Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:209:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-17T17:47:13.369163Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:209:2211], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-17T17:47:13.383343Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:47:13.384093Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2025-04-17T17:47:13.385834Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-17T17:47:13.391570Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:47:13.392635Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:47:13.393313Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:47:13.393632Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-17T17:47:13.393950Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:13.395759Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:47:13.395840Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:47:13.395868Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:47:13.395896Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 1 2025-04-17T17:47:13.395926Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T17:47:13.395995Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T17:47:13.402769Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-04-17T17:47:13.403490Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2025-04-17T17:47:13.404070Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-04-17T17:47:13.405405Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-17T17:47:13.406550Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2025-04-17T17:47:13.419562Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-17T17:47:13.419904Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-04-17T17:47:13.420851Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-17T17:47:13.421764Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-17T17:47:13.425000Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-17T17:47:13.425790Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-04-17T17:47:13.437843Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:47:13.444350Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:47:13.448983Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:47:13.455120Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:47:13.455788Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2025-04-17T17:47:13.456138Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2025-04-17T17:47:13.505089Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-17T17:47:13.505535Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-04-17T17:47:13.505907Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2025-04-17T17:47:13.505937Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 >> TExportToS3Tests::SchemaMapping [GOOD] >> TExportToS3Tests::SchemaMappingEncryption >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:46:32.451246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:46:32.452008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:32.452697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:46:32.452940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:46:32.453203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:46:32.458346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:46:32.458646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:32.459356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:46:32.466513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:46:32.766601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:46:32.766644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:32.866342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:46:32.868390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:46:32.869375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:46:32.949124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:46:32.950095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:46:32.953902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:32.958668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:46:32.983182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:32.991375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:32.991638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:32.992438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:46:32.992892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:32.993352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:46:32.994784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.039419Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:46:33.707839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:46:33.709883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.714578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:46:33.717219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:46:33.717642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.733214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:33.734743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:46:33.736271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.736506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:46:33.736937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:46:33.737358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:46:33.747366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.747626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:46:33.747857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:46:33.758818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.758865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.759059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:33.759969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:46:33.782747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:46:33.805350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:46:33.806251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:46:33.824128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:33.824898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:46:33.825665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:33.828131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:46:33.828825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:33.830380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:46:33.830995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:46:33.848593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:33.849171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:33.850162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:33.857486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:46:33.860236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.860958Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:46:33.861892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:33.862156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:33.866698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:33.866963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:33.867238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:46:33.867731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:33.868237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:46:33.868482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:46:33.868980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:46:33.869440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:46:33.870127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:46:33.889277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:33.889983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:33.894950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 1 2025-04-17T17:47:08.565516Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 6 2025-04-17T17:47:08.569228Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710769:0, at schemeshard: 72057594046678944 2025-04-17T17:47:08.569281Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710769:0, at schemeshard: 72057594046678944 2025-04-17T17:47:08.570333Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710769 ready parts: 1/1 2025-04-17T17:47:08.570578Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710769 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:47:08.571399Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.571578Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.571604Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710769 2025-04-17T17:47:08.571632Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2025-04-17T17:47:08.571911Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:47:08.573408Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.573470Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.573489Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710769 2025-04-17T17:47:08.573762Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 7 2025-04-17T17:47:08.573788Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T17:47:08.573841Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 0/1, is published: true 2025-04-17T17:47:08.583919Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710769, at schemeshard: 72057594046678944 2025-04-17T17:47:08.583969Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 0/1, is published: true 2025-04-17T17:47:08.584108Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710769, at schemeshard: 72057594046678944 2025-04-17T17:47:08.584387Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710769:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710769 msg type: 269090816 2025-04-17T17:47:08.584689Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710769, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710769 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710769 at step: 5000014 2025-04-17T17:47:08.585670Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000014, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:08.585774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710769 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 17179871339 } } Step: 5000014 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:08.586059Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710769:0, step: 5000014, at schemeshard: 72057594046678944 2025-04-17T17:47:08.586372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710769:0, at schemeshard: 72057594046678944 2025-04-17T17:47:08.586621Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710769:0 progress is 1/1 2025-04-17T17:47:08.586739Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-04-17T17:47:08.586945Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710769:0 progress is 1/1 2025-04-17T17:47:08.587042Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-04-17T17:47:08.587095Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:08.587245Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-17T17:47:08.587470Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 1/1, is published: false 2025-04-17T17:47:08.587636Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-04-17T17:47:08.587872Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710769:0 2025-04-17T17:47:08.588018Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710769:0 2025-04-17T17:47:08.588286Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T17:47:08.588560Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710769, publications: 2, subscribers: 1 2025-04-17T17:47:08.588794Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710769, [OwnerId: 72057594046678944, LocalPathId: 1], 19 2025-04-17T17:47:08.588925Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710769, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-04-17T17:47:08.590401Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.590497Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.594810Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:08.594853Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710769, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:08.595155Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710769, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-04-17T17:47:08.595403Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:08.595438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 1 2025-04-17T17:47:08.595778Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710769 2025-04-17T17:47:08.597653Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 19 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.597976Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 19 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.598753Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710769 2025-04-17T17:47:08.599067Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 19 2025-04-17T17:47:08.599386Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:47:08.600742Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.600815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.600835Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710769 2025-04-17T17:47:08.600999Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-04-17T17:47:08.601140Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-17T17:47:08.601204Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710769, subscribers: 1 2025-04-17T17:47:08.601458Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-17T17:47:08.609131Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.609824Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-04-17T17:47:08.609907Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710769 2025-04-17T17:47:08.609965Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710769 2025-04-17T17:47:08.614310Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:47:08.614564Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [4:936:2864] TestWaitNotification: OK eventTxId 104 >> ReadOnlyVDisk::TestWrites |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRightPure [GOOD] Test command err: Trying to start YDB, gRPC: 16509, MsgBus: 26818 2025-04-17T17:46:44.252163Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339992503454619:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:44.252910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e6e/r3tmp/tmpg9eE1u/pdisk_1.dat 2025-04-17T17:46:52.883849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:52.884430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:52.978999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:46:53.768652Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.136032s 2025-04-17T17:46:53.769042Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.136436s TServer::EnableGrpc on GrpcPort 16509, node 1 2025-04-17T17:46:54.155526Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339992503454619:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:54.156398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:46:56.746924Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:46:56.747194Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:46:57.265304Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:57.426630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:57.426665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:57.454713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:57.454726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:57.454733Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:57.455390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26818 TClient is connected to server localhost:26818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:04.992081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.013544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:47:05.022160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.368379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.545711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.621355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:07.359536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340091287704240:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.359621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.856333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.896148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.926622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.968095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:47:08.041328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:47:08.100420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:47:08.195175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:47:08.472363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340095582672213:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:08.472423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:08.472752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340095582672218:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:08.478107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:47:08.491642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340095582672220:2484], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:47:08.583105Z node 1 :TX_PROXY ERROR: Actor# [1:7494340095582672281:3591] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:47:10.506436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:10.506455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:11.677161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:47:11.723832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:47:11.774674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:109:2141] 2025-04-17T17:46:22.923760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:46:22.923863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:22.923907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:46:22.923938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:46:22.924050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:46:22.924082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:46:22.924145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:22.924219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:46:22.924509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:46:23.609702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:46:23.618712Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:23.763019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:46:23.770349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:46:23.771315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:46:23.841535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:46:23.844548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:46:23.849544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:23.850260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:46:23.861931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:23.863355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:23.863405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:23.863557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:46:23.863598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:23.863632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:46:23.863832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.916603Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:46:24.953875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:46:24.955369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:24.956718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:46:24.960715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:46:24.961204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:24.986711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:24.987646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:46:24.987830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:24.987875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:46:24.987904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:46:24.987933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:46:25.000631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:25.001087Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:46:25.001538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:46:25.020846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:25.020898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:25.020933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:25.020992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:46:25.052746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:46:25.092638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:46:25.092840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:46:25.093581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:25.093690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:46:25.093728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:25.093952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:46:25.093999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:25.094150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:46:25.098420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:46:25.101099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:25.101747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:25.103507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:25.103961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:46:25.105988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:25.114538Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:46:25.115678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:25.115928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:25.116647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:25.116915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:25.117139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:46:25.117375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:25.117809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:46:25.122427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:46:25.122702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:46:25.122951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:46:25.123404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:46:25.136150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:25.136452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:25.136487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:47:15.035319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:15.037000Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:15.037852Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:15.037872Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:47:15.037892Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T17:47:15.037912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:15.037960Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-04-17T17:47:15.038305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-17T17:47:15.059649Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:15.060573Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:15.060648Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-17T17:47:15.060702Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-17T17:47:15.060742Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:15.060763Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-04-17T17:47:15.060786Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-04-17T17:47:15.071823Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:15.072211Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:47:15.072531Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:470:2431] TestWaitNotification: OK eventTxId 102 2025-04-17T17:47:15.100611Z node 4 :EXPORT DEBUG: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:47:15.125059Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:15.125409Z node 4 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 102 2025-04-17T17:47:15.125988Z node 4 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:47:15.126591Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:15.127180Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2025-04-17T17:47:15.127230Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:15.127895Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2025-04-17T17:47:15.128744Z node 4 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2025-04-17T17:47:15.130197Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:15.150403Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:47:15.151315Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2025-04-17T17:47:15.152640Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:47:15.161123Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:47:15.163152Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-102 2025-04-17T17:47:15.164170Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-04-17T17:47:15.164791Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-04-17T17:47:15.165145Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:15.165473Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-04-17T17:47:15.169920Z node 4 :EXPORT TRACE: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-04-17T17:47:15.170669Z node 4 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2025-04-17T17:47:15.181972Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:15.182960Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:47:15.183606Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-17T17:47:15.184223Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-17T17:47:15.185120Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:15.185448Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-04-17T17:47:15.185778Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-04-17T17:47:15.192774Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2025-04-17T17:47:15.193745Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:47:15.194298Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:47:15.197368Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:47:15.198168Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:47:15.198444Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:692:2647] TestWaitNotification: OK eventTxId 102 2025-04-17T17:47:15.205082Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:47:15.205764Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 1.2ms result status StatusSuccess 2025-04-17T17:47:15.207916Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] Test command err: Trying to start YDB, gRPC: 3705, MsgBus: 21475 2025-04-17T17:46:43.892182Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339987652068672:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:44.126768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fe2/r3tmp/tmpPiBLOk/pdisk_1.dat 2025-04-17T17:46:53.013319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:53.038867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:53.080421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:46:53.749510Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.105015s 2025-04-17T17:46:53.749567Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.105089s TServer::EnableGrpc on GrpcPort 3705, node 1 2025-04-17T17:47:00.339679Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339987652068672:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:00.339758Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340009126905473:2292];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:00.339836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:47:00.339845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:47:01.324933Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:01.346351Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:01.346371Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:01.346378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:01.346474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21475 TClient is connected to server localhost:21475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:04.729701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:04.764662Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:47:04.780633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.027537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.301096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.417464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:06.968908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340086436318208:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:06.969007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.376247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.465663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.559003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.642601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.721263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.783681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.843867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.924614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340090731286172:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.924735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.925254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340090731286177:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.929917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:47:07.958339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340090731286179:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:47:08.044899Z node 1 :TX_PROXY ERROR: Actor# [1:7494340095026253534:3570] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:47:11.246463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:11.246709Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:11.982554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:47:12.061342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:47:12.114639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [FAIL] >> Viewer::TabletMerging >> TExportToS3Tests::ChecksumsWithCompression [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T17:46:31.173229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:46:31.173768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:31.174018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:46:31.174514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:46:31.177063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:46:31.177301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:46:31.198863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:31.200872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:46:31.214252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:46:32.500738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:46:32.501470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:32.642861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:46:32.643073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:46:32.643221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:46:32.715400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:46:32.719423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:46:32.728418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:32.739445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:46:32.763147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:32.772226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:32.772641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:32.773941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:46:32.774252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:32.774509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:46:32.776644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:46:32.834220Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-04-17T17:46:33.467159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:46:33.467396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.467561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:46:33.467750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:46:33.467822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.488713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:33.490361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:46:33.491436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.491845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:46:33.492022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:46:33.492063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:46:33.507196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.507702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:46:33.508147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:46:33.521074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.521126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.521569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:33.522297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:46:33.544973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:46:33.557514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:46:33.559978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:46:33.573987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:33.575142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:46:33.575391Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:33.578117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:46:33.578628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:33.580665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:46:33.581456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:46:33.599151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:33.599916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:33.601259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:33.601546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:46:33.604212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:33.604537Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:46:33.605862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:33.606404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:33.606679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:33.607444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:33.608048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:46:33.608340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:33.608687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:46:33.609272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:46:33.609550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:46:33.610124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:46:33.610669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:46:33.622622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:33.623425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:33.623907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 2025-04-17T17:47:20.950039Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-17T17:47:20.958813Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-04-17T17:47:20.960102Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:47:20.962070Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:20.966803Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:20.966841Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:47:20.967133Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-17T17:47:20.967466Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:47:20.968686Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:20.968752Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:20.968775Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:47:20.968798Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-04-17T17:47:20.968822Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:47:20.968888Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-17T17:47:21.001016Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:21.004104Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:47:21.004149Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-17T17:47:21.004193Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:47:21.005144Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-04-17T17:47:21.005497Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2025-04-17T17:47:21.007551Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:21.007678Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:21.008047Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2025-04-17T17:47:21.008706Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-17T17:47:21.019912Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-17T17:47:21.020223Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-17T17:47:21.020812Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-17T17:47:21.021151Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-17T17:47:21.021492Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:21.021846Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:47:21.021879Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-04-17T17:47:21.021929Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-17T17:47:21.021959Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-04-17T17:47:21.021993Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-04-17T17:47:21.022052Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:47:21.022086Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-04-17T17:47:21.026219Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-04-17T17:47:21.026535Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-17T17:47:21.027304Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:21.027400Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:21.040924Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:21.041221Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:21.041619Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:47:21.041724Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:21.041754Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:208:2210], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-17T17:47:21.042362Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:208:2210], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-17T17:47:21.045785Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:21.046171Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:21.046523Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:47:21.046854Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-04-17T17:47:21.047151Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:47:21.048593Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:21.048917Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:21.049515Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:47:21.049541Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:47:21.049846Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:47:21.051883Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-17T17:47:21.052551Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-04-17T17:47:21.061839Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:21.062525Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:21.062597Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-17T17:47:21.062647Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-17T17:47:21.062682Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:21.062716Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T17:47:21.062754Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-17T17:47:21.073118Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:21.073519Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:47:21.074455Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:775:2710] TestWaitNotification: OK eventTxId 103 >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 24334, MsgBus: 11791 2025-04-17T17:46:41.067216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339981097768268:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:41.067260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e6d/r3tmp/tmpJHLJSa/pdisk_1.dat 2025-04-17T17:46:44.754327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:46.072230Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339981097768268:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:46.072277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:46:46.168473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:46.675727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:47.150402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:48.003345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:48.029698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:48.030031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:48.138355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24334, node 1 2025-04-17T17:46:48.839167Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.224576s 2025-04-17T17:46:48.839475Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.224905s 2025-04-17T17:46:50.664980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:50.664999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:50.665009Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:50.665103Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11791 2025-04-17T17:47:01.071101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:01.071124Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:02.109380Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340036932343751:2323];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:02.109442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:11791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:04.365445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:04.534875Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:47:04.559837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:47:04.908569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:47:05.123797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.209385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:06.508377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340088471952537:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:06.508512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:06.988219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.069664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.132998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.199202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.280695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.380855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.441189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.551154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340092766920507:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.551208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.551279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340092766920512:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.561577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:47:07.591708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340092766920514:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:47:07.662794Z node 1 :TX_PROXY ERROR: Actor# [1:7494340092766920570:3601] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:47:09.991348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:47:10.116667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:47:10.299229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T17:47:10.740769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:47:10.886394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-04-17T17:47:24.589911Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T17:47:24.596420Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-04-17T17:47:24.596745Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T17:47:24.596977Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T17:47:24.597132Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-17T17:47:24.597309Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-17T17:47:24.597333Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T17:47:24.597465Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T17:47:24.597665Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-17T17:47:24.597742Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:45:2057], server id = [3:45:2057], tablet id = 3, status = OK 2025-04-17T17:47:24.597789Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:45:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T17:47:24.597846Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-17T17:47:24.597892Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-17T17:47:24.597994Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-04-17T17:47:24.598035Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T17:47:24.598075Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:45:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-17T17:47:24.598090Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T17:47:24.598148Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-17T17:47:24.598171Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T17:47:24.598833Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-17T17:47:24.598896Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-17T17:47:24.598920Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T17:47:24.598997Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-17T17:47:24.609340Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T17:47:24.609384Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T17:47:24.609447Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T17:47:24.609461Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T17:47:24.619884Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-17T17:47:24.619956Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-04-17T17:47:24.619986Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-17T17:47:24.620063Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T17:47:24.620088Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-17T17:47:24.620216Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T17:47:24.620246Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T17:47:24.620285Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T17:47:24.620313Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [FAIL] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets |74.4%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |74.4%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryption [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:47:04.333695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:47:04.334665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:47:04.334938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:47:04.335193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:47:04.335765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:47:04.339207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:47:04.339287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:47:04.339402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:47:04.339753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:47:04.540893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:47:04.540965Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:04.595661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:47:04.595883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:47:04.596411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:47:04.627704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:47:04.628596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:47:04.630644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:04.631390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:47:04.643378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:04.652317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:04.652599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:04.653331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:47:04.653805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:04.654058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:47:04.656185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:47:04.692114Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:47:04.905614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:47:04.905859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:04.906077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:47:04.906384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:47:04.906453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:04.910113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:04.910305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:47:04.910521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:04.910605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:47:04.910645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:47:04.910681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:47:04.913232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:04.913295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:47:04.913338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:47:04.917642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:04.917706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:04.917750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:47:04.917796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:47:04.921574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:47:04.927107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:47:04.927323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:47:04.928389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:04.928584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:04.928645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:47:04.928961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:47:04.929024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:47:04.929196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:47:04.929281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:47:04.935129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:04.935214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:04.935434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:04.935484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:47:04.935830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:04.935881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:47:04.935969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:47:04.936001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:47:04.936038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:47:04.936072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:47:04.936124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:47:04.936170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:47:04.936204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:47:04.936232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:47:04.936318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:04.936359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:47:04.936388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:47:04.939007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:47:04.939158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:47:04.939196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 46678944, LocalPathId: 4], 18446744073709551615 2025-04-17T17:47:24.235206Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:24.238475Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:24.238515Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:24.238984Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:47:24.239291Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:24.239321Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-17T17:47:24.239352Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-17T17:47:24.241110Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:24.241314Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:24.241437Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:47:24.241702Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-04-17T17:47:24.241989Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:47:24.243239Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:24.243432Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:24.243455Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:47:24.243479Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:47:24.243506Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:47:24.243674Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-17T17:47:24.244077Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-17T17:47:24.249454Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:24.251472Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:47:24.251540Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-17T17:47:24.251587Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-17T17:47:24.251624Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:24.251646Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T17:47:24.251671Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-17T17:47:24.257320Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:24.257415Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:47:24.257574Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:506:2464] TestWaitNotification: OK eventTxId 103 2025-04-17T17:47:24.262851Z node 4 :EXPORT DEBUG: TExport::TTxForget, dropping export tables, info: { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710763 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:47:24.270117Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:24.270278Z node 4 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 103 2025-04-17T17:47:24.270574Z node 4 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:47:24.270742Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:24.271196Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 103, at schemeshard: 72057594046678944 2025-04-17T17:47:24.271342Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:24.271466Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976710764, id# 103 2025-04-17T17:47:24.271821Z node 4 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710764 2025-04-17T17:47:24.272284Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:24.290308Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-103" } Internal: true } TxId: 281474976710764 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:47:24.291081Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/export-103, pathId: 0, opId: 281474976710764:0, at schemeshard: 72057594046678944 2025-04-17T17:47:24.291607Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710764:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-103', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:47:24.304933Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710764, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:47:24.305417Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710764, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-103', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, operation: DROP DIRECTORY, path: /MyRoot/export-103 2025-04-17T17:47:24.305571Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710764, status# StatusPathDoesNotExist 2025-04-17T17:47:24.305639Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763 2025-04-17T17:47:24.305701Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:24.305737Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976710764, status# StatusPathDoesNotExist 2025-04-17T17:47:24.305813Z node 4 :EXPORT TRACE: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763 2025-04-17T17:47:24.305942Z node 4 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710763 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2025-04-17T17:47:24.307732Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:47:24.307855Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:47:24.307937Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-17T17:47:24.307995Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-17T17:47:24.308679Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:47:24.308832Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T17:47:24.309012Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-17T17:47:24.323368Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-04-17T17:47:24.323940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:47:24.324043Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:47:24.325645Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:47:24.325921Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:47:24.330510Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:1000:2927] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:45:55.264859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:45:55.264987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:55.265022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:45:55.265053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:45:55.265087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:45:55.265111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:45:55.265159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:55.265222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:45:55.265485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:45:55.713132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:55.713373Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:55.788893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:45:55.791078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:45:55.791776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:45:55.841521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:45:55.843460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:45:55.848188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:55.848756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:45:55.856609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:55.863463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:55.863683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:55.864575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:45:55.865010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:55.865264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:45:55.866784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.932392Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:45:56.389721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:45:56.390865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.392324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:45:56.393949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:45:56.394387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.409614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.415534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:45:56.417448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.417659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:45:56.418163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:45:56.418343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:45:56.438885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.439438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:45:56.439971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:45:56.448681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.448720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.449111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:56.449351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:45:56.473855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:45:56.482959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:45:56.484187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:45:56.491040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:56.491919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:45:56.492324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:56.495676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:45:56.495892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:56.496372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:45:56.497376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:45:56.509866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:56.511243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:56.512202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:56.512799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:45:56.514629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:56.515017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:45:56.516039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:56.516484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:56.517130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:56.517337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:56.517542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:45:56.518213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:56.518585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:45:56.518785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:45:56.519047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:45:56.519696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:45:56.519915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:45:56.529209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:56.529304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:56.529334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 594037968897 2025-04-17T17:47:24.679465Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 2 -> 3 2025-04-17T17:47:24.680891Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T17:47:24.684643Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T17:47:24.688189Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T17:47:24.689231Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T17:47:24.689410Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 107:0 ProgressState at tabletId# 72057594046678944 2025-04-17T17:47:24.689723Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 107:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409549 seqNo: 2:1 2025-04-17T17:47:24.691420Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 107:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409549 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 133 RawX2: 64424511596 } TxBody: "\n\223\004\n\005Table\020\005\032\r\n\003key\030\004 \001(\000@\000\032\020\n\005value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\r/MyRoot/Table\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\005:\004\010\002\020\001" TxId: 107 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } SubDomainPathId: 1 2025-04-17T17:47:24.701199Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 107:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 269549568 2025-04-17T17:47:24.701888Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 0, tablet: 72075186233409549 TestModificationResult got TxId: 107, wait until txId: 107 TestModificationResults wait txId: 108 2025-04-17T17:47:24.852330Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Table" NewOwner: "user1" } ApplyIf { PathTypes: EPathTypeSubDomain PathTypes: EPathTypeExtSubDomain } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:47:24.852617Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /MyRoot/Table, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T17:47:24.852785Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: fail in ApplyIf section: wrong Path type. Expected types: EPathTypeSubDomain, EPathTypeExtSubDomain; But actual Path type is EPathTypeTable, at schemeshard: 72057594046678944 2025-04-17T17:47:24.855381Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "fail in ApplyIf section: wrong Path type. Expected types: EPathTypeSubDomain, EPathTypeExtSubDomain; But actual Path type is EPathTypeTable" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:24.855604Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail in ApplyIf section: wrong Path type. Expected types: EPathTypeSubDomain, EPathTypeExtSubDomain; But actual Path type is EPathTypeTable, operation: MODIFY ACL, path: /MyRoot/Table, set owner:user1 TestModificationResult got TxId: 108, wait until txId: 108 TestModificationResults wait txId: 109 2025-04-17T17:47:24.858741Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Table" NewOwner: "user1" } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:47:24.858958Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /MyRoot/Table, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-17T17:47:24.859088Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 5] name: Table type: EPathTypeTable state: EPathStateCreate stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:24.859143Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-17T17:47:24.859345Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-17T17:47:24.859413Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 109:0, at schemeshard: 72057594046678944 2025-04-17T17:47:24.859535Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-17T17:47:24.859592Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-17T17:47:24.859658Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-17T17:47:24.859713Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-17T17:47:24.859802Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T17:47:24.859885Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:47:24.859929Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: false 2025-04-17T17:47:24.859992Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-17T17:47:24.860097Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-17T17:47:24.860154Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2025-04-17T17:47:24.860204Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 109, publications: 2, subscribers: 0 2025-04-17T17:47:24.860256Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 109, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-04-17T17:47:24.860302Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 109, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-04-17T17:47:24.862709Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSuccess TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:24.862901Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /MyRoot/Table, set owner:user1 2025-04-17T17:47:24.863156Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:24.863213Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-17T17:47:24.863373Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:24.863541Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:24.863599Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:209:2211], at schemeshard: 72057594046678944, txId: 109, path id: 5 2025-04-17T17:47:24.863658Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:209:2211], at schemeshard: 72057594046678944, txId: 109, path id: 1 2025-04-17T17:47:24.864320Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 109 2025-04-17T17:47:24.864445Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 109 2025-04-17T17:47:24.864505Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 109 2025-04-17T17:47:24.864563Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-04-17T17:47:24.864624Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-17T17:47:24.865125Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 109 2025-04-17T17:47:24.865201Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 109 2025-04-17T17:47:24.865233Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 109 2025-04-17T17:47:24.865258Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-04-17T17:47:24.865283Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-17T17:47:24.865346Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 109, subscribers: 0 2025-04-17T17:47:24.867390Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-04-17T17:47:24.867680Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 TestModificationResult got TxId: 109, wait until txId: 109 >> TColumnShardTestReadWrite::WriteExoticTypes >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 25595, MsgBus: 29518 2025-04-17T17:46:41.448927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339980811982830:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:41.462561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e6a/r3tmp/tmpggPAMx/pdisk_1.dat 2025-04-17T17:46:46.419708Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339980811982830:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:46.419759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:46:47.382694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:47.508675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:48.767399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:48.767961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:48.851414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25595, node 1 2025-04-17T17:46:51.608408Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:55.644350Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:55.644367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:55.644373Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:55.644459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29518 TClient is connected to server localhost:29518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:04.162143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:04.294457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:04.627405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:04.643187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:04.643208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:04.962129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.066053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:06.482619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340088186167055:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:06.482745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:06.814425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:47:06.872546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:47:06.954877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.044505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.098668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.189718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.286473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.395155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340092481135029:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.395233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.395550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340092481135034:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:07.399087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:47:07.424448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340092481135036:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:47:07.480663Z node 1 :TX_PROXY ERROR: Actor# [1:7494340092481135093:3585] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |74.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ChecksumsWithCompression [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:47:01.236086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:47:01.236167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:47:01.236199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:47:01.236227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:47:01.236270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:47:01.236294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:47:01.236340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:47:01.236405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:47:01.236650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:47:02.022423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:47:02.023510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:02.273145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:47:02.279627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:47:02.280521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:47:02.431408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:47:02.433563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:47:02.442602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:02.443338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:47:02.503937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:02.548980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:02.549273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:02.558743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:47:02.559059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:02.559353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:47:02.561140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:47:02.833974Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:47:03.865895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:47:03.866150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:03.866414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:47:03.866802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:47:03.866870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:03.873180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:03.874282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:47:03.875055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:03.875524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:47:03.875791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:47:03.875912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:47:03.892530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:03.892816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:47:03.893079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:47:03.917161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:03.917486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:03.918004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:47:03.926899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:47:04.004267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:47:04.015480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:47:04.015677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:47:04.016741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:04.016877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:04.016945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:47:04.017220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:47:04.017295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:47:04.017492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:47:04.017572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:47:04.051362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:04.051428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:04.051730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:04.051808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:47:04.052260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:04.052322Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:47:04.052660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:47:04.052757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:47:04.052852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:47:04.052941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:47:04.053039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:47:04.053183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:47:04.053284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:47:04.053399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:47:04.053620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:04.053745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:47:04.053853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:47:04.088253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:47:04.088927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:47:04.089277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-17T17:47:24.624366Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 2025-04-17T17:47:24.625209Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T17:47:24.625273Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T17:47:24.625339Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:47:24.625471Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:47:24.629666Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.629786Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.629813Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:47:24.629853Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T17:47:24.629888Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:24.631444Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.631532Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.631562Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:47:24.631589Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-17T17:47:24.631616Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:47:24.631686Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-17T17:47:24.638743Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:47:24.638802Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-17T17:47:24.638842Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T17:47:24.639803Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-17T17:47:24.639923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:47:24.640175Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-04-17T17:47:24.641050Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:24.641161Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 17179871339 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:24.641207Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000007, at schemeshard: 72057594046678944 2025-04-17T17:47:24.641339Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T17:47:24.641402Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T17:47:24.641441Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:47:24.641488Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T17:47:24.641519Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:47:24.641570Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:24.641647Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:24.641685Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-04-17T17:47:24.641730Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T17:47:24.641766Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-17T17:47:24.641796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-17T17:47:24.641861Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:47:24.641892Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-04-17T17:47:24.641925Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T17:47:24.641975Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-17T17:47:24.642788Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.651226Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:24.651296Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:24.651449Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:47:24.651551Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:24.651594Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-17T17:47:24.651645Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-17T17:47:24.652393Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.652486Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.652519Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:47:24.652570Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:47:24.652614Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:47:24.653290Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.653355Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.653390Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T17:47:24.653415Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T17:47:24.653441Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:47:24.653508Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-04-17T17:47:24.653555Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-17T17:47:24.655931Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.662930Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T17:47:24.663065Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-17T17:47:24.663137Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-17T17:47:24.665208Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:47:24.665287Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:470:2431] TestWaitNotification: OK eventTxId 102 >> Viewer::JsonAutocompleteSimilarDatabaseName >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 20183, MsgBus: 27618 2025-04-17T17:46:04.308075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339823298968895:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:04.308490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b3c/r3tmp/tmp68QMpO/pdisk_1.dat 2025-04-17T17:46:04.947067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:04.950430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:04.950512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:04.955455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20183, node 1 2025-04-17T17:46:05.139766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:05.139793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:05.139876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:05.140050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27618 TClient is connected to server localhost:27618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:46:07.150047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:07.176437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:07.361183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:07.878074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:08.100793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:09.302529Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339823298968895:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:09.302580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:46:14.067752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494339866248643566:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:14.068152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:15.731549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:46:15.873633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.018425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.106136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.220739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.336919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.724262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.998295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494339874838578857:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:16.998436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:16.998714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494339874838578863:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:17.011772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:46:17.046428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494339874838578865:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:46:17.140543Z node 1 :TX_PROXY ERROR: Actor# [1:7494339879133546220:3605] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:46:19.934924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:46:19.935144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:20.791280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:46:20.800392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8705, MsgBus: 21156 2025-04-17T17:46:21.992512Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494339894360681013:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:21.994373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b3c/r3tmp/tmpyc9v0U/pdisk_1.dat 2025-04-17T17:46:22.119851Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8705, node 2 2025-04-17T17:46:22.137713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:22.137800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:22.139152Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:46:22.225929Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:22.225952Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:22.225962Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:22.226079Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21156 TClient is connected to server localhost:21156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:46:23.306943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:23.507847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... 1474976715695:0, at schemeshard: 72057594046644480 2025-04-17T17:46:43.523637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T17:46:43.768971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T17:46:52.231214Z node 2 :KQP_SLOW_LOG WARN: TraceId: "01js2dr5wycvx4b6h1wdv071na", SessionId: ydb://session/3?node_id=2&id=ODZlMmU1ZTEtNmYyNTgxOGEtZTE5OTg2NjgtYTg3ZThkNTk=, Slow query, duration: 15.399278s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "CREATE OBJECT mySaSecretId (TYPE SECRET) WITH value = `mySaSecretValue`;", parameters: 0b 2025-04-17T17:46:53.577844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715724:0, at schemeshard: 72057594046644480 2025-04-17T17:46:53.635196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715725:0, at schemeshard: 72057594046644480 2025-04-17T17:46:53.707651Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715725, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29400, MsgBus: 23870 2025-04-17T17:47:03.216980Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494340076148364435:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b3c/r3tmp/tmpL9lqv0/pdisk_1.dat 2025-04-17T17:47:03.346283Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:47:03.834055Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:03.896287Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:03.896973Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:03.911538Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29400, node 3 2025-04-17T17:47:04.630809Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:04.630831Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:04.630841Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:04.630969Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23870 TClient is connected to server localhost:23870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:47:05.587657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:47:05.608674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.717221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:05.902779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:06.009571Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:08.153313Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494340076148364435:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:08.153385Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:47:09.840177Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340101918169859:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:09.840261Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:09.975992Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:47:10.068000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:47:10.160363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:47:10.359322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:47:10.633262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:47:10.932798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:47:11.010959Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:47:11.230905Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340110508105145:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:11.231079Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:11.238256Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340110508105150:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:11.251026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:47:11.370582Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494340110508105153:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:47:11.688202Z node 3 :TX_PROXY ERROR: Actor# [3:7494340110508105229:3574] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:47:13.902744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-17T17:47:14.963523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:47:15.631849Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:1, at schemeshard: 72057594046644480 2025-04-17T17:47:16.937419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T17:47:18.382525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-17T17:47:18.746729Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:18.746753Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:19.128229Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-17T17:47:19.926307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:47:20.038540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:47:26.890763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710735:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [FAIL] Test command err: Trying to start YDB, gRPC: 14675, MsgBus: 10650 2025-04-17T17:47:08.380943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340095110849793:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:08.381270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fcf/r3tmp/tmp40VLrG/pdisk_1.dat 2025-04-17T17:47:09.574319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:10.132893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:10.133212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:10.385607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14675, node 1 2025-04-17T17:47:10.997901Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:11.010134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:11.038350Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:47:11.038417Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:47:11.698842Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:11.698866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:11.698872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:11.698979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:47:13.390271Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340095110849793:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:13.390337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:10650 TClient is connected to server localhost:10650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:16.389231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:16.445776Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:47:24.427628Z node 1 :KQP_PROXY ERROR: TraceId: "01js2dscy40vz1089rn09hnszk", Request deadline has expired for 2.893426s seconds (NYdb::Dev::NStatusHelpers::TYdbErrorException) Status: CLIENT_DEADLINE_EXCEEDED Issues:
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14675 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-17T17:47:27.352837Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:27.355762Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:27.356053Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T17:47:27.356095Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:27.356129Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T17:47:27.356168Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:27.356207Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.356278Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T17:47:27.356860Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-17T17:47:27.356973Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:27.396280Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:27.399140Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:27.399281Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.400024Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:27.400156Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:27.400524Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:27.400847Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-17T17:47:27.403123Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-17T17:47:27.403190Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-17T17:47:27.403240Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:27.403298Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:27.404017Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-17T17:47:27.451856Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:27.455133Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:27.455393Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T17:47:27.455434Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:27.455475Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-17T17:47:27.455511Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:27.455554Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.455636Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T17:47:27.456298Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:403:2358], now have 1 active actors on pipe 2025-04-17T17:47:27.456359Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:27.456522Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:27.458623Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:27.458762Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.459517Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:27.459619Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:27.459957Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:27.460145Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:411:2364] 2025-04-17T17:47:27.462116Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T17:47:27.462171Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:411:2364] 2025-04-17T17:47:27.462440Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:27.462491Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:27.463244Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:414:2366], now have 1 active actors on pipe 2025-04-17T17:47:27.464604Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:422:2369], now have 1 active actors on pipe 2025-04-17T17:47:27.464891Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:424:2370], now have 1 active actors on pipe 2025-04-17T17:47:27.465126Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:422:2369] destroyed 2025-04-17T17:47:27.465491Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:424:2370] destroyed 2025-04-17T17:47:30.280578Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:30.284511Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:30.284850Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T17:47:30.284904Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:30.284949Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T17:47:30.285006Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:30.285057Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:30.285121Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T17:47:30.285775Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:259:2251], now have 1 active actors on pipe 2025-04-17T17:47:30.285891Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:30.286122Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:30.288732Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:30.288867Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:30.289793Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:30.289918Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:30.290381Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:30.290717Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:267:2257] 2025-04-17T17:47:30.292975Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing c ... xId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:30.852833Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-17T17:47:30.852869Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:30.852905Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:30.852951Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-17T17:47:30.853491Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:465:2405], now have 1 active actors on pipe 2025-04-17T17:47:30.853568Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:30.853711Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 5(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:30.858154Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:30.858298Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:30.858925Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 5 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:30.859021Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:30.859295Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:30.860612Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:473:2411] 2025-04-17T17:47:30.878165Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-17T17:47:30.892824Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:473:2411] 2025-04-17T17:47:30.893455Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:30.894117Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:30.901842Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:476:2413], now have 1 active actors on pipe 2025-04-17T17:47:31.039548Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:31.080329Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:31.081880Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T17:47:31.082140Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:31.085961Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-17T17:47:31.086543Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:31.086863Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:31.088511Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T17:47:31.093986Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:525:2450], now have 1 active actors on pipe 2025-04-17T17:47:31.106359Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:31.115030Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 6(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:31.151874Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:31.153166Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:31.183081Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 6 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:31.184461Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:31.190753Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:31.195046Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:533:2456] 2025-04-17T17:47:31.203987Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T17:47:31.204978Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:533:2456] 2025-04-17T17:47:31.205639Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:31.206298Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:31.212332Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:536:2458], now have 1 active actors on pipe 2025-04-17T17:47:31.228297Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:545:2462], now have 1 active actors on pipe 2025-04-17T17:47:31.228621Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:544:2461], now have 1 active actors on pipe 2025-04-17T17:47:31.229861Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:546:2462], now have 1 active actors on pipe 2025-04-17T17:47:31.245203Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:551:2466], now have 1 active actors on pipe 2025-04-17T17:47:31.401311Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:31.427971Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:31.435917Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T17:47:31.436556Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:31.437421Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:31.508833Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:31.509148Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T17:47:31.509988Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:31.528319Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:31.542129Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:608:2511] 2025-04-17T17:47:31.560672Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-17T17:47:31.602865Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-17T17:47:31.604513Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-17T17:47:31.605900Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-17T17:47:31.606153Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-17T17:47:31.611912Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:47:31.612580Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:47:31.612910Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T17:47:31.613511Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:608:2511] 2025-04-17T17:47:31.613812Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:31.614139Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:31.615036Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:545:2462] destroyed 2025-04-17T17:47:31.615105Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:544:2461] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> ReadOnlyVDisk::TestWrites [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [FAIL] Test command err: Trying to start YDB, gRPC: 4322, MsgBus: 12839 2025-04-17T17:47:08.217851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340097906662878:2199];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fd3/r3tmp/tmpMc2NY8/pdisk_1.dat 2025-04-17T17:47:08.586782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:47:09.652767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:09.652854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:09.655126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:47:09.655410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:09.659336Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4322, node 1 2025-04-17T17:47:10.894738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:10.894753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:10.894761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:10.894838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12839 2025-04-17T17:47:13.215098Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340097906662878:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:13.215496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:12839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:15.613735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:23.898030Z node 1 :KQP_PROXY ERROR: TraceId: "01js2dsbx3b7xjtymsn8d6aske", Request deadline has expired for 3.243573s seconds (NYdb::Dev::NStatusHelpers::TYdbErrorException) Status: CLIENT_DEADLINE_EXCEEDED Issues:
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4322 >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small >> TColumnShardTestReadWrite::ReadSomePrograms >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> Backup::ProposeBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 10630277893429975101 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-04-17T17:47:26.795493Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-04-17T17:47:26.800032Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-17T17:47:26.804561Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-17T17:47:26.807219Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-04-17T17:47:26.814142Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-04-17T17:47:26.816437Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-04-17T17:47:26.818941Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-04-17T17:47:26.821190Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-04-17T17:47:28.414306Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] 2025-04-17T17:47:28.414676Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] 2025-04-17T17:47:28.414799Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] 2025-04-17T17:47:28.420341Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [0bdac48f2cb67229] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-04-17T17:47:28.432701Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] 2025-04-17T17:47:28.433114Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] 2025-04-17T17:47:28.461495Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-04-17T17:47:28.487952Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] 2025-04-17T17:47:28.494103Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] 2025-04-17T17:47:28.500231Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-04-17T17:47:28.517842Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] 2025-04-17T17:47:28.557412Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] 2025-04-17T17:47:28.565824Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only m ... ey [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2025-04-17T17:47:33.671649Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] 2025-04-17T17:47:33.671803Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2025-04-17T17:47:33.697073Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] 2025-04-17T17:47:33.718012Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2025-04-17T17:47:33.741633Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2025-04-17T17:47:33.745186Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] 2025-04-17T17:47:33.745272Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2025-04-17T17:47:33.768576Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] 2025-04-17T17:47:33.769215Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2025-04-17T17:47:33.811392Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] 2025-04-17T17:47:33.811770Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2025-04-17T17:47:33.829973Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] 2025-04-17T17:47:33.835502Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2025-04-17T17:47:33.856587Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] 2025-04-17T17:47:33.857188Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2025-04-17T17:47:33.873886Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] 2025-04-17T17:47:33.874519Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> Viewer::SelectStringWithNoBase64Encoding [FAIL] >> Viewer::ServerlessNodesPage >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-17T17:47:27.886890Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:27.889703Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:27.889960Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T17:47:27.890003Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:27.890034Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T17:47:27.890069Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:27.890114Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.890161Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T17:47:27.890710Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:259:2251], now have 1 active actors on pipe 2025-04-17T17:47:27.890824Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:27.902886Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:27.905280Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:27.905402Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.906060Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:27.906198Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:27.906564Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:27.906865Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:267:2257] 2025-04-17T17:47:27.908940Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-17T17:47:27.908994Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2257] 2025-04-17T17:47:27.909038Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:27.909075Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:27.909728Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:270:2259], now have 1 active actors on pipe 2025-04-17T17:47:27.949750Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:27.953571Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:27.953810Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-17T17:47:27.953849Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:27.953880Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-17T17:47:27.953912Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:27.953959Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.954021Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-17T17:47:27.954576Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-04-17T17:47:27.954667Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:27.954815Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:27.956673Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:27.956778Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.957452Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:27.957553Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:27.957813Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:27.957990Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:413:2366] 2025-04-17T17:47:27.959833Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-17T17:47:27.959886Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:413:2366] 2025-04-17T17:47:27.959933Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:27.959974Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:27.960607Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-04-17T17:47:27.973998Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:27.977225Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:27.977487Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-17T17:47:27.977541Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:27.977574Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-17T17:47:27.977605Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:27.977647Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.977693Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-17T17:47:27.978300Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:465:2405], now have 1 active actors on pipe 2025-04-17T17:47:27.978385Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:27.978528Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:27.980263Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:27.980360Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:27.980990Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:27.981123Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:27.981417Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:27.981597Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:473:2411] 2025-04-17T17:47:27.983333Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-17T17:47:27.983387Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:473:2411] 2025-04-17T17:47:27.983431Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:27.983472Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... EBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:34.339948Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-17T17:47:34.340129Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:34.340279Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:34.340476Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-17T17:47:34.342091Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:462:2402], now have 1 active actors on pipe 2025-04-17T17:47:34.346383Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:34.346553Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:34.352287Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:34.352412Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:34.352952Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:34.353050Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:34.353388Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:34.353562Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:470:2408] 2025-04-17T17:47:34.355420Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-17T17:47:34.355478Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:470:2408] 2025-04-17T17:47:34.355527Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:34.355572Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:34.356208Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:473:2410], now have 1 active actors on pipe 2025-04-17T17:47:34.379641Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:34.384987Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:34.385481Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T17:47:34.385533Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:34.385695Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-17T17:47:34.385878Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:34.385972Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:34.386230Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T17:47:34.387486Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:522:2447], now have 1 active actors on pipe 2025-04-17T17:47:34.387695Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:34.388043Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:34.392589Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:34.393019Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:34.393591Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:34.393698Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:34.393963Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:34.394111Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:530:2453] 2025-04-17T17:47:34.395976Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T17:47:34.396039Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:530:2453] 2025-04-17T17:47:34.396092Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:34.396137Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:34.396770Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:533:2455], now have 1 active actors on pipe 2025-04-17T17:47:34.397826Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:539:2458], now have 1 active actors on pipe 2025-04-17T17:47:34.397908Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:540:2459], now have 1 active actors on pipe 2025-04-17T17:47:34.398230Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:541:2459], now have 1 active actors on pipe 2025-04-17T17:47:34.411028Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:549:2466], now have 1 active actors on pipe 2025-04-17T17:47:34.473636Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:34.476711Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:34.477015Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T17:47:34.477067Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:34.477654Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:34.478940Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:34.479115Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T17:47:34.479368Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:34.479927Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:34.480480Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:606:2511] 2025-04-17T17:47:34.484419Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-17T17:47:34.486332Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-17T17:47:34.486567Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-17T17:47:34.486820Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-17T17:47:34.486996Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-17T17:47:34.487030Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:47:34.487064Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:47:34.487100Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T17:47:34.487144Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:606:2511] 2025-04-17T17:47:34.487192Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:34.487238Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:34.487936Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:540:2459] destroyed 2025-04-17T17:47:34.487991Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:539:2458] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2025-04-17T17:47:26.410258Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:26.414305Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:26.414572Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T17:47:26.414622Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:26.414650Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T17:47:26.414686Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:26.414728Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:26.414772Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T17:47:26.415340Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:262:2254], now have 1 active actors on pipe 2025-04-17T17:47:26.415382Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:26.503604Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:26.531090Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:26.531718Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:26.548213Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:26.548941Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:26.554888Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:26.555411Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [1:270:2260] 2025-04-17T17:47:26.567423Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-17T17:47:26.568243Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:270:2260] 2025-04-17T17:47:26.568894Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:26.569206Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:26.581760Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:273:2262], now have 1 active actors on pipe 2025-04-17T17:47:26.666573Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:26.673594Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:26.678390Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-17T17:47:26.678466Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:26.678512Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-17T17:47:26.678570Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:26.678624Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:26.678684Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-17T17:47:26.679482Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:406:2361], now have 1 active actors on pipe 2025-04-17T17:47:26.679605Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:26.679789Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:26.682588Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:26.682738Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:26.683581Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T17:47:26.683690Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:26.684051Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:26.684804Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [1:414:2367] 2025-04-17T17:47:26.696803Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-17T17:47:26.696929Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:414:2367] 2025-04-17T17:47:26.696998Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:26.697055Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:26.702958Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:417:2369], now have 1 active actors on pipe 2025-04-17T17:47:26.739074Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:26.749719Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:26.750030Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-17T17:47:26.750084Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:26.750120Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-17T17:47:26.750151Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:26.750215Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:26.750269Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-17T17:47:26.750889Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:466:2406], now have 1 active actors on pipe 2025-04-17T17:47:26.750999Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:26.751184Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:26.753502Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:26.753606Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:26.754401Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:26.754505Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:26.754876Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:26.755112Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [1:474:2412] 2025-04-17T17:47:26.756970Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-17T17:47:26.757025Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [1:474:2412] 2025-04-17T17:47:26.757073Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:26.757117Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:26.757904Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:477:2414], ... SQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:34.682118Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:34.683994Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:34.684101Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:34.684649Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T17:47:34.684759Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:34.685071Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:34.685239Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:472:2410] 2025-04-17T17:47:34.687112Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-17T17:47:34.687364Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:472:2410] 2025-04-17T17:47:34.687423Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:34.687471Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:34.688164Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:475:2412], now have 1 active actors on pipe 2025-04-17T17:47:34.705849Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:34.709395Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:34.709678Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T17:47:34.709719Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:34.709755Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-17T17:47:34.709789Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:34.709850Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:34.709898Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T17:47:34.710484Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:524:2449], now have 1 active actors on pipe 2025-04-17T17:47:34.710578Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:47:34.710743Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:34.712651Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:34.712771Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:34.713352Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T17:47:34.713464Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:34.713751Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:34.713937Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:532:2455] 2025-04-17T17:47:34.716028Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T17:47:34.716086Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:532:2455] 2025-04-17T17:47:34.716136Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:34.716187Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:34.716815Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:535:2457], now have 1 active actors on pipe 2025-04-17T17:47:34.718253Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:544:2460], now have 1 active actors on pipe 2025-04-17T17:47:34.718741Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:546:2461], now have 1 active actors on pipe 2025-04-17T17:47:34.718843Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:547:2461], now have 1 active actors on pipe 2025-04-17T17:47:34.718962Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:548:2461], now have 1 active actors on pipe 2025-04-17T17:47:34.719533Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:561:2472], now have 1 active actors on pipe 2025-04-17T17:47:34.740400Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:47:34.742473Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:47:34.742741Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T17:47:34.742787Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:47:34.742927Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:47:34.743403Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:47:34.743447Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T17:47:34.743549Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:47:34.743802Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:47:34.743982Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:618:2517] 2025-04-17T17:47:34.745936Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-17T17:47:34.746956Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-17T17:47:34.747165Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-17T17:47:34.747433Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-17T17:47:34.747604Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-17T17:47:34.747642Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:47:34.747678Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:47:34.747714Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T17:47:34.747760Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:618:2517] 2025-04-17T17:47:34.747819Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:47:34.747877Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T17:47:34.748718Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:544:2460] destroyed 2025-04-17T17:47:34.748766Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server disconnected, pipe [3:546:2461] destroyed 2025-04-17T17:47:34.748829Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:547:2461] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> TColumnShardTestReadWrite::WriteReadStandalone |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |74.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2025-04-17T17:47:35.123269Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:35.349096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:35.447763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:35.448102Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:35.456595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:35.456831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:35.457084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:35.457233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:35.457389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:35.457504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:35.457610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:35.457748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:35.457934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:35.458059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:35.458201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:35.458318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:35.487046Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:35.487215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:35.487297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:35.487459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:35.487603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:35.487675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:35.487718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:35.487826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:35.487905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:35.487944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:35.487974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:35.488170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:35.488228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:35.488270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:35.488301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:35.488399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:35.488465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:35.488508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:35.488547Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:35.488626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:35.488662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:35.488707Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:35.488764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:35.488811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:35.488841Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:35.489234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-17T17:47:35.489316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-17T17:47:35.489437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-17T17:47:35.489526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-17T17:47:35.489707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:35.489776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:35.489816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:35.490011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:35.490057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:35.490087Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:35.490266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:35.490317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:35.490349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:35.490537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:35.490580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:35.490612Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:35.490725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:35.490767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:35.490829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ath_id=1; 2025-04-17T17:47:36.351013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-17T17:47:36.403637Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-17T17:47:36.403810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:214;event=finished_tx;tx_id=10; 2025-04-17T17:47:36.554865Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-17T17:47:36.556282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2025-04-17T17:47:36.828055Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=3200;count=1; 2025-04-17T17:47:36.865543Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-17T17:47:36.872874Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T17:47:36.920228Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T17:47:36.921961Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:47:37.008327Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1744912056244 at tablet 9437184, mediator 0 2025-04-17T17:47:37.022221Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-04-17T17:47:37.024328Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:83;progress_tx_id=100;lock_id=1;broken=0; 2025-04-17T17:47:37.044159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=tx_controller.cpp:214;event=finished_tx;tx_id=100; 2025-04-17T17:47:37.081262Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-04-17T17:47:37.096971Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=100;lock_id=1;broken=0; 2025-04-17T17:47:37.097164Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=3384; 2025-04-17T17:47:37.158679Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::cc7d814-1bb411f0-aaf9beeb-2b1d1048; 2025-04-17T17:47:37.158759Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-17T17:47:37.158867Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:753;event=indexation;bytes=3384;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048; 2025-04-17T17:47:37.159123Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:619;event=start_changes;type=CS::INDEXATION;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048; 2025-04-17T17:47:37.159532Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=3035;external_task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;type=CS::INDEXATION;priority=0;; 2025-04-17T17:47:37.159753Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=3035;external_task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;type=CS::INDEXATION;priority=0;; 2025-04-17T17:47:37.159791Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;mem=3035;cpu=0; 2025-04-17T17:47:37.159946Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;task_id=1;mem=3035;cpu=0; 2025-04-17T17:47:37.160049Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048; 2025-04-17T17:47:37.180942Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-17T17:47:37.181120Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-17T17:47:37.190592Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T17:47:37.190965Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-04-17T17:47:37.191865Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-17T17:47:37.192009Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T17:47:37.192531Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T17:47:37.192585Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:47:37.192704Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:768;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=3384;indexing_debug={task_ids=cc7d814-1bb411f0-aaf9beeb-2b1d1048,;}; 2025-04-17T17:47:37.192795Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T17:47:37.193069Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:47:37.193140Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:47:37.193190Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:47:37.193311Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:47:37.193756Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {1744912056244:100} readable: {1744912056244:max} at tablet 9437184 2025-04-17T17:47:37.218939Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-17T17:47:37.219025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;fline=with_appended.cpp:65;portions=1,;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048; 2025-04-17T17:47:37.219232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::cc7d814-1bb411f0-aaf9beeb-2b1d1048; 2025-04-17T17:47:37.219316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:47:37.219381Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:47:37.219445Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T17:47:37.219513Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:47:37.219568Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:47:37.219615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:47:37.219731Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998500s; 2025-04-17T17:47:37.219808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:47:37.219913Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:3384:0] 2025-04-17T17:47:37.219973Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-17T17:47:37.220093Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=cc7d814-1bb411f0-aaf9beeb-2b1d1048;mem=3035;cpu=0; 2025-04-17T17:47:37.220276Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:47:37.220442Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-04-17T17:47:37.220565Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=100;scan_id=0;gen=0;table=;snapshot={1744912056244:100};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName >> EvWrite::AbortInTransaction [GOOD] |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TBackupCollectionTests::CreateAbsolutePath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-04-17T17:47:35.463060Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:35.579282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:35.606839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:35.607116Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:35.615021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:35.615212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:35.615425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:35.615550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:35.615664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:35.615765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:35.615851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:35.615954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:35.616079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:35.616182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:35.616289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:35.616388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:35.654938Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:35.655164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:35.655217Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:35.655377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:35.655501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:35.655563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:35.655600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:35.655700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:35.655758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:35.655800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:35.655828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:35.656209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:35.656378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:35.656448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:35.656496Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:35.656699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:35.656940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:35.657038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:35.657192Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:35.657440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:35.657553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:35.657623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:35.657670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:35.657754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:35.657830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:35.659306Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=357; 2025-04-17T17:47:35.659720Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=189; 2025-04-17T17:47:35.660023Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=180; 2025-04-17T17:47:35.660526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=265; 2025-04-17T17:47:35.661251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:35.661600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:35.661643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:35.662363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:35.662402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:35.662430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:35.663149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:35.663184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:35.663211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:35.664056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:35.664275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:35.664300Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:35.665039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:35.665200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:35.665465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Fini ... ;new_count_tierings=0; 2025-04-17T17:47:43.944706Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-17T17:47:43.945044Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-17T17:47:43.951862Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-17T17:47:43.952856Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:214;event=finished_tx;tx_id=10; 2025-04-17T17:47:44.002611Z node 2 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-17T17:47:44.006268Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=229592;columns=2; 2025-04-17T17:47:44.033468Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[2:108:2140];fline=actor.cpp:22;event=flush_writing;size=229592;count=1; 2025-04-17T17:47:44.040723Z node 2 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-17T17:47:44.046102Z node 2 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T17:47:44.074370Z node 2 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T17:47:44.075410Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:47:44.076597Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=222;problem=finished; 2025-04-17T17:47:44.076678Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=222;problem=finished; 2025-04-17T17:47:44.076885Z node 2 :TX_COLUMNSHARD DEBUG: PlanStep 1744912064061 at tablet 9437184, mediator 0 2025-04-17T17:47:44.076935Z node 2 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2025-04-17T17:47:44.076983Z node 2 :TX_COLUMNSHARD ERROR: TxPlanStep[5] Ignore old txIds [112] for step 1744912064061 last planned step 1744912064061 at tablet 9437184 2025-04-17T17:47:44.077030Z node 2 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2025-04-17T17:47:44.077286Z node 2 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744912064061:max} readable: {1744912064061:max} at tablet 9437184 2025-04-17T17:47:44.077360Z node 2 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T17:47:44.082111Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912064061:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-04-17T17:47:44.082240Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912064061:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-04-17T17:47:44.082913Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912064061:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"5":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2025-04-17T17:47:44.083031Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912064061:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T17:47:44.083628Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912064061:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[2:169:2187];trace_detailed=; 2025-04-17T17:47:44.084238Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-04-17T17:47:44.084414Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-04-17T17:47:44.084674Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:47:44.084790Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:47:44.084888Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:47:44.084935Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:169:2187] finished for tablet 9437184 2025-04-17T17:47:44.085283Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[2:168:2186];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744912064083543,"name":"_full_task","f":1744912064083543,"d_finished":0,"c":0,"l":1744912064084993,"d":1450},"events":[{"name":"bootstrap","f":1744912064083728,"d_finished":827,"c":1,"l":1744912064084555,"d":827},{"a":1744912064084651,"name":"ack","f":1744912064084651,"d_finished":0,"c":0,"l":1744912064084993,"d":342},{"a":1744912064084631,"name":"processing","f":1744912064084631,"d_finished":0,"c":0,"l":1744912064084993,"d":362},{"name":"ProduceResults","f":1744912064084542,"d_finished":228,"c":2,"l":1744912064084920,"d":228},{"a":1744912064084923,"name":"Finish","f":1744912064084923,"d_finished":0,"c":0,"l":1744912064084993,"d":70}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:47:44.085351Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:168:2186];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:47:44.085682Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[2:168:2186];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744912064083543,"name":"_full_task","f":1744912064083543,"d_finished":0,"c":0,"l":1744912064085389,"d":1846},"events":[{"name":"bootstrap","f":1744912064083728,"d_finished":827,"c":1,"l":1744912064084555,"d":827},{"a":1744912064084651,"name":"ack","f":1744912064084651,"d_finished":0,"c":0,"l":1744912064085389,"d":738},{"a":1744912064084631,"name":"processing","f":1744912064084631,"d_finished":0,"c":0,"l":1744912064085389,"d":758},{"name":"ProduceResults","f":1744912064084542,"d_finished":228,"c":2,"l":1744912064084920,"d":228},{"a":1744912064084923,"name":"Finish","f":1744912064084923,"d_finished":0,"c":0,"l":1744912064085389,"d":466}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T17:47:44.085747Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:47:44.082997Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T17:47:44.085792Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:47:44.085877Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=100; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:45:56.725849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:45:56.725926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:56.725959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:45:56.726006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:45:56.726048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:45:56.726081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:45:56.726138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:56.726226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:45:56.726529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:45:57.212653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:57.212786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:57.237445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:45:57.237545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:45:57.237678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:45:57.243269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:45:57.243590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:45:57.244244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.244531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:45:57.248363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.249657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:57.249707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.249827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:45:57.249874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:57.249910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:45:57.250138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.256241Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:45:57.377224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:45:57.377424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.377583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:45:57.377756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:45:57.377804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.383654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.383775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:45:57.383924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.383977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:45:57.384013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:45:57.384043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:45:57.390993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.391058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:45:57.391090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:45:57.399649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.399711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.399751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.399800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.403134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:45:57.404983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:45:57.405146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:45:57.406030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.406619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:45:57.406665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.406994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:45:57.407041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.407194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:45:57.407279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:45:57.409380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:57.409420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:57.409567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.409618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:45:57.409914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.409960Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:45:57.410042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.410081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.410121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.410147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.410201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:45:57.410255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.410288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:45:57.410315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:45:57.410372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:45:57.410403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:45:57.410442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:45:57.417760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.417880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.417917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :41.893583Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 59500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2539 } } 2025-04-17T17:47:41.893739Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 102:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 59500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2539 } } 2025-04-17T17:47:41.893797Z node 12 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-17T17:47:41.893945Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-04-17T17:47:41.893993Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-17T17:47:41.907506Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:47:41.908674Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:47:41.909300Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:47:41.910980Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T17:47:41.912145Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:47:41.939596Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:47:41.941261Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T17:47:41.951187Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:41.952892Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 51539609709 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:41.953293Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:47:41.956388Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T17:47:41.958028Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:47:42.006573Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:42.006907Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:47:42.008651Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:42.009595Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:47:42.017532Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:47:42.017618Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:47:42.018500Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:47:42.018650Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:47:42.018716Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:47:42.018783Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T17:47:42.018842Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:47:42.018956Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:47:42.019915Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 12841 } } 2025-04-17T17:47:42.019972Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:47:42.020132Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 12841 } } 2025-04-17T17:47:42.020269Z node 12 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 12841 } } 2025-04-17T17:47:42.021051Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 51539609847 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:47:42.021116Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:47:42.021314Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 51539609847 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:47:42.021392Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:47:42.021529Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 51539609847 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:47:42.021621Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:42.021678Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:47:42.021765Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:47:42.021837Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:47:42.053293Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:47:42.055851Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:47:42.061271Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:47:42.063717Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:47:42.063776Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:47:42.065290Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:47:42.065353Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:47:42.065433Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:47:42.065487Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:47:42.066529Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:47:42.067019Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [12:334:2313] message: TxId: 102 2025-04-17T17:47:42.067598Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:47:42.068003Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:47:42.068623Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:47:42.069999Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:47:42.097251Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:47:42.097612Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [12:391:2363] TestWaitNotification: OK eventTxId 102 >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] >> HttpRequest::AnalyzeServerless |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 62449, MsgBus: 8627 2025-04-17T17:47:12.532642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340115662183402:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:12.533604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fca/r3tmp/tmpdU4LiW/pdisk_1.dat 2025-04-17T17:47:15.902842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:16.469154Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:16.660465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:16.670619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:16.716984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62449, node 1 2025-04-17T17:47:17.534458Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340115662183402:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:17.534734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:47:18.123912Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:18.123928Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:18.123933Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:18.124560Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8627 TClient is connected to server localhost:8627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:23.519525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:23.595853Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:47:23.620188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.392794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:25.414990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:25.605215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:30.053035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340192971596546:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:30.054146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:31.360814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:47:31.441333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:31.441350Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:31.550646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:47:31.907704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:47:31.990564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:47:32.042042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:47:32.132493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:47:32.178206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:47:32.313770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340201561531843:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:32.313847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:32.314746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340201561531848:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:32.319438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:47:32.329360Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:47:32.329672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340201561531850:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:47:32.463498Z node 1 :TX_PROXY ERROR: Actor# [1:7494340201561531907:3603] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 6262, MsgBus: 8792 2025-04-17T17:46:05.317140Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339827372480465:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:05.317431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb3/r3tmp/tmppSFtqV/pdisk_1.dat 2025-04-17T17:46:05.855295Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:05.866007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:05.866087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:05.880405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6262, node 1 2025-04-17T17:46:05.987303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:46:05.987321Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:46:05.987330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:46:05.987433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8792 TClient is connected to server localhost:8792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:46:07.104628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:07.124064Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:46:07.137919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:07.447311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:07.644685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:07.735616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:10.308923Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339827372480465:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:10.308980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:46:14.789854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494339866027187822:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:14.803040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:15.983268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.082163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.169879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.255216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.330287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.594555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.796134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:46:16.948241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494339874617123116:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:16.948296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:16.950941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494339874617123121:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:16.961361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:46:16.994834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494339874617123123:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:46:17.084557Z node 1 :TX_PROXY ERROR: Actor# [1:7494339878912090480:3584] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:46:19.872039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-17T17:46:20.838412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:46:20.838553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:21.155351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:46:21.699597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:1, at schemeshard: 72057594046644480 2025-04-17T17:46:22.228973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:46:22.805774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T17:46:25.178053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-17T17:46:27.058269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:46:27.198609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:46:32.002975Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2dqn8r0yv2rhe6xrbshvjh", SessionId: ydb://session/3?node_id=1&id=M2NkNGE2M2ItN2NmMTlhZDQtNDM4OTkxMGYtMTNiYjI3Ng==, Slow query, duration: 12.199631s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "CREATE OBJECT myPasswordSecretId (TYPE SECRET) WITH value = `pswd`;", parameters: 0b 2025-04-17T17:46:32.494460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710722:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2298, MsgBus: 1245 2025-04-17T17:46:36.821519Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494339960390635071:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:36.821557Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb3/r3tmp/tmpAuMtWq/pdisk_1.dat 2025-04-17T17:46:37.047349Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:37.047429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:37.050070Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:37.070736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Co ... 7:05.667551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:47:06.374593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.085447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.679450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:47:07.742744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:47:15.465863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710735:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5177, MsgBus: 1094 2025-04-17T17:47:18.417359Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494340140955921055:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:18.417481Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb3/r3tmp/tmpxR2fQ4/pdisk_1.dat 2025-04-17T17:47:19.266120Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:19.281884Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:19.282102Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:19.283412Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5177, node 3 2025-04-17T17:47:19.868444Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:19.868753Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:19.868762Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:19.868878Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1094 TClient is connected to server localhost:1094 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-17T17:47:23.426274Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494340140955921055:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:23.426345Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:47:23.519151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:47:23.563835Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:47:23.585063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:23.950528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.209169Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.344257Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:27.822301Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340179610628505:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:27.822384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:27.953442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:47:28.019106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:47:28.113192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:47:28.150836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:47:28.221675Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:47:28.395903Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:47:28.756014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:47:29.181568Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340188200563809:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:29.181650Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:29.184406Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340188200563814:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:29.204701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:47:29.349755Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494340188200563816:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:47:29.438202Z node 3 :TX_PROXY ERROR: Actor# [3:7494340188200563883:3598] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:47:31.533346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-17T17:47:32.344188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:47:32.973581Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:1, at schemeshard: 72057594046644480 2025-04-17T17:47:34.001530Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:47:34.174426Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:34.174459Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:34.782319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T17:47:35.384347Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-17T17:47:36.036041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:47:36.143391Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:47:43.426646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710729:0, at schemeshard: 72057594046644480 >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ManyDirs |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [FAIL] |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |74.5%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> TBackupCollectionTests::HiddenByFeatureFlag |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> HttpRequest::Probe >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> TExportToS3Tests::ShouldCheckQuotas [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:45:53.921567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:45:53.922765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:53.923334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:45:53.923567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:45:53.923971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:45:53.923992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:45:53.924429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:53.925493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:45:53.929015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:45:54.905638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:54.906029Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:55.013502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:45:55.015491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:45:55.015993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:45:55.066747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:45:55.068412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:45:55.071860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:55.072177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:45:55.087445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:55.092565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:55.093325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:55.094447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:45:55.094489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:55.094519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:45:55.095859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.121429Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:45:55.621629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:45:55.622831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.624189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:45:55.626624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:45:55.627056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.651674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:55.653522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:45:55.662928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.663171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:45:55.663413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:45:55.663793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:45:55.681173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.681618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:45:55.681849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:45:55.696794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.696846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.696893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:55.696944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:45:55.717025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:45:55.730583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:45:55.731557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:45:55.743056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:55.744001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:45:55.744410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:55.754967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:45:55.755222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:55.756682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:45:55.757453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:45:55.767761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:55.767801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:55.767942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:55.767988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:45:55.774739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:55.774955Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:45:55.776249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:55.776472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:55.776704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:55.776919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:55.777355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:45:55.777575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:55.777772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:45:55.777972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:45:55.778454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:45:55.778840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:45:55.779234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:45:55.796479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:55.797131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:55.797371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:58.197193Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:47:58.200314Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue" took 3.06ms result status StatusSuccess 2025-04-17T17:47:58.205204Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue" PathDescription { Self { Name: "indexByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 3 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "indexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 3 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:58.225358Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:47:58.229960Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue/indexImplTable" took 3.69ms result status StatusSuccess 2025-04-17T17:47:58.234947Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2025-04-17T17:47:31.943267Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:32.153823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:32.181020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:32.181358Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:32.190202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:32.190428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:32.190691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:32.190827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:32.190975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:32.191094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:32.191222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:32.191355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:32.191479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:32.191611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:32.191734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:32.191851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:32.279369Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:32.279565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:32.279636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:32.279874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:32.280041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:32.280125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:32.280178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:32.280294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:32.280382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:32.280429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:32.280462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:32.280662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:32.280742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:32.280793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:32.281030Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:32.281149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:32.281214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:32.281262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:32.281323Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:32.281427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:32.281474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:32.281508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:32.281581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:32.281632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:32.281664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:32.282127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=69; 2025-04-17T17:47:32.282250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=63; 2025-04-17T17:47:32.282345Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-17T17:47:32.282427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-17T17:47:32.282627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:32.282719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:32.282772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:32.282963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:32.283023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:32.283067Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:32.283253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:32.283312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:32.283351Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:32.283549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:32.283597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:32.283637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:32.283758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:32.283797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:32.283844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-17T17:47:56.108753Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::DisallowedPath >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> TOlapNaming::AlterColumnStoreOk >> Viewer::SelectStringWithBase64Encoding [FAIL] >> Viewer::QueryExecuteScript >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::ParallelCreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [FAIL] Test command err: Trying to start YDB, gRPC: 18506, MsgBus: 14070 2025-04-17T17:47:35.053474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340208313662167:2103];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:35.057030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e6b/r3tmp/tmps0Ru2C/pdisk_1.dat 2025-04-17T17:47:35.904975Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:35.914312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:35.914488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:35.952579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18506, node 1 2025-04-17T17:47:37.455948Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:37.455967Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:37.455972Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:37.456778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:47:40.074487Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340208313662167:2103];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:40.074583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:14070 TClient is connected to server localhost:14070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:43.675760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:43.707056Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:47:50.891354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:50.891374Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:51.762014Z node 1 :KQP_PROXY ERROR: TraceId: "01js2dt77bb34b9rdg972y7yj2", Request deadline has expired for 3.038814s seconds (NYdb::Dev::NStatusHelpers::TYdbErrorException) Status: CLIENT_DEADLINE_EXCEEDED Issues:
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18506 >> TOlap::CreateStore |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |74.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonStorageListingV1 >> TOlap::CustomDefaultPresets |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TOlap::CreateStoreWithDirs |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] >> TOlapNaming::AlterColumnStoreOk [GOOD] >> TOlapNaming::AlterColumnStoreFailed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 10165, MsgBus: 19563 2025-04-17T17:47:09.588216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340102991808209:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:09.588256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fd5/r3tmp/tmpdGyYh4/pdisk_1.dat 2025-04-17T17:47:12.422400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:13.663537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:14.594297Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340102991808209:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:14.594578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:47:15.373779Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:15.375314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:15.375401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:15.375810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:15.392045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:47:15.592275Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.110948s 2025-04-17T17:47:15.592633Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.111866s TServer::EnableGrpc on GrpcPort 10165, node 1 2025-04-17T17:47:16.226642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:16.226658Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:16.226664Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:16.226755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19563 TClient is connected to server localhost:19563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:24.045787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.077391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.440530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.709210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.831195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:27.522497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340180301221126:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:27.522604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:29.195732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:47:29.860116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:47:29.962531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:47:30.129980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:47:30.281854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:47:30.374083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:30.374107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:30.581880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:47:30.851249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:47:31.643569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340197481091051:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:31.643801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:31.644566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340197481091064:2505], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:31.655112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:47:31.689956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340197481091066:2506], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:47:31.771485Z node 1 :TX_PROXY ERROR: Actor# [1:7494340197481091117:3625] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:47:34.053324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:47:34.214077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3202, MsgBus: 11737 2025-04-17T17:47:35.576209Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340211373695541:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:35.576252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fd5/r3tmp/tmphYq26l/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3202, node 2 2025-04-17T17:47:35.906856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:35.906955Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:35.907914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:47:36.070155Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:36.260300Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:36.260316Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:36.260321Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:36.260624Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11737 2025-04-17T17:47:40.582967Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494340211373695541:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:40.583570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:11737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:42.220363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:43.171945Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:47:43.222525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:43.791426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:47.692292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:48.377423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:50.818204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:47:50.818482Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:55.466482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494340297273043220:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:55.467314Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:55.777766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:47:56.796839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:47:56.986734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:47:57.190878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:47:57.418274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:47:58.040798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:47:58.380972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:47:59.605786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494340314452913166:2504], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:59.605846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:59.609397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494340314452913171:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:59.621384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:47:59.831197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494340314452913176:2511], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:47:59.964875Z node 2 :TX_PROXY ERROR: Actor# [2:7494340314452913242:3654] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:48:02.675002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:48:02.861082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::Drop >> TOlapNaming::AlterColumnTableOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:46:22.810539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:46:22.810633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:22.810733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:46:22.810769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:46:22.810810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:46:22.810835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:46:22.810890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:46:22.810954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:46:22.811264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:46:22.908570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:46:22.908640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:22.942897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:46:22.943027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:46:22.943172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:46:23.016756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:46:23.018332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:46:23.030784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:23.031639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:46:23.046574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:23.047674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:23.047736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:23.047828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:46:23.047866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:23.047896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:46:23.048096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.084427Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:46:23.550392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:46:23.552758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.554023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:46:23.557145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:46:23.557629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.588885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:23.590596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:46:23.591900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.592421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:46:23.592659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:46:23.592894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:46:23.622094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.622146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:46:23.627022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:46:23.649511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.649775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.650057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:23.654468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:46:23.689619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:46:23.698035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:46:23.700049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:46:23.720677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:46:23.721493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:46:23.721958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:23.728135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:46:23.728612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:46:23.734461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:46:23.734996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:46:23.750996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:46:23.752098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:46:23.753068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:46:23.753549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:46:23.755233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:46:23.755473Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:46:23.756701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:23.756974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:23.757209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:46:23.757905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:23.758169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:46:23.762600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:46:23.762853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:46:23.763089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:46:23.763340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:46:23.763393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:46:23.764075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:46:23.790413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:23.794103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:46:23.794527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 83Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-17T17:48:03.286534Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T17:48:03.286582Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:48:03.287557Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:48:03.287639Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:48:03.287664Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-17T17:48:03.287693Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-17T17:48:03.287725Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:48:03.287793Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-04-17T17:48:03.303767Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:48:03.303950Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-04-17T17:48:03.304006Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-04-17T17:48:03.304068Z node 5 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-04-17T17:48:03.309738Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-04-17T17:48:03.309931Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:48:03.310381Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-04-17T17:48:03.314836Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:03.315001Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 21474838635 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:03.315073Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-04-17T17:48:03.315202Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-04-17T17:48:03.315281Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-04-17T17:48:03.315328Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-17T17:48:03.315389Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-04-17T17:48:03.315430Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-17T17:48:03.315502Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:48:03.315585Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:48:03.315626Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-04-17T17:48:03.315689Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-17T17:48:03.315734Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-04-17T17:48:03.315779Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-04-17T17:48:03.315856Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:48:03.315907Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-04-17T17:48:03.315949Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T17:48:03.315990Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-17T17:48:03.316691Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-17T17:48:03.323447Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:03.323509Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:03.323676Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:48:03.323840Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:03.323880Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-04-17T17:48:03.323923Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-04-17T17:48:03.324767Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:48:03.324864Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:48:03.324904Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-17T17:48:03.324960Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:48:03.325010Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:48:03.325952Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:48:03.326039Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-17T17:48:03.326068Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-17T17:48:03.326099Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:48:03.326131Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:48:03.326235Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-04-17T17:48:03.326298Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:125:2151] 2025-04-17T17:48:03.326685Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:48:03.326744Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:48:03.326830Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:48:03.335479Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-17T17:48:03.343198Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-17T17:48:03.343369Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-04-17T17:48:03.343464Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-04-17T17:48:03.343538Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:48:03.343583Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-04-17T17:48:03.343654Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-04-17T17:48:03.344070Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:48:03.345783Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-04-17T17:48:03.346051Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:48:03.346105Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:48:03.346629Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:48:03.346738Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:48:03.346787Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [5:550:2509] TestWaitNotification: OK eventTxId 103 >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> TOlap::CreateDropStandaloneTable >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> TOlap::StoreStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-04-17T17:47:39.058531Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:41.528690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:41.683578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:41.683880Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:41.788051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:41.790197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:41.792434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:41.792803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:41.793752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:41.795311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:41.795396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:41.796906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:41.796994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:41.797669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:41.798356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:41.799264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:41.855098Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:41.856314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:41.857257Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:41.862879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:41.864309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:41.864913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:41.870248Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:41.871530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:41.871881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:41.872194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:41.872229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:41.879353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:41.879950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:41.880248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:41.880540Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:41.881401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:41.881752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:41.882032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:41.885604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:41.886817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:41.886863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:41.887188Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:41.887232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:41.887814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:41.887841Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:41.891814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=271; 2025-04-17T17:47:41.893170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=808; 2025-04-17T17:47:41.894302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=832; 2025-04-17T17:47:41.895691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=838; 2025-04-17T17:47:41.897963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:41.898268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:41.898562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:41.900602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:41.900646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:41.900674Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:41.903461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:41.903756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:41.903785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:41.905582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:41.906206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:41.906480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:41.907625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:41.907912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:41.908225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Fini ... d: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:06.220575Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:48:06.220708Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-17T17:48:06.220793Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-17T17:48:06.220920Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-17T17:48:06.221095Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:06.221223Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:06.221319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:06.221513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:48:06.221655Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:06.221764Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:06.221796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:423:2438] finished for tablet 9437184 2025-04-17T17:48:06.226099Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:422:2437];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.014}],"full":{"a":1744912086207689,"name":"_full_task","f":1744912086207689,"d_finished":0,"c":0,"l":1744912086221844,"d":14155},"events":[{"name":"bootstrap","f":1744912086207864,"d_finished":2452,"c":1,"l":1744912086210316,"d":2452},{"a":1744912086221490,"name":"ack","f":1744912086220297,"d_finished":1043,"c":1,"l":1744912086221340,"d":1397},{"a":1744912086221478,"name":"processing","f":1744912086211362,"d_finished":5179,"c":10,"l":1744912086221342,"d":5545},{"name":"ProduceResults","f":1744912086209180,"d_finished":2756,"c":13,"l":1744912086221784,"d":2756},{"a":1744912086221787,"name":"Finish","f":1744912086221787,"d_finished":0,"c":0,"l":1744912086221844,"d":57},{"name":"task_result","f":1744912086211377,"d_finished":4014,"c":9,"l":1744912086220144,"d":4014}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:06.226237Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:48:06.226681Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:422:2437];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":1744912086207689,"name":"_full_task","f":1744912086207689,"d_finished":0,"c":0,"l":1744912086226285,"d":18596},"events":[{"name":"bootstrap","f":1744912086207864,"d_finished":2452,"c":1,"l":1744912086210316,"d":2452},{"a":1744912086221490,"name":"ack","f":1744912086220297,"d_finished":1043,"c":1,"l":1744912086221340,"d":5838},{"a":1744912086221478,"name":"processing","f":1744912086211362,"d_finished":5179,"c":10,"l":1744912086221342,"d":9986},{"name":"ProduceResults","f":1744912086209180,"d_finished":2756,"c":13,"l":1744912086221784,"d":2756},{"a":1744912086221787,"name":"Finish","f":1744912086221787,"d_finished":0,"c":0,"l":1744912086226285,"d":4498},{"name":"task_result","f":1744912086211377,"d_finished":4014,"c":9,"l":1744912086220144,"d":4014}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:06.226759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:48:06.207187Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T17:48:06.226802Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:48:06.227110Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TOlap::CreateTableWithNullableKeysNotAllowed >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable >> TOlap::CustomDefaultPresets [GOOD] >> TOlap::Decimal >> TOlapNaming::CreateColumnStoreOk >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::DropTwice |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |74.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } canonic: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } canonic: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } canonic: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@sta ... athId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:07.841189Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:07.841516Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 359us result status StatusSuccess 2025-04-17T17:48:07.842052Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:07.843198Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:07.843544Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 359us result status StatusSuccess 2025-04-17T17:48:07.844107Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:07.845435Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:07.845818Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 393us result status StatusSuccess 2025-04-17T17:48:07.846305Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:07.847519Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:07.847866Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 352us result status StatusSuccess 2025-04-17T17:48:07.848389Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ColumnStatistics::CountMinSketchStatistics >> TOlapNaming::CreateColumnTableOk >> TOlapNaming::AlterColumnStoreFailed [GOOD] |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |74.6%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |74.6%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TOlap::Decimal [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteScheme ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:48:03.353469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:03.353548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:03.353610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:03.353641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:03.353676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:03.353700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:03.353750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:03.353818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:03.354113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:03.587534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:03.587585Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:03.641403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:03.641907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:03.642064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:03.649716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:03.649885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:03.650486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:03.650714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:03.654431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:03.655635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:03.655683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:03.655823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:03.655867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:03.655896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:03.656006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:03.668467Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:48:03.812068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:03.815414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:03.815656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:03.815864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:03.815921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:03.823401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:03.823544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:03.823743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:03.823797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:03.823845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:03.823887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:03.826051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:03.826107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:03.826143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:03.828259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:03.828326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:03.828369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:03.828408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:03.831942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:03.839524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:03.839722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:03.840518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:03.840632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:03.840668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:03.840885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:03.840938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:03.841068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:03.841137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:03.842978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:03.843027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:03.843190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:03.843225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:03.843523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:03.843575Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:03.843663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:03.843690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:03.843721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:03.843745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:03.843774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:03.843819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:03.843849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:03.843873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:03.843928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:03.843959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:03.843986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:03.845515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:03.845614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:03.845647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:48:09.741838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T17:48:09.742513Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:09.742632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:09.742686Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TPropose operationId# 102:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2025-04-17T17:48:09.742883Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T17:48:09.743038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:48:09.743099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:48:09.749680Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=tx_controller.cpp:214;event=finished_tx;tx_id=102; FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:48:09.752406Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:09.752451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:48:09.752671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:48:09.752820Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:09.752862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:48:09.752900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-17T17:48:09.753189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.753231Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:48:09.753290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-17T17:48:09.753933Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:48:09.754035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:48:09.754067Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:48:09.754110Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T17:48:09.754145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:48:09.754730Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:48:09.754802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:48:09.754829Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:48:09.754853Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-17T17:48:09.754876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:48:09.754987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:48:09.759957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-17T17:48:09.760038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697639 2025-04-17T17:48:09.760108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2025-04-17T17:48:09.760551Z node 2 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2025-04-17T17:48:09.762670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.762825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2025-04-17T17:48:09.764015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:48:09.764289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:48:09.767306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.786297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2025-04-17T17:48:09.786400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:48:09.786747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:48:09.789129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.789285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.789324Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:48:09.789437Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:48:09.789469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:48:09.789506Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:48:09.789534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:48:09.789569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:48:09.789686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:341:2320] message: TxId: 102 2025-04-17T17:48:09.789737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:48:09.789774Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:48:09.789806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:48:09.789926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:48:09.791780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:48:09.791834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:404:2376] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-17T17:48:09.800917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "mess age" Type: "Utf8" } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:09.801360Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore, opId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.804840Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-04-17T17:48:09.815194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:09.815342Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T17:48:09.815635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:48:09.815675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:48:09.816072Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:48:09.816164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:48:09.816198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:440:2412] TestWaitNotification: OK eventTxId 103 |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |74.7%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys >> LocalPartitionReader::FeedSlowly >> TOlap::CreateTable [GOOD] >> TOlap::CreateTableTtl >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:48:08.073770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:08.073852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:08.073889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:08.073918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:08.073953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:08.073980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:08.074064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:08.074134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:08.074567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:08.215616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:08.215668Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:08.236744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:08.236857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:08.237008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:08.242459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:08.242744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:08.243272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:08.243563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:08.247733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:08.248939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:08.249000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:08.249088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:08.249129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:08.249159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:08.249319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.257124Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:48:08.440067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:08.440278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.440457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:08.440687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:08.440738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.443111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:08.443232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:08.443427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.443480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:08.443544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:08.443577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:08.447023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.447097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:08.447136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:08.448882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.448934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.448979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:08.449024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.454696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:08.456822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:08.457026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:08.457935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:08.458054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:08.458096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:08.458340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:08.458401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:08.458583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:08.458652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:08.460492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:08.460530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:08.460713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:08.460753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:08.461028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.461078Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:08.461158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:08.461189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.461221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:08.461259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.461295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:08.461328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.461361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:08.461385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:08.461437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:08.461470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:08.461498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:08.463241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:08.463345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:08.463378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :48:10.467384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 49 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:10.474749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-17T17:48:10.474883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-04-17T17:48:10.475286Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:10.475394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:10.475437Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-04-17T17:48:10.475592Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-04-17T17:48:10.475707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:10.475759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:48:10.476757Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:214;event=finished_tx;tx_id=101; FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-17T17:48:10.478926Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:10.478979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:10.479150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:48:10.479298Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:10.479337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T17:48:10.479380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T17:48:10.479699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:10.479743Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:48:10.479811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-17T17:48:10.480544Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:48:10.480629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:48:10.480665Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:48:10.480701Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:48:10.480733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:10.481619Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:48:10.481704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:48:10.481727Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:48:10.481754Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:48:10.481779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:48:10.481834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-17T17:48:10.483880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-17T17:48:10.484665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:48:10.484883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:48:10.507850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-04-17T17:48:10.507906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T17:48:10.508034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T17:48:10.509795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:10.509918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:10.509956Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T17:48:10.510059Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:48:10.510088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:48:10.510119Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:48:10.510147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:48:10.510193Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T17:48:10.510251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:341:2320] message: TxId: 101 2025-04-17T17:48:10.510291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:48:10.510322Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:48:10.510349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T17:48:10.510474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:48:10.511829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:48:10.511869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:342:2321] TestWaitNotification: OK eventTxId 101 2025-04-17T17:48:10.512198Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:10.512389Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 203us result status StatusSuccess 2025-04-17T17:48:10.514017Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Decimal(35,9)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TOlapNaming::CreateColumnStoreOk [GOOD] >> TOlapNaming::CreateColumnStoreFailed >> LocalPartitionReader::FeedSlowly [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> LocalPartitionReader::Booting [GOOD] >> Viewer::QueryExecuteScript [FAIL] >> Viewer::Plan2SvgOK >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> TOlapNaming::CreateColumnStoreFailed [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> LocalPartitionReader::Simple >> LocalPartitionReader::Simple [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-04-17T17:47:34.713247Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:34.832689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:34.856549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:34.856899Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:34.865606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:34.865824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:34.866101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:34.866313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:34.866464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:34.866576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:34.866676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:34.866821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:34.866962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:34.867079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.867182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:34.867282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:34.895976Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:34.896197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:34.896262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:34.896457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.896623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:34.896700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:34.896752Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:34.896847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:34.896908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:34.896949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:34.896978Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:34.897182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.897259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:34.897315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:34.897370Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:34.897478Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:34.897542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:34.897587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:34.897629Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:34.897724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:34.897767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:34.897793Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:34.897842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:34.897883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:34.897911Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:34.898813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=73; 2025-04-17T17:47:34.898897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-17T17:47:34.898966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-17T17:47:34.899031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-17T17:47:34.899185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:34.899241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:34.899275Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:34.899418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:34.899446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.899466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.899585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:34.899616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:34.899652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:34.899800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:34.899845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:34.899875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:34.900000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:34.900038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:34.900090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... n_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:15.723348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:48:15.725073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-17T17:48:15.726267Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-17T17:48:15.727320Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-04-17T17:48:15.730275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:15.731043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:15.732936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:15.738902Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:48:15.741436Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:15.743201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:15.744240Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:423:2438] finished for tablet 9437184 2025-04-17T17:48:15.746378Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:422:2437];stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.016},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.026},{"events":["l_task_result"],"t":0.038},{"events":["f_ack"],"t":0.039},{"events":["l_ProduceResults","f_Finish"],"t":0.059},{"events":["l_ack","l_processing","l_Finish"],"t":0.06}],"full":{"a":1744912095684014,"name":"_full_task","f":1744912095684014,"d_finished":0,"c":0,"l":1744912095744640,"d":60626},"events":[{"name":"bootstrap","f":1744912095685079,"d_finished":25483,"c":1,"l":1744912095710562,"d":25483},{"a":1744912095738554,"name":"ack","f":1744912095723088,"d_finished":10232,"c":1,"l":1744912095733320,"d":16318},{"a":1744912095733842,"name":"processing","f":1744912095710667,"d_finished":15389,"c":10,"l":1744912095733324,"d":26187},{"name":"ProduceResults","f":1744912095700916,"d_finished":19305,"c":13,"l":1744912095743912,"d":19305},{"a":1744912095743920,"name":"Finish","f":1744912095743920,"d_finished":0,"c":0,"l":1744912095744640,"d":720},{"name":"task_result","f":1744912095710687,"d_finished":5000,"c":9,"l":1744912095722106,"d":5000}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:15.746475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:48:15.746900Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:422:2437];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.016},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.026},{"events":["l_task_result"],"t":0.038},{"events":["f_ack"],"t":0.039},{"events":["l_ProduceResults","f_Finish"],"t":0.059},{"events":["l_ack","l_processing","l_Finish"],"t":0.062}],"full":{"a":1744912095684014,"name":"_full_task","f":1744912095684014,"d_finished":0,"c":0,"l":1744912095746523,"d":62509},"events":[{"name":"bootstrap","f":1744912095685079,"d_finished":25483,"c":1,"l":1744912095710562,"d":25483},{"a":1744912095738554,"name":"ack","f":1744912095723088,"d_finished":10232,"c":1,"l":1744912095733320,"d":18201},{"a":1744912095733842,"name":"processing","f":1744912095710667,"d_finished":15389,"c":10,"l":1744912095733324,"d":28070},{"name":"ProduceResults","f":1744912095700916,"d_finished":19305,"c":13,"l":1744912095743912,"d":19305},{"a":1744912095743920,"name":"Finish","f":1744912095743920,"d_finished":0,"c":0,"l":1744912095746523,"d":2603},{"name":"task_result","f":1744912095710687,"d_finished":5000,"c":9,"l":1744912095722106,"d":5000}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T17:48:15.747003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:48:15.677888Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-04-17T17:48:15.747050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:48:15.747399Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |74.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:48:10.978581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:10.978697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:10.978746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:10.978784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:10.978826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:10.978853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:10.978925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:10.979036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:10.979403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:11.112068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:11.112127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:11.131668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:11.131817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:11.131988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:11.140733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:11.140995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:11.141567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:11.141851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:11.145678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:11.147019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:11.147083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:11.147183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:11.147230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:11.147273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:11.147455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.162912Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:48:11.336798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:11.337053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.337257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:11.337495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:11.337558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.343287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:11.343435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:11.343637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.343687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:11.343745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:11.343792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:11.346910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.346972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:11.347019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:11.355002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.355074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.355129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:11.355186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.359193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:11.366994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:11.367222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:11.368218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:11.368376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:11.368433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:11.368690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:11.368755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:11.368926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:11.369044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:11.379194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:11.379252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:11.379449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:11.379489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:11.379780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.379840Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:11.379940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:11.379980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.380018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:11.380066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.380107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:11.380148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.380186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:11.380216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:11.380290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:11.380328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:11.380362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:11.382670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:11.382788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:11.382826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:20.067950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:20.068072Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:20.068239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:20.068302Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:20.068338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:20.068370Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:20.070057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:20.070127Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:20.070162Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:20.071702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:20.071741Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:20.071788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:20.071832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:20.071973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:20.074526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:20.075037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:20.077185Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:20.077713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:20.078076Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:20.078849Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:20.079068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:20.080039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:20.080297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:20.085013Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:20.085064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:20.085270Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:20.085319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:20.085693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:20.085740Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:20.085828Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:20.085862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:20.085899Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:20.085928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:20.085987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:20.086028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:20.086066Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:20.086098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:20.086150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:20.086200Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:20.086235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:20.086624Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:20.086721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:20.086757Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:48:20.086797Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:48:20.086832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:20.086912Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:48:20.089525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:48:20.089994Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T17:48:20.096938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 1 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "data" Type: "Utf8" } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "timestamp" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:20.097299Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /MyRoot/OlapStore, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:20.097498Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-04-17T17:48:20.097943Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Bootstrap 2025-04-17T17:48:20.269240Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Become StateWork (SchemeCache [2:274:2265]) 2025-04-17T17:48:20.276165Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:48:20.287953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:20.288105Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN STORE, path: /MyRoot/OlapStore 2025-04-17T17:48:20.299041Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T17:48:20.299239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:48:20.299277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T17:48:20.299897Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:48:20.299986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:48:20.300020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:284:2275] TestWaitNotification: OK eventTxId 101 2025-04-17T17:48:20.300424Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:20.300605Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 215us result status StatusPathDoesNotExist 2025-04-17T17:48:20.300767Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TOlap::CreateTableTtl [GOOD] >> TOlap::CreateTableWithNullableKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::TableWithSystemColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:48:01.865221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:01.865339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:01.865377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:01.865409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:01.865446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:01.865476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:01.865536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:01.865946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:01.866332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:02.039615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:02.039688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:02.075299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:02.075428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:02.075613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:02.087073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:02.087415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:02.088046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:02.088343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:02.093561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:02.094959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:02.095026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:02.095159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:02.095212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:02.095250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:02.095465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:02.102282Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:48:02.244859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:02.245059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:02.245252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:02.245484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:02.245551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:02.247616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:02.247753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:02.247940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:02.248012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:02.248046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:02.248076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:02.249899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:02.249989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:02.250024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:02.251760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:02.251802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:02.251847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:02.251894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:02.255357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:02.257312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:02.257477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:02.258379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:02.258494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:02.258716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:02.258954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:02.259005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:02.259153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:02.259249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:02.261038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:02.261077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:02.261229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:02.261274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:02.261565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:02.261622Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:02.261713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:02.261743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:02.261787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:02.261815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:02.261854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:02.261899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:02.261933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:02.261964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:02.262020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:02.262052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:02.262080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:02.269793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:02.269931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:02.269975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RACE: StateWork, received event# 274137603, Sender [6:208:2210], Recipient [6:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 8] Version: 3 } 2025-04-17T17:48:21.974524Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T17:48:21.974587Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:21.974653Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:21.974678Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-17T17:48:21.974701Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 3 2025-04-17T17:48:21.974730Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 4 2025-04-17T17:48:21.974798Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/2, is published: true 2025-04-17T17:48:21.974833Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:48:21.975412Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [6:589:2546], Recipient [6:125:2151]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 2173 } } 2025-04-17T17:48:21.975462Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-04-17T17:48:21.975528Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 2173 } } 2025-04-17T17:48:21.975555Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-04-17T17:48:21.975652Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 2173 } } 2025-04-17T17:48:21.975751Z node 6 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 2173 } } 2025-04-17T17:48:21.975790Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:48:21.979147Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:651:2600], Recipient [6:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:21.979197Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:21.979221Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:48:21.979402Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [6:589:2546], Recipient [6:125:2151]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 589 RawX2: 25769806322 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-17T17:48:21.979440Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-17T17:48:21.979541Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 589 RawX2: 25769806322 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-17T17:48:21.979591Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-04-17T17:48:21.979730Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: Source { RawX1: 589 RawX2: 25769806322 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-17T17:48:21.979996Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:48:21.980086Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 589 RawX2: 25769806322 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-17T17:48:21.980149Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:1, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:21.980222Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-17T17:48:21.980260Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T17:48:21.980310Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:1 129 -> 240 2025-04-17T17:48:21.980479Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:48:21.981346Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:48:21.981709Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:48:21.982040Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:48:21.982102Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:48:21.990070Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:48:21.990115Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:48:21.990255Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:48:21.990282Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:48:21.990377Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-17T17:48:21.990404Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:48:21.990514Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-17T17:48:21.990566Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:48:21.990610Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 106:1 2025-04-17T17:48:21.990714Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:589:2546] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 106 at schemeshard: 72057594046678944 2025-04-17T17:48:21.991030Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:125:2151], Recipient [6:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:48:21.991074Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:48:21.991121Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-17T17:48:21.991158Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:1 ProgressState 2025-04-17T17:48:21.991479Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:48:21.991512Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-04-17T17:48:21.991820Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-17T17:48:21.992405Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-04-17T17:48:21.992932Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-17T17:48:21.993264Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 2/2, is published: true 2025-04-17T17:48:21.993586Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:302:2293] message: TxId: 106 2025-04-17T17:48:21.993913Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-17T17:48:21.994508Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T17:48:21.994803Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-17T17:48:21.995408Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-17T17:48:21.995439Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:1 2025-04-17T17:48:21.995456Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:1 2025-04-17T17:48:21.995831Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-04-17T17:48:22.008259Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:48:22.008656Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:302:2293] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 106 at schemeshard: 72057594046678944 2025-04-17T17:48:22.009854Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T17:48:22.010170Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [6:616:2565] 2025-04-17T17:48:22.012152Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:618:2567], Recipient [6:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:48:22.012185Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:48:22.012441Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:47:47.550490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:47:47.550593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:47:47.550631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:47:47.550671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:47:47.550715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:47:47.550747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:47:47.550793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:47:47.550867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:47:47.551134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:47:47.685827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:47:47.685878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:47.715557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:47:47.715659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:47:47.715773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:47:47.725207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:47:47.725531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:47:47.726120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:47.726431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:47:47.737723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:47.738910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:47.738968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:47.739086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:47:47.739140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:47.739184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:47:47.739360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:47:47.781619Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:47:49.261557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:47:49.263492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:49.265426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:47:49.271962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:47:49.272583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:49.313099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:49.319162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:47:49.321588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:49.324719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:47:49.325020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:47:49.325619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:47:49.357525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:49.357871Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:47:49.358281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:47:49.383932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:49.384226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:49.384795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:47:49.389001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:47:49.436472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:47:49.462668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:47:49.465974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:47:49.487876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:47:49.489948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:47:49.495437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:47:49.497309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:47:49.498348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:47:49.500235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:47:49.501821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:47:49.523448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:47:49.524326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:47:49.525579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:47:49.525884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:47:49.533691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:47:49.534030Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:47:49.535293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:47:49.535580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:47:49.536486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:47:49.536806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:47:49.537118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:47:49.537759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:47:49.551458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:47:49.551736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:47:49.552108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:47:49.552478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:47:49.553060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:47:49.589263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:47:49.602647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:47:49.603500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... omplete at tablet# 72057594046678944 2025-04-17T17:48:22.759337Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 105:1 2025-04-17T17:48:22.759466Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:124:2150], Recipient [7:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:48:22.759499Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:48:22.759821Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-04-17T17:48:22.759861Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-04-17T17:48:22.778752Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:48:22.778812Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-04-17T17:48:22.779466Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-17T17:48:22.780090Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-04-17T17:48:22.780415Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-17T17:48:22.780801Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/2, is published: true 2025-04-17T17:48:22.781211Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:305:2296] message: TxId: 105 2025-04-17T17:48:22.781558Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-17T17:48:22.781905Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T17:48:22.793315Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T17:48:22.794018Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-17T17:48:22.794055Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-04-17T17:48:22.794076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-04-17T17:48:22.794434Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-17T17:48:22.794719Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T17:48:22.819229Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:48:22.819356Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:305:2296] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 105 at schemeshard: 72057594046678944 2025-04-17T17:48:22.819555Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T17:48:22.820110Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:530:2491] 2025-04-17T17:48:22.821213Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:532:2493], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:48:22.821248Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:48:22.821273Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-04-17T17:48:22.843569Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:598:2557], Recipient [7:124:2150]: {TEvModifySchemeTransaction txid# 106 TabletId# 72057594046678944} 2025-04-17T17:48:22.843923Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:48:22.876746Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:22.880882Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:22.902615Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 4], parent name: MyCollection1, child name: 19700101000000Z_incremental, child id: [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2025-04-17T17:48:22.902976Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-04-17T17:48:22.903948Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:22.904908Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:1, explain: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-04-17T17:48:22.905271Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:2, propose status:StatusInvalidParameter, reason: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-04-17T17:48:22.944157Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Abort operation: IgniteOperation fail to propose a part, opId: 106:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusInvalidParameter, with reason: Incremental backup is disabled on this collection, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 2025-04-17T17:48:22.945250Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir AbortPropose, opId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:22.954733Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:48:22.991291Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Incremental backup is disabled on this collection" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:22.992955Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Incremental backup is disabled on this collection, operation: BACKUP INCREMENTAL, path: /MyRoot/.backups/collections/MyCollection1 2025-04-17T17:48:22.993298Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T17:48:23.007644Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-17T17:48:23.008042Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-17T17:48:23.020486Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:604:2563], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:23.020813Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:23.021137Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:48:23.021872Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:305:2296], Recipient [7:124:2150]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-04-17T17:48:23.021907Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:48:23.031682Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-17T17:48:23.032391Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T17:48:23.032434Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:602:2561] 2025-04-17T17:48:23.043395Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:604:2563], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:48:23.043453Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:48:23.044125Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2025-04-17T17:48:23.056598Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:605:2564], Recipient [7:124:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T17:48:23.057283Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:48:23.066228Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:23.068187Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 9.54ms result status StatusSuccess 2025-04-17T17:48:23.078518Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1" PathDescription { Self { Name: "MyCollection1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 BackupCollectionVersion: 0 } ChildrenExist: true } Children { Name: "19700101000000Z_full" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } BackupCollectionDescription { Name: "MyCollection1" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/Table1" } } Cluster { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> GroupWriteTest::ByTableName [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:48:08.074688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:08.074754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:08.074793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:08.074820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:08.074852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:08.074875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:08.074934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:08.074990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:08.075278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:08.155848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:08.155904Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:08.169215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:08.169328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:08.169451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:08.175380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:08.175658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:08.176212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:08.176489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:08.180791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:08.182015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:08.182074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:08.182163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:08.182218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:08.182257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:08.182448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.188485Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:48:08.347706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:08.347923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.348125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:08.348402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:08.348457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.351637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:08.351774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:08.351967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.352014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:08.352058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:08.352128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:08.354630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.354684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:08.354716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:08.362171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.362267Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.362315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:08.362357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.388967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:08.395145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:08.395362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:08.396251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:08.396391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:08.396436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:08.396670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:08.396731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:08.396877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:08.396940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:08.401063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:08.401107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:08.406324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:08.406392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:08.406748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.406817Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:08.406910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:08.406943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.406975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:08.407013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.407050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:08.407083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.407113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:08.407140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:08.407205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:08.407235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:08.407263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:08.408741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:08.408822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:08.408851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T17:48:23.180281Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-17T17:48:23.181890Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T17:48:23.182253Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T17:48:23.182606Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T17:48:23.182959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T17:48:23.183719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-17T17:48:23.184096Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2323] message: TxId: 105 2025-04-17T17:48:23.184455Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T17:48:23.184822Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T17:48:23.185206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T17:48:23.189726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-17T17:48:23.199484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T17:48:23.199821Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:527:2498] TestWaitNotification: OK eventTxId 105 2025-04-17T17:48:23.207320Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:23.209529Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/Table3" took 2.52ms result status StatusSuccess 2025-04-17T17:48:23.214363Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/Table3" PathDescription { Self { Name: "Table3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-04-17T17:48:23.251092Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:23.258740Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:23.274271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-17T17:48:23.274651Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2025-04-17T17:48:23.276440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-17T17:48:23.286738Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:23.287086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:23.288072Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:48:23.288452Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-17T17:48:23.300348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 7, at schemeshard: 72057594046678944 2025-04-17T17:48:23.302005Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2025-04-17T17:48:23.314528Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:23.314576Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:48:23.314745Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-17T17:48:23.314822Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:23.314853Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:209:2211], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-04-17T17:48:23.314891Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:209:2211], at schemeshard: 72057594046678944, txId: 106, path id: 7 2025-04-17T17:48:23.315193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:23.315235Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2025-04-17T17:48:23.315379Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2025-04-17T17:48:23.315833Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:23.315903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:23.315931Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-17T17:48:23.315965Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-04-17T17:48:23.315997Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T17:48:23.316581Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:23.316632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:23.316650Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-17T17:48:23.316671Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 1 2025-04-17T17:48:23.316692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-17T17:48:23.316740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-04-17T17:48:23.330543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2025-04-17T17:48:23.330638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-04-17T17:48:23.331463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:48:23.332201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableWithNullableKeys [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:48:10.236716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:10.236803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:10.236852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:10.236886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:10.236921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:10.236947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:10.236995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:10.237064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:10.237364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:10.901363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:10.901595Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:11.199895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:11.201298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:11.201844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:11.290647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:11.291603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:11.298769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:11.299192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:11.330801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:11.331999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:11.332052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:11.332189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:11.332231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:11.332264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:11.332374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.351496Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:48:11.713928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:11.714149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.714453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:11.714657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:11.714718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.730168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:11.736364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:11.742283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.742597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:11.743356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:11.743873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:11.760877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.761146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:11.761415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:11.782965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.783017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.783055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:11.783095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.799504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:11.807916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:11.808066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:11.808818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:11.808920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:11.808957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:11.809168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:11.809217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:11.809330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:11.809381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:11.810903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:11.810941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:11.811076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:11.811104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:11.811367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.811405Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:11.811487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:11.811515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.811545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:11.811576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.811611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:11.811641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.811665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:11.811690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:11.811734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:11.811758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:11.811784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:11.813081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:11.813153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:11.813183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:48:25.601449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:48:25.601539Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:25.601566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-17T17:48:25.601598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-17T17:48:25.601616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 4 2025-04-17T17:48:25.601895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:48:25.601935Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState at schemeshard: 72057594046678944 2025-04-17T17:48:25.601974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409547 2025-04-17T17:48:25.602406Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:48:25.602491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:48:25.602519Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:48:25.602549Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:48:25.602577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:48:25.606047Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:48:25.606101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:48:25.606119Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:48:25.606140Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T17:48:25.606159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:48:25.607166Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:48:25.607230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:48:25.607251Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:48:25.607274Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-17T17:48:25.607296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:48:25.607352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T17:48:25.608365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-04-17T17:48:25.616875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:48:25.617846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:48:25.625014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:48:25.644690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-04-17T17:48:25.645005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409547, partId: 0 2025-04-17T17:48:25.650403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-04-17T17:48:25.650696Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 104 2025-04-17T17:48:25.676172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:48:25.676910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:48:25.677252Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId# 104:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:25.677592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:48:25.683222Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:48:25.683529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:48:25.683823Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:48:25.684084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:48:25.684348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-17T17:48:25.687225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:371:2350] message: TxId: 104 2025-04-17T17:48:25.687817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:48:25.688126Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:48:25.688441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:48:25.689165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:48:25.706308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:48:25.710376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:48:25.710426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:612:2572] 2025-04-17T17:48:25.713477Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T17:48:25.721606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[2:475:2444];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-04-17T17:48:25.752550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:48:25.764781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2025-04-17T17:48:25.769144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:48:25.769453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:48:25.770388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:48:25.810546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:48:25.811204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:48:25.813855Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-17T17:48:25.820970Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/MyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:25.840597Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/MyTable" took 19.7ms result status StatusPathDoesNotExist 2025-04-17T17:48:25.846943Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/MyTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 3])" Path: "/MyRoot/MyDir/MyTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:48:25.851924Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-04-17T17:48:25.858367Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 6.99ms result status StatusPathDoesNotExist 2025-04-17T17:48:25.859026Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |74.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 9007233486025144076 2025-04-17T17:45:35.229833Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-17T17:45:35.241963Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-17T17:45:35.242012Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-17T17:45:35.243477Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-17T17:45:35.252072Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:45:35.253524Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-17T17:47:27.855584Z 6 00h01m23.910512s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 2719 2025-04-17T17:48:27.017113Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:48:27.017214Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:48:27.017266Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:48:27.017306Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:48:27.106059Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-04-17T17:48:27.106164Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} |74.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> KqpUserConstraint::KqpReadNull-UploadNull |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |74.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |74.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> TestDataErasure::DataErasureWithSplit >> Viewer::Plan2SvgOK [FAIL] >> Viewer::Plan2SvgBad >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TConsoleTests::TestGetUnknownTenantStatus >> TOlap::AlterTtl [GOOD] >> TJaegerTracingConfiguratorTests::DefaultConfig |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |74.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> TestDataErasure::DataErasureManualLaunch3Cycles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:48:10.345075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:10.345316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:10.345380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:10.345418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:10.345461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:10.345492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:10.345558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:10.345654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:10.346034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:10.704928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:10.704989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:10.732802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:10.733354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:10.733529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:10.781155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:10.781381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:10.782094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:10.782314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:10.785440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:10.792323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:10.792521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:10.793204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:10.793537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:10.793857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:10.794944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:10.905488Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:48:11.845398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:11.845597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.845787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:11.845963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:11.846006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.899974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:11.902153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:11.932608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.933257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:11.934115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:11.951633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:11.999195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.999546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:11.999835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:12.037923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:12.042360Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:12.042973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:12.043563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:12.050122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:12.053969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:12.054162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:12.055152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:12.055275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:12.055315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:12.055586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:12.055650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:12.055792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:12.055865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:12.058199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:12.058246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:12.058439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:12.058481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:12.058823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:12.058882Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:12.058979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:12.059016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:12.059058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:12.059084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:12.059117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:12.059172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:12.059204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:12.059229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:12.059289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:12.059321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:12.059349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:12.060983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:12.061077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:12.061110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cationSubscriber, SendToSchemeshard, txId 106 2025-04-17T17:48:33.476658Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 106, at schemeshard: 72057594046678944 2025-04-17T17:48:33.476697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-04-17T17:48:33.476733Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 106, at schemeshard: 72057594046678944 2025-04-17T17:48:33.512991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 5000006 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-04-17T17:48:33.513082Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-17T17:48:33.513262Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 5000006 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-04-17T17:48:33.513315Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-17T17:48:33.513440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 106:0, left await: 0, at schemeshard: 72057594046678944 2025-04-17T17:48:33.513488Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 3 -> 128 2025-04-17T17:48:33.516891Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:33.517147Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:33.517209Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2025-04-17T17:48:33.517305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2025-04-17T17:48:33.517488Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 5000006 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:33.519854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2025-04-17T17:48:33.520012Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000007 2025-04-17T17:48:33.520419Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:33.520569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 12884904040 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:33.520655Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000007 2025-04-17T17:48:33.521589Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 128 -> 129 2025-04-17T17:48:33.521869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:48:33.521951Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:48:33.535190Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=tx_controller.cpp:214;event=finished_tx;tx_id=106; FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2025-04-17T17:48:33.540923Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:33.540985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:48:33.541241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:48:33.541437Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:33.541489Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:209:2211], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-04-17T17:48:33.541538Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:209:2211], at schemeshard: 72057594046678944, txId: 106, path id: 3 2025-04-17T17:48:33.541647Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:33.541706Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:48:33.541810Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-17T17:48:33.545779Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:33.545928Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:33.545975Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-17T17:48:33.546034Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-17T17:48:33.546084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:48:33.546865Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:33.546960Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:48:33.546992Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-17T17:48:33.547023Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2025-04-17T17:48:33.547055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:48:33.547128Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-04-17T17:48:33.560770Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-17T17:48:33.561904Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:48:33.562878Z node 3 :TX_TIERING ERROR: fline=manager.cpp:158;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2025-04-17T17:48:33.563219Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:48:33.579174Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-04-17T17:48:33.579245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-17T17:48:33.579433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2025-04-17T17:48:33.581223Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:33.581350Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:48:33.581382Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-17T17:48:33.581504Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T17:48:33.581546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:48:33.581582Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T17:48:33.581611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:48:33.581647Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-17T17:48:33.581711Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2323] message: TxId: 106 2025-04-17T17:48:33.581782Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:48:33.581830Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T17:48:33.581858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-17T17:48:33.581969Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:48:33.586526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T17:48:33.586746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:551:2522] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> TestDataErasure::SimpleDataErasureTest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |74.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteEmptyColumns >> TestDataErasure::DataErasureWithMerge >> TOlapNaming::AlterColumnTableOk [GOOD] >> TOlapNaming::AlterColumnTableFailed >> Viewer::Plan2SvgBad [FAIL] >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> TOlapNaming::CreateColumnTableOk [GOOD] >> TOlapNaming::CreateColumnTableFailed >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> TOlapNaming::CreateColumnTableFailed [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:48:15.459050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:15.459151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:15.459207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:15.459238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:15.459286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:15.459308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:15.459365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:15.459423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:15.459691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:16.774388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:16.774773Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:16.901611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:16.901959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:16.902079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:17.020864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:17.023174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:17.030150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:17.030659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:17.056785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:17.071296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:17.071650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:17.072831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:17.073534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:17.073873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:17.077032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:17.143495Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:48:18.027125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:18.027310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:18.027463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:18.027615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:18.027667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:18.036828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:18.039211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:18.041494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:18.042115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:18.046513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:18.046828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:18.063078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:18.063128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:18.063448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:18.090737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:18.091117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:18.091699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:18.092017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:18.108492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:18.132051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:18.138483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:18.147571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:18.148473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:18.149034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:18.152021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:18.153254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:18.155131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:18.156372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:18.163243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:18.163282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:18.163440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:18.163473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:18.163744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:18.163786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:18.163866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:18.163905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:18.163936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:18.163958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:18.163981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:18.164024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:18.164051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:18.164072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:18.164120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:18.164149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:18.164173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:18.165501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:18.165595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:18.165628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... State, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:45.785031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:45.785066Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:45.786649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:45.786708Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:45.786746Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:45.790926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:45.791232Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:45.791509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:45.791565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:45.791707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:45.794379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:45.794558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:45.795488Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:45.795648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:45.795696Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:45.796478Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:45.796536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:45.796714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:45.796786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:45.799360Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:45.799443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:45.799626Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:45.799670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:45.800078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:45.800127Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:45.800239Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:45.800275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:45.800319Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:45.800352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:45.800390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:45.800436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:45.800476Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:45.800507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:45.800575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:45.800614Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:45.800650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:45.801139Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:45.801232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:45.801271Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:48:45.801309Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:48:45.801349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:45.801440Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:48:45.804714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:48:45.805407Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T17:48:45.809250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:45.810241Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:45.811185Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-04-17T17:48:45.812504Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Bootstrap 2025-04-17T17:48:45.835573Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Become StateWork (SchemeCache [2:274:2265]) 2025-04-17T17:48:45.837054Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:48:45.838915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:45.839067Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-17T17:48:45.840048Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T17:48:45.840249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:48:45.840290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T17:48:45.840653Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:48:45.840751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:48:45.840784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:284:2275] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-17T17:48:45.843742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "~!@#$%^&*()+=asdfa" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:45.844004Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:48:45.844182Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', at schemeshard: 72057594046678944 2025-04-17T17:48:45.846369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Invalid name for column \'~!@#$%^&*()+=asdfa\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:45.846491Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:48:45.846732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:48:45.846765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:48:45.847100Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:48:45.847184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:48:45.847215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:291:2282] TestWaitNotification: OK eventTxId 102 >> HttpRequest::AnalyzeServerless [GOOD] >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TestDataErasure::SimpleDataErasureTest [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [FAIL] Test command err: 2025-04-17T17:47:15.258670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340124734342888:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:15.264437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:47:17.082703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:18.366340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:19.048088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:19.059436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:19.059624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:19.065915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9638, node 1 2025-04-17T17:47:20.063183Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:20.063209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:20.063216Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:20.063328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:47:20.214597Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340124734342888:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:20.214659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:64228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:24.413111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.609963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:47:24.655055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.702629Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T17:47:30.439884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340189158852860:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:30.441268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:30.446864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340189158852880:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:30.467549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T17:47:30.663791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340189158852882:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:47:30.763808Z node 1 :TX_PROXY ERROR: Actor# [1:7494340189158852933:2387] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:47:35.943159Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340211630861323:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:35.962519Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:47:37.304663Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:37.636225Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:38.099142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:38.099225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:38.117609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15018, node 2 2025-04-17T17:47:39.600781Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:39.601087Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:39.601097Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:39.601616Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:47:40.655068Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494340211630861323:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:40.655141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:16157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:44.857503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:45.867921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:47:45.980775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:46.082689Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream 2025-04-17T17:48:02.995924Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494340328393548257:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:03.050639Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:48:03.522155Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:03.580090Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:03.580167Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:03.586560Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18367, node 3 2025-04-17T17:48:03.776776Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:03.776799Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:03.776806Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:03.776930Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:48:07.367147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:07.381354Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:48:07.410087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:48:07.437344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:07.515913Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T17:48:07.997707Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494340328393548257:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:07.997804Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:48:20.886148Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:20.908787Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:20.925299Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:20.925394Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:20.927227Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6247, node 4 2025-04-17T17:48:21.066648Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:21.066671Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:21.066679Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:21.066800Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14567 (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream 2025-04-17T17:48:33.758764Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494340463860589333:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:33.759130Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:48:34.006372Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:34.019214Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:34.019340Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:34.028121Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21072, node 5 2025-04-17T17:48:34.236984Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:34.237009Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:34.237019Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:34.237169Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14862 2025-04-17T17:48:38.678593Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494340463860589333:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:38.678668Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [FAIL] >> Viewer::FloatPointJsonQuery |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2025-04-17T17:48:45.473527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:45.474069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:45.474124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e51/r3tmp/tmpKoWZjV/pdisk_1.dat 2025-04-17T17:48:45.884987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:48:45.938221Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:45.979641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:45.979789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:45.992304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:46.078626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:48:46.557838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:46.557961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:874:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:46.558106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:46.563109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:48:46.712579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:879:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:48:46.778713Z node 1 :TX_PROXY ERROR: Actor# [1:961:2772] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:48:47.131301Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2dw4jw4y6fk5wvmdfzf6bs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY2ZmQ3OWEtZGJkNjJmYS1mOTUxY2I4Ni01NmYxODcxNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: 2025-04-17T17:48:08.701259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:08.701441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:08.701598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001559/r3tmp/tmpJ1BZtI/pdisk_1.dat 2025-04-17T17:48:09.800503Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7359, node 1 2025-04-17T17:48:10.680178Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:10.680239Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:10.680269Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:10.681014Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:48:10.683440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:48:10.828557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:10.828680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:10.849088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31642 2025-04-17T17:48:11.759001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:48:24.496673Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T17:48:24.829480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:24.833377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:24.923095Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:48:24.956164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:26.209396Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:26.216966Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:26.231453Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:26.231593Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:26.231658Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:26.232953Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:26.233284Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:26.233341Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:26.233392Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:26.512055Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:26.522974Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:26.590462Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:27.124240Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:27.209820Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T17:48:27.209977Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T17:48:27.358815Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T17:48:27.360525Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T17:48:27.360823Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T17:48:27.360888Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T17:48:27.360961Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T17:48:27.361049Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T17:48:27.361142Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T17:48:27.361224Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T17:48:27.365775Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T17:48:27.480728Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:48:27.483291Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1867:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:48:27.608236Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1880:2607] 2025-04-17T17:48:27.674610Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1903:2616] 2025-04-17T17:48:27.675386Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1903:2616], schemeshard id = 72075186224037897 2025-04-17T17:48:27.821863Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T17:48:28.059080Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T17:48:28.059793Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T17:48:28.060484Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T17:48:28.208757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T17:48:28.705324Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T17:48:28.706635Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T17:48:29.424903Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T17:48:30.400699Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T17:48:30.474932Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T17:48:31.450721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:48:32.192707Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:32.483161Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T17:48:32.483424Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T17:48:32.485067Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2589:2944], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T17:48:32.490799Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2590:2945] 2025-04-17T17:48:32.491612Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2590:2945], schemeshard id = 72075186224037899 2025-04-17T17:48:33.768875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2718:3243], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:33.769066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:33.834432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-17T17:48:34.262499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2873:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:48:34.262768Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2873:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:48:34.263075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2873:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:48:34.263225Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2873:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:48:34.263354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2873:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:48:34.263483Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2873:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:48:34.263599Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2873:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:48:34.263729Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2873:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:48:34.263879Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2873:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17: ... ARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:48:37.006706Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:37.034006Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:37.057845Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:37.092775Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:37.126539Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:37.139909Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:37.175264Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:37.185971Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:37.202980Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:37.218934Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T17:48:39.216185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3597:3338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:39.404431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:39.433446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037899 2025-04-17T17:48:41.218012Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:41.220170Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:41.220906Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:41.224966Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:41.226244Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:41.227688Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:41.229050Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:41.229698Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:41.231959Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:41.232325Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T17:48:43.269335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4324:3412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:43.269838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:43.365996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-17T17:48:43.548504Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T17:48:43.550067Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T17:48:43.552576Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T17:48:43.553671Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T17:48:43.555868Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T17:48:43.558750Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T17:48:43.559181Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T17:48:43.561007Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T17:48:43.562336Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T17:48:43.563711Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; waiting actualization: 0/0.000015s 2025-04-17T17:48:46.109024Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4589:4439] 2025-04-17T17:48:46.141305Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4585:3495] , Record { OperationId: "\000\000\000\000\034=1\270\222\005\007`mR7\234" Tables { PathId { OwnerId: 72057594046644480 LocalId: 2 } } } 2025-04-17T17:48:46.141397Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId= =1`mR7 2025-04-17T17:48:46.141444Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId= =1`mR7 , PathId [OwnerId: 72057594046644480, LocalPathId: 2] Answer: 'Analyze sent. OperationId: 000000071x66w94187c1pn4dww' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::SimpleDataErasureTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:48:40.687414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:40.687503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:40.687543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:40.687576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:40.687633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:40.687852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:40.688840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:40.689701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:40.691177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:41.014282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:41.014343Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:41.171459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:41.174694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:41.175740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:41.252406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:41.255193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:41.264260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:41.265100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:41.304134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:41.320608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:41.320991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:41.322059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:41.323320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:41.323992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:41.326722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:41.382713Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:48:42.140777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:42.141269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:42.143100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:42.146975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:42.147611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:42.172366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:42.174971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:42.178417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:42.178846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:42.179577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:42.180305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:42.209098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:42.209570Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:42.209978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:42.219892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:42.219940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:42.219987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:42.220042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:42.237149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:42.239639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:42.241778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:42.255730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:42.257481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:42.257841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:42.260546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:42.260595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:42.261813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:42.280399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:42.307613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:42.308363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:42.314672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:42.315019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:42.318501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:42.318827Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:42.321014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:42.321693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:42.323118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:42.323472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:42.323511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:42.323544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:42.324521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:42.324848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:42.325537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:42.325930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:42.326375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:42.342231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:42.343327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:42.343683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944 2025-04-17T17:48:47.296755Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1981:3649], Recipient [1:828:2711]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1983:3651] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-17T17:48:47.296787Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:48:47.296814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409551 2025-04-17T17:48:47.296903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:828:2711], Recipient [1:292:2276]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 1 Status: COMPLETED 2025-04-17T17:48:47.296939Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-17T17:48:47.296990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-17T17:48:47.297028Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 62 ms, next wakeup# 599.938000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-17T17:48:47.297072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-17T17:48:47.307515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-04-17T17:48:47.310140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-17T17:48:47.310222Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-17T17:48:47.310625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1989:3657], Recipient [1:292:2276]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1990:3658] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:48:47.310661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:48:47.310689Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-04-17T17:48:47.310784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:292:2276]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-04-17T17:48:47.310816Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:47.310847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:47.310917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:47.310959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-17T17:48:47.311011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-17T17:48:47.311060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-17T17:48:47.746244Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:292:2276]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:47.746299Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:47.746433Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:47.746455Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:47.746488Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:828:2711]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:47.746502Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:47.746540Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:292:2276], Recipient [1:292:2276]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:47.746563Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:47.746625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2409], Recipient [1:455:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:47.746641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:47.746696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:828:2711], Recipient [1:828:2711]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:47.746712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:47.788275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:292:2276]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:47.788346Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:47.788397Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-17T17:48:47.788651Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:292:2276]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-04-17T17:48:47.788681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:47.788708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:47.788810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:47.788841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-17T17:48:47.788897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-17T17:48:47.789794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-17T17:48:48.176893Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:828:2711]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.176972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.177041Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:292:2276]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.177065Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.177113Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.177135Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.177211Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2409], Recipient [1:455:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.177235Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.177301Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:828:2711], Recipient [1:828:2711]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.177331Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.177395Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:292:2276], Recipient [1:292:2276]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.177416Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.225174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:292:2276]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:48.225277Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:48.225322Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-17T17:48:48.225617Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:292:2276]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-17T17:48:48.225660Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:48.225694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:48.225803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:48.225873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-17T17:48:48.225958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.936000s, Timestamp# 1970-01-01T00:00:05.109000Z 2025-04-17T17:48:48.225999Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-04-17T17:48:48.235060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-17T17:48:48.235918Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:2011:3679], Recipient [1:292:2276]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:48.235986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:48.236033Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:48:48.236151Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:274:2265], Recipient [1:292:2276]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-17T17:48:48.236191Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-17T17:48:48.236234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |74.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> TestDataErasure::DataErasureRun3Cycles >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:48:39.419215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:39.419609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:39.421031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:39.421357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:39.421686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:39.422053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:39.422795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:39.423761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:39.430412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:39.896573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:39.896879Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:40.072393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:40.073198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:40.080789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:40.183495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:40.186233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:40.201389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:40.210482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:40.263501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:40.285217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:40.285582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:40.291589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:40.291899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:40.292205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:40.298677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:40.374679Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:48:40.932402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:40.934369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:40.936301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:40.941478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:40.942138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:40.960201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:40.963377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:40.966162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:40.966841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:40.967748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:40.968085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:40.987569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:40.987630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:40.987972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:41.001201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:41.001248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:41.001894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:41.003744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:41.033292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:41.054429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:41.054832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:41.055622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:41.055727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:41.055783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:41.055994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:41.056036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:41.056169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:41.056857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:41.088993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:41.089305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:41.090901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:41.092110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:41.100518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:41.101956Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:41.103248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:41.103649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:41.104649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:41.104678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:41.105820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:41.106154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:41.114042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:41.114382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:41.114787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:41.115174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:41.115823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:41.120576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:41.120683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:41.120711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ing# 1 shards at schemeshard 72075186233409551 2025-04-17T17:48:48.442359Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:1015:2864], Recipient [1:835:2719]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409554 Status: OK 2025-04-17T17:48:48.442407Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-17T17:48:48.442454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409551 2025-04-17T17:48:48.442511Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409551, LocalPathId: 2], datashard# 72075186233409554, shardIdx# 72075186233409551:4 in# 63 ms, next wakeup in# 10.803000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409551 2025-04-17T17:48:48.442558Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-04-17T17:48:48.442587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-04-17T17:48:48.445190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# false 2025-04-17T17:48:48.447680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# true 2025-04-17T17:48:48.447964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:835:2719], Recipient [1:293:2277]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 3 Status: COMPLETED 2025-04-17T17:48:48.448029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-17T17:48:48.448098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-17T17:48:48.448156Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 65 ms, next wakeup# 595.801000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-17T17:48:48.448241Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-17T17:48:48.457559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-17T17:48:48.457655Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-17T17:48:48.458045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:293:2277]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-04-17T17:48:48.458101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:48.458147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:48.458256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:48.458298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-17T17:48:48.458368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-17T17:48:48.458434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-17T17:48:48.880613Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.880702Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.880816Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:293:2277], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.880851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.892601Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:461:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.892682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.892759Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:835:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.892786Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:48.892859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:461:2414], Recipient [1:461:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.892887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.892962Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:835:2719], Recipient [1:835:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.892988Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:48.931042Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:48.931127Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:48.931177Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-17T17:48:48.931496Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:293:2277]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-04-17T17:48:48.931534Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:48.931569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:48.931645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:48.931676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-17T17:48:48.931737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-17T17:48:48.931783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-17T17:48:49.242601Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:49.242686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:49.242784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:293:2277], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:49.242817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:49.254506Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:461:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:49.254578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:49.254653Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:835:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:49.254684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:49.254758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:461:2414], Recipient [1:461:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:49.254788Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:49.254859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:835:2719], Recipient [1:835:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:49.254885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:49.290580Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:49.290665Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:49.290708Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-17T17:48:49.291001Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:293:2277]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-04-17T17:48:49.291037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:49.291069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:49.291137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:49.291175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-17T17:48:49.291223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-04-17T17:48:49.298730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-17T17:48:49.299663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3577:4935], Recipient [1:293:2277]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:49.299739Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:49.299777Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:48:49.299880Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:2788:4302], Recipient [1:293:2277]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-17T17:48:49.299917Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-17T17:48:49.299953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TestDataErasure::DataErasureWithSplit [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb >> TSchemeShardTTLTests::ShouldCheckQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithSplit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:73:2058] recipient: [1:59:2100] 2025-04-17T17:48:33.761346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:33.761500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:33.761539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:33.761590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:33.761646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:33.761674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:33.761787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:33.761853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:33.762159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:33.866680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:33.866737Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:33.880825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:33.880952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:33.881098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:33.888008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:33.888237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:33.888850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:33.889082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:33.889977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:33.891094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:33.891149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:33.891240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:33.891282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:33.891323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:33.891536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:33.894333Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:148:2058] recipient: [1:16:2063] 2025-04-17T17:48:34.037831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:34.038026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:34.042334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:34.042591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:34.042654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:34.043297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:34.043415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:34.043615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:34.043669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:34.043711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:34.043741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:34.044251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:34.044296Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:34.044328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:34.044711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:34.044747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:34.044791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:34.044842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:34.052404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:34.052978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:34.053154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:34.054057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:34.055016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 74 RawX2: 4294969404 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:34.055105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:34.055398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:34.055467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:34.055636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:34.055715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:34.056483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:34.056546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:34.056697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:34.056733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:124:2135], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:34.057097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:34.057140Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:34.057229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:34.057269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:34.057313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:34.057341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:34.057375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:34.057436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:34.057483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:34.057514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:34.057580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:34.057618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:34.057652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:34.066923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:34.067057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:34.067095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, a ... d::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:50.262750Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:50.262777Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:50.274556Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:50.274619Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:50.274695Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:50.274720Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:50.315379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:50.315432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:50.315929Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:50.315966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:50.328992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1008:2872], Recipient [1:275:2238]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409550 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1467 Memory: 93107 Storage: 5022813 } ShardState: 2 UserTablePartOwners: 72075186233409550 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-04-17T17:48:50.329051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T17:48:50.329097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5019511 rowCount 49 cpuUsage 0.1467 2025-04-17T17:48:50.329203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T17:48:50.329239Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T17:48:50.329475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1013:2874], Recipient [1:275:2238]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2444 Memory: 93131 Storage: 5125318 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-04-17T17:48:50.329510Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T17:48:50.329540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5121950 rowCount 50 cpuUsage 0.2444 2025-04-17T17:48:50.329620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T17:48:50.340535Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:50.340602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:50.340688Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:50.340719Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:50.362490Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:50.362559Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:50.362588Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-17T17:48:50.362705Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T17:48:50.362734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T17:48:50.362761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72075186233409546, queue size# 2 2025-04-17T17:48:50.362816Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-17T17:48:50.362846Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-17T17:48:50.362964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72075186233409546:5 data size 5019511 row count 49 2025-04-17T17:48:50.363045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72075186233409546:5 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 49, DataSize 5019511 2025-04-17T17:48:50.363113Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:5 with partCount# 1, rowCount# 49, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-04-17T17:48:50.363177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409550 2025-04-17T17:48:50.363220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72075186233409546:6 data size 5121950 row count 50 2025-04-17T17:48:50.363258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 50, DataSize 5121950 2025-04-17T17:48:50.363297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-04-17T17:48:50.363323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409551 2025-04-17T17:48:50.363379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-04-17T17:48:50.363556Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:186:2178], Recipient [1:183:2176]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-17T17:48:50.363590Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:50.363617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:50.363661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:50.363686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-17T17:48:50.363740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 19.899500s, Timestamp# 1970-01-01T00:01:20.100500Z 2025-04-17T17:48:50.363783Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 30 s 2025-04-17T17:48:50.364267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-17T17:48:50.370909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1195:3029], Recipient [1:183:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:50.370980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:50.371024Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:48:50.371188Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:169:2169], Recipient [1:183:2176]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-17T17:48:50.371219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-17T17:48:50.371268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes |74.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |74.9%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TestDataErasure::DataErasureWithMerge [GOOD] >> TOlapNaming::AlterColumnTableFailed [GOOD] >> TPopulatorTest::MakeDir >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRestartConsoleAndPools >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:48:08.196613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:08.196704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:08.196752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:08.196789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:08.196826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:08.196855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:08.196919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:08.196989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:08.197550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:08.556488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:08.556554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:08.650144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:08.655072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:08.655428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:08.690694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:08.690877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:08.691497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:08.691793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:08.694821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:08.702703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:08.702781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:08.702942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:08.702994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:08.703030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:08.703144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.726740Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:48:08.909825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:08.910782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.911424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:08.912591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:08.912866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.939517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:08.939961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:08.940365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.940478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:08.940608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:08.940673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:08.946148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.946228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:08.946266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:08.958985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.959057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.959097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:08.959141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.962285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:08.971775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:08.971977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:08.972861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:08.972982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:08.973030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:08.973279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:08.973367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:08.973521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:08.973595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:08.984188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:08.984369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:08.985405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:08.985545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:08.990781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:08.990989Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:08.991241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:08.991275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.991316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:08.991362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.991400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:08.991436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:08.991472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:08.991500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:08.991572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:08.991606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:08.991634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:09.000223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:09.000716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:09.000995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... pipe to deleted shardIdx 72057594046678944:50 tabletId 72075186233409595 2025-04-17T17:48:51.334304Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:52 2025-04-17T17:48:51.334334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:52 tabletId 72075186233409597 2025-04-17T17:48:51.334439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:48:51.334501Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:48:51.334597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:48:51.336375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:46 2025-04-17T17:48:51.336416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:46 tabletId 72075186233409591 2025-04-17T17:48:51.336503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:48 2025-04-17T17:48:51.336528Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:48 tabletId 72075186233409593 2025-04-17T17:48:51.337439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-04-17T17:48:51.337481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-04-17T17:48:51.338081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:44 2025-04-17T17:48:51.338191Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:44 tabletId 72075186233409589 2025-04-17T17:48:51.339298Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:65 2025-04-17T17:48:51.339335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:65 tabletId 72075186233409610 2025-04-17T17:48:51.339497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:63 2025-04-17T17:48:51.339526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:63 tabletId 72075186233409608 2025-04-17T17:48:51.339653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:61 2025-04-17T17:48:51.339680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:61 tabletId 72075186233409606 2025-04-17T17:48:51.339775Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:59 2025-04-17T17:48:51.339803Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2025-04-17T17:48:51.343368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T17:48:51.343423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-17T17:48:51.343579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:48:51.343607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:48:51.343687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T17:48:51.343715Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T17:48:51.343806Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-17T17:48:51.343848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-17T17:48:51.343930Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-04-17T17:48:51.343954Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-04-17T17:48:51.344078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-04-17T17:48:51.344107Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-04-17T17:48:51.344175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-04-17T17:48:51.344199Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-04-17T17:48:51.344329Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-17T17:48:51.344358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-04-17T17:48:51.345239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-04-17T17:48:51.345279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-04-17T17:48:51.345360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-04-17T17:48:51.345387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-04-17T17:48:51.345473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-04-17T17:48:51.345496Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-04-17T17:48:51.345561Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-04-17T17:48:51.345587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-04-17T17:48:51.345664Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-04-17T17:48:51.345690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-04-17T17:48:51.345789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:40 2025-04-17T17:48:51.345814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2025-04-17T17:48:51.358877Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-04-17T17:48:51.358939Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-04-17T17:48:51.359101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-04-17T17:48:51.359135Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-04-17T17:48:51.359211Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-04-17T17:48:51.359237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-04-17T17:48:51.359293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-04-17T17:48:51.359318Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-04-17T17:48:51.359392Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-04-17T17:48:51.359420Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-04-17T17:48:51.359475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-17T17:48:51.359500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-17T17:48:51.359568Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:57 2025-04-17T17:48:51.359594Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2025-04-17T17:48:51.359651Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:53 2025-04-17T17:48:51.359677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2025-04-17T17:48:51.371099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:55 2025-04-17T17:48:51.371170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2025-04-17T17:48:51.371286Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:49 2025-04-17T17:48:51.371315Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2025-04-17T17:48:51.371390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:47 2025-04-17T17:48:51.371416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2025-04-17T17:48:51.371486Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:51 2025-04-17T17:48:51.371510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2025-04-17T17:48:51.371589Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:45 2025-04-17T17:48:51.371615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2025-04-17T17:48:51.371661Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:43 2025-04-17T17:48:51.371686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2025-04-17T17:48:51.371778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:41 2025-04-17T17:48:51.371824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2025-04-17T17:48:51.371999Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 2025-04-17T17:48:51.373329Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:48:51.373580Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 300us result status StatusPathDoesNotExist 2025-04-17T17:48:51.373754Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:48:51.374526Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-04-17T17:48:51.374626Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 114us result status StatusPathDoesNotExist 2025-04-17T17:48:51.374712Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] >> TPopulatorTest::RemoveDir >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:73:2058] recipient: [1:59:2100] 2025-04-17T17:48:43.372464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:43.372527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:43.372562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:43.372592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:43.372639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:43.372663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:43.372733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:43.372790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:43.373028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:43.787727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:43.787783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:43.834498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:43.835162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:43.836424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:43.885367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:43.886627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:43.892145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:43.893592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:43.902635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:43.913659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:43.914042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:43.914511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:43.915201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:43.915585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:43.919069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:43.937656Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:148:2058] recipient: [1:16:2063] 2025-04-17T17:48:44.505425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:44.514949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:44.516338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:44.519137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:44.519726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:44.523707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:44.523823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:44.523976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:44.524027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:44.524067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:44.524096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:44.524537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:44.524568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:44.524596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:44.524880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:44.524912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:44.524947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:44.524990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:44.527979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:44.528338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:44.528512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:44.529257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:44.529356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 74 RawX2: 4294969404 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:44.529401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:44.529587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:44.529630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:44.529754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:44.529835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:44.530308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:44.530352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:44.530476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:44.530507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:124:2135], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:44.530763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:44.530797Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:44.530874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:44.530906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:44.530941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:44.530965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:44.530998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:44.531043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:44.531087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:44.531112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:44.531158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:44.531189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:44.531214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:44.532781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:44.532875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:44.532908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, a ... 1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.266704Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.266829Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.266860Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.279201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.279290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.279378Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.279407Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.316139Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.316204Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.316303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.316332Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.327563Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.327628Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.327711Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.327738Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.362613Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.362675Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.362755Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.362781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.374554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.374626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.374720Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.374750Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.410657Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.410727Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.411087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.411122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.422861Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1218:3031], Recipient [1:275:2238]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2553 Memory: 94331 Storage: 10148063 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-04-17T17:48:52.422926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T17:48:52.422977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 10141461 rowCount 99 cpuUsage 0.2553 2025-04-17T17:48:52.423095Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T17:48:52.423135Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T17:48:52.446533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.446602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:52.446696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.446729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:52.457441Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:52.457525Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:52.457559Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-17T17:48:52.457659Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T17:48:52.457699Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T17:48:52.457728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72075186233409546, queue size# 1 2025-04-17T17:48:52.457802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T17:48:52.457841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T17:48:52.458005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72075186233409546:6 data size 10141461 row count 99 2025-04-17T17:48:52.458076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 99, DataSize 10141461 2025-04-17T17:48:52.458151Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 99, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-04-17T17:48:52.458245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409551 2025-04-17T17:48:52.458321Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-04-17T17:48:52.458505Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:186:2178], Recipient [1:183:2176]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-17T17:48:52.458543Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:52.458572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:52.458620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:52.458645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-17T17:48:52.458705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 19.899500s, Timestamp# 1970-01-01T00:01:20.100500Z 2025-04-17T17:48:52.458747Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 30 s 2025-04-17T17:48:52.459196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-17T17:48:52.461866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1396:3186], Recipient [1:183:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:52.461929Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:52.461964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:48:52.462130Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:169:2169], Recipient [1:183:2176]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-17T17:48:52.462165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-17T17:48:52.462219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |74.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:48:52.001055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:52.001146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:52.001179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:52.001207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:52.001242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:52.001267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:52.001313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:52.001374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:52.001678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:52.079964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:52.080031Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:52.094642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:52.094780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:52.094963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:52.101080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:52.101371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:52.101980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:52.102289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:52.106285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:52.107550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:52.107610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:52.107709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:52.107754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:52.107789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:52.107956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.114248Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:48:52.245983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:52.246285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.246492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:52.246705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:52.246752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.249053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:52.249170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:52.249339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.249391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:52.249421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:52.249451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:52.251375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.251424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:52.251455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:52.253157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.253201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.253251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:52.253302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:52.256672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:52.258505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:52.258695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:52.259596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:52.259726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:52.259787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:52.260032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:52.260082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:52.260256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:52.260330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:52.262365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:52.262408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:52.262583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:52.262622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:52.262941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.262981Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:52.263083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:52.263115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:52.263165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:52.263199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:52.263237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:52.263294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:52.263328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:52.263355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:52.263426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:52.263467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:52.263495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:52.265214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:52.265310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:52.265347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 200 2025-04-17T17:48:52.796730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 129 2025-04-17T17:48:52.796872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:48:52.796955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:48:52.809432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:52.809501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:48:52.809715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:48:52.809977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:52.810020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T17:48:52.810082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 4 2025-04-17T17:48:52.810711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.810776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:48:52.811589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:48:52.811739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:48:52.811786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:48:52.811831Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-17T17:48:52.811869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:48:52.812948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:48:52.813032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:48:52.813063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:48:52.813089Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-17T17:48:52.813118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:48:52.813180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T17:48:52.816823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1439 } } 2025-04-17T17:48:52.816885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-04-17T17:48:52.817004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1439 } } 2025-04-17T17:48:52.817096Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1439 } } 2025-04-17T17:48:52.818115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 551 RawX2: 4294969792 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:48:52.818161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-04-17T17:48:52.818330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 551 RawX2: 4294969792 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:48:52.818378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:48:52.818474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 551 RawX2: 4294969792 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:48:52.818533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:52.818591Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.818641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T17:48:52.818676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-17T17:48:52.821163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:48:52.826647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:48:52.826827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.826937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.827301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.827352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:48:52.827463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:48:52.827509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:48:52.827552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:48:52.827580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:48:52.827613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T17:48:52.827702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:414:2381] message: TxId: 103 2025-04-17T17:48:52.827750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:48:52.827792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:48:52.827826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:48:52.827931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:48:52.830131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:48:52.830209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:581:2518] TestWaitNotification: OK eventTxId 103 W0000 00:00:1744912132.830707 37806 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2025-04-17T17:48:52.832749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:52.833068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.833167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2025-04-17T17:48:52.833477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2025-04-17T17:48:52.836409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:52.836559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:48:09.042909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:09.043000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:09.043040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:09.043069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:09.043105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:09.043131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:09.043191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:09.043255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:09.043565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:09.238836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:09.238892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:09.253346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:09.253463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:09.253621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:09.267490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:09.267767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:09.268374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:09.268646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:09.301944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:09.303265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:09.303330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:09.303423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:09.303460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:09.303493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:09.303651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.327664Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:48:09.495316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:09.495538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.495714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:09.495972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:09.496037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.503463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:09.503612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:09.503853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.503913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:09.503961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:09.504010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:09.511207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.511289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:09.511341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:09.513392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.513458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.513502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:09.513554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:09.516938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:09.528013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:09.528241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:09.529209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:09.529337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:09.529383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:09.529665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:09.529731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:09.529889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:09.529967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:09.533093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:09.533133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:09.533302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:09.533362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:09.533655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:09.533713Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:09.533804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:09.533834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:09.533867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:09.533903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:09.533939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:09.533972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:09.534005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:09.534031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:09.534090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:09.534129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:09.534156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:09.535823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:09.535920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:09.535951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409592 TxId: 101 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-17T17:48:52.323712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409593 TxId: 101 2025-04-17T17:48:52.323750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409593, partId: 0 2025-04-17T17:48:52.323821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409593 TxId: 101 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T17:48:52.348055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.348320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.348474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.348597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.348680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.348817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.348918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.349030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.357045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.357280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.357387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.357517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.357664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.357821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.357954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.358052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.363205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.363405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.363511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.363643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.363781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.363902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.364024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.364135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.365220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.365346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.365437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.365526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.365608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.365702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.365806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.365938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.365989Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T17:48:52.366151Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:48:52.366226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:48:52.366282Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:48:52.366328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:48:52.366372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T17:48:52.366462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2772:4037] message: TxId: 101 2025-04-17T17:48:52.366526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:48:52.366613Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:48:52.366656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T17:48:52.367926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-17T17:48:52.372201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:48:52.372284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:2773:4038] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-17T17:48:52.376067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TestTable" AlterSchema { AddColumns { Name: "New Column" Type: "Int32" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:52.376402Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:48:52.376693Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: update parse error: Invalid name for column 'New Column'. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-04-17T17:48:52.379867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "update parse error: Invalid name for column \'New Column\'. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:52.380038Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: update parse error: Invalid name for column 'New Column'. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TestTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:48:52.380434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:48:52.380486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:48:52.381022Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:48:52.381147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:48:52.381194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:3566:4758] TestWaitNotification: OK eventTxId 102 >> TPopulatorTest::RemoveDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2025-04-17T17:48:53.504237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:53.504300Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-17T17:48:53.602221Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-04-17T17:48:53.602313Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-17T17:48:53.603607Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.603701Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.603735Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.604428Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-04-17T17:48:53.604469Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-17T17:48:53.604591Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-17T17:48:53.604647Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-17T17:48:53.604682Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-17T17:48:53.604832Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-17T17:48:53.604871Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.604917Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.604946Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.605092Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-17T17:48:53.605124Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-17T17:48:53.605184Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-04-17T17:48:53.605221Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-04-17T17:48:53.605288Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-04-17T17:48:53.605711Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-04-17T17:48:53.605907Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-04-17T17:48:53.606612Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-04-17T17:48:53.606650Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-17T17:48:53.606906Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2124], cookie# 100 2025-04-17T17:48:53.606934Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T17:48:53.619364Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-04-17T17:48:53.619425Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-17T17:48:53.619566Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.619607Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.619634Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.619806Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-04-17T17:48:53.619835Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-04-17T17:48:53.619930Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.619970Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.620031Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:53.620524Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2025-04-17T17:48:53.620574Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2025-04-17T17:48:53.620607Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2025-04-17T17:48:53.620777Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-04-17T17:48:53.620811Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-17T17:48:53.620841Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-17T17:48:53.620891Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-17T17:48:53.620984Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-04-17T17:48:53.621009Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-04-17T17:48:53.621276Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 100 2025-04-17T17:48:53.621426Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-17T17:48:53.621485Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-17T17:48:53.621508Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-17T17:48:53.621750Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-04-17T17:48:53.621794Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> AutoConfig::GetASPoolsWith2CPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2025-04-17T17:48:54.178531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:54.178593Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-17T17:48:54.249316Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-04-17T17:48:54.249394Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-17T17:48:54.250667Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:54.250792Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:54.250833Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:54.251568Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-04-17T17:48:54.251618Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-17T17:48:54.251745Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-17T17:48:54.251854Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-17T17:48:54.251890Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-17T17:48:54.252023Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-17T17:48:54.252053Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:54.252100Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:54.252128Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:54.252278Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-17T17:48:54.252313Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-17T17:48:54.252456Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-04-17T17:48:54.252515Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-04-17T17:48:54.252551Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-04-17T17:48:54.252959Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-04-17T17:48:54.253127Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-04-17T17:48:54.253410Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-04-17T17:48:54.253440Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-17T17:48:54.253617Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2124], cookie# 100 2025-04-17T17:48:54.253644Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T17:48:54.256252Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-04-17T17:48:54.256312Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-17T17:48:54.256460Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:54.256502Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:54.256533Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:48:54.256728Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-04-17T17:48:54.256771Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-04-17T17:48:5 ... oard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-17T17:48:54.269101Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-17T17:48:54.269142Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-17T17:48:54.269725Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:12:2059], cookie# 101 2025-04-17T17:48:54.269819Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:15:2062], cookie# 101 2025-04-17T17:48:54.269854Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:18:2065], cookie# 101 2025-04-17T17:48:54.269892Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2025-04-17T17:48:54.269976Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:96:2122], cookie# 101 2025-04-17T17:48:54.270031Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2025-04-17T17:48:54.270087Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2025-04-17T17:48:54.270152Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:97:2123], cookie# 101 2025-04-17T17:48:54.270210Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2025-04-17T17:48:54.270584Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:98:2124], cookie# 101 2025-04-17T17:48:54.270864Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 101 2025-04-17T17:48:54.270955Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 101 2025-04-17T17:48:54.270998Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-17T17:48:54.271336Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 101 2025-04-17T17:48:54.271375Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-17T17:48:54.273229Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 101, event size# 232, preserialized size# 2 2025-04-17T17:48:54.273278Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2025-04-17T17:48:54.273401Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-17T17:48:54.273483Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-17T17:48:54.273519Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-17T17:48:54.273797Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 101, event size# 306, preserialized size# 0 2025-04-17T17:48:54.273849Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2025-04-17T17:48:54.273941Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2025-04-17T17:48:54.273987Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2025-04-17T17:48:54.274022Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T17:48:54.274102Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:96:2122], cookie# 101 2025-04-17T17:48:54.274132Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-17T17:48:54.274166Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-17T17:48:54.274249Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-17T17:48:54.274582Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:97:2123], cookie# 101 2025-04-17T17:48:54.274619Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2025-04-17T17:48:54.274663Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2025-04-17T17:48:54.274699Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2025-04-17T17:48:54.274734Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2025-04-17T17:48:54.274993Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:98:2124], cookie# 101 2025-04-17T17:48:54.275145Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:96:2122], cookie# 101 2025-04-17T17:48:54.275278Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:97:2123], cookie# 101 2025-04-17T17:48:54.275319Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2025-04-17T17:48:54.275535Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:98:2124], cookie# 101 2025-04-17T17:48:54.275561Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> KqpScanArrowInChanels::AllTypesColumns >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> KqpScanArrowFormat::AggregateCountStar >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |75.0%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> KqpScanArrowFormat::AllTypesColumns |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2025-04-17T17:48:33.936676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:33.937246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:33.937304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eb5/r3tmp/tmpsuxWuH/pdisk_1.dat 2025-04-17T17:48:34.512669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:48:34.522197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:48:34.523658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:48:34.524450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:48:34.531665Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-17T17:48:34.532065Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-17T17:48:34.567046Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:34.574636Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-17T17:48:34.616219Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:333:2374] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-17T17:48:34.616335Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-17T17:48:34.616467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:34.616501Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:48:34.616532Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:48:34.616568Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:48:34.616600Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:48:34.616690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:34.616901Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T17:48:34.616947Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T17:48:34.616982Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T17:48:34.617014Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T17:48:34.617143Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-17T17:48:34.631866Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-17T17:48:34.631941Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:333:2374]) 2025-04-17T17:48:34.632017Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T17:48:34.632472Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-17T17:48:34.632555Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:333:2374])::Execute 2025-04-17T17:48:34.632591Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:48:34.632661Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:333:2374])::Complete 2025-04-17T17:48:34.632786Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443331584 } 2025-04-17T17:48:34.632837Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-17T17:48:34.632879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:34.633008Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-17T17:48:34.633062Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:48:34.633097Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:48:34.633221Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T17:48:34.633255Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T17:48:34.633280Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T17:48:34.633305Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T17:48:34.644812Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-17T17:48:34.644880Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T17:48:34.728396Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-17T17:48:34.728484Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T17:48:34.728743Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-17T17:48:34.729320Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-17T17:48:34.729382Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-04-17T17:48:34.734714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:48:34.762456Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T17:48:34.762604Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-17T17:48:34.762645Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-17T17:48:34.762687Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-17T17:48:34.762966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:34.763027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-17T17:48:34.773964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-17T17:48:34.827318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:48:34.860092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:48:34.860805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:48:34.868902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-17T17:48:34.925449Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-17T17:48:35.049628Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T17:48:35.050045Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-17T17:48:35.053375Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-17T17:48:35.054033Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-17T17:48:35.062088Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-17T17:48:35.063788Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-17T17:48:35.073035Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-17T17:48:35.078404Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-17T17:48:35.088374Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-17T17:48:35.089668Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-17T17:48:35.090921Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-17T17:48:35.092335Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-17T17:48:35.095521Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-17T17:48:35.096279Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... TxBlockStorageResult::Execute(72075186224037888 OK) 2025-04-17T17:48:54.529125Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-04-17T17:48:54.529884Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-04-17T17:48:54.530250Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T17:48:54.530624Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-04-17T17:48:54.530674Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T17:48:54.530796Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-04-17T17:48:54.542758Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T17:48:54.544088Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-04-17T17:48:54.544164Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 step# 3500 Status# 16 SEND to# [2:409:2404] Proxy marker# C1 2025-04-17T17:48:54.555065Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-04-17T17:48:54.635889Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715665 has been planned 2025-04-17T17:48:54.635998Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T17:48:54.636044Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-04-17T17:48:54.636285Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2025-04-17T17:48:54.636756Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715665 marker# C2 2025-04-17T17:48:54.636836Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:409:2404] Proxy 2025-04-17T17:48:54.637441Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T17:48:54.637500Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:48:54.637677Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:48:54.638091Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:48:54.638143Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:48:54.638212Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-17T17:48:54.638404Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-04-17T17:48:54.638532Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:48:54.638766Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:48:54.638844Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-17T17:48:54.639248Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:48:54.648283Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-17T17:48:54.648379Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:48:54.649251Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:48:54.649330Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:48:54.649393Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-04-17T17:48:54.649481Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:48:54.649600Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T17:48:54.649688Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-17T17:48:54.649732Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-17T17:48:54.649763Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-04-17T17:48:54.649821Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 acknowledged 2025-04-17T17:48:54.651048Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-04-17T17:48:54.653666Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-04-17T17:48:54.653744Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T17:48:54.654350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-17T17:48:54.654452Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 1 2025-04-17T17:48:54.655394Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-04-17T17:48:54.655898Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:48:54.656982Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.656 INFO ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-17T17:48:54.657093Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 INFO ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-17T17:48:54.657164Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 INFO ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-17T17:48:54.657213Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 INFO ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-17T17:48:54.657259Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 TRACE ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-17T17:48:54.657322Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 INFO ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-17T17:48:54.657390Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 INFO ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-17T17:48:54.657446Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 INFO ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-17T17:48:54.657495Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 INFO ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-17T17:48:54.657669Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 NOTE ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-17T17:48:54.657725Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 NOTE ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-17T17:48:54.657761Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=Mzk4MmRjODAtNzM1ZTU4MTItYjZhM2ZlMDktYjdkNDE3YjI= 2025-04-17 17:48:54.657 NOTE ydb-core-tx-datashard-ut_minstep(pid=35483, tid=0x00007FC0C6A32D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-17T17:48:54.673241Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T17:48:54.673503Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-17T17:48:54.676046Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T17:48:54.676988Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T17:48:54.677533Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-17T17:48:54.677595Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-17T17:48:54.677714Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-17T17:48:54.677915Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-17T17:48:54.678049Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> KqpScanArrowFormat::SingleKey >> KqpScanArrowInChanels::AggregateNoColumn >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |75.1%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig >> TestDataErasure::DataErasureRun3Cycles [GOOD] |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2025-04-17T17:48:35.836335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:35.836395Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:36.307381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:42.601224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:42.601667Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:42.718932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:44.945431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:44.945489Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:44.996334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:46.140688Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:46.140749Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:46.215587Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:47.496452Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:47.496508Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:47.539395Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:49.072255Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:49.072322Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:49.122447Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:50.295249Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:50.295327Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:50.343451Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:51.775460Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:51.775533Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:51.850222Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:53.119043Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:53.119129Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:53.170800Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:54.530738Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:54.530816Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:54.571793Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:55.883202Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 268637729, Sender [11:162:2171], Recipient [11:354:2293]: {TEvControllerProposeConfigRequest Record# } 2025-04-17T17:48:55.883310Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvBlobStorage::TEvControllerProposeConfigRequest 2025-04-17T17:48:55.894071Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 269877760, Sender [11:315:2282], Recipient [11:314:2279]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936131 Status: OK ServerId: [11:407:2339] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:48:55.894171Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:48:55.929160Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:314:2279], Recipient [11:354:2293]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-gkdwnlcfnq.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 90 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-04-17T17:48:55.929529Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:314:2279], Recipient [11:361:2305]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-gkdwnlcfnq.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 90 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-04-17T17:48:55.929600Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionRequest 2025-04-17T17:48:55.929737Z node 11 :CMS_CONFIGS DEBUG: TConfigsProvider registered new subscription [11:314:2279]:1 2025-04-17T17:48:55.929867Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: check if update is required for volatile subscription [11:314:2279]:1 2025-04-17T17:48:55.929967Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: new config found for subscription [11:314:2279]:1 version= 2025-04-17T17:48:55.930108Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:314:2279]) send TEvConfigSubscriptionResponse 2025-04-17T17:48:55.933977Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273286169, Sender [11:408:2305], Recipient [11:314:2279]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionResponse { Generation: 1 Status { Code: SUCCESS } } 2025-04-17T17:48:55.934060Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionResponse 2025-04-17T17:48:55.934357Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:361:2305], Recipient [11:408:2305]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-04-17T17:48:55.934413Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-04-17T17:48:55.934501Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:314:2279]) send TEvConfigSubscriptionNotificationRequest: Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true 2025-04-17T17:48:55.934674Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:408:2305], Recipient [11:314:2279]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-04-17T17:48:55.934720Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-04-17T17:48:55.940808Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285129, Sender [11:314:2279], Recipient [11:354:2293]: NKikimr::NConsole::TEvConsole::TEvGetNodeConfigRequest { Node { NodeId: 11 Host: "ghrun-gkdwnlcfnq.auto.internal" Tenant: "" NodeType: "" } ItemKinds: 29 ItemKinds: 1 ItemKinds: 89 ItemKinds: 2 ItemKinds: 90 ItemKinds: 32 ItemKinds: 3 ItemKinds: 33 ItemKinds: 34 ItemKinds: 6 ItemKinds: 36 ItemKinds: 37 ItemKinds: 8 ItemKinds: 38 ItemKinds: 10 ItemKinds: 39 ItemKinds: 43 ItemKinds: 73 ItemKinds: 75 ItemKinds: 46 ItemKinds: 77 ItemKinds: 80 ItemKinds: 81 ItemKinds: 52 ItemKinds: 54 ItemKinds: 25 ItemKinds: 55 ItemKinds: 26 } 2025-04-17T17:48:55.941112Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285129, Sender [11:314:2279], Recipient [11:361:2305]: NKikimr::NConsole::TEvConsole::TEvGetNodeConfigRequest { Node { NodeId: 11 Host: "ghrun-gkdwnlcfnq.auto.internal" Tenant: "" NodeType: "" } ItemKinds: 29 ItemKinds: 1 ItemKinds: 89 ItemKinds: 2 ItemKinds: 90 ItemKinds: 32 ItemKinds: 3 ItemKinds: 33 ItemKinds: 34 ItemKinds: 6 ItemKinds: 36 ItemKinds: 37 ItemKinds: 8 ItemKinds: 38 ItemKinds: 10 ItemKinds: 39 ItemKinds: 43 ItemKinds: 73 ItemKinds: 75 ItemKinds: 46 ItemKinds: 77 ItemKinds: 80 ItemKinds: 81 ItemKinds: 52 ItemKinds: 54 ItemKinds: 25 ItemKinds: 55 ItemKinds: 26 } 2025-04-17T17:48:55.941161Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvGetNodeConfigRequest 2025-04-17T17:48:55.941327Z node 11 :CMS_CONFIGS TRACE: Send TEvGetNodeConfigResponse: Status { Code: SUCCESS } Config { } 2025-04-17T17:48:55.941434Z node 11 :CMS_CONFIGS INFO: TLogSettingsConfigurator: got new config: 2025-04-17T17:48:55.941521Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GLOBAL has been changed from WARN to NOTICE 2025-04-17T17:48:55.941588Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GLOBAL has been changed from WARN to DEBUG 2025-04-17T17:48:55.941639Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT has been changed from WARN to NOTICE 2025-04-17T17:48:55.941671Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT has been changed from WARN to DEBUG 2025-04-17T17:48:55.941698Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TEST has been changed from WARN to NOTICE 2025-04-17T17:48:55.941725Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TEST has been changed from WARN to DEBUG 2025-04-17T17:48:55.941750Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component PROTOCOLS has been changed from WARN to NOTICE 2025-04-17T17:48:55.941793Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component PROTOCOLS has been changed from WARN to DEBUG 2025-04-17T17:48:55.941823Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to NOTICE 2025-04-17T17:48:55.941849Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to DEBUG 2025-04-17T17:48:55.941875Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_STATUS has been changed from WARN to NOTICE 2025-04-17T1 ... ZER has been changed from 0 to 10 2025-04-17T17:48:59.773478Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_MANAGER has been changed from NOTICE to ALERT 2025-04-17T17:48:59.773512Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_MANAGER has been changed from DEBUG to ALERT 2025-04-17T17:48:59.773558Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_MANAGER has been changed from 0 to 10 2025-04-17T17:48:59.773589Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_SECRET has been changed from NOTICE to ALERT 2025-04-17T17:48:59.773617Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_SECRET has been changed from DEBUG to ALERT 2025-04-17T17:48:59.773642Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_SECRET has been changed from 0 to 10 2025-04-17T17:48:59.773671Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_TIERING has been changed from NOTICE to ALERT 2025-04-17T17:48:59.773700Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_TIERING has been changed from DEBUG to ALERT 2025-04-17T17:48:59.773725Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_TIERING has been changed from 0 to 10 2025-04-17T17:48:59.773754Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BG_TASKS has been changed from NOTICE to ALERT 2025-04-17T17:48:59.773799Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BG_TASKS has been changed from DEBUG to ALERT 2025-04-17T17:48:59.773825Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BG_TASKS has been changed from 0 to 10 2025-04-17T17:48:59.773852Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY has been changed from NOTICE to ALERT 2025-04-17T17:48:59.773880Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY has been changed from DEBUG to ALERT 2025-04-17T17:48:59.773907Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY has been changed from 0 to 10 2025-04-17T17:48:59.773934Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY_CACHE has been changed from NOTICE to ALERT 2025-04-17T17:48:59.773959Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2025-04-17T17:48:59.773984Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2025-04-17T17:48:59.774032Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2025-04-17T17:48:59.774065Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2025-04-17T17:48:59.774089Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2025-04-17T17:48:59.774119Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2025-04-17T17:48:59.774144Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2025-04-17T17:48:59.774168Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2025-04-17T17:48:59.778384Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2025-04-17T17:48:59.778447Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2025-04-17T17:48:59.778481Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2025-04-17T17:48:59.778517Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2025-04-17T17:48:59.778548Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2025-04-17T17:48:59.778575Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2025-04-17T17:48:59.778604Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component SSA_GRAPH_EXECUTION has been changed from NOTICE to ALERT 2025-04-17T17:48:59.778636Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component SSA_GRAPH_EXECUTION has been changed from DEBUG to ALERT 2025-04-17T17:48:59.778663Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component SSA_GRAPH_EXECUTION has been changed from 0 to 10 2025-04-17T17:48:59.778691Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2025-04-17T17:48:59.778742Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2025-04-17T17:48:59.778779Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2025-04-17T17:48:59.778813Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2025-04-17T17:48:59.778840Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2025-04-17T17:48:59.778867Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2025-04-17T17:48:59.778896Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2025-04-17T17:48:59.778924Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2025-04-17T17:48:59.778950Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2025-04-17T17:48:59.779026Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779060Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779084Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2025-04-17T17:48:59.779114Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779144Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779169Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2025-04-17T17:48:59.779197Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779224Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779276Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2025-04-17T17:48:59.779308Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779335Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779361Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2025-04-17T17:48:59.779390Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779417Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779442Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2025-04-17T17:48:59.779472Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779499Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779526Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2025-04-17T17:48:59.779559Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779587Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779610Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2025-04-17T17:48:59.779639Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779664Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779687Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2025-04-17T17:48:59.779731Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779760Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779785Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2025-04-17T17:48:59.779814Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from NOTICE to ALERT 2025-04-17T17:48:59.779839Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from DEBUG to ALERT 2025-04-17T17:48:59.779865Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2025-04-17T17:48:59.779997Z node 14 :CMS_CONFIGS TRACE: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:48:51.098704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:51.098804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:51.098854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:51.098896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:51.098942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:51.098972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:51.099026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:51.099119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:51.099483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:51.226651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:51.226714Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:51.251058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:51.251181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:51.251330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:51.285369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:51.285753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:51.286475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:51.286831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:51.296672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:51.297911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:51.297987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:51.298110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:51.298155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:51.298214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:51.298393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:51.304974Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:48:51.449854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:51.450069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:51.450291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:51.450532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:51.450588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:51.452845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:51.452969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:51.453160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:51.453212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:51.453270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:51.453303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:51.455209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:51.455277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:51.455310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:51.456979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:51.457026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:51.457060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:51.457119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:51.467442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:51.469536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:51.469722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:51.470697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:51.470841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:51.470894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:51.471148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:51.471197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:51.471350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:51.471434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:51.475116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:51.475180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:51.475353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:51.475412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:51.475728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:51.475770Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:51.475865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:51.475897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:51.475948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:51.475979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:51.476021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:51.476071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:51.476109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:51.476138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:51.476207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:51.476243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:51.476275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:51.478089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:51.478212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:51.478252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3409551, NeedResponseComplete# false 2025-04-17T17:48:58.859138Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:1010:2857], Recipient [1:834:2718]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409555 Status: OK 2025-04-17T17:48:58.859210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-17T17:48:58.859293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409551 2025-04-17T17:48:58.859367Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409551, LocalPathId: 2], datashard# 72075186233409555, shardIdx# 72075186233409551:5 in# 66 ms, next wakeup in# 8.934000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409551 2025-04-17T17:48:58.859442Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-04-17T17:48:58.859468Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-04-17T17:48:58.861544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# true 2025-04-17T17:48:58.861852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:834:2718], Recipient [1:293:2277]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 3 Status: COMPLETED 2025-04-17T17:48:58.861895Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-17T17:48:58.861955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-17T17:48:58.862022Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 67 ms, next wakeup# 593.933000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-17T17:48:58.862092Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-17T17:48:58.871143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-17T17:48:58.871211Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-17T17:48:58.871519Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:293:2277]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-04-17T17:48:58.871565Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:58.871596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:58.871655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:58.871682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-17T17:48:58.871740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-17T17:48:58.871789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-17T17:48:59.422626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:834:2718]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.422710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.422792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.422818Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.422870Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:458:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.422893Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.422951Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:293:2277], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.422976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.423040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:458:2410], Recipient [1:458:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.423060Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.423113Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:834:2718], Recipient [1:834:2718]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.423134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.458548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:59.458625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:59.458660Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-17T17:48:59.458910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:293:2277]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-04-17T17:48:59.458942Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:59.458974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:59.459048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:59.459080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-17T17:48:59.459141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-17T17:48:59.459182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-17T17:48:59.926510Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:834:2718]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.926588Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.926659Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.926684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.926736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:458:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.926761Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:48:59.926819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:834:2718], Recipient [1:834:2718]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.926846Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.926916Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:293:2277], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.926943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.926993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:458:2410], Recipient [1:458:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.927019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:48:59.962473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:59.962550Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T17:48:59.962587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-17T17:48:59.962828Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:293:2277]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-04-17T17:48:59.962865Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T17:48:59.962898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T17:48:59.962969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T17:48:59.962999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-17T17:48:59.963046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.932000s, Timestamp# 1970-01-01T00:00:11.113000Z 2025-04-17T17:48:59.963075Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-04-17T17:48:59.967350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-17T17:48:59.967891Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3599:4956], Recipient [1:293:2277]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:59.967945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:48:59.967980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:48:59.968127Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:275:2266], Recipient [1:293:2277]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-17T17:48:59.968159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-17T17:48:59.968196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.1%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> JsonProtoConversion::JsonToProtoMap [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2025-04-17T17:48:44.528161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:44.528881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:44.528940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ec4/r3tmp/tmpuAbpTB/pdisk_1.dat 2025-04-17T17:48:46.972453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:48:47.003398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:48:47.005023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:48:47.005968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:48:47.020259Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-17T17:48:47.020329Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-17T17:48:47.056221Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:47.064297Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-17T17:48:47.109033Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:333:2374] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-17T17:48:47.109172Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-17T17:48:47.109316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:47.109360Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:48:47.109397Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:48:47.109441Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:48:47.109476Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:48:47.109580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:47.109847Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T17:48:47.109903Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T17:48:47.109956Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T17:48:47.109998Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T17:48:47.110150Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-17T17:48:47.126903Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-17T17:48:47.127007Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:333:2374]) 2025-04-17T17:48:47.127099Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T17:48:47.127654Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-17T17:48:47.127787Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:333:2374])::Execute 2025-04-17T17:48:47.127844Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:48:47.127925Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:333:2374])::Complete 2025-04-17T17:48:47.128103Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443331584 } 2025-04-17T17:48:47.128169Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-17T17:48:47.128227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:47.128414Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-17T17:48:47.128487Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T17:48:47.128526Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T17:48:47.128698Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T17:48:47.128739Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T17:48:47.128779Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T17:48:47.128814Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T17:48:47.139571Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-17T17:48:47.139671Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T17:48:47.213930Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-17T17:48:47.214090Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T17:48:47.214499Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-17T17:48:47.214940Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-17T17:48:47.215042Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-04-17T17:48:47.216064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:48:47.218240Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T17:48:47.218370Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-17T17:48:47.218457Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-17T17:48:47.218496Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-17T17:48:47.218839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:47.218906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-17T17:48:47.220039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-17T17:48:47.231522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:48:47.232770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:48:47.232841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:48:47.233747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-17T17:48:47.245103Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-17T17:48:47.271942Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T17:48:47.272056Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-17T17:48:47.272325Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-17T17:48:47.272399Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-17T17:48:47.272466Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-17T17:48:47.272642Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-17T17:48:47.273248Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-17T17:48:47.273420Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-17T17:48:47.274044Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-17T17:48:47.274417Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-17T17:48:47.274525Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-17T17:48:47.274597Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-17T17:48:47.274781Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-17T17:48:47.274869Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... 037889 2025-04-17T17:49:01.994719Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2025-04-17T17:49:01.994760Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2025-04-17T17:49:01.994938Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:49:01.995085Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:49:01.995141Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:49:01.995169Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:49:01.995200Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-04-17T17:49:02.006754Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:49:02.006857Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:49:02.008102Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-04-17T17:49:02.008156Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 step# 33000 Status# 16 SEND to# [2:409:2404] Proxy marker# C1 2025-04-17T17:49:02.082609Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715665 has been planned 2025-04-17T17:49:02.082701Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T17:49:02.082740Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-04-17T17:49:02.082950Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2025-04-17T17:49:02.083295Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715665 marker# C2 2025-04-17T17:49:02.083363Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:409:2404] Proxy 2025-04-17T17:49:02.083890Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T17:49:02.083935Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:49:02.084079Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:02.084425Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:49:02.084465Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:49:02.084505Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-17T17:49:02.084666Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715665 keys extracted: 0 2025-04-17T17:49:02.084781Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:49:02.085000Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:49:02.085063Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-17T17:49:02.085408Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:49:02.087048Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2025-04-17T17:49:02.087117Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:49:02.087553Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T17:49:02.087636Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-17T17:49:02.087673Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-17T17:49:02.087697Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-04-17T17:49:02.087737Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 acknowledged 2025-04-17T17:49:02.087994Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:49:02.088056Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:49:02.088107Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-04-17T17:49:02.088210Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:49:02.088946Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-04-17T17:49:02.091446Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-04-17T17:49:02.091516Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T17:49:02.091965Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-17T17:49:02.092055Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 1 2025-04-17T17:49:02.092487Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-04-17T17:49:02.093058Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:49:02.093886Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.093 INFO ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-17T17:49:02.093988Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.093 INFO ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-17T17:49:02.094051Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.094 INFO ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-17T17:49:02.094093Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.094 INFO ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-17T17:49:02.094138Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.094 TRACE ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-17T17:49:02.098354Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.098 INFO ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-17T17:49:02.098490Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.098 INFO ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-17T17:49:02.098550Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.098 INFO ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-17T17:49:02.098606Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.098 INFO ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-17T17:49:02.098845Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.098 NOTE ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-17T17:49:02.098909Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.098 NOTE ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-17T17:49:02.098950Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=NzMyNzNkMTMtY2NhN2ZlNWMtNjk2NTU4NC04ZTRhNDQyZA== 2025-04-17 17:49:02.098 NOTE ydb-core-tx-datashard-ut_minstep(pid=35488, tid=0x00007F897DF50D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-17T17:49:02.114977Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T17:49:02.115148Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-17T17:49:02.117032Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T17:49:02.117936Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T17:49:02.118415Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-17T17:49:02.118493Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-17T17:49:02.118602Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-17T17:49:02.118726Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-17T17:49:02.118839Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> JsonProtoConversion::ProtoMapToJson [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestListTenants |75.2%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |75.2%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope |75.2%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |75.2%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |75.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> Viewer::JsonAutocompleteColumnsPOST [GOOD] |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2025-04-17T17:47:33.946987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:114:2160], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:47:33.947506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:47:33.947572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 8370, node 1 TClient is connected to server localhost:11767 2025-04-17T17:48:07.677654Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:07.678918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:07.679165Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 22713, node 2 TClient is connected to server localhost:20295 2025-04-17T17:48:34.385687Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:335:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:34.386103Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:34.386587Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6444, node 3 TClient is connected to server localhost:1901 2025-04-17T17:48:51.956681Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:339:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:51.957110Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:51.957421Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 2832, node 4 TClient is connected to server localhost:10993 2025-04-17T17:49:04.351900Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:04.352330Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:04.352499Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 3263, node 5 TClient is connected to server localhost:27529 >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AggregateByColumn |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] Test command err: 2025-04-17T17:48:03.266492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:724:2426], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:03.267355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:03.267679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:48:03.268482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:721:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:03.269010Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:03.269134Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:48:04.399225Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:05.499890Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:48:05.586757Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:48:08.380770Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 65484, node 1 TClient is connected to server localhost:64112 2025-04-17T17:48:09.282440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:09.289341Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:09.294306Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:09.295203Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:48:33.902455Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:33.931097Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:33.960120Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:33.960232Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:33.964511Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4764, node 3 2025-04-17T17:48:34.340089Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:34.340115Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:34.340125Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:34.340316Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:48:36.469102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:37.368523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:48:37.553810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream 2025-04-17T17:48:49.100317Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494340530147244960:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:49.100363Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:48:49.636053Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:49.725398Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:49.725498Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:49.731181Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3333, node 4 2025-04-17T17:48:49.978699Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:49.978720Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:49.978729Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:49.978870Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8569 2025-04-17T17:48:50.635412Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:48:50.643377Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:53.446750Z node 4 :GRPC_SERVER DEBUG: Got grpc request# request auth and check internal request, traceId# undef, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# /Root, peer# , grpcInfo# undef, timeout# 9.999873s 2025-04-17T17:48:53.447045Z node 4 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T17:48:53.447114Z node 4 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T17:48:54.102638Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494340530147244960:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:54.102737Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:48:54.352031Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340551622082117:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:54.352194Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:54.698778Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340551622082138:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:54.698920Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:54.699437Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340551622082143:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:54.706093Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T17:48:54.714687Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T17:48:54.714846Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T17:48:54.714860Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T17:48:54.714906Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T17:48:54.720377Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T17:48:54.720388Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T17:48:54.720517Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T17:48:54.720531Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T17:48:54.721077Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494340551622082145:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T17:48:54.793948Z node 4 :TX_PROXY ERROR: Actor# [4:7494340551622082198:2348] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:48:54.951190Z node 4 :GRPC_SERVER DEBUG: [0x51a000020a80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T17:48:54.951536Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d6e80] received request Name# SchemeOperationStatus ok# false data# ... false data# peer# 2025-04-17T17:49:03.952146Z node 5 :GRPC_SERVER DEBUG: [0x51b000270580] received request Name# Coordination/CreateNode ok# false data# peer# 2025-04-17T17:49:03.952165Z node 5 :GRPC_SERVER DEBUG: [0x51b000271a80] received request Name# Coordination/AlterNode ok# false data# peer# 2025-04-17T17:49:03.952421Z node 5 :GRPC_SERVER DEBUG: [0x51b000272180] received request Name# Coordination/DropNode ok# false data# peer# 2025-04-17T17:49:03.952436Z node 5 :GRPC_SERVER DEBUG: [0x51b000272880] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-04-17T17:49:03.952645Z node 5 :GRPC_SERVER DEBUG: [0x51b000272f80] received request Name# CreateDatabase ok# false data# peer# 2025-04-17T17:49:03.952692Z node 5 :GRPC_SERVER DEBUG: [0x51b000273d80] received request Name# GetDatabaseStatus ok# false data# peer# 2025-04-17T17:49:03.952873Z node 5 :GRPC_SERVER DEBUG: [0x51b000273680] received request Name# AlterDatabase ok# false data# peer# 2025-04-17T17:49:03.952986Z node 5 :GRPC_SERVER DEBUG: [0x51b000274480] received request Name# ListDatabases ok# false data# peer# 2025-04-17T17:49:03.953096Z node 5 :GRPC_SERVER DEBUG: [0x51b000274b80] received request Name# RemoveDatabase ok# false data# peer# 2025-04-17T17:49:03.953240Z node 5 :GRPC_SERVER DEBUG: [0x51b000275280] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-04-17T17:49:03.953338Z node 5 :GRPC_SERVER DEBUG: [0x51b000275980] received request Name# GetScaleRecommendation ok# false data# peer# 2025-04-17T17:49:03.953496Z node 5 :GRPC_SERVER DEBUG: [0x51b0002a6980] received request Name# ListEndpoints ok# false data# peer# 2025-04-17T17:49:03.953575Z node 5 :GRPC_SERVER DEBUG: [0x51b000276080] received request Name# WhoAmI ok# false data# peer# 2025-04-17T17:49:03.953745Z node 5 :GRPC_SERVER DEBUG: [0x51b000276780] received request Name# NodeRegistration ok# false data# peer# 2025-04-17T17:49:03.953872Z node 5 :GRPC_SERVER DEBUG: [0x51b0002a7080] received request Name# Scan ok# false data# peer# 2025-04-17T17:49:03.954062Z node 5 :GRPC_SERVER DEBUG: [0x51b0002a7780] received request Name# GetShardLocations ok# false data# peer# 2025-04-17T17:49:03.954117Z node 5 :GRPC_SERVER DEBUG: [0x51b0002a7e80] received request Name# DescribeTable ok# false data# peer# 2025-04-17T17:49:03.954345Z node 5 :GRPC_SERVER DEBUG: [0x51b0002a8580] received request Name# CreateSnapshot ok# false data# peer# 2025-04-17T17:49:03.954606Z node 5 :GRPC_SERVER DEBUG: [0x51b0002a8c80] received request Name# RefreshSnapshot ok# false data# peer# 2025-04-17T17:49:03.954868Z node 5 :GRPC_SERVER DEBUG: [0x51b0002a9380] received request Name# DiscardSnapshot ok# false data# peer# 2025-04-17T17:49:03.955147Z node 5 :GRPC_SERVER DEBUG: [0x51b0002a9a80] received request Name# List ok# false data# peer# 2025-04-17T17:49:03.955201Z node 5 :GRPC_SERVER DEBUG: [0x51b0002aa180] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-04-17T17:49:03.955426Z node 5 :GRPC_SERVER DEBUG: [0x51b0002aa880] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-04-17T17:49:03.955455Z node 5 :GRPC_SERVER DEBUG: [0x51b0002aaf80] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-04-17T17:49:03.955687Z node 5 :GRPC_SERVER DEBUG: [0x51b0002ab680] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-04-17T17:49:03.955744Z node 5 :GRPC_SERVER DEBUG: [0x51b0002abd80] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-04-17T17:49:03.955956Z node 5 :GRPC_SERVER DEBUG: [0x51b0002ac480] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-04-17T17:49:03.955993Z node 5 :GRPC_SERVER DEBUG: [0x51b0002ad280] received request Name# CreateStream ok# false data# peer# 2025-04-17T17:49:03.956194Z node 5 :GRPC_SERVER DEBUG: [0x51b0002ad980] received request Name# ListStreams ok# false data# peer# 2025-04-17T17:49:03.956250Z node 5 :GRPC_SERVER DEBUG: [0x51b0002ae080] received request Name# DeleteStream ok# false data# peer# 2025-04-17T17:49:03.956447Z node 5 :GRPC_SERVER DEBUG: [0x51b0002acb80] received request Name# DescribeStream ok# false data# peer# 2025-04-17T17:49:03.956493Z node 5 :GRPC_SERVER DEBUG: [0x51b0002ae780] received request Name# ListShards ok# false data# peer# 2025-04-17T17:49:03.956710Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b5580] received request Name# SetWriteQuota ok# false data# peer# 2025-04-17T17:49:03.956774Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b5c80] received request Name# UpdateStream ok# false data# peer# 2025-04-17T17:49:03.956947Z node 5 :GRPC_SERVER DEBUG: [0x51b0002aee80] received request Name# PutRecord ok# false data# peer# 2025-04-17T17:49:03.957044Z node 5 :GRPC_SERVER DEBUG: [0x51b0002af580] received request Name# PutRecords ok# false data# peer# 2025-04-17T17:49:03.957206Z node 5 :GRPC_SERVER DEBUG: [0x51b0002afc80] received request Name# GetRecords ok# false data# peer# 2025-04-17T17:49:03.957311Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b0380] received request Name# GetShardIterator ok# false data# peer# 2025-04-17T17:49:03.957444Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b0a80] received request Name# SubscribeToShard ok# false data# peer# 2025-04-17T17:49:03.957565Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b1880] received request Name# DescribeLimits ok# false data# peer# 2025-04-17T17:49:03.957698Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b1f80] received request Name# DescribeStreamSummary ok# false data# peer# 2025-04-17T17:49:03.957838Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b2680] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-04-17T17:49:03.957945Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b2d80] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-04-17T17:49:03.958093Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b3480] received request Name# UpdateShardCount ok# false data# peer# 2025-04-17T17:49:03.958207Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b3b80] received request Name# UpdateStreamMode ok# false data# peer# 2025-04-17T17:49:03.958363Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b4280] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-04-17T17:49:03.958447Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b4980] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-04-17T17:49:03.958592Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b5080] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-04-17T17:49:03.958681Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b5780] received request Name# ListStreamConsumers ok# false data# peer# 2025-04-17T17:49:03.958830Z node 5 :GRPC_SERVER DEBUG: [0x51b0002b5e80] received request Name# AddTagsToStream ok# false data# peer# 2025-04-17T17:49:03.958914Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cac80] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-04-17T17:49:03.959157Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ca580] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-04-17T17:49:03.959354Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c9e80] received request Name# ListTagsForStream ok# false data# peer# 2025-04-17T17:49:03.959400Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b6a80] received request Name# MergeShards ok# false data# peer# 2025-04-17T17:49:03.959603Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b7880] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-04-17T17:49:03.959638Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b7f80] received request Name# SplitShard ok# false data# peer# 2025-04-17T17:49:03.959853Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b7180] received request Name# StartStreamEncryption ok# false data# peer# 2025-04-17T17:49:03.959862Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b6380] received request Name# StopStreamEncryption ok# false data# peer# 2025-04-17T17:49:03.960089Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b4e80] received request Name# SelfCheck ok# false data# peer# 2025-04-17T17:49:03.960133Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b4780] received request Name# NodeCheck ok# false data# peer# 2025-04-17T17:49:03.960340Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b2480] received request Name# CreateSession ok# false data# peer# 2025-04-17T17:49:03.960400Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b1d80] received request Name# DeleteSession ok# false data# peer# 2025-04-17T17:49:03.960600Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b1680] received request Name# AttachSession ok# false data# peer# 2025-04-17T17:49:03.960684Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b0880] received request Name# BeginTransaction ok# false data# peer# 2025-04-17T17:49:03.960838Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b0180] received request Name# CommitTransaction ok# false data# peer# 2025-04-17T17:49:03.960954Z node 5 :GRPC_SERVER DEBUG: [0x51b0004afa80] received request Name# RollbackTransaction ok# false data# peer# 2025-04-17T17:49:03.961076Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b4080] received request Name# ExecuteQuery ok# false data# peer# 2025-04-17T17:49:03.961214Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b3280] received request Name# ExecuteScript ok# false data# peer# 2025-04-17T17:49:03.961301Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b2b80] received request Name# FetchScriptResults ok# false data# peer# 2025-04-17T17:49:03.961465Z node 5 :GRPC_SERVER DEBUG: [0x51b0004af380] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-04-17T17:49:03.961565Z node 5 :GRPC_SERVER DEBUG: [0x51b00049f080] received request Name# ChangeTabletSchema ok# false data# peer# 2025-04-17T17:49:03.961721Z node 5 :GRPC_SERVER DEBUG: [0x51b000498e80] received request Name# RestartTablet ok# false data# peer# 2025-04-17T17:49:03.961832Z node 5 :GRPC_SERVER DEBUG: [0x51b00049e280] received request Name# CreateLogStore ok# false data# peer# 2025-04-17T17:49:03.961976Z node 5 :GRPC_SERVER DEBUG: [0x51b00049db80] received request Name# DescribeLogStore ok# false data# peer# 2025-04-17T17:49:03.962064Z node 5 :GRPC_SERVER DEBUG: [0x51b00049d480] received request Name# DropLogStore ok# false data# peer# 2025-04-17T17:49:03.962239Z node 5 :GRPC_SERVER DEBUG: [0x51b00049cd80] received request Name# AlterLogStore ok# false data# peer# 2025-04-17T17:49:03.962473Z node 5 :GRPC_SERVER DEBUG: [0x51b00049c680] received request Name# CreateLogTable ok# false data# peer# 2025-04-17T17:49:03.962770Z node 5 :GRPC_SERVER DEBUG: [0x51b00049bf80] received request Name# DescribeLogTable ok# false data# peer# 2025-04-17T17:49:03.963002Z node 5 :GRPC_SERVER DEBUG: [0x51b00049b880] received request Name# DropLogTable ok# false data# peer# 2025-04-17T17:49:03.963010Z node 5 :GRPC_SERVER DEBUG: [0x51b00049b180] received request Name# AlterLogTable ok# false data# peer# 2025-04-17T17:49:03.963246Z node 5 :GRPC_SERVER DEBUG: [0x51b00049e980] received request Name# Login ok# false data# peer# 2025-04-17T17:49:03.963257Z node 5 :GRPC_SERVER DEBUG: [0x51b00049aa80] received request Name# DescribeReplication ok# false data# peer# 2025-04-17T17:49:03.963493Z node 5 :GRPC_SERVER DEBUG: [0x51b00049a380] received request Name# DescribeView ok# false data# peer# >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> FolderServiceTest::TFolderService |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |75.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic >> FolderServiceTest::TFolderServiceAdapter >> TUserAccountServiceTest::Get >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> TAccessServiceTest::Authenticate >> TSchemeShardViewTest::AsyncCreateDifferentViews >> FolderServiceTest::TFolderServiceTransitional |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TServiceAccountServiceTest::Get >> TSchemeShardViewTest::DropView >> TServiceAccountServiceTest::Get [GOOD] >> TSchemeShardViewTest::EmptyName >> TAccessServiceTest::PassRequestId >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> TSchemeShardViewTest::EmptyQueryText >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TOlap::StoreStats [GOOD] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> TOlap::StoreStatsQuota >> TSchemeShardViewTest::DropView [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteSchemePOST >> TSchemeShardViewTest::AsyncDropSameView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:14.999227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:14.999355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:14.999392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:14.999426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:14.999468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:14.999513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:14.999574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:14.999648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:14.999993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:15.169344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:15.169401Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:15.216052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:15.216542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:15.216721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:15.240493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:15.240643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:15.241185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:15.241470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:15.248427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:15.249893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:15.249962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:15.250165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:15.250243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:15.250281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:15.250402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:15.268387Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:15.512388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:15.512655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:15.512888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:15.513149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:15.513216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:15.520339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:15.520592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:15.520831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:15.520896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:15.520936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:15.520968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:15.527242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:15.527320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:15.527357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:15.533330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:15.533402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:15.533442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:15.533500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:15.537218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:15.542537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:15.542747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:15.543768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:15.543920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:15.543967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:15.544234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:15.544285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:15.544441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:15.544546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:15.546860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:15.546908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:15.547121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:15.547162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:15.547531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:15.547574Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:15.547675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:15.547709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:15.547749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:15.547807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:15.547860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:15.547903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:15.547938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:15.547970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:15.548034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:15.548073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:15.548108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:15.550032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:15.550157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:15.550235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4-17T17:49:15.629367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T17:49:15.629392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-17T17:49:15.629422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T17:49:15.629634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:49:15.629670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:15.629697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:49:15.629744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-17T17:49:15.630674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:15.630756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:15.630782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:49:15.630821Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T17:49:15.630855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:49:15.631700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:15.631772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:15.631800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:49:15.631823Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T17:49:15.631851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:49:15.631907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T17:49:15.635158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:49:15.636040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-04-17T17:49:15.636359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:49:15.636404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-17T17:49:15.636508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:49:15.636529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-17T17:49:15.636588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:49:15.636636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:49:15.637165Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:49:15.637253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:49:15.637289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:334:2325] 2025-04-17T17:49:15.637433Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:49:15.637503Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:49:15.637546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:49:15.637567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:334:2325] 2025-04-17T17:49:15.637693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:49:15.637722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:334:2325] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-17T17:49:15.638259Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:15.638560Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 244us result status StatusSuccess 2025-04-17T17:49:15.638992Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:15.639463Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:15.639634Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 167us result status StatusSuccess 2025-04-17T17:49:15.639885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:15.640369Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:15.640548Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 177us result status StatusSuccess 2025-04-17T17:49:15.640783Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView >> FolderServiceTest::TFolderService [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:15.737625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:15.737722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:15.737759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:15.737809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:15.737850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:15.737895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:15.737960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:15.738039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:15.738418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:15.824364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:15.824423Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:15.841599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:15.842120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:15.842326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:15.855651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:15.855858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:15.856494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:15.856714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:15.860077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:15.861346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:15.861400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:15.861581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:15.861633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:15.861667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:15.861789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:15.872113Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:16.008181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:16.008443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.008694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:16.008921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:16.008990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.013040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.013218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:16.013455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.013524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:16.013559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:16.013612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:16.016189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.016253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:16.016293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:16.018437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.018493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.018530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:16.018583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.022902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:16.025478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:16.025693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:16.026728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.026882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:16.026939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:16.027304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:16.027363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:16.027529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:16.027613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:16.030112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:16.030155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:16.030386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:16.030426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:16.030813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.030859Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:16.030963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:16.030998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.031044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:16.031088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.031126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:16.031164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.031224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:16.031258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:16.031336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:16.031372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:16.031405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:16.033233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:16.033369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:16.033403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... dReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-17T17:49:16.089688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:16.089884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2025-04-17T17:49:16.090014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T17:49:16.090063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:16.092654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-17T17:49:16.092849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2025-04-17T17:49:16.093063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.093130Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2025-04-17T17:49:16.093186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T17:49:16.093305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:16.095426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:49:16.095584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T17:49:16.095989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.096099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:16.096153Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-04-17T17:49:16.096309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-17T17:49:16.096487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:16.096568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:49:16.098797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:16.098849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:16.099016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:16.099186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:16.099228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T17:49:16.099271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:49:16.099583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.099637Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:49:16.099758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:49:16.099791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:49:16.099828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:49:16.099860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:49:16.099898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T17:49:16.099938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:49:16.099972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:49:16.100031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:49:16.100106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:49:16.100162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T17:49:16.100201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T17:49:16.100231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T17:49:16.100936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:16.101031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:16.101066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:49:16.101127Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:49:16.101170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:16.101930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:16.102027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:16.102063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:49:16.102090Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:49:16.102125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:49:16.102317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T17:49:16.102798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:49:16.102845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:49:16.102910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:16.106029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:49:16.107042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:49:16.107126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:49:16.107364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:49:16.107405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:49:16.107810Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:49:16.107903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:49:16.107939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:323:2314] TestWaitNotification: OK eventTxId 102 2025-04-17T17:49:16.108420Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:16.108606Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 228us result status StatusPathDoesNotExist 2025-04-17T17:49:16.108761Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:16.113245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:16.113329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:16.113364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:16.113404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:16.113448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:16.113489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:16.113547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:16.113617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:16.113950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:16.314744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:16.314802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:16.330643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:16.331068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:16.331216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:16.345318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:16.345504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:16.346149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.346359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:16.352573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:16.353950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:16.354023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:16.354243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:16.354300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:16.354341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:16.354480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.368930Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:16.521609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:16.521850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.522059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:16.522309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:16.522362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.527510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.527680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:16.527890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.527945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:16.527989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:16.528029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:16.534994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.535067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:16.535102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:16.547304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.547389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.547432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:16.547496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.551198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:16.555091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:16.555292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:16.556296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.556449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:16.556501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:16.556745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:16.556789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:16.556947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:16.557030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:16.559465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:16.559509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:16.559688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:16.559747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:16.560098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.560147Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:16.560258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:16.560292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.560329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:16.560374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.560411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:16.560558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.560628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:16.560661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:16.560730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:16.560765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:16.560793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:16.568351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:16.568491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:16.568526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:49:16.568566Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:49:16.568607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:16.568730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:49:16.572154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:49:16.572677Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T17:49:16.573286Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T17:49:16.591571Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T17:49:16.594396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:16.594560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-04-17T17:49:16.594620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-04-17T17:49:16.594708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-04-17T17:49:16.595447Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:49:16.600747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:16.600918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2025-04-17T17:49:16.601456Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardViewTest::CreateView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:16.400495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:16.400578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:16.400621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:16.400652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:16.400696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:16.400722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:16.400774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:16.400865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:16.401169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:16.558258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:16.558306Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:16.592420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:16.592534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:16.592689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:16.608818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:16.609094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:16.609588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.609882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:16.614557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:16.615923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:16.615984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:16.616109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:16.616152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:16.616192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:16.616367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.624469Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:16.764972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:16.765173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.765351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:16.765619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:16.765685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.775189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.775346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:16.775575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.775636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:16.775670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:16.775721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:16.777668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.777728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:16.777761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:16.779675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.779738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.779775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:16.779830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.788906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:16.794958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:16.795159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:16.796134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.796272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:16.796321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:16.796612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:16.796668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:16.796825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:16.796908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:16.803166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:16.803219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:16.803435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:16.803538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:16.803842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.803884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:16.803976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:16.804010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.804045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:16.804098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.804153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:16.804201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:16.804253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:16.804280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:16.804346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:16.804375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:16.804405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:16.806215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:16.806327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:16.806378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:49:16.806425Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:49:16.806463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:16.806569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:49:16.816880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:49:16.817427Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T17:49:16.818134Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] Bootstrap 2025-04-17T17:49:16.856110Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] Become StateWork (SchemeCache [1:275:2266]) 2025-04-17T17:49:16.863029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:16.863306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-04-17T17:49:16.863374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-04-17T17:49:16.863503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:49:16.863574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T17:49:16.863632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:16.864773Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:49:16.872153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-17T17:49:16.872316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-17T17:49:16.872627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.872691Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-04-17T17:49:16.872743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-17T17:49:16.872888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:16.873633Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:49:16.877209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-17T17:49:16.877351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-17T17:49:16.877686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:16.877815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:16.877864Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-17T17:49:16.877984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T17:49:16.878142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:16.878261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T17:49:16.883373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:16.883428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:16.883637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:16.883740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:16.883778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T17:49:16.884088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T17:49:16.884161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:49:16.884216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T17:49:16.884318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:49:16.884358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:49:16.884398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:49:16.884426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:49:16.884458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T17:49:16.884499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:49:16.884533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:49:16.884573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T17:49:16.884642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:49:16.884688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T17:49:16.884724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-17T17:49:16.884751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T17:49:16.886082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:16.886201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:16.886239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:49:16.886278Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-17T17:49:16.886328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:16.890521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:16.890627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:16.890656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:49:16.890685Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:49:16.890715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:49:16.890794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T17:49:16.893232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:49:16.894147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-04-17T17:49:12.677234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340629755069517:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:12.677944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000edc/r3tmp/tmpLQg83M/pdisk_1.dat 2025-04-17T17:49:13.219393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:13.219531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:13.221994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:13.250006Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:13.560314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:13.622646Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:49:13.628183Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Connect to grpc://localhost:62204 2025-04-17T17:49:13.663021Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-17T17:49:13.766349Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:62204: Failed to connect to remote host: Connection refused 2025-04-17T17:49:13.778393Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-17T17:49:13.782360Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:62204: Failed to connect to remote host: Connection refused 2025-04-17T17:49:14.794490Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-17T17:49:14.810327Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Status 5 Not Found 2025-04-17T17:49:14.816437Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-04-17T17:49:14.823596Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> TUserAccountServiceTest::Get [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> TAccessServiceTest::Authenticate [GOOD] >> Viewer::TabletMerging [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:17.338319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:17.338416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:17.338458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:17.338495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:17.338540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:17.338576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:17.338633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:17.338726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:17.339072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:17.435953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:17.436015Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:17.471330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:17.471483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:17.471668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:17.510599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:17.511039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:17.511753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:17.512110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:17.522033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:17.523418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:17.523496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:17.523633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:17.523686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:17.523734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:17.523942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.538486Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:17.837170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:17.837405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.837631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:17.837889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:17.837946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.851466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:17.851632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:17.851997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.852059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:17.852114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:17.852155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:17.854301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.854373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:17.854416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:17.856425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.856478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.856520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:17.856582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:17.860418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:17.862568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:17.862749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:17.863757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:17.863922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:17.863984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:17.864365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:17.864445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:17.864626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:17.864720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:17.867050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:17.867102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:17.867301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:17.867365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:17.867698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.867746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:17.867855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:17.867892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:17.867931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:17.867984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:17.868022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:17.868062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:17.868103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:17.868148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:17.868213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:17.868272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:17.868304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:17.870163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:17.870322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:17.870366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-04-17T17:49:17.940972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-04-17T17:49:17.941258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-04-17T17:49:17.941368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-04-17T17:49:17.941933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:49:17.942100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T17:49:17.942425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:17.942568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:17.942632Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-04-17T17:49:17.942773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-17T17:49:17.942938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:17.943006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:49:17.945275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:17.945321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:17.945467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:17.945630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:17.945674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T17:49:17.945740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:49:17.946076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.946126Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:49:17.946253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:49:17.946291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:49:17.946334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:49:17.946368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:49:17.946412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T17:49:17.946467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:49:17.946509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:49:17.946558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:49:17.946629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:49:17.946667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T17:49:17.946701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T17:49:17.946737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T17:49:17.947531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:17.947680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:17.947722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:49:17.947774Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:49:17.947836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:17.948718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:17.948803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:17.948832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:49:17.948861Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:49:17.948909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:49:17.948997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T17:49:17.961723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:49:17.961821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:49:17.961920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:17.964910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:49:17.970653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:49:17.970818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-04-17T17:49:17.971185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:49:17.971230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-04-17T17:49:17.971325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T17:49:17.971346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T17:49:17.971871Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:49:17.971970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:49:17.972021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:330:2321] 2025-04-17T17:49:17.972201Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T17:49:17.972275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:49:17.972299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:330:2321] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-04-17T17:49:17.972844Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:17.973036Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 202us result status StatusPathDoesNotExist 2025-04-17T17:49:17.973163Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> AssignTxId::Basic [GOOD] >> TSchemeShardViewTest::ReadOnlyMode >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:17.543659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:17.543751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:17.543794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:17.543827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:17.543867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:17.543897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:17.543954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:17.544054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:17.544359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:17.659807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:17.659859Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:17.687885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:17.688028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:17.688185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:17.697339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:17.697656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:17.698507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:17.698971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:17.715076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:17.716495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:17.716562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:17.716701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:17.716756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:17.716794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:17.717018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:17.744264Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:18.083875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:18.084116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:18.084324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:18.084536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:18.084587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:18.087540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:18.087737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:18.087962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:18.088042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:18.088098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:18.088142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:18.091145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:18.091209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:18.091250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:18.098588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:18.098688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:18.098730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:18.098793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:18.102746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:18.105825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:18.106032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:18.107133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:18.107264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:18.107317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:18.107619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:18.107673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:18.107828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:18.107926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:18.110276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:18.110330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:18.110568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:18.110615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:18.110972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:18.111020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:18.111117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:18.111160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:18.111205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:18.111266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:18.111306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:18.111345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:18.111384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:18.111419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:18.111518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:18.111558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:18.111590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:18.113498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:18.113618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:18.113662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athStateCreate)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-17T17:49:18.159758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-17T17:49:18.159941Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:49:18.161753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-17T17:49:18.161925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-17T17:49:18.162317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:18.162437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:18.162493Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-17T17:49:18.162610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T17:49:18.162780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:18.162846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:49:18.164990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:18.165040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:18.165224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:18.165356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:18.165414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T17:49:18.165459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T17:49:18.165865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:49:18.165912Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T17:49:18.166016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:49:18.166050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:49:18.166090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:49:18.166129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:49:18.166165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T17:49:18.166224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:49:18.166258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:49:18.166288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T17:49:18.166370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:49:18.166417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T17:49:18.166458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-17T17:49:18.166486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T17:49:18.167275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:18.167389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:18.167433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:49:18.167484Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-17T17:49:18.167523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:18.168660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:18.168731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:18.168757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:49:18.168785Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:49:18.168834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:49:18.168908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T17:49:18.174279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:49:18.174415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-04-17T17:49:18.174694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:49:18.174749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-17T17:49:18.174875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:49:18.174899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-17T17:49:18.174948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:49:18.174983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:49:18.175452Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:49:18.175565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:49:18.175603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:302:2293] 2025-04-17T17:49:18.175932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:49:18.176028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:49:18.176052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:302:2293] 2025-04-17T17:49:18.176203Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:49:18.176286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:49:18.176309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:302:2293] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-17T17:49:18.177090Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:18.177290Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 244us result status StatusSuccess 2025-04-17T17:49:18.177641Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardViewTest::CreateView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-04-17T17:49:14.091823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340637653331851:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:14.092063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eaa/r3tmp/tmpwLm2w4/pdisk_1.dat 2025-04-17T17:49:14.655596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:14.655650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:14.658468Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:14.659662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:15.137129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-04-17T17:49:14.482033Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340637575010053:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:14.483095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e95/r3tmp/tmpdyZiWI/pdisk_1.dat 2025-04-17T17:49:15.088740Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:15.147543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:15.147644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:15.159252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:15.621171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:15.658424Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:49:15.715145Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Connect to grpc://localhost:19747 2025-04-17T17:49:15.719213Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-04-17T17:49:15.738372Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Status 7 Permission Denied 2025-04-17T17:49:15.739231Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-04-17T17:49:15.741472Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> TServiceAccountServiceTest::IssueToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-04-17T17:49:14.087573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340637478959726:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:14.088472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ea8/r3tmp/tmpZ3TL6X/pdisk_1.dat 2025-04-17T17:49:14.612763Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:14.620473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:14.620549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:14.631127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:15.147290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:15.163243Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:49:15.268630Z node 1 :GRPC_CLIENT DEBUG: [517000013c08] Connect to grpc://localhost:32583 2025-04-17T17:49:15.352176Z node 1 :GRPC_CLIENT DEBUG: [517000013c08] Request ListFoldersRequest { id: "i_am_exists" } 2025-04-17T17:49:15.380126Z node 1 :GRPC_CLIENT DEBUG: [517000013c08] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-04-17T17:49:15.381853Z node 1 :GRPC_CLIENT DEBUG: [517000070108] Connect to grpc://localhost:17458 2025-04-17T17:49:15.390423Z node 1 :GRPC_CLIENT DEBUG: [517000070108] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-04-17T17:49:15.430920Z node 1 :GRPC_CLIENT DEBUG: [517000070108] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-04-17T17:49:15.447567Z node 1 :GRPC_CLIENT DEBUG: [517000070108] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-17T17:49:15.466151Z node 1 :GRPC_CLIENT DEBUG: [517000070108] Status 5 Not Found 2025-04-17T17:49:15.466975Z node 1 :GRPC_CLIENT DEBUG: [517000013c08] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-17T17:49:15.469081Z node 1 :GRPC_CLIENT DEBUG: [517000013c08] Status 5 Not Found >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> TSchemeShardMoveTest::Reject >> TAccessServiceTest::PassRequestId [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:18.747714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:18.747793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:18.747821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:18.747847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:18.747881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:18.747913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:18.747955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:18.748007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:18.748266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:18.848234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:18.848290Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:18.873803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:18.874356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:18.874544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:18.889834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:18.890043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:18.890743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:18.890965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:18.907466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:18.908972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:18.909050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:18.909266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:18.909320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:18.909366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:18.909491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:18.918114Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:19.137023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:19.137358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.137608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:19.137884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:19.137945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.145588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:19.145754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:19.146006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.146071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:19.146106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:19.146142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:19.151865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.151946Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:19.152002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:19.158238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.158324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.158370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:19.158433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:19.169514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:19.172715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:19.172918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:19.173948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:19.174111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:19.174253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:19.174538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:19.174590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:19.174752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:19.174854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:19.181427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:19.181490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:19.181696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:19.181737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:19.182135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.182208Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:19.182320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:19.182357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:19.182406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:19.182479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:19.182523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:19.182568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:19.182632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:19.182669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:19.182754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:19.182791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:19.182830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:19.184722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:19.184840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:19.184875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0 2025-04-17T17:49:19.215979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2025-04-17T17:49:19.216091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:49:19.216169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T17:49:19.216231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:19.217118Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:49:19.239037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-17T17:49:19.239272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-17T17:49:19.239590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.239653Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2025-04-17T17:49:19.239710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-17T17:49:19.239873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:19.240923Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:49:19.242792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T17:49:19.242942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T17:49:19.243443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:19.243571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:19.243626Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-17T17:49:19.243773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T17:49:19.247876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:19.247982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T17:49:19.251155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:19.251214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:19.251429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:19.251557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:19.251597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T17:49:19.251636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-17T17:49:19.251972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.252020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T17:49:19.252152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T17:49:19.252187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:49:19.252223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T17:49:19.252252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:49:19.252290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T17:49:19.252329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:49:19.252363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T17:49:19.252392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T17:49:19.252467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:49:19.252504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-17T17:49:19.252535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-17T17:49:19.252568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T17:49:19.253385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:49:19.253496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:49:19.253539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:49:19.253576Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-17T17:49:19.253615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:19.254434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:49:19.254509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:49:19.254546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:49:19.254577Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:49:19.254602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:49:19.254668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-17T17:49:19.271239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T17:49:19.271558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2025-04-17T17:49:19.271784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:49:19.271843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T17:49:19.272279Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:49:19.272383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:49:19.272416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:299:2290] TestWaitNotification: OK eventTxId 101 2025-04-17T17:49:19.272890Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:19.273069Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 220us result status StatusSuccess 2025-04-17T17:49:19.273411Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2025-04-17T17:49:13.673407Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340633904277522:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:13.679837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cca/r3tmp/tmprQ54l2/pdisk_1.dat 2025-04-17T17:49:14.219748Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:14.229518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:14.229592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:14.230838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24173 TServer::EnableGrpc on GrpcPort 27431, node 1 2025-04-17T17:49:14.542798Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:14.542822Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:14.542836Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:14.542955Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:14.997766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:15.020256Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:49:17.728148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340651084147175:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:17.728259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:18.329599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:49:18.363107Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] OnActivateExecutor 2025-04-17T17:49:18.363183Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Execute 2025-04-17T17:49:18.370656Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Complete 2025-04-17T17:49:18.370722Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Execute 2025-04-17T17:49:18.370950Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Complete 2025-04-17T17:49:18.370959Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] SwitchToWork 2025-04-17T17:49:18.411252Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:27431" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-04-17T17:49:18.411803Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:27431" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-04-17T17:49:18.411887Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:49:18.412589Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Complete 2025-04-17T17:49:18.418505Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:2:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:49:18.423212Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:49:18.423328Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveTenantResult { ReplicationId: 1 Tenant: /Root Sucess: 1 } 2025-04-17T17:49:18.423347Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888] Tenant resolved: rid# 1, tenant# /Root 2025-04-17T17:49:18.423357Z node 1 :REPLICATION_CONTROLLER INFO: [controller 72075186224037888] Discover tenant nodes: tenant# /Root 2025-04-17T17:49:18.424097Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2025-04-17T17:49:18.424130Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888] Create session: nodeId# 1 TClient::Ls request: /Root/replication TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "replication" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912158458 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsIns... (TRUNCATED) 2025-04-17T17:49:18.473853Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2025-04-17T17:49:18.473943Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2025-04-17T17:49:18.474046Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2025-04-17T17:49:18.474153Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:49:18.474214Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2025-04-17T17:49:18.475338Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-17T17:49:18.477711Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2025-04-17T17:49:18.477754Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-17T17:49:18.477823Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-17T17:49:18.478396Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2025-04-17T17:49:18.478427Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-17T17:49:18.478460Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-17T17:49:18.479015Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2025-04-17T17:49:18.479041Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-17T17:49:18.479553Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-04-17T17:49:18.479959Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2025-04-17T17:49:18.480002Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2025-04-17T17:49:18.480035Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-04-17T17:49:18.480439Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2025-04-17T17:49:18.480464Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2025-04-17T17:49:18.480842Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2025-04-17T17:49:18.480914Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:49:18.480932Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2025-04-17T17:49:18.480969Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-04-17T17:49:18.481401Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2025-04-17T17:49:18.481424Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2025-04-17T17:49:18.484589Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-04-17T17:49:18.503207Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-04-17T17:49:18.503695Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found } 2025-04-17T17:49:18.504865Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-04-17T17:49:18.504985Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-04-17T17:49:18.505029Z node 1 :REPLICATION_CONTROLLER ERROR: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2025-04-17T17:49:18.506910Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete >> TSchemeShardMoveTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-04-17T17:49:14.814637Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340637547965924:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:14.815196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e84/r3tmp/tmpwGKXZp/pdisk_1.dat 2025-04-17T17:49:15.540427Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:15.556135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:15.556253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:15.562736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:16.191262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:16.230582Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Connect to grpc://localhost:15629 2025-04-17T17:49:16.335794Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-17T17:49:16.397299Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15629: Failed to connect to remote host: Connection refused 2025-04-17T17:49:16.399501Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-17T17:49:16.401102Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15629: Failed to connect to remote host: Connection refused 2025-04-17T17:49:17.417653Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-17T17:49:17.431016Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Status 5 Not Found 2025-04-17T17:49:17.433764Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Request ListFoldersRequest { id: "i_am_exists" } 2025-04-17T17:49:17.441099Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestMergeConfig >> TSchemeShardMoveTest::Replace >> TSchemeShardMoveTest::MoveTableForBackup >> TSchemeShardMoveTest::MoveIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:19.823862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:19.823947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:19.823987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:19.824019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:19.824058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:19.824103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:19.824163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:19.824233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:19.824668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:19.920457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:19.920522Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:19.936817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:19.937315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:19.937481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:19.945847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:19.946040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:19.946759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:19.946978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:19.949924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:19.951266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:19.951327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:19.951535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:19.951585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:19.951640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:19.951780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:19.958786Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:20.092880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:20.093132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.093373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:20.093620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:20.093678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.096319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:20.096467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:20.096708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.096777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:20.096842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:20.096886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:20.099086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.099149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:20.099186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:20.101148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.101200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.101243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:20.101311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:20.105207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:20.109487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:20.109697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:20.110839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:20.111005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:20.111064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:20.111374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:20.111438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:20.111616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:20.111714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:20.119414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:20.119488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:20.119680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:20.119717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:20.120013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.120064Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:20.120155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:20.120180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:20.120214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:20.120246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:20.120277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:20.120315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:20.120365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:20.120402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:20.120466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:20.120494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:20.120530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:20.121871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:20.121963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:20.121993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.416333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.416476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.416563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.416656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.416845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.416942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.417068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.417279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.417458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.417514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.417559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.444010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:20.444100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:20.451051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:20.451146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:20.451265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:20.459613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:378:2347] sender: [1:435:2058] recipient: [1:15:2062] 2025-04-17T17:49:20.508211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:20.508495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-04-17T17:49:20.508635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-04-17T17:49:20.508792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:49:20.508876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-17T17:49:20.508930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:20.512815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-04-17T17:49:20.512999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-04-17T17:49:20.513212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.513275Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-04-17T17:49:20.513335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-17T17:49:20.513464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:20.515604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-17T17:49:20.515850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-04-17T17:49:20.516568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:20.516680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:20.516731Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-04-17T17:49:20.516873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-17T17:49:20.517043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:20.517116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-04-17T17:49:20.519922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:20.519969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:20.520184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:49:20.520296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:20.520336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:428:2386], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-17T17:49:20.520377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:428:2386], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-17T17:49:20.520761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.520824Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:49:20.520930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:49:20.520973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:49:20.521016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:49:20.521047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:49:20.521089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-17T17:49:20.521131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:49:20.521164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:49:20.521197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:49:20.521320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:49:20.521362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-17T17:49:20.521405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T17:49:20.521438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T17:49:20.522207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:49:20.522302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:49:20.522359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:49:20.522402Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:49:20.522435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:49:20.522931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:49:20.523004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:49:20.523026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:49:20.523050Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T17:49:20.523073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:49:20.523139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-17T17:49:20.526645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:49:20.526752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-04-17T17:49:15.613133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340641470662980:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:15.613179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e80/r3tmp/tmpqOubdT/pdisk_1.dat 2025-04-17T17:49:16.328818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:16.378229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:16.378367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:16.383847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:16.812361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:16.936848Z node 1 :GRPC_CLIENT DEBUG: [51700008c108]{trololo} Connect to grpc://localhost:14132 2025-04-17T17:49:16.937860Z node 1 :GRPC_CLIENT DEBUG: [51700008c108]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-04-17T17:49:16.966701Z node 1 :GRPC_CLIENT DEBUG: [51700008c108]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies >> TSchemeShardMoveTest::ResetCachedPath >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TSchemeShardMoveTest::MoveIndexSameDst >> TSchemeShardMoveTest::TwoTables >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant |75.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardMoveTest::MoveMigratedTable >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> DataShardSnapshots::VolatileSnapshotSplit >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] >> TSchemeShardMoveTest::Chain >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> DataShardSnapshots::MvccSnapshotAndSplit >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TSchemeShardMoveTest::ResetCachedPath [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-04-17T17:49:13.985574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340635123510171:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:13.994764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ec5/r3tmp/tmpOoV6jE/pdisk_1.dat 2025-04-17T17:49:14.608242Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:14.614029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:14.614139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:14.618730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:14.897236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:18.623057Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340653203290709:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:18.623337Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ec5/r3tmp/tmpsvkAX4/pdisk_1.dat 2025-04-17T17:49:18.895705Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:18.908323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:18.908401Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:18.919114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:19.259896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:19.267736Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> TSchemeShardMoveTest::TwoTables [GOOD] >> TSchemeShardMoveTest::Replace [GOOD] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:21.284409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:21.284517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:21.284566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:21.284597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:21.284649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:21.284676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:21.284731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:21.284797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:21.285165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:21.364070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:21.364126Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:21.378597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:21.378726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:21.378882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:21.388495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:21.388759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:21.389398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:21.389674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:21.394563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:21.395881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:21.395946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:21.396051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:21.396113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:21.396155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:21.396340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.403166Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:21.540853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:21.541082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.541278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:21.541475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:21.541534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.544025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:21.544165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:21.544367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.544430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:21.544524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:21.544561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:21.546611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.546683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:21.546720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:21.548604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.548656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.548703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:21.548765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.552031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:21.557882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:21.558088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:21.559135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:21.559286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:21.559339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:21.559611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:21.559669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:21.559855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:21.559953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:21.561952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:21.561984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:21.562121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:21.562150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:21.562488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.562534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:21.562625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:21.562657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.562701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:21.562752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.562791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:21.562820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.562865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:21.562891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:21.562952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:21.562985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:21.563010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:21.564613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:21.564697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:21.564726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T17:49:22.940827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 12 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1012 } } 2025-04-17T17:49:22.940909Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 12 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1012 } } FAKE_COORDINATOR: Erasing txId 103 2025-04-17T17:49:22.941931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:49:22.941975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2025-04-17T17:49:22.942097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:49:22.942148Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:49:22.942292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:49:22.942362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.942407Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.942444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T17:49:22.942497Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-17T17:49:22.949881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936902 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:49:22.949945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2025-04-17T17:49:22.950077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936902 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:49:22.950123Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:49:22.950223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 8589936902 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:49:22.950283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.950316Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-17T17:49:22.950345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:49:22.950377Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:2 129 -> 240 2025-04-17T17:49:22.954854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.957668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-17T17:49:22.957864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.958688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.958750Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:22.958801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-17T17:49:22.958917Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 2/3 2025-04-17T17:49:22.958955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-04-17T17:49:22.958994Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 2/3 2025-04-17T17:49:22.959029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-04-17T17:49:22.959081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2025-04-17T17:49:22.967250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-17T17:49:22.967652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-17T17:49:22.967697Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:22.967741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-17T17:49:22.967821Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 3/3 2025-04-17T17:49:22.967854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-17T17:49:22.967887Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 3/3 2025-04-17T17:49:22.967915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-17T17:49:22.967942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2025-04-17T17:49:22.967986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-17T17:49:22.968033Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:49:22.968068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:49:22.968231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-17T17:49:22.968274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:49:22.968314Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-17T17:49:22.968336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-17T17:49:22.968365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T17:49:22.968390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:49:22.968412Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-17T17:49:22.968431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-17T17:49:22.968474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-17T17:49:22.968496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:49:22.969191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:49:22.969246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:49:22.969313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:49:22.969381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:49:22.969420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:49:22.969447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:49:22.969476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:22.977507Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:49:22.977974Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2025-04-17T17:49:23.040304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:49:23.040453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:49:23.040878Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:49:23.040978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:49:23.041026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:679:2564] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-04-17T17:49:14.700015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340636045182295:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:14.700073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eba/r3tmp/tmp6EEIbL/pdisk_1.dat 2025-04-17T17:49:15.421838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:15.426148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:15.434478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:15.443367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:15.760779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:15.774386Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:49:19.007011Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340655666487256:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:19.051017Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eba/r3tmp/tmpZBg7wm/pdisk_1.dat 2025-04-17T17:49:19.210820Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:19.233008Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:19.233112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:19.234839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:19.466459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:19.473209Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 16821, MsgBus: 11259 2025-04-17T17:48:56.587572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340560917242325:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:56.588175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001471/r3tmp/tmpg5eWUi/pdisk_1.dat 2025-04-17T17:48:57.399002Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:57.401540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:57.401648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:57.415100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16821, node 1 2025-04-17T17:48:57.604662Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:57.604683Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:57.604696Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:57.604810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11259 TClient is connected to server localhost:11259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:48:58.325202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:58.409067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:58.633629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:58.919272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.037575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:00.967971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340578097113140:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:00.968071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:01.586555Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340560917242325:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:01.586645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:02.035950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.090246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.166458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.250667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.286318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.356697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.419885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.687705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340586687048413:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.687906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.688474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340586687048422:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.710532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:02.737326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340586687048424:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:49:02.888926Z node 1 :TX_PROXY ERROR: Actor# [1:7494340586687048481:3570] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:06.294427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 864000000000 2025-04-17T17:49:07.460423Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912147433, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 6316, MsgBus: 14785 2025-04-17T17:49:08.364833Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340610760301730:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001471/r3tmp/tmpYlKAry/pdisk_1.dat 2025-04-17T17:49:08.538342Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:49:08.610653Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:08.668407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:08.668495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:08.679478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6316, node 2 2025-04-17T17:49:08.730932Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:08.730967Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:08.730976Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:08.731095Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14785 TClient is connected to server localhost:14785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:09.243904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:09.254780Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:49:09.276542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 w ... T_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:12.263055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:12.319994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:12.386809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:12.429983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:12.517373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:12.586724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:12.664848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494340627940173180:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:12.664950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:12.665869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494340627940173185:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:12.671753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:12.688507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494340627940173187:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:49:12.741960Z node 2 :TX_PROXY ERROR: Actor# [2:7494340627940173241:3539] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:13.420550Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494340610760301730:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:13.420890Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:13.947562Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912153971, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 11321, MsgBus: 29956 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001471/r3tmp/tmpodo69j/pdisk_1.dat 2025-04-17T17:49:15.113216Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:15.269657Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:15.288992Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:15.289103Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:15.292105Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11321, node 3 2025-04-17T17:49:15.502883Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:15.502910Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:15.502919Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:15.503057Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29956 TClient is connected to server localhost:29956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:16.350351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:16.367495Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:49:16.385844Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:16.495495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:16.732246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:16.818827Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:19.391406Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340660666041925:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:19.391521Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:19.456759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:19.514422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:19.567018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:19.613392Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:19.693900Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:19.742989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:19.823265Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:19.925630Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340660666042599:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:19.925720Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340660666042604:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:19.925721Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:19.929978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:19.947081Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494340660666042606:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:49:20.025427Z node 3 :TX_PROXY ERROR: Actor# [3:7494340664961009957:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:21.384039Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912161419, txId: 281474976710672] shutting down 2025-04-17T17:49:21.663362Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912161692, txId: 281474976710674] shutting down >> TSchemeShardMoveTest::OneTable [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:22.190532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:22.190623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:22.190673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:22.190713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:22.190759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:22.190788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:22.190846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:22.190951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:22.191306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:22.285345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:22.285399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:22.314058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:22.314565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:22.314733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:22.322665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:22.322847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:22.323482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.323673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:22.326172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.327511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:22.328224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.328446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:22.328500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:22.328546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:22.328688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.338681Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:22.507234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:22.507455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.507698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:22.507926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:22.507975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.510144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.510376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:22.510607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.510675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:22.510716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:22.510748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:22.512605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.512660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:22.512711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:22.514560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.514606Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.514665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.514730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.518551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:22.520345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:22.520533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:22.521486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.521617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:22.521667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.521951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:22.522011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.522250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:22.522331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:22.524246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:22.524300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:22.524468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.524510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:22.524847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.524888Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:22.524973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:22.525054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.525095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:22.525130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.525168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:22.525202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.525232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:22.525288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:22.525346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:22.525380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:22.525438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:22.527287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:22.527434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:22.527486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... indRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-17T17:49:23.357891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 189500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 261 } } 2025-04-17T17:49:23.357987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 105:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 189500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 261 } } 2025-04-17T17:49:23.358017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-17T17:49:23.358100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.358209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2025-04-17T17:49:23.359747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.359900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.359934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:23.359999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-04-17T17:49:23.360112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:23.361302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-04-17T17:49:23.361409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2025-04-17T17:49:23.361711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.361829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:23.361871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-17T17:49:23.362093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2025-04-17T17:49:23.362226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-17T17:49:23.367273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:23.367323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:49:23.367641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:23.367693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-17T17:49:23.368179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.368238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:49:23.369308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T17:49:23.369404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T17:49:23.369442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-17T17:49:23.369490Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-04-17T17:49:23.369532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T17:49:23.369617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 105 2025-04-17T17:49:23.371288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 961 } } 2025-04-17T17:49:23.371344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-17T17:49:23.371470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 961 } } 2025-04-17T17:49:23.371575Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 961 } } 2025-04-17T17:49:23.372871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 674 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-17T17:49:23.372921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-17T17:49:23.373084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 674 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-17T17:49:23.373155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:49:23.373255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 674 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-17T17:49:23.373332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.373368Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.373402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T17:49:23.373447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-04-17T17:49:23.374022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T17:49:23.375539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.375674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.375949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.375994Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-17T17:49:23.376106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T17:49:23.376144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T17:49:23.376180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T17:49:23.376211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T17:49:23.376259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-17T17:49:23.376316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 105 2025-04-17T17:49:23.376373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T17:49:23.376422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T17:49:23.376451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T17:49:23.376563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:49:23.380083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T17:49:23.380138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:835:2754] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:22.551685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:22.551800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:22.551865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:22.551905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:22.551946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:22.551974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:22.552171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:22.552251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:22.552578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:22.649087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:22.649153Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:22.662004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:22.662105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:22.662264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:22.675539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:22.675899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:22.676579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.676909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:22.682538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.683898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:22.683973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.684077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:22.684120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:22.684159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:22.684360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.691197Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:22.831797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:22.832001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.832218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:22.832473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:22.832528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.835287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.835449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:22.835660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.835729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:22.835815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:22.835853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:22.839316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.839387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:22.839447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:22.841537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.841586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.841625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.841692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.845938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:22.848154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:22.848330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:22.849355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.849475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:22.849525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.849823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:22.849895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.850081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:22.850164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:22.858922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:22.858997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:22.859178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.859227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:22.859555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.859609Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:22.859721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:22.859756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.859797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:22.859850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.859892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:22.859930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.859966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:22.860013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:22.860131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:22.860168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:22.860202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:22.862327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:22.862433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:22.862469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... and all the parts is done, operation id: 103:1 2025-04-17T17:49:23.359340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-17T17:49:23.359383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T17:49:23.359422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:49:23.359747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:49:23.359789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:49:23.359849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:49:23.359882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:49:23.359910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:49:23.363021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:49:23.363080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:508:2468] 2025-04-17T17:49:23.363171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-17T17:49:23.363754Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.363942Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 201us result status StatusPathDoesNotExist 2025-04-17T17:49:23.364105Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:49:23.364549Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.364762Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 208us result status StatusSuccess 2025-04-17T17:49:23.365164Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:23.365821Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.365984Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 208us result status StatusPathDoesNotExist 2025-04-17T17:49:23.366118Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:49:23.366584Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.366745Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 158us result status StatusSuccess 2025-04-17T17:49:23.367112Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:23.367760Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.367952Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 167us result status StatusSuccess 2025-04-17T17:49:23.368330Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |75.5%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:21.520357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:21.520440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:21.520479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:21.520525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:21.520566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:21.520597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:21.520652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:21.520744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:21.521050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:21.610828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:21.610885Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:21.646082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:21.646540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:21.646737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:21.665951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:21.666121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:21.666701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:21.666885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:21.675244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:21.676490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:21.676550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:21.676722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:21.676765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:21.676802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:21.676924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.688584Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:21.804173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:21.804387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.804589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:21.804857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:21.804906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.806982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:21.807104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:21.807320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.807377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:21.807406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:21.807437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:21.809152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.809202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:21.809250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:21.810888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.810938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.810975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:21.811034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.814279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:21.815876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:21.816029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:21.816872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:21.816983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:21.817028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:21.817297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:21.817348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:21.817487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:21.817552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:21.819382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:21.819432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:21.819579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:21.819612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:21.819965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.820004Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:21.820080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:21.820122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.820159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:21.820190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.820228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:21.820260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.820290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:21.820332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:21.820393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:21.820428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:21.820463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:21.822107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:21.822238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:21.822274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 233409546 Forgetting tablet 72075186233409546 2025-04-17T17:49:23.541345Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T17:49:23.541517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T17:49:23.541784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 1 Forgetting tablet 72075186233409547 2025-04-17T17:49:23.544190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:49:23.544399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-04-17T17:49:23.544989Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409548 2025-04-17T17:49:23.545239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:49:23.545273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-04-17T17:49:23.545330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2025-04-17T17:49:23.545364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2025-04-17T17:49:23.545393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-04-17T17:49:23.545417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2025-04-17T17:49:23.545445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-17T17:49:23.545469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-04-17T17:49:23.545498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 Forgetting tablet 72075186233409548 2025-04-17T17:49:23.548157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.548385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2025-04-17T17:49:23.551078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:49:23.551141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-04-17T17:49:23.552809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:49:23.552871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:49:23.552965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 4 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-17T17:49:23.553103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:49:23.553148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2025-04-17T17:49:23.553228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:23.553457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:49:23.553504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-04-17T17:49:23.555323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-04-17T17:49:23.556059Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-17T17:49:23.556197Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-17T17:49:23.556235Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-04-17T17:49:23.556894Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.557406Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Src" took 231us result status StatusPathDoesNotExist 2025-04-17T17:49:23.557611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:49:23.558279Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.558511Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 248us result status StatusSuccess 2025-04-17T17:49:23.558995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:23.559978Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.560142Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 187us result status StatusSuccess 2025-04-17T17:49:23.560555Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 28 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 28 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 26 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestRemoveTenant >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:21.711364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:21.711468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:21.711524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:21.711564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:21.711607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:21.711643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:21.711739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:21.711840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:21.712184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:21.803527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:21.803585Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:21.820311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:21.820641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:21.820777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:21.828217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:21.828380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:21.828903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:21.829090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:21.831378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:21.832446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:21.832511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:21.832657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:21.832694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:21.832738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:21.832846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.838356Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:21.954955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:21.955151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.955331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:21.955552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:21.955632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.965715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:21.965865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:21.966035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.966095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:21.966128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:21.966159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:21.968978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.969049Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:21.969086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:21.970984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.971038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.971079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:21.971131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.974931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:21.976909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:21.977067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:21.978012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:21.978134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:21.978204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:21.978463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:21.978514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:21.978672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:21.978747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:21.980611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:21.980653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:21.980800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:21.980843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:21.981233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:21.981283Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:21.981369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:21.981413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.981453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:21.981482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.981517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:21.981555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:21.981600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:21.981643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:21.981712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:21.981750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:21.981797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:21.989973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:21.990120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:21.990159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... t: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:23.946092Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.946243Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 169us result status StatusSuccess 2025-04-17T17:49:23.946575Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:23.947053Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.947301Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 254us result status StatusSuccess 2025-04-17T17:49:23.947904Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:23.948448Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.948618Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 185us result status StatusSuccess 2025-04-17T17:49:23.949177Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:20.699803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:20.699899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:20.699936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:20.699968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:20.700004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:20.700039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:20.700097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:20.700152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:20.700443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:20.785338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:20.785392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:20.800823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:20.800940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:20.801094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:20.806983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:20.807243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:20.807808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:20.808102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:20.812778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:20.814032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:20.814088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:20.814211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:20.814253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:20.814294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:20.814496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.823718Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:20.963715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:20.963907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.964096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:20.964373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:20.964422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.967824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:20.967980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:20.968178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.968233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:20.968337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:20.968375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:20.975083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.975174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:20.975214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:20.979730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.979793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.979831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:20.979895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:20.983261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:20.986948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:20.987117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:20.988017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:20.988149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:20.988193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:20.988452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:20.988500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:20.988665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:20.988748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:20.995036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:20.995085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:20.995254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:20.995296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:20.995590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:20.995628Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:20.995723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:20.995753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:20.995789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:20.995833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:20.995871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:20.995908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:20.995952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:20.995981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:20.996043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:20.996076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:20.996103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:21.003310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:21.003460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:21.003497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7T17:49:23.813646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T17:49:23.813681Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2025-04-17T17:49:23.813750Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-17T17:49:23.813864Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2025-04-17T17:49:23.813982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:23.814045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:49:23.816102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.817015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.817255Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:23.817298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:23.817442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:49:23.817601Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:23.817645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-04-17T17:49:23.817686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 4 2025-04-17T17:49:23.818048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.818104Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:49:23.818207Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.818245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:49:23.818286Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-04-17T17:49:23.819119Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T17:49:23.819218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T17:49:23.819250Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-17T17:49:23.819291Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2025-04-17T17:49:23.819329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:23.820084Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T17:49:23.820153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T17:49:23.820176Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-17T17:49:23.820203Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:49:23.820228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:49:23.820297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-04-17T17:49:23.822348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.822402Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:23.822635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:49:23.822742Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-17T17:49:23.822783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-17T17:49:23.822820Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-17T17:49:23.822851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-17T17:49:23.822887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-04-17T17:49:23.822946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:335:2314] message: TxId: 108 2025-04-17T17:49:23.822984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-17T17:49:23.823017Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-17T17:49:23.823047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-17T17:49:23.823134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:49:23.825712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-17T17:49:23.826687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-17T17:49:23.827550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-17T17:49:23.827594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:830:2788] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-17T17:49:23.828176Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-17T17:49:23.828240Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2025-04-17T17:49:23.845407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 8589936886 } TabletId: 72075186233409546 State: 4 2025-04-17T17:49:23.845498Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-17T17:49:23.851000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:49:23.851393Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-17T17:49:23.853664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.853933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:49:23.854402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:49:23.854448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:49:23.854513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:23.859747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:49:23.859812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T17:49:23.860254Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2025-04-17T17:49:23.860957Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:23.861112Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 182us result status StatusSuccess 2025-04-17T17:49:23.861427Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:21.954282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:21.954363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:21.954422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:21.954450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:21.954492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:21.954522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:21.954569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:21.954619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:21.954897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:22.031360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:22.031419Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:22.049119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:22.049216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:22.049344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:22.059916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:22.060269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:22.060913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.061232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:22.066367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.067719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:22.067782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.067919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:22.067967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:22.068008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:22.068210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.079318Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:22.249078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:22.249279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.249477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:22.249720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:22.249794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.252385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.252518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:22.252684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.252744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:22.252827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:22.252863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:22.254731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.254800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:22.254835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:22.256766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.256810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.256845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.256908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.260488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:22.262382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:22.262542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:22.263523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.263629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:22.263678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.263967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:22.264029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.264220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:22.264309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:22.266214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:22.266267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:22.266425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.266487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:22.266763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.266804Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:22.266889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:22.266921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.266977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:22.267033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.267078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:22.267114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.267161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:22.267192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:22.267260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:22.267298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:22.267328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:22.269126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:22.269219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:22.269274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD DEBUG: TMoveSequence TDropParts HandleReply TEvDropSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 102:1 at tablet 72057594046678944 2025-04-17T17:49:24.091233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:49:24.091288Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 4 -> 240 2025-04-17T17:49:24.094018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-17T17:49:24.094145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-17T17:49:24.094197Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveSequence TDone, operationId: 102:1 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:24.094259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDone, operationId: 102:1 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-17T17:49:24.094351Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-17T17:49:24.094512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-17T17:49:24.094561Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-17T17:49:24.094586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-17T17:49:24.094609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-04-17T17:49:24.094671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:376:2345] message: TxId: 102 2025-04-17T17:49:24.094711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-17T17:49:24.094749Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:49:24.094783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:49:24.094916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:49:24.094949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:49:24.094981Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-17T17:49:24.094997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-17T17:49:24.095043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-17T17:49:24.095074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:49:24.095395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:49:24.095435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:49:24.095487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:49:24.095521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:49:24.095549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:49:24.097639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:49:24.097685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:472:2428] 2025-04-17T17:49:24.097973Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-17T17:49:24.101195Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:24.101418Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 257us result status StatusPathDoesNotExist 2025-04-17T17:49:24.101586Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:49:24.101959Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:24.102233Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 243us result status StatusPathDoesNotExist 2025-04-17T17:49:24.102464Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:49:24.102818Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:24.103014Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 208us result status StatusSuccess 2025-04-17T17:49:24.103468Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:24.104007Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:49:24.104164Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 171us result status StatusSuccess 2025-04-17T17:49:24.104402Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:22.406695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:22.406800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:22.406842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:22.406881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:22.406928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:22.406962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:22.407017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:22.407080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:22.407373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:22.506647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:22.506695Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:22.534845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:22.534960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:22.535122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:22.562532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:22.562883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:22.563508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.563832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:22.575684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.576933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:22.576989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.577086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:22.577137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:22.577185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:22.577378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.590583Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:22.720546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:22.720735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.720949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:22.721195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:22.721250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.725953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.726087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:22.726298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.726363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:22.726469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:22.726508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:22.732112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.732182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:22.732220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:22.734136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.734202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.734246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.734322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.738121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:22.740021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:22.740192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:22.741058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:22.741230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:22.741279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.741546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:22.741605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:22.741804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:22.741892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:22.743968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:22.744010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:22.744162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:22.744198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:22.744491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:22.744530Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:22.744630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:22.744659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.744694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:22.744736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.744771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:22.744807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:22.744849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:22.744878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:22.744934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:22.744965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:22.744994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:22.746908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:22.747006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:22.747041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... thId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:452:2413], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T17:49:24.430816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-17T17:49:24.430922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:49:24.431088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-17T17:49:24.431126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-17T17:49:24.431163Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-17T17:49:24.431445Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:24.431556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:24.431614Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-17T17:49:24.431656Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-17T17:49:24.434262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-17T17:49:24.434319Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-17T17:49:24.434426Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T17:49:24.434461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T17:49:24.434513Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T17:49:24.434541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T17:49:24.434570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-17T17:49:24.434665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:125:2151] message: TxId: 281474976710760 2025-04-17T17:49:24.434716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T17:49:24.434751Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-17T17:49:24.434781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-17T17:49:24.434848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-17T17:49:24.439468Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-17T17:49:24.439554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-17T17:49:24.439616Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-17T17:49:24.439692Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:452:2413], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T17:49:24.441592Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T17:49:24.441681Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:452:2413], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T17:49:24.441731Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T17:49:24.443503Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T17:49:24.443587Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:452:2413], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T17:49:24.443623Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-17T17:49:24.443746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:49:24.443792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:630:2579] TestWaitNotification: OK eventTxId 102 2025-04-17T17:49:24.444367Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:24.444611Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 280us result status StatusSuccess 2025-04-17T17:49:24.445137Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2025-04-17T17:47:14.297747Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340121824616707:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:14.298053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:47:18.375238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:47:18.415736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:47:18.425568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:47:18.452041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:47:18.458701Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20788, node 1 2025-04-17T17:47:19.584643Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340121824616707:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:19.584719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:47:19.606289Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:19.606306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:19.606312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:19.606610Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:24.108196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.364114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:47:24.382688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:24.437635Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream 2025-04-17T17:48:01.853355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:466:2426], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:01.853789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:01.854019Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:48:02.469856Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:02.666778Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:48:02.702566Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:48:03.443474Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 12841, node 2 TClient is connected to server localhost:29151 2025-04-17T17:48:05.359773Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:05.359842Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:05.359888Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:05.360572Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:48:46.772266Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:541:2427], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:46.772700Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:46.772778Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:48:47.265968Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:47.454977Z node 4 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:48:47.488509Z node 4 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:48:47.964489Z node 4 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 27590, node 4 TClient is connected to server localhost:3155 2025-04-17T17:48:48.610830Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:48.610910Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:48.610963Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:48.611274Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:49:00.683487Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:539:2427], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:00.683845Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:00.683990Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:49:01.294643Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:01.548245Z node 7 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:49:01.587081Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:49:02.182219Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 18353, node 7 TClient is connected to server localhost:32423 2025-04-17T17:49:02.784002Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:02.784094Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:02.784159Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:02.784739Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:49:16.372209Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:611:2425], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:16.372809Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:16.373105Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:49:16.897206Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:17.126365Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:49:17.162587Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:49:17.988255Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 16565, node 10 TClient is connected to server localhost:1194 2025-04-17T17:49:18.991858Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:18.991975Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:18.992067Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:18.992567Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TSchemeShardMoveTest::Index [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:22.963575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:22.963676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:22.963724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:22.963765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:22.963997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:22.964044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:22.964122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:22.964205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:22.964574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:23.055970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:23.056028Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:23.071210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:23.071582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:23.071730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:23.081081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:23.081241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:23.081849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.082081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:23.084727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:23.085878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:23.085930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:23.086104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:23.086156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:23.086217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:23.086339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.092421Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:23.205472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:23.205665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.205872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:23.206147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:23.206219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.211176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.211401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:23.211588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.211644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:23.211673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:23.211702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:23.213541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.213617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:23.213652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:23.215374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.215416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.215458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:23.215523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:23.218859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:23.220581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:23.220726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:23.221597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.221715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:23.221754Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:23.222070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:23.222122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:23.222300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:23.222372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:23.224257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:23.224299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:23.224431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:23.224467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:23.224805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.224848Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:23.224930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:23.224988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:23.225023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:23.225052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:23.225085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:23.225119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:23.225168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:23.225202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:23.225260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:23.225316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:23.225348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:23.227069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:23.227193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:23.227231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... p: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 675 } } 2025-04-17T17:49:25.250328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T17:49:25.250402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 675 } } 2025-04-17T17:49:25.250467Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 675 } } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:49:25.250935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936902 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:49:25.250966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2025-04-17T17:49:25.251077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936902 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:49:25.251117Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:49:25.251206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 8589936902 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:49:25.251254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:25.251291Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-17T17:49:25.251318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:49:25.251348Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:2 129 -> 240 2025-04-17T17:49:25.251791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:49:25.251817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T17:49:25.251891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:49:25.251914Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:49:25.251951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:49:25.251980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:25.251999Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:25.252026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T17:49:25.252047Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:49:25.255651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-17T17:49:25.255774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:25.256086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-17T17:49:25.256309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:25.256454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-17T17:49:25.256489Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:25.256543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-17T17:49:25.256628Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-04-17T17:49:25.256660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-04-17T17:49:25.256688Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-04-17T17:49:25.256712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-04-17T17:49:25.256741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2025-04-17T17:49:25.256944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:25.256966Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:25.256990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-17T17:49:25.257029Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-04-17T17:49:25.257045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-17T17:49:25.257066Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-04-17T17:49:25.257079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-17T17:49:25.257096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2025-04-17T17:49:25.257157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:379:2347] message: TxId: 102 2025-04-17T17:49:25.257200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-17T17:49:25.257237Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:49:25.257263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:49:25.257358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-17T17:49:25.257395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:49:25.257431Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-17T17:49:25.257445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-17T17:49:25.257463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T17:49:25.257477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:49:25.257490Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-17T17:49:25.257502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-17T17:49:25.257540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-17T17:49:25.257554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:49:25.257749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:49:25.257797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:49:25.257857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:49:25.257889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:49:25.257918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:49:25.257944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:49:25.257967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:25.262987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:49:25.263051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:478:2439] 2025-04-17T17:49:25.263143Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:23.295043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:23.295128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:23.295167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:23.295200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:23.295239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:23.295276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:23.295339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:23.295399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:23.295700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:23.381499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:23.381542Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:23.412273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:23.412390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:23.412551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:23.420193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:23.420460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:23.421405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.421677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:23.426032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:23.427567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:23.427619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:23.427721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:23.427759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:23.427794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:23.427980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.437447Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:23.553571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:23.553759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.553990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:23.554237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:23.554298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.556567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.556694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:23.556867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.556944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:23.557028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:23.557063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:23.559052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.559126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:23.559164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:23.561111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.561165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.561200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:23.561263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:23.564641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:23.566514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:23.566662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:23.567567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:23.567679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:23.567731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:23.567961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:23.568012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:23.568191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:23.568326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:23.570422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:23.570466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:23.570637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:23.570680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:23.571004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:23.571049Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:23.571150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:23.571182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:23.571224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:23.571265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:23.571304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:23.571340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:23.571372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:23.571394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:23.571453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:23.571485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:23.571508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:23.572895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:23.572975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:23.572998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:25.847549Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:25.847662Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 131us result status StatusSuccess 2025-04-17T17:49:25.847917Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:25.848276Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:49:25.848469Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 200us result status StatusSuccess 2025-04-17T17:49:25.849000Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:25.849531Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:49:25.849703Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 216us result status StatusSuccess 2025-04-17T17:49:25.850283Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> KqpStreamLookup::ReadTableDuringSplit >> KqpStreamLookup::ReadTableWithIndexDuringSplit |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> Viewer::JsonAutocompleteSchemePOST [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> Initializer::Simple |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2025-04-17T17:48:06.331009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:114:2160], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:06.331484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:48:06.331542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 26912, node 1 TClient is connected to server localhost:61894 2025-04-17T17:48:43.506771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:43.507090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:43.507201Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 3837, node 2 TClient is connected to server localhost:27154 2025-04-17T17:48:59.693689Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:335:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:59.694032Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:59.694244Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 19163, node 3 TClient is connected to server localhost:62164 2025-04-17T17:49:13.285385Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:339:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:13.285800Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:13.286051Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 26308, node 4 TClient is connected to server localhost:13513 2025-04-17T17:49:25.681754Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:25.682127Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:25.682275Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 18844, node 5 TClient is connected to server localhost:20970 >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> KqpScanArrowInChanels::AggregateByColumn [GOOD] |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 12650, MsgBus: 13665 2025-04-17T17:48:57.195834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340564168533358:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:57.195898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00140c/r3tmp/tmpJMgmK6/pdisk_1.dat 2025-04-17T17:48:58.042450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:58.042580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:58.048205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:58.090742Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12650, node 1 2025-04-17T17:48:58.234675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:58.234695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:58.234701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:58.234811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13665 TClient is connected to server localhost:13665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:48:59.112959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.144222Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:48:59.161045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.341229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.515594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.608925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:02.215460Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340564168533358:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:02.215715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:02.264208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340585643371500:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.264307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.670722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.708673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.754762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.847593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.929293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:03.028706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:03.123492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:03.222740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340589938339472:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:03.222819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:03.223102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340589938339477:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:03.227267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:03.245857Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:49:03.247446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340589938339479:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:49:03.322954Z node 1 :TX_PROXY ERROR: Actor# [1:7494340589938339536:3561] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:07.231582Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912147216, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 62855, MsgBus: 61377 2025-04-17T17:49:08.239006Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340610076208472:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:08.239916Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00140c/r3tmp/tmpjZ1n7m/pdisk_1.dat 2025-04-17T17:49:08.386873Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:08.413578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:08.422364Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:08.424075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62855, node 2 2025-04-17T17:49:08.502727Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:08.502754Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:08.502761Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:08.502882Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61377 TClient is connected to server localhost:61377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:08.907462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:08.914866Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:49:08.924602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.257688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.298511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.348193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.415585Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.468829Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.556370Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340665685668608:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:20.556456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:20.556667Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340665685668613:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:20.560194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:20.576508Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494340665685668615:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:49:20.673825Z node 3 :TX_PROXY ERROR: Actor# [3:7494340665685668672:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:20.754458Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494340644210829860:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:20.754518Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:22.969457Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912162448, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 22993, MsgBus: 7033 2025-04-17T17:49:24.022527Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494340680359843263:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:24.024777Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00140c/r3tmp/tmp7BJx60/pdisk_1.dat 2025-04-17T17:49:24.175150Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:24.201278Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:24.201397Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:24.202745Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22993, node 4 2025-04-17T17:49:24.254615Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:24.254632Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:24.254638Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:24.254740Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7033 TClient is connected to server localhost:7033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:24.743648Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:24.761838Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:24.849840Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:49:25.063925Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:49:25.142027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:27.685129Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340693244746917:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:27.685263Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:27.741503Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:27.774719Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:27.803865Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:27.835220Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:27.907396Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:27.941613Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:27.983796Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:28.050867Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340697539714877:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:28.050995Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:28.051300Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340697539714882:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:28.056501Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:28.065267Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494340697539714884:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:49:28.168951Z node 4 :TX_PROXY ERROR: Actor# [4:7494340697539714942:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:29.022645Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494340680359843263:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:29.022737Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:30.333809Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912169658, txId: 281474976715672] shutting down >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |75.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> TPopulatorTestWithResets::UpdateAck |75.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |75.7%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} >> TPopulatorTest::Boot |75.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] >> TPopulatorTestWithResets::UpdateAck [GOOD] >> OlapEstimationRowsCorrectness::TPCH21 >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> TExportToS3Tests::ExportStartTime >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2025-04-17T17:49:33.246489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:33.246537Z node 1 :IMPORT WARN: Table profiles were not loaded |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> BasicUsage::WriteSessionSwitchDatabases [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-04-17T17:49:33.149725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:33.149804Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-17T17:49:33.225526Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-04-17T17:49:33.225612Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-17T17:49:33.230592Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:49:33.230747Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:49:33.231518Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-04-17T17:49:33.231583Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-17T17:49:33.231739Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-17T17:49:33.231914Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T17:49:33.234475Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-04-17T17:49:33.234514Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-17T17:49:33.235043Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-04-17T17:49:33.235092Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-17T17:49:33.257052Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-04-17T17:49:33.257129Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Successful handshake: replica# [1:12:2059] 2025-04-17T17:49:33.257174Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:33.257228Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-04-17T17:49:33.257243Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Successful handshake: replica# [1:15:2062] 2025-04-17T17:49:33.257261Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:33.257283Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-04-17T17:49:33.257296Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Successful handshake: replica# [1:18:2065] 2025-04-17T17:49:33.257307Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:33.257364Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:96:2122] 2025-04-17T17:49:33.257426Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-04-17T17:49:33.257532Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:96:2122] 2025-04-17T17:49:33.257615Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-17T17:49:33.257765Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:96:2122] 2025-04-17T17:49:33.257826Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2025-04-17T17:49:33.257900Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:97:2123] 2025-04-17T17:49:33.258004Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-17T17:49:33.258070Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-04-17T17:49:33.258154Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:97:2123] 2025-04-17T17:49:33.258222Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2025-04-17T17:49:33.258261Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-17T17:49:33.258333Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:97:2123] 2025-04-17T17:49:33.258365Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2025-04-17T17:49:33.258418Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:98:2124] 2025-04-17T17:49:33.258471Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-17T17:49:33.258525Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-04-17T17:49:33.258620Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:98:2124] 2025-04-17T17:49:33.258654Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2025-04-17T17:49:33.258695Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-17T17:49:33.258745Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:98:2124] 2025-04-17T17:49:33.258765Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2025-04-17T17:49:33.258835Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:96:2122] 2025-04-17T17:49:33.258900Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-04-17T17:49:33.258942Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-17T17:49:33.259021Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 0 2025-04-17T17:49:33.259052Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-04-17T17:49:33.259107Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-04-17T17:49:33.259147Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2025-04-17T17:49:33.259191Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-04-17T17:49:33.259232Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:97:2123] 2025-04-17T17:49:33.259285Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-04-17T17:49:33.259352Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 0 2025-04-17T17:49:33.259372Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-04-17T17:49:33.259396Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-04-17T17:49:33.259426Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-17T17:49:33.259467Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 0 2025-04-17T17:49:33.259485Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-04-17T17:49:33.259507Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-04-17T17:49:33.259539Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-17T17:49:33.259580Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-04-17T17:49:33.260241Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:98:2124] 2025-04-17T17:49:33.260296Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-04-17T17:49:33.260464Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 0 2025-04-17T17:49:33.260488Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-04-17T17:49:33.260518Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-04-17T17:49:33.260597Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-17T17:49:33.260631Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-17T17:49:33.260674Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-17T17:49:33.260879Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 0 2025-04-17T17:49:33.260901Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-04-17T17:49:33.261125Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 100 2025-04-17T17:49:33.261145Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 2025-04-17T17:49:33.261275Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 0 2025-04-17T17:49:33.261293Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-04-17T17:49:33.261418Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-04-17T17:49:33.261439Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 TestWaitNotification: OK eventTxId 100 >> KqpJoinOrder::CanonizedJoinOrderTPCH14 [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink >> Viewer::TenantInfo5kkTablets [GOOD] |75.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |75.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 25681, MsgBus: 16422 2025-04-17T17:48:56.936379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340560460586822:2216];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00143c/r3tmp/tmpBqXnbS/pdisk_1.dat 2025-04-17T17:48:57.357830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:48:57.725391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:57.725505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:57.727391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:57.794431Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25681, node 1 2025-04-17T17:48:58.072433Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:58.072465Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:58.072507Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:58.072642Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16422 TClient is connected to server localhost:16422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:48:59.109231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.131222Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:48:59.140264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.368295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.533755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.623022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:01.938984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340581935424909:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:01.939080Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340560460586822:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:01.939158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:01.939200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:02.300061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.377487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.422156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.471190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.520430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.568402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.659248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.816233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340586230392877:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.816373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.818285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340586230392886:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.822875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:02.834552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340586230392888:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:49:02.903501Z node 1 :TX_PROXY ERROR: Actor# [1:7494340586230392942:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:06.294949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:49:07.232361Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912147209, txId: 281474976710674] shutting down 864000000000 Trying to start YDB, gRPC: 23766, MsgBus: 28145 2025-04-17T17:49:08.145655Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340612011770485:2079];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00143c/r3tmp/tmpsSagPw/pdisk_1.dat 2025-04-17T17:49:08.249692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:49:08.295988Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:08.306416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:08.306511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:08.315319Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23766, node 2 2025-04-17T17:49:08.529407Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:08.529429Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:08.529434Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:08.529530Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28145 TClient is connected to server localhost:28145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:09.138403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:09.162984Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:49:09.182466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Ope ... proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.301167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.364946Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.448028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.536145Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.593451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:20.692575Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340661782899986:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:20.692664Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:20.693001Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340661782899991:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:20.698736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:20.706411Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494340640308061035:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:20.706493Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:20.710858Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494340661782899993:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:49:20.811193Z node 3 :TX_PROXY ERROR: Actor# [3:7494340661782900053:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:24.811187Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912162931, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 5864, MsgBus: 63570 2025-04-17T17:49:25.781582Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494340683096234695:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:25.781644Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00143c/r3tmp/tmpye0VYS/pdisk_1.dat 2025-04-17T17:49:25.927268Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5864, node 4 2025-04-17T17:49:25.962748Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:25.963075Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:25.964510Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:25.996178Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:25.996213Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:25.996222Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:25.996354Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63570 TClient is connected to server localhost:63570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:26.521904Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:26.543774Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:26.616651Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:49:26.803662Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:49:26.876229Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:29.614656Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340700276105662:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:29.614775Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:29.683061Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:29.721309Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:29.757369Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:29.793634Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:29.832670Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:29.868445Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:29.918046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:30.013421Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340704571073619:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:30.013537Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:30.013865Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340704571073624:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:30.023848Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:30.050356Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494340704571073626:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:49:30.131243Z node 4 :TX_PROXY ERROR: Actor# [4:7494340704571073684:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:30.781891Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494340683096234695:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:30.781989Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:32.013337Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912171681, txId: 281474976715672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 28007, MsgBus: 27745 2025-04-17T17:48:56.604997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340561109433288:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:56.605680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001462/r3tmp/tmphyBEPC/pdisk_1.dat 2025-04-17T17:48:57.328356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:57.328491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:57.332847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:57.353739Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28007, node 1 2025-04-17T17:48:57.531116Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:57.531133Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:57.531152Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:57.531250Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27745 TClient is connected to server localhost:27745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:48:58.263704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:58.322376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:48:58.340999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:58.576395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:48:58.945183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:48:59.063362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:01.115197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340582584271386:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:01.115362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:01.615038Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340561109433288:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:01.615202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:02.033817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.080948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.144378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.210836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.318679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.401206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.484715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:02.687097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340586879239371:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.687173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.687544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340586879239376:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:02.701276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:02.798271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340586879239378:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:49:02.892995Z node 1 :TX_PROXY ERROR: Actor# [1:7494340586879239441:3573] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:09.923533Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912147818, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 18719, MsgBus: 2637 2025-04-17T17:49:10.800153Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340621432394896:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:10.800315Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001462/r3tmp/tmpNj9TVF/pdisk_1.dat 2025-04-17T17:49:11.073850Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:11.087319Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:11.087427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:11.093398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18719, node 2 2025-04-17T17:49:11.306795Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:11.306818Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:11.306825Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:11.306948Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2637 TClient is connected to server localhost:2637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:12.003666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:12.031107Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:49:12.041288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:12.134261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operatio ... oposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:23.609675Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:23.640020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:23.677540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:23.720221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:23.796947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:23.872365Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340677150518207:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:23.872460Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340677150518212:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:23.872486Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:23.876056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:23.887139Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494340677150518214:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:49:23.961631Z node 3 :TX_PROXY ERROR: Actor# [3:7494340677150518269:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:24.464980Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494340659970646595:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:24.465065Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:27.998468Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912166130, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 65178, MsgBus: 14819 2025-04-17T17:49:28.762342Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494340699758836392:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:28.762417Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001462/r3tmp/tmpCqPEuR/pdisk_1.dat 2025-04-17T17:49:28.878463Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:28.903686Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:28.903783Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:28.905053Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65178, node 4 2025-04-17T17:49:28.968671Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:28.968704Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:28.968712Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:28.968840Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14819 TClient is connected to server localhost:14819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:29.488740Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:29.496661Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:49:29.563062Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:49:29.774432Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:29.844917Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:32.533283Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340716938707362:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.533396Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.600616Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.634130Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.660526Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.687220Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.714793Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.744784Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.779124Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.832861Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340716938708018:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.832943Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.832970Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340716938708023:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.836672Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:32.846658Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494340716938708025:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:49:32.909659Z node 4 :TX_PROXY ERROR: Actor# [4:7494340716938708080:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:33.762448Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494340699758836392:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:33.762538Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:34.512204Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912174264, txId: 281474976715672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 12183, MsgBus: 62935 2025-04-17T17:48:57.338830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340563123096141:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:57.339038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001457/r3tmp/tmprRYqGt/pdisk_1.dat 2025-04-17T17:48:58.028798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:58.039262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:58.039353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:58.058756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12183, node 1 2025-04-17T17:48:58.282859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:58.282882Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:58.282890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:58.283029Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62935 TClient is connected to server localhost:62935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:48:59.293939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.368883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:48:59.809139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:00.030101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:00.169352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:02.346389Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340563123096141:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:02.346485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:03.303140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340588892901460:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:03.303266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:03.695834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:03.744641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:03.812771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:03.870122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:03.985416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:04.067669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:04.141706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:04.270572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340593187869428:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:04.270654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:04.271011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340593187869433:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:04.274675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:04.289911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340593187869435:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:49:04.362162Z node 1 :TX_PROXY ERROR: Actor# [1:7494340593187869494:3563] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:11.365446Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912148168, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 20359, MsgBus: 13011 2025-04-17T17:49:12.315632Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340629639136882:2151];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001457/r3tmp/tmprI7Evp/pdisk_1.dat 2025-04-17T17:49:12.378200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:49:12.505560Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:12.515151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:12.515236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:12.516527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20359, node 2 2025-04-17T17:49:12.614724Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:12.614751Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:12.614761Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:12.614869Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13011 TClient is connected to server localhost:13011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:49:13.208743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:49:13.234758Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:49:13.256528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:13.384005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 w ... T17:49:25.827250Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:25.898996Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:25.968391Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:26.014167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:26.086566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:26.179487Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340688038325102:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:26.179634Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:26.179861Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494340688038325107:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:26.183594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:26.197105Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494340688038325109:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:49:26.253694Z node 3 :TX_PROXY ERROR: Actor# [3:7494340688038325163:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:27.030516Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494340670858453482:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:27.030622Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:28.209120Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912167796, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 65366, MsgBus: 1367 2025-04-17T17:49:29.206777Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494340701423709272:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:29.206870Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001457/r3tmp/tmpJFYgRk/pdisk_1.dat 2025-04-17T17:49:29.311861Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:29.344154Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:29.344260Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:29.345625Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65366, node 4 2025-04-17T17:49:29.419790Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:29.419812Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:29.419818Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:29.419916Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1367 TClient is connected to server localhost:1367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:29.927109Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:29.936707Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:49:29.943609Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:30.048489Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:30.269903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:30.347231Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:32.691940Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340714308612938:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.692027Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.741982Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.775611Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.823904Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.857268Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.889708Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.925908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.995102Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:33.049734Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340718603580894:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:33.049811Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494340718603580899:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:33.049833Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:33.052880Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:33.061314Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494340718603580901:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:49:33.154032Z node 4 :TX_PROXY ERROR: Actor# [4:7494340718603580956:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:34.204358Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494340701423709272:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:34.204428Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:34.705764Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912174488, txId: 281474976715672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2025-04-17T17:46:05.478297Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1744911965478264 2025-04-17T17:46:06.647143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339832134021500:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:06.655594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:46:06.852282Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494339831467498395:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:06.852365Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:46:07.335545Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001865/r3tmp/tmpyvWp2z/pdisk_1.dat 2025-04-17T17:46:07.387224Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:46:07.707868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:07.934006Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:08.626680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:08.626757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:08.632408Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:46:08.650712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:46:08.809093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:46:08.893916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:08.938818Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 30323, node 1 2025-04-17T17:46:09.047635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:09.048086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:09.123391Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:46:09.145217Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:46:09.504316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:46:09.776286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001865/r3tmp/yandexCyfEbH.tmp 2025-04-17T17:46:09.776309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001865/r3tmp/yandexCyfEbH.tmp 2025-04-17T17:46:09.776536Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001865/r3tmp/yandexCyfEbH.tmp 2025-04-17T17:46:09.777921Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:46:10.635848Z INFO: TTestServer started on Port 26675 GrpcPort 30323 2025-04-17T17:46:11.592791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339832134021500:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:11.593408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:46:11.823683Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494339831467498395:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:11.826618Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:26675 PQClient connected to localhost:30323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:46:16.868379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:46:17.754544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:46:22.631346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494339900853499196:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:22.631455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:22.638592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494339900853499208:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:46:22.643603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T17:46:22.701886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494339900853499211:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:46:22.816127Z node 1 :TX_PROXY ERROR: Actor# [1:7494339900853499301:2757] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:46:23.878546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:46:23.878781Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:46:25.688739Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494339900186975481:2324], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:46:25.716064Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MThmZmIyN2QtZDllMDlmNGUtODVhZWRhOTYtMTQ5ZDJlYzM=, ActorId: [2:7494339900186975441:2316], ActorState: ExecuteState, TraceId: 01js2dqr4cb12xtcxdmt9nst23, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:46:25.750525Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:46:25.844226Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494339900853499311:2363], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:46:25.846103Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDVmNjIwY2EtNDJjOTQ5MDItZDU3MmY0NjctZjcwOGU4NDE=, ActorId: [1:7494339900853499169:2349], ActorState: ExecuteState, TraceId: 01js2dqr0p1928qp9dzf3dnbrg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:46:25.875207Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:46:25.962451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:46:26.924838Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494339917366844711:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:46:26.926173Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id ... :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:49:03.943475Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:49:03.943545Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:49:03.944655Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-17T17:49:03.944717Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744912143944 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:49:03.944818Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session established. Init response: last_seq_no: 2 session_id: "src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0" supported_codecs { codecs: 1 codecs: 2 codecs: 3 } 2025-04-17T17:49:03.943926Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0 2025-04-17T17:49:04.952012Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494340589836550554:3206] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2025-04-17T17:49:04.980198Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494340589836550554:3206] (SourceId=src_id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2025-04-17T17:49:04.980226Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494340589836550554:3206] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2025-04-17T17:49:30.534235Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-04-17T17:49:30.534303Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 4 2025-04-17T17:49:30.535714Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-04-17T17:49:30.538747Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 4 DataSize: 0 UsedReserveSize: 0 2025-04-17T17:49:30.538895Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 >>> Ready to answer: ok 2025-04-17T17:49:33.715806Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent 2025-04-17T17:49:33.716296Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write session: try to update token 2025-04-17T17:49:33.716331Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2025-04-17T17:49:33.716850Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T17:49:33.717090Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-17T17:49:33.717446Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:49:33.717493Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:49:33.717586Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-17T17:49:33.717790Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-17T17:49:33.718020Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:49:33.718047Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:49:33.718101Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2025-04-17T17:49:33.718303Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2025-04-17T17:49:33.719215Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2025-04-17T17:49:33.719771Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 160 WTime 1744912173719 2025-04-17T17:49:33.719875Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:49:33.719892Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:49:33.719904Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:49:33.719917Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:49:33.719926Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000psrc_id 2025-04-17T17:49:33.719935Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-04-17T17:49:33.719943Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:49:33.719954Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:49:33.719968Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:49:33.720054Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:49:33.720139Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 160 2025-04-17T17:49:33.722925Z node 4 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 160 actorID [4:7494340190541652229:2401] 2025-04-17T17:49:33.723045Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Topic 'rt3.dc1--test-topic' counters. CacheSize 480 CachedBlobs 3 2025-04-17T17:49:33.723051Z node 4 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 size 160 2025-04-17T17:49:33.723099Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:49:33.723139Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:49:33.723180Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-17T17:49:33.723219Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-17T17:49:33.723439Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-17T17:49:33.723952Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-17T17:49:33.724123Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 4000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-17T17:49:33.724155Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-04-17T17:49:33.724181Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write session: acknoledged message 1 2025-04-17T17:49:33.724866Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0 grpc read done: success: 0 data: 2025-04-17T17:49:33.724900Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0 grpc read failed 2025-04-17T17:49:33.724924Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0 grpc closed 2025-04-17T17:49:33.724937Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0 is DEAD 2025-04-17T17:49:33.725298Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T17:49:33.725530Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494340589836550595:3206] destroyed 2025-04-17T17:49:33.725587Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:49:33.725713Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2025-04-17T17:49:33.725814Z :ERROR: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2025-04-17T17:49:33.725849Z :ERROR: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2025-04-17T17:49:33.725878Z :INFO: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write session will now close 2025-04-17T17:49:33.725926Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write session: aborting 2025-04-17T17:49:33.739836Z :DEBUG: [/Root] TraceId [] SessionId [src_id|900d3144-a68be053-9390fe7b-7c17c8d3_0] MessageGroupId [src_id] Write session: destroy 2025-04-17T17:49:34.271264Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715861, task: 1, CA Id [3:7494340722980538281:3485]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-17T17:49:34.302321Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715861, task: 1, CA Id [3:7494340722980538281:3485]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> Viewer::UseTransactionWhenExecuteDataActionQuery >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary |75.8%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |75.8%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |75.8%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |75.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TExportToS3Tests::EncryptedExport |75.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> IncrementalBackup::SimpleRestore >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental >> IncrementalBackup::SimpleBackup >> IncrementalBackup::BackupRestore >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH14 [GOOD] Test command err: Trying to start YDB, gRPC: 16788, MsgBus: 16889 2025-04-17T17:46:44.097429Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494339990129748061:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:46:44.108040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fdb/r3tmp/tmpgAYW7H/pdisk_1.dat 2025-04-17T17:46:51.521828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:46:51.521913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:46:51.824916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:46:52.749904Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.123676s 2025-04-17T17:46:52.750193Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.124190s TServer::EnableGrpc on GrpcPort 16788, node 1 2025-04-17T17:47:00.805292Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339998719682773:2289];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:00.805649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:47:01.728597Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494339990129748061:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:47:01.729829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:47:03.078531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:47:03.082856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:47:03.082872Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:47:03.082878Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:47:03.082957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16889 TClient is connected to server localhost:16889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:47:04.945258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:47:04.959159Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:47:06.741060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340088913996419:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:06.741168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:06.741569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340088913996431:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:47:06.745210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:47:06.758113Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:47:06.758363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340088913996433:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:47:06.867037Z node 1 :TX_PROXY ERROR: Actor# [1:7494340088913996484:2358] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:47:07.458063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:47:07.829591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:07.829591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:07.829836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:07.830148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:07.830331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:07.830432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:07.830529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:07.830620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:07.830706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:07.830800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:07.830862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:07.830905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340093208964042:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:07.831181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:07.831287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:07.831377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:07.831476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:07.831564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:07.831660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:07.831759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:07.831862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:07.832015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494340093208963991:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register ... tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.287028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.292799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.296964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.299447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.301493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.305892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.306949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.310901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.311923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.315590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.317785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.320883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.324057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.326525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.329999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.330808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.335020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.336316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.340306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.342729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.345432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.349219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.350153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.356180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.356522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.363899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.371740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.375006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.377689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.382119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.383682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.389519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.389526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.395745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.396132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.402148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.402879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.407911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.408924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.414142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.415737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.419920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.421735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.425424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.428262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.431201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:49:24.522025Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2dw5e61ntctgeq6skzwvwd", SessionId: ydb://session/3?node_id=1&id=MzFiMzVjNzAtYjY5NmY1MTktM2UzZDBmMjQtNDZiMzgzYWY=, Slow query, duration: 37.090827s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T17:49:24.798692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T17:49:24.798691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T17:49:24.799485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] >> TExportToS3Tests::EncryptedExport [GOOD] >> JsonChangeRecord::Heartbeat [GOOD] |75.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] >> JsonChangeRecord::DataChangeVersion [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 11768, MsgBus: 26786 2025-04-17T17:49:33.698752Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340718905093432:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:33.698818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e69/r3tmp/tmppHSekH/pdisk_1.dat 2025-04-17T17:49:33.979153Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11768, node 1 2025-04-17T17:49:34.041893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:34.042017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:34.043778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:34.043802Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:34.043804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:34.043816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:34.043915Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26786 TClient is connected to server localhost:26786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:34.449989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:34.468919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:34.575384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:34.690681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:34.769132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:36.238778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340731789997096:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:36.238901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:36.422556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:36.450045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:36.476458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:49:36.498824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:36.521344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:49:36.548739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:49:36.611745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:36.661107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340731789997759:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:36.661168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340731789997764:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:36.661181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:36.663881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:49:36.671767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340731789997766:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:49:36.742382Z node 1 :TX_PROXY ERROR: Actor# [1:7494340731789997823:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:37.586277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:49:37.611825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:49:37.636541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T17:49:37.659056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:49:37.685013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:49:37.715911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants |75.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EncryptedExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:34.639209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:34.639307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:34.639349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:34.639388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:34.639451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:34.639482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:34.639543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:34.639641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:34.639911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:34.709226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:34.709316Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:34.725863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:34.726393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:34.726560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:34.735424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:34.735612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:34.736326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:34.736553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:34.739518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:34.740969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:34.741033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:34.741237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:34.741295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:34.741336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:34.741484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:34.748382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:34.899756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:34.900028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:34.900280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:34.900508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:34.900564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:34.903330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:34.903480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:34.903686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:34.903764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:34.903804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:34.903842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:34.906038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:34.906110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:34.906149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:34.907946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:34.908000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:34.908047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:34.908099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:34.911751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:34.913687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:34.913921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:34.914935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:34.915097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:34.915152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:34.915436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:34.915494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:34.915667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:34.915752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:34.917800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:34.917853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:34.918115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:34.918201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:34.918599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:34.918649Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:34.918753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:34.918788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:34.918832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:34.918864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:34.918900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:34.918940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:34.918975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:34.919004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:34.919082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:34.919143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:34.919181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:34.921014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:34.921136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:34.921177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 46678944, LocalPathId: 4], 18446744073709551615 2025-04-17T17:49:39.256464Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:49:39.258049Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:39.258110Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:39.258305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:49:39.258426Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:39.258459Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-17T17:49:39.258496Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-17T17:49:39.259281Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:49:39.259370Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:49:39.259409Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:49:39.259460Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-04-17T17:49:39.259521Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:49:39.260167Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:49:39.260255Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-17T17:49:39.260282Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-17T17:49:39.260310Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:49:39.260339Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:49:39.260406Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-17T17:49:39.260457Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-17T17:49:39.263140Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:49:39.263417Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-17T17:49:39.263472Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-17T17:49:39.263511Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-17T17:49:39.263547Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:49:39.263568Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T17:49:39.263592Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-17T17:49:39.264841Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:49:39.264898Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:49:39.264936Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:615:2550] TestWaitNotification: OK eventTxId 103 2025-04-17T17:49:39.265770Z node 4 :EXPORT DEBUG: TExport::TTxForget, dropping export tables, info: { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710763 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:49:39.267310Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:49:39.267352Z node 4 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 103 2025-04-17T17:49:39.267399Z node 4 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T17:49:39.267446Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:49:39.267505Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 103, at schemeshard: 72057594046678944 2025-04-17T17:49:39.267559Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:49:39.267594Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976710764, id# 103 2025-04-17T17:49:39.267655Z node 4 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710764 2025-04-17T17:49:39.267736Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:49:39.269337Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-103" } Internal: true } TxId: 281474976710764 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:39.269439Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/export-103, pathId: 0, opId: 281474976710764:0, at schemeshard: 72057594046678944 2025-04-17T17:49:39.269557Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710764:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-103', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:49:39.271600Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710764, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:49:39.271720Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710764, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-103', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, operation: DROP DIRECTORY, path: /MyRoot/export-103 2025-04-17T17:49:39.271823Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710764, status# StatusPathDoesNotExist 2025-04-17T17:49:39.271885Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763 2025-04-17T17:49:39.271933Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:49:39.271964Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976710764, status# StatusPathDoesNotExist 2025-04-17T17:49:39.272018Z node 4 :EXPORT TRACE: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763 2025-04-17T17:49:39.272113Z node 4 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710763 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2025-04-17T17:49:39.273675Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T17:49:39.273827Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-17T17:49:39.273928Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-17T17:49:39.273995Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-17T17:49:39.274049Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T17:49:39.274083Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T17:49:39.274115Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-17T17:49:39.275595Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-04-17T17:49:39.275801Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:49:39.275854Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:49:39.276223Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:49:39.276316Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:49:39.276353Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:1269:3144] TestWaitNotification: OK eventTxId 103 >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink |75.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |75.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |75.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] |75.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> KqpStreamLookup::ReadTableDuringSplit [GOOD] |75.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] |75.9%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTestsWithReboots::CreateTable >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2025-04-17T17:49:31.678981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:31.679501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:31.679565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001871/r3tmp/tmpFZoNFF/pdisk_1.dat 2025-04-17T17:49:32.236061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.326617Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:32.365853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:32.365968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:32.413289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:32.511923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.979155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:739:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.979241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.979308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.989242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:49:33.146998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2629], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:49:33.210940Z node 1 :TX_PROXY ERROR: Actor# [1:827:2672] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:40.059695Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2dxhxhcscmryeftzgwbzr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjIyNjlkMTItZTJhZTFjMmMtOTMxMTlhMGQtMmVkOWQwMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:49:41.625274Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2dxsa94zmtjvp8jk9ggckk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM1YWU5ZmMtYWM3N2YxMGUtZTJjNDhlZGItOTA0OTMxZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR 2025-04-17T17:49:41.676494Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2dxsa94zmtjvp8jk9ggckk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM1YWU5ZmMtYWM3N2YxMGUtZTJjNDhlZGItOTA0OTMxZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |75.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |75.9%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardTTLTests::CheckCounters |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> StreamCreator::WithResolvedTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2025-04-17T17:49:31.464872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:31.465505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:31.465566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00185f/r3tmp/tmptWeJ8O/pdisk_1.dat 2025-04-17T17:49:32.228506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.319343Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:32.367157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:32.368769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:32.413286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:32.513013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:49:32.979241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:790:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.979361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.979432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:32.989112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:49:33.146965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:804:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:49:33.217704Z node 1 :TX_PROXY ERROR: Actor# [1:882:2715] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:41.436041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2dxhxh3gkkfq9gkn60gqc7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY4N2NlMjItNmMwNTliYi0xNWQxMDU0ZS1hMmI2M2RhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:49:41.617571Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2dxhxh3gkkfq9gkn60gqc7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY4N2NlMjItNmMwNTliYi0xNWQxMDU0ZS1hMmI2M2RhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:49:42.033173Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2dxtgcbn9sqw7eppayn77f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMzYjFmMTQtOTg0MGE1ZmQtNWY4Nzk3M2ItOTEzMDViZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] |76.0%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAttributes >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:43.617840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:43.617933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:43.618008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:43.618045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:43.618085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:43.618117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:43.618210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:43.618292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:43.618625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:43.689119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:43.689168Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:43.706911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:43.707392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:43.707570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:43.716911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:43.717095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:43.717737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:43.717945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:43.724111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:43.725406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:43.725471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:43.725658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:43.725704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:43.725760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:43.725890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.732856Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:43.875996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:43.876226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.876409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:43.876689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:43.876743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.880280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:43.880422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:43.880606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.880673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:43.880724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:43.880760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:43.883060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.883121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:43.883164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:43.885661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.885710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.885777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:43.885828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:43.889491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:43.892222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:43.892371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:43.893139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:43.893294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:43.893342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:43.893599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:43.893656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:43.893840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:43.893961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:43.896253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:43.896304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:43.896486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:43.896543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:43.896870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.896918Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:43.897025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:43.897060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:43.897098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:43.897126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:43.897161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:43.897205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:43.897239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:43.897266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:43.897323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:43.897382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:43.897424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:43.899065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:43.899176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:43.899222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:49:43.899275Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:49:43.899310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:43.899394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:49:43.903563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:49:43.904184Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T17:49:43.905477Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T17:49:43.923800Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T17:49:43.926489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:43.926838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.926977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2025-04-17T17:49:43.927386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2025-04-17T17:49:43.928206Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:49:43.931105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:43.931246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-17T17:49:43.931737Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::MultiRestore |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> ViewerTopicDataTests::TopicDataTest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TKeyValueTest::TestCopyRangeWorks >> TSchemeShardTTLTests::TtlTiersValidation >> TSchemeShardTTLTests::ConditionalErase >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> TKeyValueTest::TestObtainLockNewApi >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> TKeyValueTest::TestWriteReadPatchRead >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:45.719068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:45.719163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:45.719210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:45.719261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:45.719300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:45.719335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:45.719399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:45.719462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:45.719759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:45.795447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:45.795501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:45.809679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:45.810079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:45.810246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:45.818494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:45.818671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:45.819225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:45.819438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:45.822355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:45.823469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:45.823536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:45.823704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:45.823746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:45.823794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:45.823919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.829922Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:45.943957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:45.944178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.944366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:45.944563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:45.944621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.946603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:45.946725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:45.946890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.946940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:45.946973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:45.947008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:45.948660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.948709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:45.948739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:45.950366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.950410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.950451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:45.950494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:45.953683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:45.955440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:45.955606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:45.956457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:45.956593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:45.956636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:45.956886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:45.956932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:45.957081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:45.957216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:45.961045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:45.961110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:45.961282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:45.961320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:45.961677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.961735Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:45.961822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:45.961852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:45.961890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:45.961915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:45.961948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:45.961997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:45.962030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:45.962057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:45.962123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:45.962158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:45.962223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:45.963900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:45.963994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:45.964061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-04-17T17:49:46.265342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.265446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:46.265495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:49:46.265816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T17:49:46.265967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:49:46.269472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:46.269518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:46.269812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.269859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:49:46.270210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.270278Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:49:46.271013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:46.271125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:49:46.271159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:49:46.271202Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T17:49:46.271269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:49:46.271344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:49:46.273899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:49:46.285859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1183 } } 2025-04-17T17:49:46.285898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T17:49:46.285985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1183 } } 2025-04-17T17:49:46.286057Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1183 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:49:46.286670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:49:46.286704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T17:49:46.286792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:49:46.286828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:49:46.286891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T17:49:46.286949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.286974Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.287011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T17:49:46.287055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:49:46.288881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.289172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.289387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.289418Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:49:46.289498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:49:46.289523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:49:46.289552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:49:46.289577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:49:46.289606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:49:46.289696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2347] message: TxId: 102 2025-04-17T17:49:46.289744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:49:46.289777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:49:46.289801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:49:46.289893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:49:46.291006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:49:46.291042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:462:2423] TestWaitNotification: OK eventTxId 102 2025-04-17T17:49:46.291464Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:49:46.291667Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 209us result status StatusSuccess 2025-04-17T17:49:46.292027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:45.804270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:45.804376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:45.804439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:45.804485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:45.804534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:45.804562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:45.804642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:45.804717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:45.805072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:45.894674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:45.894725Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:45.918841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:45.919308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:45.919501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:45.946040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:45.946209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:45.946784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:45.947004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:45.951964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:45.953025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:45.953092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:45.953362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:45.953411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:45.953449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:45.953555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.962885Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:46.069548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:46.069729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.069853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:46.070001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:46.070036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.072777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.072884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:46.073005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.073043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:46.073074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:46.073095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:46.074444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.074485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:46.074510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:46.075761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.075792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.075827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.075857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.077972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:46.079828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:46.079955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:46.080534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.080628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:46.080658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.080835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:46.080867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.080985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:46.081041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:46.082699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:46.082728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:46.082855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.082882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:46.083151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.083186Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:46.083267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:46.083289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.083312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:46.083340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.083377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:46.083411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.083435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:46.083452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:46.083499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:46.083522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:46.083551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:46.084629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:46.084703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:46.084738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6.267523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:49:46.267653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.267679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T17:49:46.267706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T17:49:46.268566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.268617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T17:49:46.269419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:46.269522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:46.269548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:49:46.269580Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:49:46.269608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:46.270116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:46.270164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:49:46.270206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:49:46.270230Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:49:46.270254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:49:46.270315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-17T17:49:46.270823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1292 } } 2025-04-17T17:49:46.270858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T17:49:46.270962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1292 } } 2025-04-17T17:49:46.271039Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1292 } } 2025-04-17T17:49:46.271828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T17:49:46.271868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T17:49:46.271983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T17:49:46.272040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:49:46.272107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T17:49:46.272165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.272195Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.272224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:49:46.272257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T17:49:46.275825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:49:46.276165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:49:46.276788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.277191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.277523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.277594Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T17:49:46.277735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:49:46.277785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:49:46.277836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:49:46.277899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:49:46.277938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T17:49:46.278023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 101 2025-04-17T17:49:46.278073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:49:46.278115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:49:46.278149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T17:49:46.278298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:49:46.279963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:49:46.280018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:334:2313] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-17T17:49:46.283310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:46.283509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.283814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2025-04-17T17:49:46.285778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:46.285944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-04-17T17:49:46.289074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:46.289279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.289603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2025-04-17T17:49:46.291732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:46.291847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |76.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |76.0%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2025-04-17T17:49:46.837541Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:49:46.837606Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1744912186836 ErrorReason# 2025-04-17T17:49:46.845817Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:49:46.845905Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1744912186845 ErrorReason# 2025-04-17T17:49:46.851619Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:49:46.851708Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1744912186851 ErrorReason# >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestCleanUpDataOnEmptyTablet >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |76.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> StreamCreator::WithResolvedTimestamps [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-17T17:49:41.115801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:41.116275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:41.116330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00172a/r3tmp/tmphBtJO2/pdisk_1.dat 2025-04-17T17:49:41.432994Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.433084Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.433143Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:41.433260Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-17T17:49:41.433299Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:49:41.596413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-17T17:49:41.596689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.596904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:49:41.597172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:49:41.597269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.597386Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.598218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.598377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:49:41.598447Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.598492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:41.598702Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.598751Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.598855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.598934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:49:41.598980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:41.599017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:41.599125Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.599647Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.599717Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:41.599912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.599960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.600010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.600060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:49:41.600097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:41.600198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.600668Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.600705Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:41.600848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.600894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.600954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.600993Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.601051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:49:41.601091Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.601144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:41.605420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:41.606013Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.606067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:41.606282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:49:41.607811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:49:41.607869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:49:41.607928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-17T17:49:41.608083Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-17T17:49:41.608608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.608671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.608712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:41.608933Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-17T17:49:41.608996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:49:41.609079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.609126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-17T17:49:41.609167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.671957Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-17T17:49:41.672106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-17T17:49:41.672206Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:41.673067Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T17:49:41.673167Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-17T17:49:41.714685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:41.714828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:41.727463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:41.801825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-17T17:49:41.802541Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.802591Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.802641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:41.802797Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-17T17:49:41.802836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:49:41.802908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.803046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... eration: 1 2025-04-17T17:49:49.089510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:49.089550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-17T17:49:49.089585Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:49.089872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:49.089902Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:49.090246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:49.090294Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:49.101026Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:49.101102Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-17T17:49:49.101181Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:49:49.101262Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:49.101480Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [2:1007:2796], Recipient [2:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:49.101510Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:49.101541Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:49.101755Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [2:739:2610], Recipient [2:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-17T17:49:49.101801Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-17T17:49:49.101895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-17T17:49:49.101942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 0 2025-04-17T17:49:49.102089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480, message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-17T17:49:49.102144Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-17T17:49:49.102239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-17T17:49:49.102298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:0, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:49.102350Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:49.102394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-17T17:49:49.102436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-17T17:49:49.102467Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 129 -> 240 2025-04-17T17:49:49.102588Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:49.103060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:49.103096Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:49.103132Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-04-17T17:49:49.103237Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:954:2752] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-17T17:49:49.103293Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:739:2610] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-17T17:49:49.103415Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-17T17:49:49.103491Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:49:49.103685Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-04-17T17:49:49.103741Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T17:49:49.103905Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:409:2404], Recipient [2:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:49.103955Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:49.104013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:49.104056Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0ProgressState, operation type TxCopyTable 2025-04-17T17:49:49.104092Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:49.104122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T17:49:49.104155Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 0, blocked: 1 2025-04-17T17:49:49.104235Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-17T17:49:49.104275Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 240 -> 240 2025-04-17T17:49:49.104751Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:49.104790Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-04-17T17:49:49.104896Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:409:2404], Recipient [2:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:49.104928Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:49.104972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:49.105023Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:0 ProgressState 2025-04-17T17:49:49.105144Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:49.105184Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-17T17:49:49.105226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-17T17:49:49.105274Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-17T17:49:49.105313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-17T17:49:49.105360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: true 2025-04-17T17:49:49.105432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:934:2736] message: TxId: 281474976715662 2025-04-17T17:49:49.105485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-17T17:49:49.105534Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-17T17:49:49.105572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-17T17:49:49.105722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-17T17:49:49.105753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-04-17T17:49:49.106094Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:49.106153Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:934:2736] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-17T17:49:49.106692Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [2:941:2742], Recipient [2:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:49:49.106733Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:49:49.106763Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T17:49:49.225929Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [2:1034:2813], serverId# [2:1035:2814], sessionId# [0:0:0] 2025-04-17T17:49:49.226049Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2dy1nrfp42a2d3qmck91bd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTM1NDllYTQtZGVlMGJiM2ItNWM4NzY3YmItZTBkYzUxZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> TSchemeShardTTLTests::ConditionalErase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2025-04-17T17:49:44.527494Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340768322286184:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:44.527560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ad3/r3tmp/tmprzmEVN/pdisk_1.dat 2025-04-17T17:49:45.255718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:45.283712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:45.283852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:45.318754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:45.645758Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.242174s 2025-04-17T17:49:45.660853Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.257285s TClient is connected to server localhost:6446 TServer::EnableGrpc on GrpcPort 12407, node 1 2025-04-17T17:49:46.294965Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:46.294992Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:46.294999Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:46.295224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:47.360894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:47.577440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912188040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744912187480 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912188040 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-17T17:49:48.126002Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:49:48.126137Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:49:48.126147Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:49:48.126718Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:49:48.652870Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744912188040, tx_id: 281474976710658 } } } 2025-04-17T17:49:48.661781Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T17:49:48.676476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:49:48.677322Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-17T17:49:48.677348Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-17T17:49:48.704107Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-17T17:49:48.704147Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-17T17:49:48.716738Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-17T17:49:48.905361Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7494340785502156295:2347] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T17:49:48.945460Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-17T17:49:48.945492Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-17T17:49:48.975704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:48.989652Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-17T17:49:48.989674Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912188040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:45.925231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:45.925339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:45.925403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:45.925438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:45.925478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:45.925508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:45.925565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:45.925630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:45.927416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:46.023322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:46.023390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:46.046057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:46.046248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:46.046432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:46.058294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:46.058682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:46.059224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.059503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:46.063327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.064166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:46.064208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.064298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:46.064328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:46.064355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:46.064486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.069912Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:46.170648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:46.170882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.171061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:46.171289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:46.171338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.173795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.173919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:46.174127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.174206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:46.174246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:46.174283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:46.176326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.176384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:46.176419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:46.177927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.177958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.177995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.178041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.180496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:46.181833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:46.181972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:46.182675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.182756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:46.182787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.182985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:46.183024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.183126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:46.183193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:46.185016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:46.185053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:46.185198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.185235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:46.185448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.185481Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:46.185575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:46.185603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.185632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:46.185654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.185688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:46.185755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.185789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:46.185810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:46.185859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:46.185885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:46.185914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:46.187605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:46.187730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:46.187794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... .202236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 6] state 'Ready' dataSize 627 rowCount 2 cpuUsage 0 2025-04-17T17:49:50.324878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-17T17:49:50.325013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.325148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.325341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409546, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640240000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-17T17:49:50.325425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409550, request: TableId: 6 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640240000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-17T17:49:50.325472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 5 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640240000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-17T17:49:50.325508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640240000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-17T17:49:50.325545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 3 Expiration { ColumnId: 2 WallClockTimestamp: 1600463040240000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-17T17:49:50.325579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409551, request: TableId: 7 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640240000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-17T17:49:50.326013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T17:49:50.326658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T17:49:50.327190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T17:49:50.327594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:49:50.327767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-04-17T17:49:50.328024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-04-17T17:49:50.334801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.334870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T17:49:50.335552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.335604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T17:49:50.337432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.337469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T17:49:50.348742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.348799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-04-17T17:49:50.349413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.349445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:49:50.350347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.350506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.350578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.241000Z, at schemeshard: 72057594046678944 2025-04-17T17:49:50.350678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.350755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.350855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.350888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.241000Z, at schemeshard: 72057594046678944 2025-04-17T17:49:50.350925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.350951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.241000Z, at schemeshard: 72057594046678944 2025-04-17T17:49:50.352777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.352850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.352948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.352989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.353009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.353061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.353095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.243000Z, at schemeshard: 72057594046678944 2025-04-17T17:49:50.353132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.353178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.353203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.243000Z, at schemeshard: 72057594046678944 2025-04-17T17:49:50.353240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.429895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2025-04-17T17:49:50.430076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-17T17:49:50.430204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=TTLEnabledTable2, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:49:50.430356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-04-17T17:49:50.430437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-17T17:49:50.430491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable1, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:49:50.430532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-04-17T17:49:50.430579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 5 shard idx 72057594046678944:4 data size 43 row count 1 2025-04-17T17:49:50.430639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], pathId map=TTLEnabledTable4, is column=0, is olap=0, RowCount 1, DataSize 43 2025-04-17T17:49:50.430739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409549 2025-04-17T17:49:50.430791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 603 row count 2 2025-04-17T17:49:50.430833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTable3, is column=0, is olap=0, RowCount 2, DataSize 603 2025-04-17T17:49:50.430873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409548 2025-04-17T17:49:50.430926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 6 shard idx 72057594046678944:5 data size 627 row count 2 2025-04-17T17:49:50.430978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], pathId map=TTLEnabledTable5, is column=0, is olap=0, RowCount 2, DataSize 627, with borrowed parts 2025-04-17T17:49:50.431036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409550 2025-04-17T17:49:50.448950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.449026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-04-17T17:49:50.454023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T17:49:50.454209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:49:50.454281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.245000Z, at schemeshard: 72057594046678944 2025-04-17T17:49:50.454345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> IncrementalBackup::MultiRestore [GOOD] >> IncrementalBackup::E2EBackupCollection >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestRenameWorks >> ViewerTopicDataTests::TopicDataTest [GOOD] >> TKeyValueTest::TestRewriteThenLastValue >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> ViewerTopicDataTests::TopicDataTest [GOOD] Test command err: 2025-04-17T17:48:56.273305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1755:2432], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:56.275212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:56.276246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:48:56.276643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1758:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:56.277202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1752:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:56.277805Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:56.278127Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:56.278247Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:48:56.278842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:48:56.279044Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1761:2373], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:56.279865Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:56.280341Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:48:56.283183Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:833:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:56.283818Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:56.284130Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:48:56.864289Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:57.129382Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:48:57.187195Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:48:57.775553Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 1034, node 1 TClient is connected to server localhost:11776 2025-04-17T17:48:58.204935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:58.205013Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:58.205056Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:58.205408Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:49:36.985729Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494340733350930568:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:36.985876Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:49:37.161167Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:37.187167Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:37.187281Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:37.190629Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4188, node 6 2025-04-17T17:49:37.238698Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:37.238730Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:37.238743Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:37.238966Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:37.604843Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:37.629145Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:49:37.631835Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:40.107039Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T17:49:40.107112Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T17:49:40.469799Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494340750530800446:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:40.469799Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494340750530800427:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:40.469966Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:40.474221Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T17:49:40.483386Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494340750530800456:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:49:40.578647Z node 6 :TX_PROXY ERROR: Actor# [6:7494340750530800507:2357] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:41.231709Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.527987Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T17:49:41.528030Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T17:49:41.965371Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T17:49:41.965406Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T17:49:41.986076Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494340733350930568:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:41.986137Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:49:45.263165Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494340769389589733:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:45.263332Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c23/r3tmp/tmpXlfoCd/pdisk_1.dat 2025-04-17T17:49:45.385605Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:45.385693Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:45.386063Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:45.402654Z node 7 :HIVE WARN: HIVE#720575940379688 ... edged message 13 2025-04-17T17:49:49.224442Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: acknoledged message 14 2025-04-17T17:49:49.224462Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: acknoledged message 15 2025-04-17T17:49:49.229678Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: try to update token 2025-04-17T17:49:49.229739Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Send 1 message(s) (4 left), first sequence number is 16 2025-04-17T17:49:49.235741Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session got write response: sequence_numbers: 16 offsets: 55 already_written: false write_statistics { persist_duration_ms: 3 queued_in_partition_duration_ms: 1 } 2025-04-17T17:49:49.235782Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: acknoledged message 16 2025-04-17T17:49:49.237163Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: try to update token 2025-04-17T17:49:49.237212Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Send 1 message(s) (3 left), first sequence number is 17 2025-04-17T17:49:49.245269Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: try to update token 2025-04-17T17:49:49.245321Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Send 1 message(s) (2 left), first sequence number is 18 2025-04-17T17:49:49.255601Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: try to update token 2025-04-17T17:49:49.255653Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Send 1 message(s) (1 left), first sequence number is 19 2025-04-17T17:49:49.256301Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session got write response: sequence_numbers: 17 offsets: 56 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 14 } 2025-04-17T17:49:49.256334Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: acknoledged message 17 2025-04-17T17:49:49.257777Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session got write response: sequence_numbers: 18 offsets: 57 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-04-17T17:49:49.257805Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: acknoledged message 18 2025-04-17T17:49:49.261427Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session got write response: sequence_numbers: 19 offsets: 58 already_written: false write_statistics { persist_duration_ms: 1 } 2025-04-17T17:49:49.261470Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: acknoledged message 19 2025-04-17T17:49:49.262895Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: try to update token 2025-04-17T17:49:49.262936Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Send 1 message(s) (0 left), first sequence number is 20 2025-04-17T17:49:49.310651Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session got write response: sequence_numbers: 20 offsets: 59 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 40 } 2025-04-17T17:49:49.310696Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: acknoledged message 20 2025-04-17T17:49:49.345796Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session will now close 2025-04-17T17:49:49.345891Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: aborting 2025-04-17T17:49:49.346553Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: gracefully shut down, all writes complete 2025-04-17T17:49:49.347027Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d36b9727-3baf6f0b-f6d47817-1dd2ea7d_0] Write session: destroy 2025-04-17T17:49:49.594643Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494340786569459910:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:49.594746Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494340786569459887:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:49.594834Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:49.598050Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-17T17:49:49.612100Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494340786569459924:2392], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-17T17:49:49.683461Z node 7 :TX_PROXY ERROR: Actor# [7:7494340786569459975:2515] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:49.785786Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7494340786569459991:2396], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:49:49.788497Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MWFmYzM0NTgtMTQ4OWQwZDItYzRmNWI5NmYtNzJhZWQwMDg=, ActorId: [7:7494340786569459884:2386], ActorState: ExecuteState, TraceId: 01js2dy24refyhgxymztkm4kd5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:49:49.790070Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 2025-04-17T17:49:50.044873Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: try to update token 2025-04-17T17:49:50.045895Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session: Do CDS request 2025-04-17T17:49:50.046045Z :INFO: [] MessageGroupId [producer4] SessionId [] Start write session. Will connect to endpoint: localhost:12692 2025-04-17T17:49:50.051519Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: send init request: init_request { topic: "/Root/topic1" message_group_id: "producer4" } 2025-04-17T17:49:50.058797Z :INFO: [] MessageGroupId [producer4] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744912190058 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:49:50.058898Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session established. Init response: session_id: "producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0" topic: "topic1" 2025-04-17T17:49:50.062001Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write 1 messages with Id from 1 to 1 2025-04-17T17:49:50.081830Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write session: close. Timeout = 18446744073709551 ms 2025-04-17T17:49:50.115550Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write session: try to update token 2025-04-17T17:49:50.115615Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-17T17:49:50.121747Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write session got write response: sequence_numbers: 1 offsets: 60 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-04-17T17:49:50.121809Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write session: acknoledged message 1 2025-04-17T17:49:50.182055Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write session will now close 2025-04-17T17:49:50.182160Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write session: aborting 2025-04-17T17:49:50.183130Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write session: gracefully shut down, all writes complete 2025-04-17T17:49:50.183191Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write session is aborting and will not restart 2025-04-17T17:49:50.184245Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|d8a43c30-6a5cc343-8f35deeb-5c248e63_0] Write session: destroy 2025-04-17T17:49:50.263599Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494340769389589733:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:50.263754Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Size: 4194320 Got response:400: PathErrorUnknown Got response:400: No such partition in topic 2025-04-17T17:49:50.399677Z node 7 :PERSQUEUE ERROR: [PQ: 72075186224037889, Partition: 0, State: StateIdle] reading from too big offset - topic topic1 partition 0 client $without_consumer EndOffset 61 offset 10000 Got response:400: Bad offset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-17T17:49:41.353372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:41.353920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:41.353985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016e3/r3tmp/tmpJ2txaC/pdisk_1.dat 2025-04-17T17:49:41.742914Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.742975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.743012Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:41.743093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-17T17:49:41.743117Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:49:41.828959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-17T17:49:41.829203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.829351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:49:41.829497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:49:41.829554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.829620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.830148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.830266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:49:41.830312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.830337Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:41.830478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.830505Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.830580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.830629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:49:41.830652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:41.830678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:41.830744Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.831016Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.831044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:41.831159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.831182Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.831210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.831239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:49:41.831261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:41.831316Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.831548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.831572Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:41.831643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.831668Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.831701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.831728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.831754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:49:41.831774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.831805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:41.834204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:41.834516Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.834547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:41.834686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:49:41.835539Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:49:41.835574Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:49:41.835609Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-17T17:49:41.835711Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-17T17:49:41.836021Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.836060Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.836087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:41.836208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-17T17:49:41.836241Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:49:41.836289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.836321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-17T17:49:41.836347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.877182Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-17T17:49:41.877289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-17T17:49:41.877319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:41.877943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T17:49:41.878002Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-17T17:49:41.914724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:41.914847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:41.926346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:42.000170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-17T17:49:42.000813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:42.000855Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:42.000911Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:42.001036Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-17T17:49:42.001065Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:49:42.001147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:42.001269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... 76715662:1, at schemeshard: 72057594046644480 2025-04-17T17:49:54.808027Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:54.808273Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-17T17:49:54.808314Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:54.818973Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:54.819061Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-17T17:49:54.819146Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:49:54.819227Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:54.819435Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:998:2793], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:54.819464Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:54.819485Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:54.819628Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:666:2570], Recipient [3:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-17T17:49:54.819662Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-17T17:49:54.819727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-17T17:49:54.819765Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 1 2025-04-17T17:49:54.819887Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480, message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-17T17:49:54.819926Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-17T17:49:54.819988Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-17T17:49:54.820046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:1, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:54.820081Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-17T17:49:54.820132Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-17T17:49:54.820173Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-17T17:49:54.820201Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 129 -> 240 2025-04-17T17:49:54.820317Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:54.820756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-17T17:49:54.820782Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:54.820806Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-04-17T17:49:54.820888Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:939:2743] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-17T17:49:54.820929Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:666:2570] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-17T17:49:54.821008Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-17T17:49:54.821085Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:49:54.821258Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-04-17T17:49:54.821299Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T17:49:54.821419Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:54.821444Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:54.821480Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-17T17:49:54.821519Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1ProgressState, operation type TxCopyTable 2025-04-17T17:49:54.821551Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:54.821583Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:1, name: CopyTableBarrier, done: 1, blocked: 1, parts count: 2 2025-04-17T17:49:54.821619Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 1, blocked: 1 2025-04-17T17:49:54.821715Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-17T17:49:54.821745Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 240 -> 240 2025-04-17T17:49:54.822166Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:54.822235Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-04-17T17:49:54.822336Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:54.822366Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:54.822406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-17T17:49:54.822447Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:1 ProgressState 2025-04-17T17:49:54.822564Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:54.822597Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-04-17T17:49:54.822637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-17T17:49:54.822683Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-04-17T17:49:54.822723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-17T17:49:54.822771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 2/2, is published: true 2025-04-17T17:49:54.822843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:909:2723] message: TxId: 281474976715662 2025-04-17T17:49:54.822898Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-17T17:49:54.822947Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-17T17:49:54.822984Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-17T17:49:54.823046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 2 2025-04-17T17:49:54.823074Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:1 2025-04-17T17:49:54.823086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:1 2025-04-17T17:49:54.823148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-17T17:49:54.823179Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T17:49:54.823616Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:54.823705Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:909:2723] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-17T17:49:54.824208Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:923:2730], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:49:54.824234Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:49:54.824256Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T17:49:54.941016Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [3:1025:2810], serverId# [3:1026:2811], sessionId# [0:0:0] 2025-04-17T17:49:54.941200Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2dy78d8kx8b5z9jpdtkdrm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTBmYjJlY2ItM2VkMTQ5MTItMzQwZGU2YzMtZjNjMDdkZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } 2025-04-17T17:49:55.071843Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2dy7c52wn9zwk4czndteha, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDc0ZmY3MmUtNTdkNzcyZGItMTdkNjBhNjItYzRlZDI3ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2025-04-17T17:49:58.334451Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-17T17:49:58.336220Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> IncrementalBackup::E2EBackupCollection [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::E2EBackupCollection [GOOD] Test command err: 2025-04-17T17:49:40.822985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.823324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.823382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016ea/r3tmp/tmpiN8MEX/pdisk_1.dat 2025-04-17T17:49:41.100875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.100945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.100980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:41.101065Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-17T17:49:41.101094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:49:41.216436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-17T17:49:41.216691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.216900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:49:41.217103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:49:41.217183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.217279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.217994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.218142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:49:41.218206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.218244Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:41.218421Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.218458Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.218548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.218607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:49:41.218639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:41.218669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:41.218753Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.219127Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.219164Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:41.219283Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.219320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.219360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.219398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:49:41.219443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:41.219513Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.219829Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.219855Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:41.219954Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.219988Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:41.220032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.220075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:41.220119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:49:41.220145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:41.220188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:41.223650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:41.224052Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:41.224100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:41.224257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:49:41.225340Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:49:41.225393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:49:41.225440Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-17T17:49:41.225556Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-17T17:49:41.225903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.225950Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.225981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:41.226129Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-17T17:49:41.226157Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:49:41.226230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.226261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-17T17:49:41.226297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.275164Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-17T17:49:41.275271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-17T17:49:41.275322Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:41.275991Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T17:49:41.276088Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-17T17:49:41.313245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:41.313355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:41.324423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:41.398506Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-17T17:49:41.399217Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.399264Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.399308Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:41.399417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-17T17:49:41.399444Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:49:41.399528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.399651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... ode 3 :TX_DATASHARD INFO: 72075186224037892 Sending notify to schemeshard 72057594046644480 txId 281474976715668 state Ready TxInFly 0 2025-04-17T17:49:58.234515Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-17T17:49:58.234872Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:1565:3213], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:58.234910Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:58.234940Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:58.235067Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:1224:2934], Recipient [3:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1224 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-17T17:49:58.235098Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-17T17:49:58.235156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 1224 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-17T17:49:58.235189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715668, tablet: 72075186224037892, partId: 1 2025-04-17T17:49:58.235294Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480, message: Source { RawX1: 1224 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-17T17:49:58.235331Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-17T17:49:58.235385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 1224 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-17T17:49:58.235420Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715668:1, shardIdx: 72057594046644480:5, datashard: 72075186224037892, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:58.235440Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-17T17:49:58.235461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715668:1, datashard: 72075186224037892, at schemeshard: 72057594046644480 2025-04-17T17:49:58.235485Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 129 -> 240 2025-04-17T17:49:58.235664Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 Constructed op# SrcTablePaths: "/Root/.backups/collections/MyCollection/19700101000002Z_incremental/Table" DstTablePath: "/Root/Table" SrcPathIds { OwnerId: 72057594046644480 LocalId: 15 } 2025-04-17T17:49:58.235801Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:58.236229Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-17T17:49:58.236254Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:58.236275Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:1 2025-04-17T17:49:58.236323Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1224:2934] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-04-17T17:49:58.236453Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:58.236485Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:58.236525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-17T17:49:58.236567Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 ProgressState 2025-04-17T17:49:58.236688Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:58.236729Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-04-17T17:49:58.236771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-04-17T17:49:58.236834Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715668, done: 1, blocked: 1 2025-04-17T17:49:58.236930Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715668:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715668 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-17T17:49:58.236971Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 240 -> 240 2025-04-17T17:49:58.237122Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-04-17T17:49:58.237164Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-04-17T17:49:58.237205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 1/2, is published: true 2025-04-17T17:49:58.237371Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715668 datashard 72075186224037892 state Ready 2025-04-17T17:49:58.237405Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Got TEvSchemaChangedResult from SS at 72075186224037892 2025-04-17T17:49:58.237633Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:58.237655Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:0 2025-04-17T17:49:58.237745Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:58.237765Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:58.237801Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:49:58.237831Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:0 ProgressState 2025-04-17T17:49:58.237940Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:58.237959Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-04-17T17:49:58.237974Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-04-17T17:49:58.237995Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-04-17T17:49:58.238011Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-04-17T17:49:58.238027Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 2/2, is published: true 2025-04-17T17:49:58.238074Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1442:3110] message: TxId: 281474976715668 2025-04-17T17:49:58.238109Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-04-17T17:49:58.238153Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-04-17T17:49:58.238198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-04-17T17:49:58.238306Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 4 2025-04-17T17:49:58.238334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-04-17T17:49:58.238362Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-04-17T17:49:58.238377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-04-17T17:49:58.238405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 3 2025-04-17T17:49:58.238418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-04-17T17:49:58.238726Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:58.238785Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1442:3110] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-04-17T17:49:58.239039Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1450:3117], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:49:58.239061Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:49:58.239079Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T17:49:58.252262Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1565:3213], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:49:58.252330Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:49:58.252366Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T17:49:58.336847Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:49:58.336913Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:49:58.336989Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:49:58.337027Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:49:58.578358Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2dyatcfnpxae90418d2sac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mzg3Njk5ZGMtOWVjZmVhYjktMWZjNTQ1NmEtOWQ2NGU0NTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |76.1%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |76.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |76.2%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |76.2%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |76.2%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |76.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |76.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |76.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |76.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |76.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:79:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:80:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:80:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:83:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:83:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:104:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:84:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:86:2115] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:90:2057] recipient: [11:86:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:89:2116] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:143:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:85:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:88:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:87:2116] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:91:2057] recipient: [12:87:2116] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:90:2117] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:110:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:86:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:88:2117] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:92:2057] recipient: [13:88:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:91:2118] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:111:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:89:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:92:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:91:2120] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:95:2057] recipient: [15:91:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:94:2121] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:148:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:49:46.033883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:46.033980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:46.034021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:46.034062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:46.034114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:46.034145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:46.034218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:46.034291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:46.034614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:46.109968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:46.110021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:46.121777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:46.121866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:46.121991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:46.126769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:46.126970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:46.127415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.127623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:46.130721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.131843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:46.131890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.131967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:46.132004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:46.132034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:46.132159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.137180Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:46.265143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:46.265370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.265555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:46.265799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:46.265847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.268123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.268249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:46.268411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.268466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:46.268502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:46.268535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:46.270325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.270377Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:46.270416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:46.275291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.275350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.275400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.275439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.278618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:46.280497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:46.280660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:46.281522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.281652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:46.281727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.281987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:46.282034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.282202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:46.282279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:46.284230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:46.284273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:46.284437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.284475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:46.284758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.284812Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:46.284908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:46.284940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.284972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:46.284995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.285025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:46.285068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.285109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:46.285137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:46.285193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:46.285230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:46.285255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:46.286964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:46.287062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:46.287101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:06.838428Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:50:06.838583Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:50:06.848513Z node 27 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [27:133:2156] sender: [27:242:2058] recipient: [27:15:2062] 2025-04-17T17:50:06.858820Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:06.859102Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:06.859336Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:50:06.859592Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:50:06.859674Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:06.862023Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:06.862151Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:50:06.862429Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:06.862508Z node 27 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:50:06.862571Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:50:06.862619Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:50:06.864044Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:06.864100Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:50:06.864152Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:50:06.865467Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:06.865505Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:06.865579Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:06.865676Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:50:06.865870Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:06.866985Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:50:06.867210Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:50:06.868051Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:06.868200Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 115964119146 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:06.868263Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:06.868538Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:50:06.868605Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:06.868893Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:50:06.869012Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:50:06.870715Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:06.870781Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:06.871027Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:06.871085Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [27:209:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:50:06.871501Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:06.871567Z node 27 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:50:06.871744Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:06.871796Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:06.871850Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:06.871904Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:06.871960Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:50:06.872020Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:06.872076Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:50:06.872124Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:50:06.872208Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:06.872274Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:50:06.872334Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:50:06.872848Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:06.872982Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:06.873051Z node 27 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:50:06.873132Z node 27 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:50:06.873209Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:50:06.873352Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:50:06.876364Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:50:06.877036Z node 27 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T17:50:06.878304Z node 27 :TX_PROXY DEBUG: actor# [27:272:2263] Bootstrap 2025-04-17T17:50:06.916210Z node 27 :TX_PROXY DEBUG: actor# [27:272:2263] Become StateWork (SchemeCache [27:277:2268]) 2025-04-17T17:50:06.920043Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:06.920636Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:50:06.920839Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-04-17T17:50:06.921561Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-04-17T17:50:06.923214Z node 27 :TX_PROXY DEBUG: actor# [27:272:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:50:06.926715Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:06.926960Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-17T17:50:06.927514Z node 27 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:49:45.760046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:45.760134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:45.760195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:45.760234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:45.760278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:45.760315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:45.760388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:45.760467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:45.760795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:45.853983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:45.854047Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:45.871771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:45.872215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:45.872396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:45.885638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:45.885847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:45.886581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:45.886809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:45.889704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:45.891018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:45.891111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:45.891305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:45.891356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:45.891403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:45.891541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:45.898767Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:49:46.051246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:46.051434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.051572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:46.051750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:46.051792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.053665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.053783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:46.053926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.053965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:46.053996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:46.054022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:46.055594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.055667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:46.055698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:46.057284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.057318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.057358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.057400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.059881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:46.062851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:46.062983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:46.063618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:46.063722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:46.063763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.063956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:46.063995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:46.064102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:46.064173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:46.065482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:46.065545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:46.065668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:46.065759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:46.066209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:46.066247Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:46.066315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:46.066340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.066369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:46.066393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.066418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:46.066460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:46.066494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:46.066518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:46.066564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:46.066596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:46.066631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:46.067914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:46.067990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:46.068035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:50:08.920782Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:08.920818Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T17:50:08.920861Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-17T17:50:08.921358Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:50:08.921402Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T17:50:08.922843Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:50:08.922927Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:50:08.922977Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:50:08.923013Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:50:08.923050Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:08.924134Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:50:08.924213Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:50:08.924248Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:50:08.924288Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:50:08.924323Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:50:08.924401Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-17T17:50:08.925875Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1058 } } 2025-04-17T17:50:08.925918Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T17:50:08.926044Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1058 } } 2025-04-17T17:50:08.926142Z node 28 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1058 } } 2025-04-17T17:50:08.927533Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T17:50:08.927579Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T17:50:08.927695Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T17:50:08.927738Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:50:08.927821Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T17:50:08.927876Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:08.927916Z node 28 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:50:08.927953Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:50:08.927996Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T17:50:08.929396Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:50:08.930354Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:50:08.931907Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:50:08.932041Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:50:08.932301Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:50:08.932344Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T17:50:08.932452Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:50:08.932488Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:50:08.932530Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:50:08.932563Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:50:08.932597Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T17:50:08.932660Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:334:2313] message: TxId: 101 2025-04-17T17:50:08.932701Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:50:08.932743Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:50:08.932776Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T17:50:08.932887Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:50:08.934585Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:50:08.934628Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:335:2314] TestWaitNotification: OK eventTxId 101 2025-04-17T17:50:08.935108Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:08.935326Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 260us result status StatusSuccess 2025-04-17T17:50:08.935793Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> TSchemeShardTTLTests::CheckCounters [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> TTicketParserTest::NebiusAuthenticationUnavailable |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T17:49:44.061276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:44.061352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:44.061392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:44.061418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:44.061446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:44.061462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:44.061494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:44.061545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:44.061795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:44.119727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:44.119797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:44.126161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:44.126312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:44.126410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:44.129031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:44.129168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:44.129590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:44.129797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:44.131877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:44.133102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:44.133159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:44.133263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:44.133306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:44.133342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:44.133564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:49:44.140257Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:49:44.245194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:44.245481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:44.245682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:44.245923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:44.245976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:44.248433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:44.248558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:44.248719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:44.248777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:44.248808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:44.248838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:44.250901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:44.250952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:44.250995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:44.252897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:44.252938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:44.252987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:44.253034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:44.256130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:44.257467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:44.257618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:44.258259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:44.258362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:44.258395Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:44.258576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:44.258612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:44.258716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:44.258771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:44.260556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:44.260607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:44.260786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:44.260821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:44.261017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:44.261056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:44.261154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:44.261186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:44.261219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:44.261256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:44.261300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:49:44.261345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:44.261384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:49:44.261427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:49:44.261508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:49:44.261540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:49:44.261566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:49:44.263345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:44.263447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:49:44.263478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T17:50:11.301436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-17T17:50:11.301459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T17:50:11.301488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-17T17:50:11.301545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2321] message: TxId: 107 2025-04-17T17:50:11.301586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T17:50:11.301641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-17T17:50:11.301668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-17T17:50:11.301771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:50:11.301796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:50:11.302005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:50:11.302036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:50:11.302080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:11.304834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-17T17:50:11.304905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1353:3254] 2025-04-17T17:50:11.304993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-04-17T17:50:11.405780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-17T17:50:11.406530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-17T17:50:11.406697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-17T17:50:11.406791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:50:11.406897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409548 2025-04-17T17:50:11.407697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-17T17:50:11.407772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:50:11.407843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409549 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-04-17T17:50:11.528698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-17T17:50:11.528812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:50:11.528929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:50:11.529070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1744925474439883 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-17T17:50:11.529164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1744925474439883 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-17T17:50:11.529775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T17:50:11.530218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T17:50:11.530683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T17:50:11.530732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T17:50:11.531375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T17:50:11.531409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T17:50:11.537742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T17:50:11.537980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:50:11.538031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-04-17T22:31:14.439883Z, at schemeshard: 72057594046678944 2025-04-17T17:50:11.538284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T17:50:11.538345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:50:11.538580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:50:11.538622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-04-17T22:31:14.439883Z, at schemeshard: 72057594046678944 2025-04-17T17:50:11.538679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:50:11.561302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T17:50:11.619494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-17T17:50:11.619628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-17T17:50:11.619688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-17T17:50:11.619788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:50:11.619901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409548 2025-04-17T17:50:11.620070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-17T17:50:11.620117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:50:11.620160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409549 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-04-17T17:50:11.652235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T17:50:11.721435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2025-04-17T17:50:11.721575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-17T17:50:11.721649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-17T17:50:11.721733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:50:11.721847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409548 2025-04-17T17:50:11.722027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-17T17:50:11.722071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:50:11.722111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409549 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:84:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:84:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:140:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] >> Worker::Basic >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] 2025-04-17T17:49:55.282719Z node 1 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] 2025-04-17T17:50:05.582469Z node 2 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:449:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:452:2057] recipient: [4:451:2377] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:453:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:455:2057] recipient: [4:451:2377] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:454:2378] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:508:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:449:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:452:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:453:2057] recipient: [5:451:2377] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:455:2057] recipient: [5:451:2377] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:454:2378] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:508:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:450:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:453:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:454:2057] recipient: [6:452:2377] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:456:2057] recipient: [6:452:2377] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:455:2378] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:509:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:86:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:90:2057] recipient: [12:88:2117] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:92:2057] recipient: [12:88:2117] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:91:2118] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:92:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:93:2057] recipient: [13:91:2120] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:95:2057] recipient: [13:91:2120] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:94:2121] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:148:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:90:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:94:2057] recipient: [15:92:2120] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:96:2057] recipient: [15:92:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:95:2121] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:149:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 >> VDiskBalancing::TestStopOneNode_Block42 |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |76.3%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |76.3%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |76.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |76.4%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |76.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 5526233385540449859 SEND TEvPut with key [1:1:1:0:0:100:0] 2025-04-17T17:50:18.123680Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-17T17:50:18.124196Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-04-17T17:50:18.224504Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 15723591892515566903 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-04-17T17:50:18.461037Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:188:17] ServerId# [1:296:58] TabletId# 72057594037932033 PipeClientId# [3:188:17] 2025-04-17T17:50:18.461256Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:223:17] ServerId# [1:301:63] TabletId# 72057594037932033 PipeClientId# [8:223:17] 2025-04-17T17:50:18.461375Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:209:17] ServerId# [1:299:61] TabletId# 72057594037932033 PipeClientId# [6:209:17] 2025-04-17T17:50:18.461465Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:202:17] ServerId# [1:298:60] TabletId# 72057594037932033 PipeClientId# [5:202:17] 2025-04-17T17:50:18.461556Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:195:17] ServerId# [1:297:59] TabletId# 72057594037932033 PipeClientId# [4:195:17] 2025-04-17T17:50:18.461689Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:181:17] ServerId# [1:295:57] TabletId# 72057594037932033 PipeClientId# [2:181:17] 2025-04-17T17:50:18.461787Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:216:17] ServerId# [1:300:62] TabletId# 72057594037932033 PipeClientId# [7:216:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |76.4%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] Test command err: RandomSeed# 705569415176809129 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-04-17T17:50:19.017338Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> TKeyValueTest::TestCleanUpDataOnEmptyTablet [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration >> VDiskRestart::Simple [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration [GOOD] >> TKeyValueTest::TestCleanUpDataWithMockDisk |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit |76.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> Worker::Basic [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:78:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:79:2110] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:79:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:79:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:81:2112] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:85:2057] recipient: [7:81:2112] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:84:2113] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:138:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:79:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:81:2112] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:85:2057] recipient: [8:81:2112] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:84:2113] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:138:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:80:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:82:2112] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:86:2057] recipient: [9:82:2112] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:85:2113] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:83:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:85:2115] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:89:2057] recipient: [11:85:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:88:2116] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:142:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:84:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:87:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:88:2057] recipient: [12:86:2115] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:90:2057] recipient: [12:86:2115] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:89:2116] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:143:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |76.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |76.5%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2025-04-17T17:50:15.187454Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340900174370200:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:15.188102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b92/r3tmp/tmpwIQhsx/pdisk_1.dat 2025-04-17T17:50:15.966746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:15.966857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:15.970803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:15.989110Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3162 TServer::EnableGrpc on GrpcPort 20527, node 1 2025-04-17T17:50:16.508744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:16.508774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:16.508781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:16.508973Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:17.190431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:17.223014Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:50:17.478021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744912217594 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-17T17:50:17.604362Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Handshake: worker# [1:7494340908764305617:2426] 2025-04-17T17:50:17.604439Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handshake: worker# [1:7494340908764305617:2426] 2025-04-17T17:50:17.604913Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:50:17.605140Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T17:50:17.605169Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Send handshake: worker# [1:7494340908764305617:2426] 2025-04-17T17:50:17.605254Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494340908764305617:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-17T17:50:17.605272Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7494340908764305617:2426] Handshake with writer: sender# [1:7494340908764305619:2426] 2025-04-17T17:50:17.606582Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Create read session: session# [1:7494340908764305622:2297] 2025-04-17T17:50:17.606618Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494340908764305617:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-17T17:50:17.610391Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7494340908764305617:2426] Handshake with reader: sender# [1:7494340908764305618:2426] 2025-04-17T17:50:17.618411Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:50:17.793407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-17T17:50:19.715212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340917354240393:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:19.715326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:19.720545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340917354240408:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:19.720654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340917354240409:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:19.727389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:50:19.744104Z node 1 :TX_PROXY ERROR: Actor# [1:7494340917354240415:2513] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:50:19.750840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340917354240412:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:50:19.750885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340917354240413:2382], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:50:19.845076Z node 1 :TX_PROXY ERROR: Actor# [1:7494340917354240461:2543] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:19.846700Z node 1 :TX_PROXY ERROR: Actor# [1:7494340917354240466:2547] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:20.193064Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340900174370200:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:20.193106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:20.774663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T17:50:21.164219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:50:21.587780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T17:50:22.003568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T17:50:22.402058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:50:22.971099Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-04-17T17:50:22.959000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-17T17:50:22.971193Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494340908764305617:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-04-17T17:50:22.959000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:50:22.971257Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-04-17T17:50:22.959000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:50:22.990454Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2025-04-17T17:50:22.990666Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494340930239143102:2426] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T17:50:22.990731Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-17T17:50:22.990794Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494340930239143102:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-17T17:50:22.993047Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494340930239143102:2426] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T17:50:22.993109Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-17T17:50:22.993162Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2025-04-17T17:50:22.993257Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494340908764305617:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:50:22.993312Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:50:23.138895Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-04-17T17:50:23.131000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-17T17:50:23.138961Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494340908764305617:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-04-17T17:50:23.131000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:50:23.139008Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-04-17T17:50:23.131000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:50:23.139092Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2025-04-17T17:50:23.139201Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494340930239143102:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-17T17:50:23.141046Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494340930239143102:2426] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T17:50:23.141099Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-17T17:50:23.141133Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-17T17:50:23.141186Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494340908764305617:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:50:23.141236Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:50:23.303038Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-04-17T17:50:23.296000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-17T17:50:23.303102Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494340908764305617:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-04-17T17:50:23.296000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:50:23.303176Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-04-17T17:50:23.296000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-17T17:50:23.303290Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2025-04-17T17:50:23.303372Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494340930239143102:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-17T17:50:23.305199Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7494340930239143102:2426] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T17:50:23.305252Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-17T17:50:23.305294Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7494340908764305619:2426] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2025-04-17T17:50:23.305352Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494340908764305617:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:50:23.305589Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-17T17:50:23.433593Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2025-04-17T17:50:23.433621Z node 1 :REPLICATION_SERVICE INFO: [RemoteTopicReader][/Root/topic][0][1:7494340908764305618:2426] Leave 2025-04-17T17:50:23.433669Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7494340908764305617:2426] Reader has gone: sender# [1:7494340908764305618:2426] 2025-04-17T17:50:23.433715Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340934534110582:2426] Handshake: worker# [1:7494340908764305617:2426] 2025-04-17T17:50:23.434698Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340934534110582:2426] Create read session: session# [1:7494340934534110583:2297] 2025-04-17T17:50:23.434786Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7494340908764305617:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-17T17:50:23.434801Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7494340908764305617:2426] Handshake with reader: sender# [1:7494340934534110582:2426] 2025-04-17T17:50:23.434830Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7494340934534110582:2426] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] >> TKeyValueTest::TestRenameWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] Test command err: 2025-04-17T17:48:34.664232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:34.664301Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:34.807720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:38.270125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-17T17:48:38.552192Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:48:38.552668Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:48:38.557363Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16600737674598033720 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:48:38.582193Z node 2 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:48:38.824111Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:48:38.835535Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:48:38.836511Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11480861191609860849 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:48:39.174864Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:48:39.177721Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:48:39.177905Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3834001808414997160 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:48:39.318449Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:48:39.328135Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:48:39.328918Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00172c/r3tmp/tmpwVuGKk/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 408039700638707953 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:48:39.473017Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoCon ... istered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerRequestDataSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardReadSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardIncomingReadSetSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.DefaultTimeoutMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.EnableLeaderLeases was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinLeaderLeaseDurationUs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.VolatilePlanLeaseMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.PlanAheadTimeShiftMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinPlanResolutionMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.ForceShardSplitDataSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.DisableForceShardSplit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.ProfileSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.GuardedSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheTargetSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheReleaseRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableLocalSyncLogDataCutting was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DefaultHugeGarbagePerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.HugeDefragFreeSpaceBorderPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxChunksToDefragInflight was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingDryRun was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxInProgressSyncCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TabletControls.MaxCommitRedoMB was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThreshold was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplier was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisks was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.BucketSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakDurationMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control BlobStorageControllerControls.EnableSelfHealWithDegraded was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TableServiceControls.EnableMergeDatashardReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TestShardControls.DisableWrites was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test3 [GOOD] >> EntityId::Order >> EntityId::Order [GOOD] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> unstable_connection.py::TestUnstableConnection::test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:107:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:108:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:90:2057] recipient: [10:88:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:88:2118] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:145:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:88:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:209 ... recipient: [15:78:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:81:2111] Leader for TabletID 72057594037927937 is [15:81:2111] sender: [15:135:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:77:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:81:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:83:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:82:2111] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:136:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:83:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:82:2113] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:86:2057] recipient: [18:82:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:85:2114] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:139:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:81:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:85:2057] recipient: [20:83:2113] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:87:2057] recipient: [20:83:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:86:2114] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:140:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:82:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:86:2057] recipient: [21:84:2114] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:88:2057] recipient: [21:84:2114] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:87:2115] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:107:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:108:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:89:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:90:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:145:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:86:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:90:2057] recipient: [24:88:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:88:2118] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:145:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:89:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:90:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:90:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2025-04-17T17:48:33.527295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340463360484053:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:33.527403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpokstWu/pdisk_1.dat 2025-04-17T17:48:34.152562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:34.156444Z node 1 :HTTP ERROR: (#26,[::1]:20552) connection closed with error: Connection refused 2025-04-17T17:48:34.157420Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:48:34.157587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:34.157653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:34.160224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:45.249579Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340512367386751:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:45.293078Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmp1w0P5z/pdisk_1.dat 2025-04-17T17:48:45.419375Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:45.420234Z node 2 :HTTP ERROR: (#28,[::1]:15612) connection closed with error: Connection refused 2025-04-17T17:48:45.420829Z node 2 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:48:45.423313Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:45.423374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:45.427275Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:48.630540Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494340526162550049:2090];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:48.631336Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpyAZ0LF/pdisk_1.dat 2025-04-17T17:48:48.992232Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:49.036498Z node 3 :HTTP ERROR: (#26,[::1]:7311) connection closed with error: Connection refused 2025-04-17T17:48:49.045238Z node 3 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:48:49.054325Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:49.054394Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:49.055827Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:52.944008Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494340545444639875:2153];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:48:52.998020Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmp2jw0GR/pdisk_1.dat 2025-04-17T17:48:53.117398Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:53.136027Z node 4 :HTTP ERROR: (#28,[::1]:23483) connection closed with error: Connection refused 2025-04-17T17:48:53.142509Z node 4 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:48:53.144003Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:53.144069Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:53.151217Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:56.592983Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494340561005577717:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmp30H7KR/pdisk_1.dat 2025-04-17T17:48:56.688004Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:48:56.784723Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:56.789697Z node 5 :HTTP ERROR: (#30,[::1]:23028) connection closed with error: Connection refused 2025-04-17T17:48:56.791084Z node 5 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:48:56.814817Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:56.814898Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:56.816599Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:01.255559Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494340582364037385:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:01.258696Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpMbZyo7/pdisk_1.dat 2025-04-17T17:49:01.567616Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:01.579289Z node 6 :HTTP ERROR: (#32,[::1]:11396) connection closed with error: Connection refused 2025-04-17T17:49:01.579514Z node 6 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:01.579882Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:01.579975Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:01.581276Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:06.134784Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494340604042277848:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:06.135618Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpAA33LF/pdisk_1.dat 2025-04-17T17:49:06.353333Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:06.366551Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:06.366673Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:06.367942Z node 7 :HTTP ERROR: (#34,[::1]:22576) connection closed with error: Connection refused 2025-04-17T17:49:06.369632Z node 7 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:06.371174Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:10.819405Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494340619529155046:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:10.819748Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpi9oX92/pdisk_1.dat 2025-04-17T17:49:11.244841Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:11.293472Z node 8 :HTTP ERROR: (#36,[::1]:17706) connection closed with error: Connection refused 2025-04-17T17:49:11.297221Z node 8 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:11.312899Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:11.334437Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:11.339155Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:15.679768Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494340643120646904:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpPmtYpB/pdisk_1.dat 2025-04-17T17:49:15.706099Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:49:15.816345Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:15.828830Z node 9 :HTTP ERROR: (#35,[::1]:20641) connection closed with error: Connection refused 2025-04-17T17:49:15.829543Z node 9 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:15.830405Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:15.830485Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:15.831962Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:20.646531Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494340665391181927:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:20.646602Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpKnycLH/pdisk_1.dat 2025-04-17T17:49:20.832042Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:20.846481Z node 10 :HTTP ERROR: (#26,[::1]:11216) connection closed with error: Connection refused 2025-04-17T17:49:20.846914Z node 10 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:20.864856Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileSta ... cheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpaYJtwc/pdisk_1.dat 2025-04-17T17:49:37.816697Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:37.833358Z node 14 :HTTP ERROR: (#34,[::1]:28900) connection closed with error: Connection refused 2025-04-17T17:49:37.833789Z node 14 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:37.852496Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:37.852607Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:37.854152Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:41.152450Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7494340752836299449:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:41.152533Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmp2KxzIb/pdisk_1.dat 2025-04-17T17:49:41.255865Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:41.275433Z node 15 :HTTP ERROR: (#36,[::1]:12938) connection closed with error: Connection refused 2025-04-17T17:49:41.275687Z node 15 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:41.293551Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:41.293647Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:41.295277Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:44.925684Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7494340768547690053:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:44.925762Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpMg9HT1/pdisk_1.dat 2025-04-17T17:49:45.060040Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:45.081586Z node 16 :HTTP ERROR: (#38,[::1]:4257) connection closed with error: Connection refused 2025-04-17T17:49:45.082118Z node 16 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:45.084839Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:45.084935Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:45.086325Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:49.550873Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7494340788804801933:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:49.550954Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpQC7xFA/pdisk_1.dat 2025-04-17T17:49:49.684052Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:49.704317Z node 17 :HTTP ERROR: (#26,[::1]:63821) connection closed with error: Connection refused 2025-04-17T17:49:49.706369Z node 17 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:49.719787Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:49.719890Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:49.721212Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:54.211739Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7494340811254392741:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:54.211825Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpUB686D/pdisk_1.dat 2025-04-17T17:49:54.341366Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:54.358706Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:54.358813Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:54.358961Z node 18 :HTTP ERROR: (#28,[::1]:28075) connection closed with error: Connection refused 2025-04-17T17:49:54.360168Z node 18 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:54.361270Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:58.728395Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7494340827115108175:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:58.728519Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmp3pqiu0/pdisk_1.dat 2025-04-17T17:49:58.834869Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:58.867646Z node 19 :HTTP ERROR: (#30,[::1]:2088) connection closed with error: Connection refused 2025-04-17T17:49:58.868215Z node 19 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:49:58.870386Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:58.870507Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:58.872328Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:03.650617Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7494340847744319710:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:03.653374Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpxQMjzY/pdisk_1.dat 2025-04-17T17:50:03.812929Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:03.845190Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:03.845339Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:03.847128Z node 20 :HTTP ERROR: (#32,[::1]:22135) connection closed with error: Connection refused 2025-04-17T17:50:03.848203Z node 20 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:50:03.849770Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:09.338384Z node 21 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7494340875093161710:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:09.338441Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmppVhLHq/pdisk_1.dat 2025-04-17T17:50:09.568355Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:09.570021Z node 21 :HTTP ERROR: (#34,[::1]:18230) connection closed with error: Connection refused 2025-04-17T17:50:09.573556Z node 21 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:50:09.575745Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:09.575843Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:09.585250Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:15.119845Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7494340900704399941:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:15.120695Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpqq5iSe/pdisk_1.dat 2025-04-17T17:50:15.546536Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:15.569784Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:15.569918Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:15.570161Z node 22 :HTTP ERROR: (#36,[::1]:62389) connection closed with error: Connection refused 2025-04-17T17:50:15.570870Z node 22 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:50:15.571888Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:21.484606Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7494340926187939264:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:21.484668Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171b/r3tmp/tmpoiojRR/pdisk_1.dat 2025-04-17T17:50:21.652224Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:21.691753Z node 23 :HTTP ERROR: (#38,[::1]:8704) connection closed with error: Connection refused 2025-04-17T17:50:21.692124Z node 23 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T17:50:21.694709Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:21.694808Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:21.696414Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> EntityId::Distinct [GOOD] >> EntityId::MinId [GOOD] >> EntityId::MaxId [GOOD] >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> TPartBtreeIndexIteration::NoNodes_History >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::FewNodes |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MaxId [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> SplitterBasic::LimitExceed [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> TIterator::Single >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 >> SystemView::VSlotsFields >> SystemView::ShowCreateTableDefaultLiteral |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> SystemView::CollectPreparedQueries >> SystemView::ShowCreateTablePartitionByHash >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> SystemView::TopPartitionsByCpuFields ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... bletID 72057594037927937 is [13:56:2097] sender: [13:87:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:78:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:82:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:83:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:83:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2025-04-17T17:47:33.764287Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:34.215964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:34.275088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:34.275364Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:34.298347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:34.299242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:34.300269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:34.300532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:34.300982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:34.301167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:34.301737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:34.301930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:34.302380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:34.302484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.302582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:34.303716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:34.366042Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:34.366344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:34.366468Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:34.366666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.366864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:34.366943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:34.366991Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:34.367087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:34.367152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:34.367195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:34.367233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:34.367425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.367494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:34.367554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:34.367589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:34.367690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:34.367736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:34.367780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:34.367813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:34.367894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:34.367929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:34.367958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:34.368006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:34.368044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:34.368071Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:34.368569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-17T17:47:34.368665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=42; 2025-04-17T17:47:34.368748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-17T17:47:34.368837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-17T17:47:34.369034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:34.369094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:34.369130Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:34.369323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:34.369379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.369428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.369594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:34.369640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:34.369669Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:34.369838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:34.369875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:34.369909Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:34.370040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:34.370082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:34.370131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 8.055695Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:81:2696:0]; 2025-04-17T17:50:28.055744Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:82:8528:0]; 2025-04-17T17:50:28.055787Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:83:2776:0]; 2025-04-17T17:50:28.055826Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:84:2768:0]; 2025-04-17T17:50:28.055866Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:85:2768:0]; 2025-04-17T17:50:28.055906Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:86:2768:0]; 2025-04-17T17:50:28.055948Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:87:2768:0]; 2025-04-17T17:50:28.055992Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:88:2768:0]; 2025-04-17T17:50:28.056034Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:89:2768:0]; 2025-04-17T17:50:28.056075Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:90:2768:0]; 2025-04-17T17:50:28.056114Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:91:2768:0]; 2025-04-17T17:50:28.056156Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:92:2768:0]; 2025-04-17T17:50:28.056199Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:93:2768:0]; 2025-04-17T17:50:28.056247Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:94:2768:0]; 2025-04-17T17:50:28.056306Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:95:2768:0]; 2025-04-17T17:50:28.056365Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:96:2768:0]; 2025-04-17T17:50:28.056410Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:97:2768:0]; 2025-04-17T17:50:28.056470Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:98:2768:0]; 2025-04-17T17:50:28.056518Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:99:2768:0]; 2025-04-17T17:50:28.056564Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:100:2768:0]; 2025-04-17T17:50:28.056620Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:101:2768:0]; 2025-04-17T17:50:28.056664Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:102:2768:0]; 2025-04-17T17:50:28.056708Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:103:2768:0]; 2025-04-17T17:50:28.056762Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:104:2768:0]; 2025-04-17T17:50:28.056802Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:105:2768:0]; 2025-04-17T17:50:28.056853Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:106:2768:0]; 2025-04-17T17:50:28.056896Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:107:2768:0]; 2025-04-17T17:50:28.056939Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:108:2768:0]; 2025-04-17T17:50:28.056986Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:109:2768:0]; 2025-04-17T17:50:28.057030Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:110:2768:0]; 2025-04-17T17:50:28.057071Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:111:2768:0]; 2025-04-17T17:50:28.057111Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:112:2768:0]; 2025-04-17T17:50:28.057159Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:113:2768:0]; 2025-04-17T17:50:28.057198Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:114:2768:0]; 2025-04-17T17:50:28.057235Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:115:2768:0]; 2025-04-17T17:50:28.057279Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:116:2768:0]; 2025-04-17T17:50:28.057322Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:117:2768:0]; 2025-04-17T17:50:28.057363Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:118:2768:0]; 2025-04-17T17:50:28.057409Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:119:2696:0]; 2025-04-17T17:50:28.057451Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:120:2696:0]; 2025-04-17T17:50:28.057493Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:121:2696:0]; 2025-04-17T17:50:28.057533Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:122:2696:0]; 2025-04-17T17:50:28.057584Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:123:8528:0]; 2025-04-17T17:50:28.057630Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:124:2768:0]; 2025-04-17T17:50:28.057675Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:125:2768:0]; 2025-04-17T17:50:28.057716Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:126:2768:0]; 2025-04-17T17:50:28.057769Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:127:2768:0]; 2025-04-17T17:50:28.057838Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:128:2768:0]; 2025-04-17T17:50:28.057883Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:129:2768:0]; 2025-04-17T17:50:28.057921Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:130:2768:0]; 2025-04-17T17:50:28.057960Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:131:2768:0]; 2025-04-17T17:50:28.058005Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:132:2768:0]; 2025-04-17T17:50:28.058049Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:133:2768:0]; 2025-04-17T17:50:28.058091Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:134:2768:0]; 2025-04-17T17:50:28.058130Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:135:2768:0]; 2025-04-17T17:50:28.058172Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:136:2768:0]; 2025-04-17T17:50:28.058251Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:137:2768:0]; 2025-04-17T17:50:28.058300Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:138:2768:0]; 2025-04-17T17:50:28.058335Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:139:2768:0]; 2025-04-17T17:50:28.058382Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:140:2768:0]; 2025-04-17T17:50:28.058430Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:141:2768:0]; 2025-04-17T17:50:28.058476Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:142:2768:0]; 2025-04-17T17:50:28.058522Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:143:2768:0]; 2025-04-17T17:50:28.058559Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:144:2768:0]; 2025-04-17T17:50:28.058606Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:145:2768:0]; 2025-04-17T17:50:28.058667Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:146:2768:0]; 2025-04-17T17:50:28.058718Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:147:2768:0]; 2025-04-17T17:50:28.058782Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:148:2768:0]; 2025-04-17T17:50:28.058853Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:149:2768:0]; 2025-04-17T17:50:28.058912Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:150:2768:0]; 2025-04-17T17:50:28.058956Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:151:2768:0]; 2025-04-17T17:50:28.058995Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:152:2768:0]; 2025-04-17T17:50:28.059039Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:153:2768:0]; 2025-04-17T17:50:28.059082Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:154:2768:0]; 2025-04-17T17:50:28.059120Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:155:2768:0]; 2025-04-17T17:50:28.059169Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:156:2768:0]; 2025-04-17T17:50:28.059217Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:157:2768:0]; 2025-04-17T17:50:28.059255Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:158:2768:0]; 2025-04-17T17:50:28.059291Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:159:2768:0]; 2025-04-17T17:50:28.059330Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:160:2696:0]; 2025-04-17T17:50:28.059383Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:161:2696:0]; 2025-04-17T17:50:28.059423Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:162:2696:0]; 2025-04-17T17:50:28.059458Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:163:2696:0]; 2025-04-17T17:50:28.059502Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:164:8528:0]; 2025-04-17T17:50:28.570084Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T17:50:28.570890Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[4] (CS::GENERAL) apply at tablet 9437184 2025-04-17T17:50:28.725345Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:2 Blob count: 692 2025-04-17T17:50:28.732008Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2078720;raw_bytes=2324579;count=1;records=24469} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100419184;raw_bytes=104021253;count=42;records=1103721} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately >> YdbSdkSessions::MultiThreadSync >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History >> YdbSdkSessions::TestSessionPool >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> YdbSdkSessions::SessionsServerLimit >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SimpleFeatureFlags >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> SystemView::CollectPreparedQueries [GOOD] >> SystemView::AuthUsers >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsByCpuTables >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> TKeyValueTest::TestRenameToLongKey [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> YdbSdkSessions::TestSessionPool [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> YdbSdkSessions::SessionsServerLimit [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:86:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:141:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:84:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:86:2117] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:86:2117] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2119] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:92:2057] recipient: [11:88:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2120] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Re ... is [18:56:2097] sender: [18:95:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:96:2057] recipient: [18:94:2123] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:98:2057] recipient: [18:94:2123] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:97:2124] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:151:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:76:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:78:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:80:2057] recipient: [21:79:2110] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:82:2057] recipient: [21:79:2110] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:81:2111] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:135:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:76:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:79:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:80:2057] recipient: [22:78:2110] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:82:2057] recipient: [22:78:2110] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:81:2111] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:135:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:77:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:81:2057] recipient: [23:79:2110] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:83:2057] recipient: [23:79:2110] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:82:2111] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:136:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:80:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:83:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:82:2113] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:86:2057] recipient: [24:82:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:85:2114] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:139:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:80:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:82:2113] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:86:2057] recipient: [25:82:2113] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:85:2114] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:139:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:81:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:85:2057] recipient: [26:83:2113] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:87:2057] recipient: [26:83:2113] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:86:2114] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:140:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:87:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:86:2116] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:90:2057] recipient: [27:86:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2117] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:143:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:84:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:87:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:86:2116] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:90:2057] recipient: [28:86:2116] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:89:2117] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:143:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:85:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:88:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:87:2116] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:91:2057] recipient: [29:87:2116] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:90:2117] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:144:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSessionPool [GOOD] Test command err: 2025-04-17T17:50:32.683822Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340974679668050:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:32.686774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000efe/r3tmp/tmp97W3lY/pdisk_1.dat 2025-04-17T17:50:33.120882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:33.120960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:33.121985Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:33.125540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30712, node 1 2025-04-17T17:50:33.259051Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:33.259081Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:33.259101Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:33.259211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:33.502474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:35.227690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340987564570934:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:35.227697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340987564570946:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:35.227815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:35.231434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:35.253607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340987564570948:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:35.339506Z node 1 :TX_PROXY ERROR: Actor# [1:7494340987564571032:2666] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] Test command err: 2025-04-17T17:48:46.104612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:46.104686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:46.176611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:49.857985Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:49.858060Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:49.960036Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:53.825006Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:53.825071Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:53.884176Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:54.982842Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-17T17:48:55.141757Z node 25 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:48:55.142317Z node 25 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:48:55.142892Z node 25 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2018204208034595097 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:48:55.189603Z node 26 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:48:55.194274Z node 26 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:48:55.194490Z node 26 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6423884318299522346 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:48:55.268320Z node 22 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:48:55.268811Z node 22 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:48:55.269033Z node 22 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9496794036748364035 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:48:55.322548Z node 21 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:48:55.323007Z node 21 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:48:55.323203Z node 21 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13022855866787836763 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:48:55.374025Z node 27 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:48:55.375234Z node 27 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:48:55.375485Z node 27 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016f9/r3tmp/tmpT0QnYF/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2711685294231159602 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 ... ode 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-04-17T17:50:35.978418Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-17T17:50:35.978508Z node 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-04-17T17:50:35.978594Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-17T17:50:35.978684Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-04-17T17:50:35.978762Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715661, ready parts: 1/1, is published: false 2025-04-17T17:50:35.978931Z node 163 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [163:1379:2616], msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72057594046578944 2025-04-17T17:50:35.978983Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-17T17:50:35.979067Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2025-04-17T17:50:35.979142Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715661:0 2025-04-17T17:50:35.979239Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 12 2025-04-17T17:50:35.979323Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 1, subscribers: 1 2025-04-17T17:50:35.979398Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046578944, LocalPathId: 3], 7 2025-04-17T17:50:35.982208Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-04-17T17:50:35.982253Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 281474976715661, path id: [OwnerId: 72057594046578944, LocalPathId: 3] 2025-04-17T17:50:35.982451Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-04-17T17:50:35.982487Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:674:2235], at schemeshard: 72057594046578944, txId: 281474976715661, path id: 3 2025-04-17T17:50:35.982622Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3 2025-04-17T17:50:35.982713Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72075186233409546 2025-04-17T17:50:35.982981Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-17T17:50:35.983840Z node 163 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T17:50:35.983930Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 tablet 72057594046578944 removed=1 2025-04-17T17:50:35.983964Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 acknowledged 2025-04-17T17:50:35.983994Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 acknowledged 2025-04-17T17:50:35.984461Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-04-17T17:50:35.984546Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-04-17T17:50:35.984594Z node 163 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 281474976715661 2025-04-17T17:50:35.984686Z node 163 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 281474976715661, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], version: 7 2025-04-17T17:50:35.984764Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-04-17T17:50:35.984921Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 281474976715661, subscribers: 1 2025-04-17T17:50:35.984995Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [163:1917:2383] 2025-04-17T17:50:35.988691Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046578944, msg: DomainSchemeShard: 72057594046578944 DomainPathId: 3 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 3 TenantHive: 18446744073709551615 TenantSysViewProcessor: 72075186233409553 TenantRootACL: "" TenantStatisticsAggregator: 72075186233409554 TenantGraphShard: 18446744073709551615 2025-04-17T17:50:35.988760Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 2025-04-17T17:50:35.988837Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046578944, LocalPathId: 3], Generation: 2, ActorId:[163:1379:2616], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 3, TenantHive: 18446744073709551615, TenantSysViewProcessor: 72075186233409553, TenantStatisticsAggregator: 72075186233409554, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 3, tenantHive: 18446744073709551615, tenantSysViewProcessor: 72075186233409553, at schemeshard: 72057594046578944 2025-04-17T17:50:35.988924Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T17:50:35.988957Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T17:50:35.989094Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T17:50:35.989127Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:1596:2765], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-17T17:50:35.990877Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186233409546, cookie: 0 2025-04-17T17:50:35.991991Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 281474976715661 2025-04-17T17:50:35.992073Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> YdbSdkSessions::TestActiveSessionCountAfterBadSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:78:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:140:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:84:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:88:2057] recipient: [22:86:2116] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:90:2057] recipient: [22:86:2116] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:89:2117] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:143:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:88:2057] recipient: [23:86:2116] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:90:2057] recipient: [23:86:2116] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:89:2117] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:143:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:87:2116] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:91:2057] recipient: [24:87:2116] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:90:2117] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:144:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> YdbSdkSessions::TestMultipleSessions >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] >> Viewer::SimpleFeatureFlags [GOOD] >> SystemView::AuthUsers [GOOD] >> SystemView::AuthUsers_LockUnlock >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2025-04-17T17:50:12.645412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340886326626052:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:12.645497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cc0/r3tmp/tmpt69h9j/pdisk_1.dat 2025-04-17T17:50:13.066697Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:13.113145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:13.113450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:13.118253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22783, node 1 2025-04-17T17:50:13.338700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:13.338729Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:13.338737Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:13.338869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:13.990394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:14.020112Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:50:14.030567Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T17:50:14.030610Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T17:50:14.030622Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T17:50:14.031407Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T17:50:14.031737Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:25777 2025-04-17T17:50:14.048268Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-17T17:50:14.069601Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-17T17:50:14.070858Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T17:50:14.070894Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T17:50:14.071047Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-17T17:50:14.076207Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 1 CANCELLED 2025-04-17T17:50:14.077093Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2025-04-17T17:50:17.914719Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340909955550639:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:18.041087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cc0/r3tmp/tmpCCeDCt/pdisk_1.dat 2025-04-17T17:50:18.477554Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:18.506006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:18.506105Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:18.519940Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19981, node 2 2025-04-17T17:50:18.726838Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:18.726862Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:18.726870Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:18.726976Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:19.063509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:19.075020Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:50:19.081059Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T17:50:19.081100Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T17:50:19.081110Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T17:50:19.081205Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-17T17:50:19.081253Z node 2 :GRPC_CLIENT DEBUG: [517000041408] Connect to grpc://localhost:23325 2025-04-17T17:50:19.082707Z node 2 :GRPC_CLIENT DEBUG: [517000041408] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-17T17:50:19.092613Z node 2 :GRPC_CLIENT DEBUG: [517000041408] Status 14 Service Unavailable 2025-04-17T17:50:19.094350Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T17:50:19.094391Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T17:50:19.094460Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-17T17:50:19.094722Z node 2 :GRPC_CLIENT DEBUG: [517000041408] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-17T17:50:19.099702Z node 2 :GRPC_CLIENT DEBUG: [517000041408] Status 14 Service Unavailable 2025-04-17T17:50:19.100111Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T17:50:19.100139Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T17:50:19.876352Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-17T17:50:19.876472Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-17T17:50:19.876699Z node 2 :GRPC_CLIENT DEBUG: [517000041408] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-17T17:50:19.879118Z node 2 :GRPC_CLIENT DEBUG: [517000041408] Status 14 Service Unavailable 2025-04-17T17:50:19.879252Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T17:50:19.879289Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T17:50:20.876852Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-17T17:50:20.876931Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-17T17:50:20.877169Z node 2 :GRPC_CLIENT DEBUG: [517000041408] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAcc ... 1 2025-04-17T17:50:35.481112Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T17:50:35.481118Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T17:50:35.481148Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-17T17:50:35.481262Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-17T17:50:35.483720Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Response AuthorizeResponse { results { key: 0 value { resultCode: PERMISSION_DENIED } } } 2025-04-17T17:50:35.483888Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "" 2025-04-17T17:50:35.483915Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-17T17:50:35.484439Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T17:50:35.484466Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T17:50:35.484474Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T17:50:35.484520Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-17T17:50:35.484678Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-17T17:50:35.486320Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-17T17:50:35.486618Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T17:50:35.487125Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T17:50:35.487163Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T17:50:35.487172Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T17:50:35.487247Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-17T17:50:35.487473Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-17T17:50:35.489549Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-17T17:50:35.489731Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T17:50:35.490223Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T17:50:35.490256Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T17:50:35.490266Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T17:50:35.490326Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( monitoring.view) 2025-04-17T17:50:35.490547Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Request AuthorizeRequest { checks { key: 0 value { permission { name: "monitoring.view" } container_id: "gizmo" iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "monitoring.view" } container_id: "gizmo" iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-17T17:50:35.492501Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-17T17:50:35.492802Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T17:50:38.750909Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494341000206063434:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:38.751005Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cc0/r3tmp/tmpTnPz2k/pdisk_1.dat 2025-04-17T17:50:38.859764Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:38.893281Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:38.893367Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:38.894783Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13513, node 5 2025-04-17T17:50:38.940785Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:38.940824Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:38.940835Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:38.940950Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:39.185505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:39.194758Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T17:50:39.194794Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T17:50:39.194807Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T17:50:39.194879Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-17T17:50:39.194920Z node 5 :GRPC_CLIENT DEBUG: [5170000f3508] Connect to grpc://localhost:11130 2025-04-17T17:50:39.195841Z node 5 :GRPC_CLIENT DEBUG: [5170000f3508] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-17T17:50:39.205051Z node 5 :GRPC_CLIENT DEBUG: [5170000f3508] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-17T17:50:39.205292Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T17:50:39.205789Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T17:50:39.205839Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T17:50:39.205853Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T17:50:39.205914Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-17T17:50:39.206244Z node 5 :GRPC_CLIENT DEBUG: [5170000f3508] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-17T17:50:39.208176Z node 5 :GRPC_CLIENT DEBUG: [5170000f3508] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } } 2025-04-17T17:50:39.208370Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] Test command err: 2025-04-17T17:50:33.459996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340979094354301:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:33.460062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ef2/r3tmp/tmp8CzFgX/pdisk_1.dat 2025-04-17T17:50:33.778000Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14841, node 1 2025-04-17T17:50:33.862983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:33.863186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:33.891991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:33.898113Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:33.898133Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:33.898140Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:33.898290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:34.185635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:36.224918Z node 1 :KQP_PROXY WARN: TraceId: "01js2dzfp028svyg9795jgx4d3", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:36.247727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340991979257211:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:36.247832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:36.248095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340991979257223:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:36.251971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:36.281848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340991979257225:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:36.388364Z node 1 :TX_PROXY ERROR: Actor# [1:7494340991979257296:2679] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:36.731872Z node 1 :KQP_PROXY WARN: TraceId: "01js2dzg5vepkwye2dqhh3h0js", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:38.193685Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494340999986060974:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:38.195530Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ef2/r3tmp/tmpxssRZh/pdisk_1.dat 2025-04-17T17:50:38.375678Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:38.406267Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:38.406360Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:38.409721Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7477, node 4 2025-04-17T17:50:38.472621Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:38.472650Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:38.472658Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:38.472813Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:50:38.690762Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:50:41.257205Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmk8ay98ndgnynvb2b2e", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.263493Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmkfafjpvnn92bg5vm5n", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.272385Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmkrfzvj9qktnvdyz30w", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.278542Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmky6gphcpc5z4pypz5f", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.285883Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmm53rans1esk43qyx3x", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.291296Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmma5n2ghhb21ws7m4vd", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.307689Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341012870963926:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:41.307748Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341012870963934:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:41.307785Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:41.311516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:41.339587Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494341012870963940:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:50:41.419744Z node 4 :TX_PROXY ERROR: Actor# [4:7494341012870964022:2686] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.482299Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmtacrqz6jk877vrge3e", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.489909Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmth4xv3exq4ptmv7jyh", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.497603Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmtsca8xecvy9pbhx5ac", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.504157Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmtz8sz1xk2at8rzh5na", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.510768Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmv662tnw8pvzp2yqqmv", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.520112Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmvf4trk468536m6yaxe", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.529043Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmvr0812hnt454vehy27", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.535740Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmvzfhz1qshf84q5pjr8", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.541919Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmw5de74br10hqecax6d", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.547568Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmwbb28t41d7m4kvy3tf", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.553652Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmwh8p0sg06aq41wrecs", Active sessions limit exceeded, maximum allowed: 2 2025-04-17T17:50:41.559713Z node 4 :KQP_PROXY WARN: TraceId: "01js2dzmwq42trg1crtjvkeypj", Active sessions limit exceeded, maximum allowed: 2 >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:79:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:78:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:78:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:79:2110] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:83:2057] recipient: [16:82:2113] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:82:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:83:2113] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:83:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: 2025-04-17T17:49:28.365554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:28.366070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:28.366131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018dd/r3tmp/tmpNOCpeQ/pdisk_1.dat 2025-04-17T17:49:28.722940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:28.772076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:28.826852Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTA2NzZkZS1jMzcwOTI3Zi04YjgzZGMxOS1iYjk1NGExYQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTA2NzZkZS1jMzcwOTI3Zi04YjgzZGMxOS1iYjk1NGExYQ== 2025-04-17T17:49:28.827463Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTA2NzZkZS1jMzcwOTI3Zi04YjgzZGMxOS1iYjk1NGExYQ==, ActorId: [1:619:2540], ActorState: unknown state, session actor bootstrapped 2025-04-17T17:49:28.827840Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTA2NzZkZS1jMzcwOTI3Zi04YjgzZGMxOS1iYjk1NGExYQ==, ActorId: [1:619:2540], ActorState: ReadyState, TraceId: 01js2dxdvv6r9kph034pjnb9p9, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpcActor: [0:0:0] database: databaseId: /Root pool id: default 2025-04-17T17:49:29.044448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:623:2543], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:29.044561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:29.076623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:29.076739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:29.079106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:49:29.097183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:29.127226Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:49:29.128064Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:49:29.128432Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:689:2579] 2025-04-17T17:49:29.128625Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:29.178764Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:49:29.179849Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:29.180022Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:49:29.182194Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:49:29.182302Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:49:29.182384Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:49:29.182862Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:49:29.183041Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:49:29.183156Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2579] in generation 1 2025-04-17T17:49:29.183715Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:49:29.242054Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:49:29.242317Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:49:29.242437Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:705:2588] 2025-04-17T17:49:29.242467Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:49:29.242496Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:49:29.242536Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:49:29.242752Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:689:2579], Recipient [1:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:29.242795Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:29.242970Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:49:29.243080Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:49:29.243160Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:49:29.243200Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:49:29.243258Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:49:29.243293Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:49:29.243321Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:49:29.243345Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:49:29.243377Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:29.276107Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:708:2590], Recipient [1:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:29.276177Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:29.276228Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:680:2574], serverId# [1:708:2590], sessionId# [0:0:0] 2025-04-17T17:49:29.276343Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:708:2590] 2025-04-17T17:49:29.276387Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:49:29.276521Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:49:29.276736Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:49:29.276797Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:49:29.276895Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:49:29.276951Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:49:29.277027Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:49:29.277072Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:49:29.277110Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:29.277419Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:49:29.277480Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:49:29.277521Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:49:29.277566Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:29.277620Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:49:29.277668Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:49:29.277705Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:49:29.277735Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:49:29.277793Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:49:29.278636Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:49:29.278716Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:29.278771Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:29.278902Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:49:29.278974Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:49:29.281255Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:709:2591], Recipient [1:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:49:29.281303Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:49:29.325698Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:724:2600], Recipient [1:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:29.325776Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:29.325813Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:723:2599], serverId# [1:724:2600], sessionId# [0:0:0] 2025-04-17T17:49:29.325960Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:689:2579]: {TEvPlanStep step# 300 MediatorId# 72057594046382081 Tabl ... node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-17T17:50:42.247079Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:50:42.247145Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-17T17:50:42.247221Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-04-17T17:50:42.247326Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:50:42.248138Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NjRhNWJiYTItYzcyNzc4ZmMtMTYxYzFmNjgtZjczY2JkN2M=, ActorId: [13:842:2687], ActorState: ExecuteState, TraceId: 01js2dzndw6cbw4mhk6v2c49s2, Create QueryResponse for error on request, msg: 2025-04-17T17:50:42.249290Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2dzndw6cbw4mhk6v2c49s2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjRhNWJiYTItYzcyNzc4ZmMtMTYxYzFmNjgtZjczY2JkN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:42.249669Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:950:2687], Recipient [13:905:2733]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 950 RawX2: 55834577535 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2025-04-17T17:50:42.249714Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:50:42.249834Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:905:2733], Recipient [13:905:2733]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T17:50:42.249869Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T17:50:42.249929Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:50:42.250092Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-17T17:50:42.250196Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2025-04-17T17:50:42.250242Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-17T17:50:42.250274Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2025-04-17T17:50:42.250303Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:50:42.250333Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:50:42.250370Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-04-17T17:50:42.250418Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2025-04-17T17:50:42.250453Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-17T17:50:42.250481Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:50:42.250508Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-17T17:50:42.250534Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-17T17:50:42.250607Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-04-17T17:50:42.250736Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-17T17:50:42.250826Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T17:50:42.250904Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-17T17:50:42.250936Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-17T17:50:42.250962Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:50:42.250990Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-17T17:50:42.251050Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:50:42.251166Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2025-04-17T17:50:42.251200Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:50:42.251230Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:50:42.251260Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:50:42.251305Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-17T17:50:42.251331Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:50:42.251358Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2025-04-17T17:50:42.251418Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:50:42.251453Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-17T17:50:42.251493Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:50:42.252964Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:61:2108], Recipient [13:905:2733]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2025-04-17T17:50:42.466236Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2dznjkfyye39gw0ycvfrkm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NzVmYjZkOGEtMzU4M2NjMmUtOTgwNTYzZC1mYmU1YTZiYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:42.468952Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:971:2777], Recipient [13:905:2733]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-17T17:50:42.469226Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:50:42.469324Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-04-17T17:50:42.469399Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v500/18446744073709551615 2025-04-17T17:50:42.469526Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-17T17:50:42.469672Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:50:42.469753Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:50:42.469831Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:50:42.469895Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:50:42.469967Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-17T17:50:42.470040Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:50:42.470077Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:50:42.470106Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:50:42.470137Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:50:42.470317Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-17T17:50:42.470661Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:971:2777], 0} after executionsCount# 1 2025-04-17T17:50:42.470763Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:971:2777], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:50:42.470891Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:971:2777], 0} finished in read 2025-04-17T17:50:42.470992Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:50:42.471021Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:50:42.471048Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:50:42.471079Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:50:42.471131Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:50:42.471157Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:50:42.471192Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-17T17:50:42.471265Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:50:42.471424Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:50:42.472880Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:971:2777], Recipient [13:905:2733]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:50:42.472963Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] Test command err: 2025-04-17T17:50:39.877017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341001344390089:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:39.877905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ef0/r3tmp/tmp5LewkG/pdisk_1.dat 2025-04-17T17:50:40.199663Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64619, node 1 2025-04-17T17:50:40.228332Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:40.228364Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:40.229355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:40.229624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:40.238245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:40.280865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:40.280885Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:40.280891Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:40.280975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:40.573061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:42.623251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341014229292857:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:42.623349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:42.905814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:50:43.290033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341018524260333:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:43.290140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:43.290625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341018524260338:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:43.295523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:50:43.320270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341018524260340:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:50:43.407670Z node 1 :TX_PROXY ERROR: Actor# [1:7494341018524260428:2806] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:43.568063Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2dzpjs8pa9rjktk50axe1f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzIyZjFiNjktOTBlNWJjMTgtZmQ2YmIyYzctMTQ4NWIyNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:43.765336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2dzq1kbvgyg54qe389hmcf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDNiZGQ0NTYtYzU1ZmU2MTYtYzQ0Y2QzMDUtNjlkM2ZlYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestMultipleSessions [GOOD] Test command err: 2025-04-17T17:50:41.069673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341012452043704:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:41.069715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ee2/r3tmp/tmpglvBxg/pdisk_1.dat 2025-04-17T17:50:41.417313Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4341, node 1 2025-04-17T17:50:41.470094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:41.470396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:41.479656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:41.507827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:41.507845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:41.507854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:41.507990Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:41.772949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:43.847750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341021041979313:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:43.847750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341021041979322:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:43.848189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:43.852699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:43.854260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341021041979361:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:43.854261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341021041979359:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:43.854367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:43.861425Z node 1 :TX_PROXY ERROR: Actor# [1:7494341021041979367:2643] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:50:43.873339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341021041979328:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:43.873424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341021041979365:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:43.968268Z node 1 :TX_PROXY ERROR: Actor# [1:7494341021041979456:2701] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:43.972192Z node 1 :TX_PROXY ERROR: Actor# [1:7494341021041979460:2704] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] >> DbCounters::TabletsSimple >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: BASE_PERF = 6.790394605 2025-04-17T17:49:24.490575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:114:2160], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:24.491100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:24.491164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 26188, node 1 TClient is connected to server localhost:25984 2025-04-17T17:49:33.504614Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:33.504940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:33.505078Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6510, node 2 TClient is connected to server localhost:14097 2025-04-17T17:49:42.251744Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:335:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:42.252125Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:42.252372Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6481, node 3 TClient is connected to server localhost:23305 2025-04-17T17:49:52.158015Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:339:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:52.158305Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:52.158461Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 11258, node 4 TClient is connected to server localhost:3682 2025-04-17T17:50:02.575052Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:50:02.575629Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:02.575851Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 22066, node 5 TClient is connected to server localhost:4314 2025-04-17T17:50:16.103602Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:335:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:16.104068Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:50:16.104324Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 13152, node 6 TClient is connected to server localhost:26007 2025-04-17T17:50:30.393152Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:30.393353Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:50:30.393600Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 13537, node 7 TClient is connected to server localhost:4885 2025-04-17T17:50:34.376092Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494340982517451702:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:34.376193Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:50:34.612338Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:34.634656Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:34.634795Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:34.637145Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14948, node 8 2025-04-17T17:50:34.743153Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:34.743193Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:34.743213Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:34.743444Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12488 >> SystemView::TopPartitionsByCpuFields [GOOD] >> SystemView::TopPartitionsByCpuRanges >> SystemView::AuthGroups_Access >> SystemView::PartitionStatsOneSchemeShard >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [27:56:2097] sender: [27:89:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:92:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:93:2057] recipient: [27:91:2119] Leader for TabletID 72057594037927937 is [27:94:2120] sender: [27:95:2057] recipient: [27:91:2119] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:76:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:79:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:78:2110] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:82:2057] recipient: [30:78:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:81:2111] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:135:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:76:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:79:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:78:2110] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:82:2057] recipient: [31:78:2110] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:81:2111] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:135:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:77:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:79:2110] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:81:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:83:2057] recipient: [32:79:2110] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:82:2111] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:136:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:80:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:83:2057] recipient: [33:82:2113] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:86:2057] recipient: [33:82:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:85:2114] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:139:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:80:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:83:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:82:2113] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:86:2057] recipient: [34:82:2113] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:85:2114] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:139:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:81:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:84:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:85:2057] recipient: [35:83:2113] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:87:2057] recipient: [35:83:2113] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:86:2114] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:140:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:84:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:87:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:86:2116] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:90:2057] recipient: [36:86:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:89:2117] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:143:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:84:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:87:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:86:2116] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:90:2057] recipient: [37:86:2116] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:89:2117] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:143:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:85:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:88:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:89:2057] recipient: [38:87:2116] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:91:2057] recipient: [38:87:2116] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:90:2117] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:144:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> ShowCreateView::WithTablePathPrefix >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> SystemView::Nodes ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] Test command err: 2025-04-17T17:50:31.948787Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340966655468263:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:31.949127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f03/r3tmp/tmpnwmfuJ/pdisk_1.dat 2025-04-17T17:50:32.287695Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:32.339375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:32.339504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:32.343995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22769, node 1 2025-04-17T17:50:32.441930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:32.441952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:32.441963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:32.442081Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:32.724691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:37.024200Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494340995476707475:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:37.024671Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f03/r3tmp/tmpCqCUDz/pdisk_1.dat 2025-04-17T17:50:37.240210Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:37.284688Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:37.284822Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:37.290140Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13291, node 4 2025-04-17T17:50:37.406675Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:37.406707Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:37.406716Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:37.406852Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:37.619350Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:40.805851Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610565:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.806439Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.806647Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610671:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.806736Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610673:2511], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.806784Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610674:2512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.806820Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610675:2513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.806848Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610676:2514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.806935Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610679:2517], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.806956Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610672:2510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807000Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610680:2518], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807034Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610681:2519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807061Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610682:2520], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807106Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610667:2505], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807161Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610689:2527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807167Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610668:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807202Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610669:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807217Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610677:2515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807247Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610678:2516], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807263Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610670:2508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807313Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341008361610683:2521], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:40.807370Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFet ... ROR: Actor# [4:7494341008361611348:3130] txid# 281474976710712, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.028152Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611359:3140] txid# 281474976710718, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.028260Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611361:3142] txid# 281474976710720, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.030561Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611350:3132] txid# 281474976710714, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.035531Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611360:3141] txid# 281474976710719, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.035755Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611383:3156] txid# 281474976710721, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.035925Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611445:3204] txid# 281474976710727, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.036231Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611454:3213] txid# 281474976710733, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.036389Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611438:3197] txid# 281474976710723, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.036532Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611441:3200] txid# 281474976710725, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.036652Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611456:3215] txid# 281474976710735, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.036909Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611457:3216] txid# 281474976710736, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.037336Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611437:3196] txid# 281474976710722, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.037496Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611451:3210] txid# 281474976710730, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.037797Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611452:3211] txid# 281474976710731, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.037934Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611461:3220] txid# 281474976710738, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.038207Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611439:3198] txid# 281474976710724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.038355Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611453:3212] txid# 281474976710732, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.041287Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611449:3208] txid# 281474976710728, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.041423Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611450:3209] txid# 281474976710729, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.041979Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611464:3223] txid# 281474976710741, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.042239Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611444:3203] txid# 281474976710726, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.042363Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611463:3222] txid# 281474976710740, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.042489Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611455:3214] txid# 281474976710734, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.042632Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611458:3217] txid# 281474976710737, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.044023Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611462:3221] txid# 281474976710739, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.044452Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611525:3271] txid# 281474976710742, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.045541Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611527:3273] txid# 281474976710744, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.045971Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611530:3276] txid# 281474976710747, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.046103Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611526:3272] txid# 281474976710743, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.046233Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611528:3274] txid# 281474976710745, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:41.046470Z node 4 :TX_PROXY ERROR: Actor# [4:7494341008361611529:3275] txid# 281474976710746, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:42.020865Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494340995476707475:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:42.020950Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] >> TPart::MassCheck [GOOD] >> TPart::WreckPart >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] Test command err: 2025-04-17T17:50:45.256553Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341029694089941:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:45.256695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ed4/r3tmp/tmpH3soIJ/pdisk_1.dat 2025-04-17T17:50:45.674668Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:45.683887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:45.684057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:45.691728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6307, node 1 2025-04-17T17:50:45.846457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:45.846482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:45.846491Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:45.846612Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:46.120384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:48.244269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341042578992850:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:48.244467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:48.498956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:50:48.715456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341042578993026:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:48.715554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:48.715914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341042578993031:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:48.719525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:50:48.753654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341042578993033:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:50:48.851432Z node 1 :TX_PROXY ERROR: Actor# [1:7494341042578993129:2811] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:48.963924Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2dzvwa30t9v6ee168x4qm5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJmNGNjNGMtM2U3YjllZDQtYWUwM2QyN2YtMzgyZmQwYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:49.159133Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2dzw9z1cx3b4r8ndd9s6d2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjUzOGQ5MjYtZWM5Njk1MGYtNjFkYzhjZi05MWZkYzM2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... boot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:85:2114] Leader for TabletID 72057594037927937 is [22:85:2114] sender: [22:139:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:82:2113] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:86:2057] recipient: [23:82:2113] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:104:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:86:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:85:2115] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:89:2057] recipient: [25:85:2115] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:88:2116] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:142:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:83:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:86:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:85:2115] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:89:2057] recipient: [26:85:2115] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:88:2116] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:142:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:87:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:86:2115] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:90:2057] recipient: [27:86:2115] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2116] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:107:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:86:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:90:2057] recipient: [28:89:2117] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:92:2057] recipient: [28:89:2117] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:91:2118] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:145:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:86:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:92:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:93:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:93:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] Test command err: 2025-04-17T17:50:40.372685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341009062529136:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:40.372823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ee3/r3tmp/tmpejz1gc/pdisk_1.dat 2025-04-17T17:50:40.685592Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25767, node 1 2025-04-17T17:50:40.780255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:40.780857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:40.808885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:40.824052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:40.824079Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:40.824086Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:40.824229Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:41.120296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:42.928066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341017652464540:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:42.928121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341017652464548:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:42.928159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:42.931196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:42.934249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341017652464589:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:42.934337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341017652464591:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:42.934350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:42.941881Z node 1 :TX_PROXY ERROR: Actor# [1:7494341017652464595:2648] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:50:42.965634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341017652464594:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:42.965768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341017652464554:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:43.035486Z node 1 :TX_PROXY ERROR: Actor# [1:7494341021947432007:2716] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:43.044183Z node 1 :TX_PROXY ERROR: Actor# [1:7494341021947432017:2724] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:44.650221Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341023536051626:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:44.650337Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ee3/r3tmp/tmp2r7iz8/pdisk_1.dat 2025-04-17T17:50:44.858788Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26578, node 4 2025-04-17T17:50:44.972814Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:44.972926Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:45.011210Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:45.018934Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:45.018958Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:45.018968Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:45.019109Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:45.278046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:47.790613Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341036420954536:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:47.790685Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:47.822846Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:50:47.999860Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341036420954710:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:47.999931Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:48.000240Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341036420954715:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:48.004902Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:50:48.027996Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494341040715922013:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:50:48.127489Z node 4 :TX_PROXY ERROR: Actor# [4:7494341040715922127:2793] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:49.646688Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494341023536051626:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:49.646772Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys >> TPart::WreckPart [GOOD] >> TPart::PageFailEnv >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TOlap::StoreStatsQuota [GOOD] >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> IcebergClusterProcessor::ValidateDdlCreationForHadoopWithS3 [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> SystemView::TopPartitionsByCpuTables [GOOD] >> SystemView::TopPartitionsByTliFields |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... or TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:92:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:93:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:93:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:76:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:79:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:80:2057] recipient: [35:78:2110] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:82:2057] recipient: [35:78:2110] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:81:2111] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:135:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:77:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:81:2057] recipient: [37:79:2110] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:83:2057] recipient: [37:79:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:82:2111] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:136:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:79:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:82:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:83:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:85:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:84:2113] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:138:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:79:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:81:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:83:2057] recipient: [39:82:2112] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:85:2057] recipient: [39:82:2112] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:84:2113] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:138:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:82:2112] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:86:2057] recipient: [40:82:2112] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2113] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] >> IcebergClusterProcessor::ValidateDdlCreationForHiveWithS3 [GOOD] >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] >> Initializer::Simple [GOOD] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv >> Cache::Test4 [GOOD] >> Cache::Test5 |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 4910b 40r} data 6206b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 50b {0, 1} | 1 2 50b {0, 4} | 2 4 50b {0, 7} | 3 6 50b {0, 10} | 4 8 50b {1, 3} | 5 10 50b {1, 6} | 6 12 50b {1, 8} | 7 14 50b {2, NULL} | 8 16 50b {2, 4} | 10 18 50b {2, 7} | 11 20 50b {2, 10} | 12 22 50b {3, 3} | 13 24 50b {3, 6} | 15 26 50b {3, 8} | 16 28 50b {4, NULL} | 17 30 50b {4, 4} | 18 32 50b {4, 7} | 19 34 50b {4, 10} | 21 36 50b {5, 3} | 22 38 50b {5, 6} | 22 39 50b {5, 7} + BTreeIndex ... xxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::StoreStatsQuota [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:48:10.086527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:48:10.086624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:10.086692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:48:10.086731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:48:10.086772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:48:10.086801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:48:10.086862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:48:10.086936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:48:10.087278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:48:10.282116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:10.282167Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:10.323547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:48:10.323958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:48:10.324109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:48:10.346661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:48:10.346862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:48:10.347539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:10.347763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:48:10.362886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:10.364237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:10.364299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:10.364460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:48:10.364515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:10.364566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:48:10.364685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:48:10.383132Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:48:11.353859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:48:11.363596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.364993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:48:11.382745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:48:11.383120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.402967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:11.403118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:48:11.403307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.403362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:48:11.403410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:48:11.403461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:48:11.405223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.405274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:48:11.405312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:48:11.410611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.410673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.410721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:11.410767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.414142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:48:11.422927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:48:11.423124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:48:11.428004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:48:11.428611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:48:11.429325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:11.438461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:48:11.439120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:48:11.440983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:48:11.447936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:48:11.514744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:48:11.515352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:48:11.517245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:48:11.534607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:48:11.537423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:48:11.538058Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:48:11.538164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:11.547099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.547788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:48:11.548104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.548429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:48:11.549401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:48:11.549749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:48:11.550113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:48:11.558420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:48:11.558712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:48:11.559008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:48:11.614889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:11.616446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:48:11.617413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... shot_ts=1744911557921; 2025-04-17T17:50:51.054679Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:50:51.054728Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:50:51.054767Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:50:51.054868Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:50:51.181228Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T17:50:51.403596Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T17:50:51.414020Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186233409546; 2025-04-17T17:50:51.414144Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546; 2025-04-17T17:50:51.414394Z node 2 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T17:50:51.414453Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:50:51.414523Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:50:51.414579Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-17T17:50:51.414646Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744911257921;tx_id=18446744073709551615;;current_snapshot_ts=1744911557921; 2025-04-17T17:50:51.414688Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:50:51.414739Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:50:51.414778Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:50:51.414885Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:50:51.540786Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T17:50:51.718333Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T17:50:51.730712Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186233409546; 2025-04-17T17:50:51.730905Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546; 2025-04-17T17:50:51.730968Z node 2 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T17:50:51.731058Z node 2 :TX_COLUMNSHARD DEBUG: There are stats for 1 tables 2025-04-17T17:50:51.731175Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:50:51.731263Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:50:51.731384Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-17T17:50:51.731469Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744911257921;tx_id=18446744073709551615;;current_snapshot_ts=1744911557921; 2025-04-17T17:50:51.731523Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:50:51.731581Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:50:51.731626Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:50:51.731742Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:50:51.732048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-17T17:50:51.739172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-17T17:50:51.739369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=OlapStore, is column=0, is olap=1, RowCount 0, DataSize 0 2025-04-17T17:50:51.739466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: OLAP store contains 1 tables. 2025-04-17T17:50:51.742297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Aggregated stats for pathId 3: RowCount 0, DataSize 0 2025-04-17T17:50:51.742789Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:51.742845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:50:51.746562Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:51.751687Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:51.751755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:336:2312], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-17T17:50:51.751841Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 273us result status StatusSuccess 2025-04-17T17:50:51.754326Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 102 CreateStep: 1744911556914 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1000000 data_size_soft_quota: 900000 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:51.755360Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2025-04-17T17:50:51.757114Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046678944, LocalPathId: 2]; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T17:49:43.146846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:49:43.146932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:43.146979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:49:43.147019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:49:43.147078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:49:43.147115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:49:43.147166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:49:43.147241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:49:43.147511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:43.229499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:49:43.229603Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T17:49:43.252375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:43.255348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:49:43.255514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:49:43.284034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:49:43.286613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:49:43.287216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:43.287481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:49:43.295106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:43.296447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:43.296508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:43.296722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:49:43.296769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:43.296809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:49:43.296968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T17:49:43.315142Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T17:49:43.436533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:49:43.436798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.436988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:49:43.437200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:49:43.437270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.439842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:43.439981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:49:43.440186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.440278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:49:43.440312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:43.440342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:43.442368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.442416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:49:43.442452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:43.444595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.444651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.444707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:43.444747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:43.448127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:43.452465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:43.452651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:49:43.453502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:49:43.453617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:49:43.453660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:43.453939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:49:43.453987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:49:43.454220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:49:43.454293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:49:43.456400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:49:43.456437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:49:43.456610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:49:43.456643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:49:43.457020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:49:43.457058Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:49:43.457137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:49:43.457164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:49:43.457212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... lPathId: 3] 2025-04-17T17:50:52.237452Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:52.237487Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:205:2207], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-04-17T17:50:52.237524Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:205:2207], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-04-17T17:50:52.237912Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-17T17:50:52.237953Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:50:52.238944Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-17T17:50:52.239023Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-17T17:50:52.239051Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-04-17T17:50:52.239083Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:50:52.239114Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 FAKE_COORDINATOR: Erasing txId 1002 2025-04-17T17:50:52.240074Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-17T17:50:52.240146Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-17T17:50:52.240171Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-04-17T17:50:52.240198Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T17:50:52.240229Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T17:50:52.240290Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-04-17T17:50:52.242260Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1040 } } 2025-04-17T17:50:52.242300Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-04-17T17:50:52.242410Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1040 } } 2025-04-17T17:50:52.242490Z node 72 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1040 } } 2025-04-17T17:50:52.243027Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 309237647629 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-17T17:50:52.243060Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-04-17T17:50:52.243153Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 309237647629 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-17T17:50:52.243187Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:50:52.243251Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 330 RawX2: 309237647629 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-17T17:50:52.243295Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:52.243324Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-17T17:50:52.243355Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:50:52.243388Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2025-04-17T17:50:52.246969Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-17T17:50:52.247111Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-17T17:50:52.247571Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-17T17:50:52.247672Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-17T17:50:52.247796Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-17T17:50:52.247833Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-04-17T17:50:52.247923Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-04-17T17:50:52.247952Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-17T17:50:52.247985Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-04-17T17:50:52.248013Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-17T17:50:52.248045Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-04-17T17:50:52.248080Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-17T17:50:52.248113Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2025-04-17T17:50:52.248142Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2025-04-17T17:50:52.248258Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-04-17T17:50:52.250871Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-04-17T17:50:52.250915Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-04-17T17:50:52.251225Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-04-17T17:50:52.251313Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-04-17T17:50:52.251343Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:408:2381] TestWaitNotification: OK eventTxId 1002 2025-04-17T17:50:52.251730Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:52.251915Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 227us result status StatusSuccess 2025-04-17T17:50:52.252335Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] Test command err: test case: 1 test case: 2 test case: 3 test case: 4 test case: 5 test case: 6 test case: 7 test case: 8 test case: 9 >> TPart::ForwardEnv [GOOD] >> TPart::WreckPartColumnGroups >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |76.7%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: 2025-04-17T17:49:34.204370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:34.217814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:34.217880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00179f/r3tmp/tmpQIb3Uq/pdisk_1.dat 2025-04-17T17:49:35.548560Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.143141s 2025-04-17T17:49:35.548712Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.143323s TServer::EnableGrpc on GrpcPort 25332, node 1 TClient is connected to server localhost:29513 2025-04-17T17:49:36.047393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:36.149197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:36.168897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:36.168944Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:36.168965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:36.177773Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:49:36.235267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:36.235403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:36.266635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:46.802595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:681:2572], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:46.802796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:691:2577], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:46.822339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:46.907588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:49:46.971788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:695:2580], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:49:47.087012Z node 1 :TX_PROXY ERROR: Actor# [1:764:2618] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:48.268233Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:774:2627], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:49:48.294026Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGE5ZThjY2MtYmIzMGU5NDQtNjZlMzAwMDctMWQ3ZTc1OTE=, ActorId: [1:674:2566], ActorState: ExecuteState, TraceId: 01js2dxzbq0801kjy11bqcyrsa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-04-17T17:49:48.463230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-17T17:49:53.744459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:49:54.129388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:49:54.931392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480 Initialization finished 2025-04-17T17:50:05.889360Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2dyhz3937j0hb414np67gm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWE3YWNmOGUtYmQ3MDllNjMtYzU2MTYyNjQtOGY2MTIzYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-04-17T17:50:20.179105Z node 1 :TX_PROXY ERROR: Actor# [1:1359:3053] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2025-04-17T17:50:20.179289Z node 1 :TX_PROXY ERROR: Actor# [1:1359:3053] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-04-17T17:50:30.813824Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01js2dzaa64yr7x1bf4a8ze1zt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ2YTYxOS1mN2IxMjg1Mi00ZDFiYjczMS1mOWZjODhlNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:977 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2025-04-17T17:50:52.076016Z node 1 :TX_PROXY ERROR: Actor# [1:1552:3196] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2025-04-17T17:50:52.076223Z node 1 :TX_PROXY ERROR: Actor# [1:1552:3196] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> TSchemeShardServerLess::StorageBillingLabels >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors |76.7%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.7%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> SystemView::AuthUsers_LockUnlock [GOOD] >> SystemView::AuthUsers_Access >> TSchemeShardServerLess::StorageBilling |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> data_correctness.py::TestDataCorrectness::test [GOOD] >> TSchemeShardServerLess::Fake [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: 2025-04-17T17:49:26.215951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:26.216517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:26.216594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018e4/r3tmp/tmp1AL9UX/pdisk_1.dat 2025-04-17T17:49:26.567831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:26.608328Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:26.646715Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:49:26.647662Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:49:26.647864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:26.647970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:26.659497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:26.745599Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:49:26.745680Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:49:26.745909Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:49:26.856887Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:49:26.856999Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:49:26.857508Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:49:26.857586Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:49:26.857870Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:49:26.858065Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:49:26.858158Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:49:26.859937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:49:26.860391Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:49:26.861076Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:49:26.861147Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:49:26.896756Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:49:26.897813Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:49:26.898282Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:49:26.898535Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:26.942509Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:49:26.943274Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:26.943392Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:49:26.944894Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:49:26.944960Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:49:26.945019Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:49:26.945316Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:49:26.945419Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:49:26.945505Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:49:26.945956Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:49:26.982791Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:49:26.983017Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:49:26.983143Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:49:26.983176Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:49:26.983209Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:49:26.983242Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:49:26.983483Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:26.983557Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:26.983848Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:49:26.983949Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:49:26.984048Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:49:26.984087Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:49:26.984131Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:49:26.984189Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:49:26.984233Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:49:26.984275Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:49:26.984328Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:26.984452Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:26.984484Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:26.984523Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:49:26.984885Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:49:26.984925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:49:26.985045Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:49:26.985310Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:49:26.985374Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:49:26.985469Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:49:26.985511Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:49:26.985566Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:49:26.985610Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:49:26.985647Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:26.985941Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:49:26.985980Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:49:26.986012Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:49:26.986054Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:26.986100Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:49:26.986135Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:49:26.986169Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:49:26.986219Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:49:26.986247Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:49:26.987004Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:49:26.987047Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:26.987088Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:26.987141Z node 1 :TX_DATASHARD TRACE: Prop ... 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T17:50:54.016068Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T17:50:54.016153Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-04-17T17:50:54.016291Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3048 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T17:50:54.016393Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-17T17:50:54.016499Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:667:2571], Recipient [13:756:2635]: {TEvReadSet step# 3048 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T17:50:54.016529Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T17:50:54.016557Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-17T17:50:54.016609Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3048 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T17:50:54.016642Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T17:50:54.016717Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:756:2635], Recipient [13:667:2571]: {TEvReadSet step# 3048 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:50:54.016745Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T17:50:54.016771Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-04-17T17:50:54.016812Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3048 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:50:54.017011Z node 13 :TX_DATASHARD DEBUG: Complete [3048 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:984:2782], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:50:54.017290Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:667:2571], Recipient [13:756:2635]: {TEvReadSet step# 3048 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:50:54.017321Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T17:50:54.017348Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-17T17:50:54.017408Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3048 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:50:54.017505Z node 13 :TX_DATASHARD DEBUG: Complete [3048 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:984:2782], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1760 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } 2025-04-17T17:50:54.018296Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:50:54.018613Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 932 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } 2025-04-17T17:50:54.019288Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:50:54.022012Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-17T17:50:54.022237Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:667:2571], Recipient [13:756:2635]: {TEvReadSet step# 3048 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-17T17:50:54.022344Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:50:54.022428Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-04-17T17:50:54.028378Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T17:50:54.028680Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:756:2635], Recipient [13:667:2571]: {TEvReadSet step# 3048 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-17T17:50:54.028781Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:50:54.028857Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-04-17T17:50:54.228548Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-04-17T17:50:54.228648Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715667 ProcessProposeKqpTransaction 2025-04-17T17:50:54.229658Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2e012d2fvhjbdwxnr8fwqx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTNlZDEyMGEtYjcyMWI5Y2ItMzk4MzdiNGQtOGZmZDBkNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-04-17T17:50:54.232493Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1108:2902], Recipient [13:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-17T17:50:54.232686Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:50:54.232780Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3048/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2025-04-17T17:50:54.232854Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2025-04-17T17:50:54.232973Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-17T17:50:54.233118Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:50:54.233191Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:50:54.233259Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:50:54.233316Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:50:54.233379Z node 13 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-17T17:50:54.233463Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:50:54.233495Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:50:54.233520Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:50:54.233544Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:50:54.233690Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-17T17:50:54.234095Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:1108:2902], 0} after executionsCount# 1 2025-04-17T17:50:54.234192Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1108:2902], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:50:54.234319Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1108:2902], 0} finished in read 2025-04-17T17:50:54.234407Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:50:54.234435Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:50:54.234459Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:50:54.234487Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:50:54.234536Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:50:54.234561Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:50:54.234599Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-17T17:50:54.234664Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:50:54.234831Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:50:54.236178Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1108:2902], Recipient [13:667:2571]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:50:54.236263Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |76.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:50:56.147851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:50:56.147905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.147931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:50:56.147957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:50:56.147988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:50:56.148007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:50:56.148038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.148099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:50:56.148337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:50:56.245844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:50:56.245894Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:56.259878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:50:56.259992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:50:56.260127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:50:56.265804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:50:56.266051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:50:56.266623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:56.266934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:50:56.270603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.271699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:56.271751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.271866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:50:56.271907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:56.271946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:50:56.272106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.277813Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:50:56.412652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:56.412790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.412925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:50:56.413087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:50:56.413126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.414997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:56.415127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:50:56.415292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.415351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:50:56.415410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:50:56.415446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:50:56.417232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.417288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:50:56.417320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:50:56.418784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.418822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.418858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:56.418914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.421403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:56.423308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:50:56.423467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:50:56.424088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:56.424170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:56.424208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:56.424396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:50:56.424434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:56.424534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:50:56.424617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:50:56.426140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:56.426193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:56.426372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.426412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:50:56.426697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.426750Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:50:56.426836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:56.426862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.426899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:56.426942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.426975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:50:56.427013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.427044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:50:56.427069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:50:56.427125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:56.427157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:50:56.427185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:50:56.428823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:56.428913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:56.428945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tor: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-17T17:50:56.865746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:50:56.865850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:616:2544], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:50:56.865929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409549 2025-04-17T17:50:56.865966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409549, txId: 0, path id: [OwnerId: 72075186234409549, LocalPathId: 1] 2025-04-17T17:50:56.866063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409549 2025-04-17T17:50:56.866088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:708:2610], at schemeshard: 72075186234409549, txId: 0, path id: 1 2025-04-17T17:50:56.867025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:3 msg type: 268697640 2025-04-17T17:50:56.867141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-04-17T17:50:56.867290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409549, cookie: 0 2025-04-17T17:50:56.867740Z node 1 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2025-04-17T17:50:56.867870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2025-04-17T17:50:56.867902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-17T17:50:56.868005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-04-17T17:50:56.868056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-04-17T17:50:56.868094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 138 -> 240 2025-04-17T17:50:56.868676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:50:56.868735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:50:56.869883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.870018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.870056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-17T17:50:56.870154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T17:50:56.870202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:50:56.870233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T17:50:56.870260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:50:56.870292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-17T17:50:56.870322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:50:56.870369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T17:50:56.870393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-17T17:50:56.870444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T17:50:56.871994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-17T17:50:56.872036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-17T17:50:56.872519Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-17T17:50:56.872603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T17:50:56.872649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:856:2738] TestWaitNotification: OK eventTxId 106 2025-04-17T17:50:56.873214Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:56.873380Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 179us result status StatusSuccess 2025-04-17T17:50:56.873597Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:56.874094Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-04-17T17:50:56.874251Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 159us result status StatusSuccess 2025-04-17T17:50:56.874482Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2025-04-17T17:50:56.875053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:56.875219Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 134us result status StatusSuccess 2025-04-17T17:50:56.875443Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:56.875938Z node 1 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:50:56.165619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:50:56.165701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.165737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:50:56.165768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:50:56.165809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:50:56.165835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:50:56.165881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.165966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:50:56.166401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:50:56.251083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:50:56.251128Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:56.269306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:50:56.269416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:50:56.269542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:50:56.275285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:50:56.275553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:50:56.276177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:56.276461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:50:56.280327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.281508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:56.281567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.281680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:50:56.281723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:56.281753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:50:56.281915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.287916Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:50:56.396491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:56.396642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.396779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:50:56.396955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:50:56.396995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.402368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:56.402549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:50:56.402770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.402843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:50:56.402895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:50:56.402936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:50:56.404994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.405062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:50:56.405095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:50:56.406427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.406458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.406482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:56.406528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.409867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:56.411613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:50:56.411775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:50:56.412734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:56.412845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:56.412915Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:56.413139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:50:56.413185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:56.413319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:50:56.413406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:50:56.415230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:56.415273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:56.415444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.415495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:50:56.415805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.415857Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:50:56.415952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:56.415981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.416014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:56.416052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.416087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:50:56.416117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.416146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:50:56.416169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:50:56.416217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:56.416253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:50:56.416299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:50:56.418068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:56.418170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:56.418238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ecute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.120373Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-17T17:50:57.120478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T17:50:57.120534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:50:57.120585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T17:50:57.120617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:50:57.120652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-17T17:50:57.120685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:50:57.120720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T17:50:57.120747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-17T17:50:57.120911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T17:50:57.122156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:50:57.125448Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409549 Forgetting tablet 72075186234409549 2025-04-17T17:50:57.125814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-17T17:50:57.126245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T17:50:57.127371Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409551 2025-04-17T17:50:57.127762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-17T17:50:57.127934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:50:57.128634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:50:57.132161Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409550 2025-04-17T17:50:57.132643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-17T17:50:57.132918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409551 Forgetting tablet 72075186234409550 2025-04-17T17:50:57.134905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:50:57.134952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:50:57.135074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:50:57.135686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:50:57.136479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:50:57.136533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:50:57.136601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:57.144145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-17T17:50:57.144224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2025-04-17T17:50:57.144496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-17T17:50:57.144533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2025-04-17T17:50:57.144629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T17:50:57.144689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2025-04-17T17:50:57.144900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:50:57.144990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T17:50:57.145306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-17T17:50:57.145357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-17T17:50:57.145849Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-17T17:50:57.145948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T17:50:57.145989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:936:2800] TestWaitNotification: OK eventTxId 106 2025-04-17T17:50:57.146623Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:57.146825Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 236us result status StatusPathDoesNotExist 2025-04-17T17:50:57.147018Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:50:57.147494Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:57.147687Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 163us result status StatusPathDoesNotExist 2025-04-17T17:50:57.147818Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:50:57.148279Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:57.148453Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 181us result status StatusSuccess 2025-04-17T17:50:57.148862Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2025-04-17T17:50:57.149452Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2025-04-17T17:50:57.149529Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2025-04-17T17:50:57.149570Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2025-04-17T17:50:57.149622Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:50:56.907180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:50:56.907257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.907297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:50:56.907326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:50:56.907363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:50:56.907413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:50:56.907467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.907543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:50:56.907860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:50:56.990642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:50:56.990692Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:57.004703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:50:57.005071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:50:57.005225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:50:57.021320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:50:57.021501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:50:57.022078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.022314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:50:57.024744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.025860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.025926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.026115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:50:57.026159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:57.026216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:50:57.026339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.032218Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:50:57.156247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:57.156453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.156655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:50:57.157072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:50:57.157140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.159345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.159498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:50:57.159683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.159745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:50:57.159776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:50:57.159806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:50:57.161500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.161560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:50:57.161598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:50:57.163184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.163224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.163259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.163313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.166629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:57.168019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:50:57.168158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:50:57.168942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.169065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:57.169121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.169326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:50:57.169357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.169487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:50:57.169575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:50:57.171240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.171287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:57.171450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.171483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:50:57.171839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.171881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:50:57.171986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:57.172023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.172058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:57.172098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.172134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:50:57.172164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.172190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:50:57.172219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:50:57.172270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:57.172301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:50:57.172329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:50:57.174001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:57.174109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:57.174147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tate::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-04-17T17:50:57.491789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-04-17T17:50:57.491920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:57.493762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-17T17:50:57.493864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-17T17:50:57.494133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.494256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:57.494298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-17T17:50:57.494543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-17T17:50:57.494598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-17T17:50:57.494686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T17:50:57.494789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:610:2539], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-04-17T17:50:57.496545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.496577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:50:57.496708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.496743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-17T17:50:57.497011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.497054Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-04-17T17:50:57.497084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-04-17T17:50:57.497736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:50:57.497803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:50:57.497834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:50:57.497863Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-17T17:50:57.497892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-17T17:50:57.497953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T17:50:57.499832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.499868Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T17:50:57.499954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:50:57.499993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:50:57.500038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:50:57.500068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:50:57.500105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-17T17:50:57.500140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:50:57.500174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:50:57.500229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:50:57.500381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T17:50:57.500831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T17:50:57.502290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T17:50:57.502329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T17:50:57.502690Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T17:50:57.502770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:50:57.502800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:761:2643] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-17T17:50:57.505572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:57.505714Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2025-04-17T17:50:57.505749Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2025-04-17T17:50:57.505865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-04-17T17:50:57.505899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-04-17T17:50:57.507692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:57.507829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-04-17T17:50:57.510333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:57.510448Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2025-04-17T17:50:57.510485Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2025-04-17T17:50:57.510589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-04-17T17:50:57.510624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-04-17T17:50:57.514931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:57.515050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 24919, MsgBus: 15731 2025-04-17T17:49:33.724742Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340719289957554:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:49:33.724915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e68/r3tmp/tmpfVVzFh/pdisk_1.dat 2025-04-17T17:49:33.992943Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24919, node 1 2025-04-17T17:49:34.055174Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:34.055210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:34.055224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:34.055421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:49:34.090641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:34.090770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:34.092580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15731 TClient is connected to server localhost:15731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:49:34.513242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:49:36.143594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340732174860107:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:36.143600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340732174860112:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:36.143715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:49:36.146949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:49:36.155358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340732174860121:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:49:36.248878Z node 1 :TX_PROXY ERROR: Actor# [1:7494340732174860172:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:49:36.495116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:49:36.640285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:49:36.640453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:49:36.640680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:49:36.640784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:49:36.640889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:49:36.640973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:49:36.641050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:49:36.641142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:49:36.641264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:49:36.641360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:49:36.641437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:49:36.641544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494340732174860442:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:49:36.643098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:49:36.643186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:49:36.643329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:49:36.643450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:49:36.643549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:49:36.643626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:49:36.643731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:49:36.643819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:49:36.643929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:49:36.644059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:49:36.644133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:49:36.644234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494340732174860456:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:49:36.671909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494340732174860426:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:49:36.671991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494340732174860426:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:49:36.672194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;sel ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.119332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.123376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.123912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.129035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.129395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.133767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.134907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.138426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.140944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.142969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.146610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.148493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.152253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.152451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.157012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.158117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.162364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.163759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.167018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.169359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.172386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.175236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.178322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.181203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.183330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.187413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.189296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.193403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.194971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.199424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.199454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.204222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.205338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.209463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.211197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.214289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.216578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.219552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.221995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.223730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.227341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.228378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.232373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.233920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.237537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T17:50:34.358978Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2dyhcra2kf78f0km8y6r55", SessionId: ydb://session/3?node_id=1&id=ZjVmODQyZjctOTY5YmYyYWQtNzkzOGM3MzEtMzg0ZmQwOTA=, Slow query, duration: 29.149868s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T17:50:35.280512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T17:50:35.280521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T17:50:35.280925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494340912563534039:9266];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T17:50:35.281248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::ForwardEnvColumnGroups >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::NoNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] Test command err: 2025-04-17T17:50:42.012812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341014436679341:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:42.012983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ed6/r3tmp/tmpbH8Emo/pdisk_1.dat 2025-04-17T17:50:42.342829Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18115, node 1 2025-04-17T17:50:42.382719Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:42.383323Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:42.401426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:42.401561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:42.403437Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:42.403459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:42.403489Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:42.403604Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:50:42.410848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:42.693132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:42.715305Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:50:47.210573Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341039283685658:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:47.210656Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ed6/r3tmp/tmpiRSOyM/pdisk_1.dat 2025-04-17T17:50:47.402070Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:47.435448Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:47.435535Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:47.440991Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32316, node 4 2025-04-17T17:50:47.516553Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:47.516574Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:47.516579Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:47.516682Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:47.751990Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:52.206519Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494341039283685658:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:52.206584Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:50:57.386094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:50:57.386165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:57.386208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:50:57.386238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:50:57.386300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:50:57.386340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:50:57.386400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:57.386491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:50:57.386795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:50:57.449562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:50:57.449607Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:57.464313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:50:57.464674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:50:57.464796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:50:57.471848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:50:57.472020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:50:57.472564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.472755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:50:57.475385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.476659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.476713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.476910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:50:57.476953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:57.476989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:50:57.477116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.483727Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:50:57.602060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:57.602275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.602465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:50:57.602661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:50:57.602697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.604440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.604547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:50:57.604692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.604805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:50:57.604830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:50:57.604850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:50:57.606261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.606317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:50:57.606347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:50:57.607711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.607746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.607778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.607834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.615232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:57.617155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:50:57.617284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:50:57.617966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.618050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:57.618098Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.618287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:50:57.618318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.618434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:50:57.618508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:50:57.620240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.620279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:57.620420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.620448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:50:57.620687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.620723Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:50:57.620788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:57.620814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.620837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:57.620865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.620890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:50:57.620924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.620948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:50:57.620966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:50:57.621021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:57.621045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:50:57.621065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:50:57.622397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:57.622516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:57.622548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409550 2025-04-17T17:50:57.934969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-04-17T17:50:57.935015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvConfigureStatus from tablet# 72075186234409550 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-04-17T17:50:57.936265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409551, partId: 0 2025-04-17T17:50:57.936335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409551 2025-04-17T17:50:57.936363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-04-17T17:50:57.936394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvConfigureStatus from tablet# 72075186234409551 shardIdx# 72057594046678944:7 at schemeshard# 72057594046678944 2025-04-17T17:50:57.936418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2025-04-17T17:50:57.937852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.941388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.941587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.941870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.941908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.941952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-04-17T17:50:57.942012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-04-17T17:50:57.942148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:57.944402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-17T17:50:57.944513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-17T17:50:57.944776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.944888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:57.944927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-17T17:50:57.945111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-17T17:50:57.945146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-17T17:50:57.945230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T17:50:57.945329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:610:2539], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-04-17T17:50:57.947324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.947364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:50:57.947531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.947571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-17T17:50:57.947840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.947889Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-04-17T17:50:57.948028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-04-17T17:50:57.948794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:50:57.948888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:50:57.948924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:50:57.948953Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-17T17:50:57.948985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-17T17:50:57.949048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T17:50:57.951631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.951678Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T17:50:57.951777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:50:57.951811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:50:57.951850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:50:57.951879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:50:57.951912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-17T17:50:57.951950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:50:57.951996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:50:57.952063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:50:57.952213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T17:50:57.952646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T17:50:57.954454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T17:50:57.954495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T17:50:57.954890Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T17:50:57.954976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:50:57.955010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:761:2643] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-17T17:50:57.957068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:57.957208Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2025-04-17T17:50:57.957241Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2025-04-17T17:50:57.957391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-04-17T17:50:57.957434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-04-17T17:50:57.959582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:57.959711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 >> SystemView::ShowCreateTablePartitionByHash [GOOD] >> SystemView::ShowCreateTablePartitionSettings >> SystemView::AuthGroups_Access [GOOD] >> SystemView::AuthGroups_ResultOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:50:57.395176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:50:57.395260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:57.395302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:50:57.395340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:50:57.395382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:50:57.395427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:50:57.395490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:57.395552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:50:57.395875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:50:57.476477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:50:57.476525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:57.494862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:50:57.495132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:50:57.495243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:50:57.506201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:50:57.506389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:50:57.507025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.507220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:50:57.510034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.511241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.511299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.511505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:50:57.511552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:57.511591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:50:57.511708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.521074Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:50:57.656867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:57.657134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.657409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:50:57.657726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:50:57.657797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.660416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.660538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:50:57.660729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.660808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:50:57.660863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:50:57.660898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:50:57.662780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.662830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:50:57.662865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:50:57.664490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.664536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.664590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.664643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.668606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:57.670353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:50:57.670533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:50:57.671485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.671603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:57.671661Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.671930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:50:57.671985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.672140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:50:57.672397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:50:57.674387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.674435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:57.674605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.674648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:50:57.674984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.675029Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:50:57.675121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:57.675164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.675213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:57.675274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.675315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:50:57.675356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.675388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:50:57.675417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:50:57.675473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:57.675506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:50:57.675538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:50:57.677351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:57.677467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:57.677506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ecute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T17:50:58.406364Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-17T17:50:58.406493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T17:50:58.406535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:50:58.406571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T17:50:58.406604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:50:58.406640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-17T17:50:58.406676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T17:50:58.406715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T17:50:58.406745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-17T17:50:58.406922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T17:50:58.407973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:50:58.408222Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409546 2025-04-17T17:50:58.408476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-17T17:50:58.408772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T17:50:58.409317Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409548 2025-04-17T17:50:58.410067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409546 2025-04-17T17:50:58.413588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-17T17:50:58.413824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:50:58.415191Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409547 Forgetting tablet 72075186234409548 Forgetting tablet 72075186234409547 2025-04-17T17:50:58.416742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-17T17:50:58.416963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:50:58.417817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:50:58.417868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:50:58.417967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:50:58.418501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:50:58.418611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:50:58.418651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:50:58.418709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:58.422220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-17T17:50:58.422274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2025-04-17T17:50:58.422423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-17T17:50:58.422451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2025-04-17T17:50:58.422710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T17:50:58.422751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2025-04-17T17:50:58.423712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:50:58.423791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T17:50:58.424118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-17T17:50:58.424161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-17T17:50:58.424644Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-17T17:50:58.424746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T17:50:58.424784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:933:2792] TestWaitNotification: OK eventTxId 106 2025-04-17T17:50:58.425383Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:58.425582Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 234us result status StatusPathDoesNotExist 2025-04-17T17:50:58.425747Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:50:58.426299Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:58.426455Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 161us result status StatusPathDoesNotExist 2025-04-17T17:50:58.426592Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:50:58.427145Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:50:58.427347Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 171us result status StatusSuccess 2025-04-17T17:50:58.427702Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2025-04-17T17:50:58.428331Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2025-04-17T17:50:58.428426Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2025-04-17T17:50:58.428471Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2025-04-17T17:50:58.428528Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> SystemView::Nodes [GOOD] >> SystemView::ConcurrentScans |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> SystemView::ShowCreateTableDefaultLiteral [GOOD] >> SystemView::ShowCreateTablePartitionAtKeys >> Secret::SimpleQueryService |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TGroupMapperTest::Mirror3dc >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge |76.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> TGroupMapperTest::Mirror3dc [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] >> SystemView::TopPartitionsByCpuRanges [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> SystemView::TopPartitionsByCpuFollowers >> TPartBtreeIndexIteration::FewNodes |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot >> ShowCreateView::WithTablePathPrefix [GOOD] >> ShowCreateView::WithSingleQuotedTablePathPrefix >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |77.0%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |77.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |77.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |77.0%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:89:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:91:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:90:2118] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:144:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:89:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:92:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:93:2057] recipient: [9:91:2121] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:95:2057] recipient: [9:91:2121] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:94:2122] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:148:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:92:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:93:2057] recipient: [10:91:2121] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:95:2057] recipient: [10:91:2121] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:94:2122] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:148:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:94:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:95:2057] recipient: [11:93:2123] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:97:2057] recipient: [11:93:2123] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:96:2124] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:150:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:1 ... 57594037927937 is [35:99:2125] sender: [35:153:2057] recipient: [35:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:54:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:54:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:17:2064] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:95:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:98:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:99:2057] recipient: [36:97:2124] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:101:2057] recipient: [36:97:2124] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:100:2125] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:154:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:52:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:52:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:17:2064] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:98:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:101:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:102:2057] recipient: [37:100:2127] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:104:2057] recipient: [37:100:2127] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:103:2128] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:157:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:17:2064] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:98:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:101:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:102:2057] recipient: [38:100:2127] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:104:2057] recipient: [38:100:2127] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:103:2128] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:157:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:17:2064] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:99:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:102:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:103:2057] recipient: [39:101:2127] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:105:2057] recipient: [39:101:2127] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:104:2128] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:158:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:17:2064] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:100:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:103:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:104:2057] recipient: [40:102:2128] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:106:2057] recipient: [40:102:2128] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:105:2129] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:125:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:17:2064] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:101:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:103:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:105:2057] recipient: [41:104:2129] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:107:2057] recipient: [41:104:2129] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:106:2130] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:126:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:17:2064] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:104:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:107:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:108:2057] recipient: [42:106:2132] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:110:2057] recipient: [42:106:2132] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:109:2133] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:163:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:17:2064] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:104:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:107:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:108:2057] recipient: [43:106:2132] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:110:2057] recipient: [43:106:2132] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:109:2133] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:163:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:17:2064] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:104:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:107:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:108:2057] recipient: [44:106:2132] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:110:2057] recipient: [44:106:2132] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:109:2133] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:163:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:17:2064] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:109:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:112:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:113:2057] recipient: [45:111:2136] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:115:2057] recipient: [45:111:2136] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:114:2137] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:168:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:17:2064] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> SystemView::ConcurrentScans [GOOD] >> SystemView::GroupsFields |77.1%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubscriberTest::InvalidNotification >> TSubscriberTest::NotifyUpdate |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |77.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.1%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> TSubscriberTest::SyncWithOutdatedReplica >> TSubscriberTest::Sync >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> TSubscriberTest::StrongNotificationAfterCommit >> TSubscriberTest::ReconnectOnFailure >> TSubscriberTest::InvalidNotification [GOOD] >> TSubscriberTest::NotifyUpdate [GOOD] |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |77.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |77.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] Test command err: 2025-04-17T17:50:46.570682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341033845549450:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:46.570783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ec6/r3tmp/tmpJABmEO/pdisk_1.dat 2025-04-17T17:50:46.905267Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:46.937916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:46.938023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:46.942880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64778, node 1 2025-04-17T17:50:47.036618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:47.036661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:47.036672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:47.036821Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:47.292595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:51.571165Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341033845549450:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:51.571271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:56.186062Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341076259407871:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:56.186576Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ec6/r3tmp/tmpZnKaAs/pdisk_1.dat 2025-04-17T17:50:56.393075Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:56.438678Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:56.438757Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:56.456084Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62844, node 4 2025-04-17T17:50:56.574028Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:56.574053Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:56.574060Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:56.574219Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:56.824402Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:01.184998Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494341076259407871:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:01.185073Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSubscriberTest::NotifyDelete >> TSubscriberCombinationsTest::MigratedPathRecreation >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TSubscriberCombinationsTest::CombinationsRootDomain >> TSubscriberTest::Sync [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2025-04-17T17:51:07.074785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:07.077655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:51:07.077789Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:51:07.077837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:51:07.077910Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:51:07.077995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:51:07.078051Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.078098Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:51:07.078134Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.078694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:51:07.078785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2025-04-17T17:51:07.078836Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2025-04-17T17:51:07.076449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:07.078687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:51:07.078801Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:51:07.078849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:51:07.078919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:51:07.078991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:51:07.079047Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.079095Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:51:07.079130Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.079281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] 2025-04-17T17:51:07.079322Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: [main][1:34:2065][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberTest::ReconnectOnFailure [GOOD] >> TSubscriberTest::NotifyDelete [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2025-04-17T17:51:07.503045Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:07.505613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2025-04-17T17:51:07.505726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2025-04-17T17:51:07.505767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2025-04-17T17:51:07.505838Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:36:2066] 2025-04-17T17:51:07.505883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:37:2066] 2025-04-17T17:51:07.505933Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.506075Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:38:2066] 2025-04-17T17:51:07.506153Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.510409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-17T17:51:07.510603Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-04-17T17:51:07.510727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-04-17T17:51:07.510806Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-04-17T17:51:07.510866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-04-17T17:51:07.510911Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-17T17:51:07.510942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-17T17:51:07.511025Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-04-17T17:51:07.511069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:51:07.511111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-04-17T17:51:07.511140Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:51:07.511180Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-04-17T17:51:07.511205Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 >> TSubscriberTest::SyncPartial >> SystemView::AuthUsers_Access [GOOD] >> SystemView::AuthUsers_ResultOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2025-04-17T17:51:07.429418Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:07.432329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:51:07.433333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-04-17T17:51:07.433386Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-04-17T17:51:07.433459Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-04-17T17:51:07.433517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-04-17T17:51:07.433573Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.433733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-04-17T17:51:07.433822Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.433958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-17T17:51:07.434077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-04-17T17:51:07.434172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-04-17T17:51:07.434225Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-04-17T17:51:07.434282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-04-17T17:51:07.434431Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-17T17:51:07.434472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-17T17:51:07.434538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-04-17T17:51:07.434588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:51:07.434639Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-04-17T17:51:07.434676Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:51:07.434726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-04-17T17:51:07.434750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-04-17T17:51:07.690643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:07.693812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:51:07.693945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:51:07.693996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:51:07.694070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:51:07.694161Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:51:07.694287Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.694340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:51:07.694383Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.694804Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:51:07.694901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:51:07.694963Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.695127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:51:07.695218Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:51:07.695270Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2025-04-17T17:51:07.673208Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:07.675648Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:51:07.675752Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:51:07.675866Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-17T17:51:07.676003Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-17T17:51:07.676051Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Set up state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.676122Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:51:07.676220Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-17T17:51:07.676286Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.676741Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-17T17:51:07.676810Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.676859Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-17T17:51:07.676888Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.676926Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-17T17:51:07.676973Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.691179Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:51:07.691272Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-17T17:51:07.691333Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.691523Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:46:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:51:07.691599Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-17T17:51:07.691638Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.691709Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:47:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:51:07.691778Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-17T17:51:07.691804Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.692358Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:51:07.692440Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:35:2065] 2025-04-17T17:51:07.692486Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Update to strong state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> AsyncIndexChangeCollector::UpsertSingleRow >> TSubscriberTest::SyncPartial [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyDelete [GOOD] Test command err: 2025-04-17T17:51:07.724194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:07.726441Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:51:07.726567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-04-17T17:51:07.726608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-04-17T17:51:07.726674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-04-17T17:51:07.726718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-04-17T17:51:07.726765Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.726913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-04-17T17:51:07.726974Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.727334Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2025-04-17T17:51:07.727405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2025-04-17T17:51:07.727480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:9:2056] 2025-04-17T17:51:07.727558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:36:2066] 2025-04-17T17:51:07.729032Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Path was updated to new version: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.729144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:37:2066] 2025-04-17T17:51:07.729190Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.729252Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2066] 2025-04-17T17:51:07.729285Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberTest::Boot [GOOD] >> SystemView::TopPartitionsByTliFields [GOOD] >> ViewQuerySplit::Basic [GOOD] >> ViewQuerySplit::WithPragmaTablePathPrefix [GOOD] >> ViewQuerySplit::WithPairedPragmaTablePathPrefix [GOOD] >> ViewQuerySplit::WithComments [GOOD] >> ViewQuerySplit::Joins [GOOD] >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2025-04-17T17:51:08.495910Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:08.497645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-17T17:51:08.497725Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-17T17:51:08.497755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-17T17:51:08.497797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:51:08.497855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:51:08.497898Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:08.497929Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:51:08.497953Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:08.498110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-17T17:51:08.498254Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2025-04-17T17:51:08.498337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2025-04-17T17:51:08.498395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 1 2025-04-17T17:51:08.498468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-17T17:51:08.498517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-17T17:51:08.498585Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 1 2025-04-17T17:51:08.498622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2025-04-17T17:51:08.498663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-17T17:51:08.498714Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:08.498755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:36:2065], cookie# 1 2025-04-17T17:51:08.498798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2025-04-17T17:51:08.498830Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 1 2025-04-17T17:51:08.498862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2025-04-17T17:51:08.498983Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 2 2025-04-17T17:51:08.499080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 2 2025-04-17T17:51:08.499107Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2025-04-17T17:51:08.499141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 2 2025-04-17T17:51:08.499185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 2 2025-04-17T17:51:08.499250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2025-04-17T17:51:08.499294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 2 2025-04-17T17:51:08.499335Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-17T17:51:08.499383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-17T17:51:08.499419Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:08.499465Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 2 2025-04-17T17:51:08.499503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 2 2025-04-17T17:51:08.499601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 3 2025-04-17T17:51:08.499678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 3 2025-04-17T17:51:08.499699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2025-04-17T17:51:08.499721Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 3 2025-04-17T17:51:08.499747Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-17T17:51:08.499796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 3 2025-04-17T17:51:08.499861Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:37:2065], cookie# 3 2025-04-17T17:51:08.499884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 3 2025-04-17T17:51:08.499926Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-17T17:51:08.499962Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2025-04-17T17:51:07.832872Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-17T17:51:07.832946Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-17T17:51:07.833141Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-17T17:51:07.833171Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-04-17T17:51:07.833224Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-04-17T17:51:07.833254Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-17T17:51:07.833526Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-04-17T17:51:07.833570Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 900, generation# 1 2025-04-17T17:51:07.833690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:07.834129Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2068] 2025-04-17T17:51:07.834163Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside 2025-04-17T17:51:07.834577Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:51:07.834780Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:42:2068] 2025-04-17T17:51:07.834804Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside 2025-04-17T17:51:07.834840Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:51:07.834963Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:43:2068] 2025-04-17T17:51:07.834986Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/db/dir_inside 2025-04-17T17:51:07.835030Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:51:07.835127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2025-04-17T17:51:07.835200Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-04-17T17:51:07.835273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2025-04-17T17:51:07.835308Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-04-17T17:51:07.835352Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2025-04-17T17:51:07.835404Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-04-17T17:51:07.835494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2068] 2025-04-17T17:51:07.835593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:39:2068] 2025-04-17T17:51:07.835645Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.835698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:40:2068] 2025-04-17T17:51:07.835735Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/db/dir_inside] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-04-17T17:51:07.835950Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 118 2025-04-17T17:51:07.835984Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-04-17T17:51:07.843200Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-17T17:51:07.843447Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:51:07.843533Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-04-17T17:51:07.843602Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:38:2068] 2025-04-17T17:51:07.843680Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2025-04-17T17:51:07.843963Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:35:2066], cookie# 0, event size# 117 2025-04-17T17:51:07.844005Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-04-17T17:51:07.844071Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-17T17:51:07.844183Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2025-04-17T17:51:07.844234Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:42:2068] 2025-04-17T17:51:07.844289Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:39:2068] 2025-04-17T17:51:07.844347Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Path was updated to new version: owner# [1:36:2067], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:08.405453Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:08.406052Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-04-17T17:51:08.406097Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-04-17T17:51:08.406120Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-04-17T17:51:08.406154Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2065] 2025-04-17T17:51:08.406223Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:36:2065] 2025-04-17T17:51:08.406258Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:34:2065][path] Set up state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:08.406301Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2065] 2025-04-17T17:51:08.406347Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:34:2065][path] Ignore empty state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> SystemView::AuthGroups_ResultOrder [GOOD] >> SystemView::AuthGroups_TableRange >> TTabletLabeledCountersAggregator::HeavyAggregation |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] |77.2%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> TTabletResolver::TabletResolvePriority [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: 2025-04-17T17:49:23.344986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:23.345560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:23.345618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001917/r3tmp/tmp2WDmhd/pdisk_1.dat 2025-04-17T17:49:23.751361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:23.795232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:23.837157Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:49:23.838173Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:49:23.838420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:23.838547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:23.850343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:23.932370Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:49:23.932455Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:49:23.932615Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:49:24.059100Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:49:24.059229Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:49:24.059896Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:49:24.060030Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:49:24.060386Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:49:24.060596Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:49:24.060696Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:49:24.062671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:49:24.063157Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:49:24.063915Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:49:24.063998Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:49:24.107081Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:49:24.108459Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:49:24.109090Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:49:24.109506Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:24.160847Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:49:24.161680Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:24.161818Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:49:24.163773Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:49:24.163867Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:49:24.163929Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:49:24.164361Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:49:24.164537Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:49:24.164655Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:49:24.165186Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:49:24.195331Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:49:24.195600Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:49:24.195763Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:49:24.195807Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:49:24.195846Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:49:24.195912Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:49:24.196167Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:24.196207Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:24.196474Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:49:24.196561Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:49:24.196652Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:49:24.196683Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:49:24.196725Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:49:24.196758Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:49:24.196796Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:49:24.196831Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:49:24.196862Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:24.196978Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:24.197016Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:24.197073Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:49:24.197563Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:49:24.197619Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:49:24.197734Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:49:24.198008Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:49:24.198076Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:49:24.198221Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:49:24.198272Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:49:24.198315Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:49:24.198371Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:49:24.198415Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:24.198752Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:49:24.198795Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:49:24.198835Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:49:24.198868Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:24.198917Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:49:24.198951Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:49:24.199011Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:49:24.199047Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:49:24.199075Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:49:24.199960Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:49:24.200010Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:24.200045Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:24.200099Z node 1 :TX_DATASHARD TRACE: Prop ... ablet: 72075186224037890 2025-04-17T17:51:06.048086Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037890 2025-04-17T17:51:06.048508Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 278593539, Sender [13:833:2692], Recipient [13:667:2571]: NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3,4] } 2025-04-17T17:51:06.048609Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435091, Sender [13:667:2571], Recipient [13:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRemoveChangeRecords 2025-04-17T17:51:06.048648Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037888 2025-04-17T17:51:06.048672Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-04-17T17:51:06.048742Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 3, at tablet: 72075186224037888 2025-04-17T17:51:06.048774Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-04-17T17:51:06.048991Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037890 2025-04-17T17:51:06.049248Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2025-04-17T17:51:06.302963Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1080:2893]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T17:51:06.916852Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-04-17T17:51:06.916950Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715671 ProcessProposeKqpTransaction 2025-04-17T17:51:06.918201Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2e0d2x2g2g94as7dqp6jwf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZTg0OWRjMDUtYWUxMDIyZjUtYzE5YThlZDItNjcyOTI4MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-04-17T17:51:06.920869Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1708:3416], Recipient [13:797:2664]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-04-17T17:51:06.921109Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-17T17:51:06.921195Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8042/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:51:06.921268Z node 13 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-04-17T17:51:06.921402Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-04-17T17:51:06.921579Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-17T17:51:06.921650Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-04-17T17:51:06.921726Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:51:06.921786Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:51:06.921839Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-04-17T17:51:06.921893Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-17T17:51:06.921918Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:51:06.921937Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-04-17T17:51:06.921957Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-04-17T17:51:06.922070Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-17T17:51:06.922479Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[13:1708:3416], 0} after executionsCount# 1 2025-04-17T17:51:06.922572Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1708:3416], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-04-17T17:51:06.922699Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1708:3416], 0} finished in read 2025-04-17T17:51:06.922802Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-17T17:51:06.922832Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:51:06.922858Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:51:06.922886Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:51:06.922936Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-17T17:51:06.922961Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:51:06.922997Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-04-17T17:51:06.923064Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:51:06.923236Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:51:06.924332Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1708:3416], Recipient [13:797:2664]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:51:06.924396Z node 13 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2025-04-17T17:51:07.513806Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-04-17T17:51:07.513898Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715672 ProcessProposeKqpTransaction 2025-04-17T17:51:07.515436Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2e0dnp9d5d5ccpkptrjqmf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MmM3M2VlYjYtM2VhOGFhNTMtZDk0ZGJiY2ItOTM5ZjEyYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-04-17T17:51:07.518172Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1739:3441], Recipient [13:1080:2893]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-04-17T17:51:07.518381Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-17T17:51:07.518465Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v8042/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:51:07.518537Z node 13 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-04-17T17:51:07.518648Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2025-04-17T17:51:07.518941Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-17T17:51:07.519007Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2025-04-17T17:51:07.519076Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-17T17:51:07.519141Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-17T17:51:07.519200Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037891 2025-04-17T17:51:07.519272Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-17T17:51:07.519302Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-17T17:51:07.519325Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2025-04-17T17:51:07.519349Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2025-04-17T17:51:07.519479Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-17T17:51:07.520122Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[13:1739:3441], 0} after executionsCount# 1 2025-04-17T17:51:07.520225Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1739:3441], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-04-17T17:51:07.520357Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1739:3441], 0} finished in read 2025-04-17T17:51:07.520459Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-17T17:51:07.520486Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2025-04-17T17:51:07.520510Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2025-04-17T17:51:07.520536Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2025-04-17T17:51:07.520583Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-17T17:51:07.520607Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2025-04-17T17:51:07.520649Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037891 has finished 2025-04-17T17:51:07.520722Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-17T17:51:07.520893Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2025-04-17T17:51:07.522055Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1739:3441], Recipient [13:1080:2893]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:51:07.522129Z node 13 :TX_DATASHARD TRACE: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } >> TTabletPipeTest::TestRewriteSameNode >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> TResourceBroker::TestRealUsage |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |77.2%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::ColumnShardCounters |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |77.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> TResourceBroker::TestQueueWithConfigure |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |77.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |77.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TDataShardLocksTest::Points_OneTx >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> ViewQuerySplit::Joins [GOOD] Test command err: 2025-04-17T17:50:29.864513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340960836023524:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:29.866560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ac5/r3tmp/tmpXRwHfv/pdisk_1.dat 2025-04-17T17:50:30.295391Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:30.313187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:30.313280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:30.315002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32194, node 1 2025-04-17T17:50:30.404309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:30.404340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:30.404352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:30.404494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:30.691144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:30.714810Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:50:32.672856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340973720926023:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:32.673000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:32.673405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340973720926050:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:32.677726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:32.688244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340973720926052:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:32.791406Z node 1 :TX_PROXY ERROR: Actor# [1:7494340973720926103:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:33.318268Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2dzaaw4vhz40fyh9z3vsfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY4M2JjODEtYWFmYjI0OWMtNTg2MDI0ZDQtYjMyZjc5YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:33.353146Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494340978015893437:2340], owner: [1:7494340978015893433:2338], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-17T17:50:33.353892Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494340978015893437:2340], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:50:33.362004Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494340978015893437:2340], row count: 1, finished: 1 2025-04-17T17:50:33.362079Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494340978015893437:2340], owner: [1:7494340978015893433:2338], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-17T17:50:33.368445Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912233308, txId: 281474976710660] shutting down 2025-04-17T17:50:34.531505Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2dzdwe7sn17vxb2rr7nrz7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA5N2Q1NzktNjk4ZmQzMTMtOTFkNWFjZi1lMGI2ZGEzYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:34.533860Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494340982310860777:2354], owner: [1:7494340982310860773:2352], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-17T17:50:34.534556Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494340982310860777:2354], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:50:34.534891Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494340982310860777:2354], row count: 1, finished: 1 2025-04-17T17:50:34.534927Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494340982310860777:2354], owner: [1:7494340982310860773:2352], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-17T17:50:34.536821Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912234526, txId: 281474976710662] shutting down 2025-04-17T17:50:34.864967Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494340960836023524:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:34.865040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:35.653653Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2dzf0r3tj53n604t7p7rte, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdhNjVlOGQtNWMzYTgzN2UtNDhjMTE2NS02NmU2Njk5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:35.656208Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494340986605828112:2365], owner: [1:7494340986605828108:2363], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-17T17:50:35.659208Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494340986605828112:2365], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:50:35.659544Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494340986605828112:2365], row count: 2, finished: 1 2025-04-17T17:50:35.659582Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494340986605828112:2365], owner: [1:7494340986605828108:2363], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-17T17:50:35.661391Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912235652, txId: 281474976710664] shutting down 2025-04-17T17:50:36.740124Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340991402081683:2107];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:36.746577Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ac5/r3tmp/tmpRA10zF/pdisk_1.dat 2025-04-17T17:50:36.902126Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:36.932766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:36.932869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:36.938311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28781, node 2 2025-04-17T17:50:37.106018Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:37.106048Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:37.106054Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:37.106144Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:37.289895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, su ... BUG: [72075186224037899] TTxTopPartitions::Execute: , partition by CPU count# 1, partition by TLI count# 1 2025-04-17T17:51:05.903352Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxTopPartitions::Complete 2025-04-17T17:51:05.947487Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [9:7494341068209989805:2078] 2025-04-17T17:51:05.989344Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] TEvSendDbCountersRequest: node id# 10, generation# 5064782804045410590, services count# 1, request size# 277 2025-04-17T17:51:05.987579Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [10:7494341067157702635:2074] 2025-04-17T17:51:05.988543Z node 10 :SYSTEM_VIEWS DEBUG: Send counters: service id# [10:7494341067157702635:2074], processor id# 72075186224037893, database# /Root/Tenant1, generation# 5064782804045410590, node id# 10, is retrying# 0, is labeled# 0 2025-04-17T17:51:05.988955Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [10:7494341062862735260:2063] 2025-04-17T17:51:05.989673Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvSendDbCountersResponse: service id# [10:7494341067157702635:2074], database# /Root/Tenant1, generation# 5064782804045410590 2025-04-17T17:51:06.000766Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [10:7494341062862735260:2063], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.000830Z node 10 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [10:7494341062862735260:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:51:06.001294Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [11:7494341064453685649:2063], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.001338Z node 11 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [11:7494341064453685649:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:51:06.001411Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [11:7494341068748653015:2073], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.001428Z node 11 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [11:7494341068748653015:2073], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-04-17T17:51:06.001380Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [8:7494341068531776192:2200], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.001415Z node 8 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [8:7494341068531776192:2200], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-04-17T17:51:06.002275Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [8:7494341064236808686:2063], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.002246Z node 7 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [7:7494341062265820722:2072], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.002452Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [10:7494341067157702635:2074], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.002274Z node 7 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [7:7494341062265820722:2072], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:51:06.002486Z node 10 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [10:7494341067157702635:2074], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-04-17T17:51:06.002345Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [9:7494341063915022421:2063], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.002372Z node 9 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [9:7494341063915022421:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:51:06.002434Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [9:7494341068209989805:2078], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.002446Z node 9 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [9:7494341068209989805:2078], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-04-17T17:51:06.003325Z node 7 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [7:7494341062265820905:2133], interval end# 2025-04-17T17:51:06.000000Z, event interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.003359Z node 7 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [7:7494341062265820905:2133], query logs count# 0, processor ids count# 1, processor id to database count# 0 2025-04-17T17:51:06.002307Z node 8 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [8:7494341064236808686:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:51:06.048548Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxCollect::Execute 2025-04-17T17:51:06.048671Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistPartitionTopResults: table id# 17, partition interval end# 2025-04-17T17:51:06.000000Z, partition count# 1 2025-04-17T17:51:06.048735Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistPartitionTopResults: table id# 20, partition interval end# 2025-04-17T17:51:06.000000Z, partition count# 1 2025-04-17T17:51:06.048798Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistPartitionTopResults: table id# 18, partition interval end# 2025-04-17T18:00:00.000000Z, partition count# 1 2025-04-17T17:51:06.048850Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistPartitionTopResults: table id# 21, partition interval end# 2025-04-17T18:00:00.000000Z, partition count# 1 2025-04-17T17:51:06.048956Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] Reset: interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.053085Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxCollect::Complete 2025-04-17T17:51:06.072444Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxCollect::Execute 2025-04-17T17:51:06.072564Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistPartitionTopResults: table id# 17, partition interval end# 2025-04-17T17:51:06.000000Z, partition count# 1 2025-04-17T17:51:06.072630Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistPartitionTopResults: table id# 20, partition interval end# 2025-04-17T17:51:06.000000Z, partition count# 1 2025-04-17T17:51:06.072675Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistPartitionTopResults: table id# 18, partition interval end# 2025-04-17T18:00:00.000000Z, partition count# 1 2025-04-17T17:51:06.072709Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistPartitionTopResults: table id# 21, partition interval end# 2025-04-17T18:00:00.000000Z, partition count# 1 2025-04-17T17:51:06.072779Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2025-04-17T17:51:06.000000Z 2025-04-17T17:51:06.083152Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxCollect::Complete 2025-04-17T17:51:06.211192Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01js2e0ckyf3c7hrb623whhckr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NGI3NGQ5ZTktNjQ0YzUzZWItNjY1M2Y1OC01MzNjYmFlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:06.215889Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7494341118100397768:2449], owner: [7:7494341118100397764:2447], scan id: 0, table id: [72075186224037888:1:0:top_partitions_by_tli_one_minute] 2025-04-17T17:51:06.216585Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7494341118100397768:2449], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-17T17:51:06.218437Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1744912265000000 Rank: 0 } InclusiveFrom: true To { IntervalEndUs: 1744912265000000 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_BY_TLI_ONE_MINUTE , rows# 1, bytes# 63, next# 2025-04-17T17:51:06.219342Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7494341118100397768:2449], row count: 1, finished: 1 2025-04-17T17:51:06.219379Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7494341118100397768:2449], owner: [7:7494341118100397764:2447], scan id: 0, table id: [72075186224037888:1:0:top_partitions_by_tli_one_minute] 2025-04-17T17:51:06.222488Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912266209, txId: 281474976715682] shutting down 2025-04-17T17:51:06.238259Z node 8 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [8:7494341068531776192:2200], processor id# 72075186224037899, database# /Root/Tenant2 2025-04-17T17:51:06.240019Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-04-17T17:51:06.240409Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:51:06.240543Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-17T17:51:06.241109Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:51:06.241322Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-04-17T17:51:06.241515Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:51:06.238516Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7494341068209989805:2078], processor id# 72075186224037899, database# /Root/Tenant2 2025-04-17T17:51:06.242579Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-17T17:51:06.243287Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:51:06.243547Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7494341068209989805:2078], database# /Root/Tenant2, processor id# 72075186224037899 2025-04-17T17:51:06.240318Z node 8 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [8:7494341068531776192:2200], database# /Root/Tenant2, processor id# 72075186224037899 2025-04-17T17:51:06.245811Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7494341067157702635:2074], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-17T17:51:06.246143Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7494341067157702635:2074], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-17T17:51:06.248191Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7494341068748653015:2073], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-17T17:51:06.260389Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[11:7494341068748653232:2104], Type=268959746 2025-04-17T17:51:06.262455Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7494341068748653015:2073], database# /Root/Tenant1, processor id# 72075186224037893 >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] Test command err: 2025-04-17T17:50:45.707691Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341030399779316:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:45.707741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ece/r3tmp/tmpLqPTe5/pdisk_1.dat 2025-04-17T17:50:46.111077Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3563, node 1 2025-04-17T17:50:46.144266Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:46.144309Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:46.176877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:46.177080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:46.186731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:46.186771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:46.186782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:46.186952Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:50:46.217603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:46.465887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:49.530543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649720:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.530847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649708:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.531152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.542946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649756:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.557983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649763:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.558049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649765:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.558075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649767:2500], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.558099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649768:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.558136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649770:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.558173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649771:2503], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.570420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649772:2504], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.570578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649757:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.574973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649742:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.575161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649787:2508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.578241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649786:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.578918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.612043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:49.619819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649857:2532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.619920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649855:2530], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.625862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649843:2524], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.626005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.626125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649852:2528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.626238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649853:2529], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.632257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649939:2560], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.632572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649940:2561], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.633573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649941:2562], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.634015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649924:2546], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.634065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649934:2555], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.634093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649935:2556], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:49.634119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341047579649936:2557], DatabaseId: /Root, PoolId: default, Failed to fetch pool ... uestId: 462 2025-04-17T17:51:04.868449Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZWJiNzJmYmUtYjdjMjkwZWYtODUyZWEyZjctNTcyODA2MDQ=, ActorId: [4:7494341095626021519:2348], ActorState: ExecuteState, TraceId: 01js2e0bdv7dqqj6qepqjq3tvb, Reply query error, msg: Pending previous query completion proxyRequestId: 463 2025-04-17T17:51:04.868473Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZWJiNzJmYmUtYjdjMjkwZWYtODUyZWEyZjctNTcyODA2MDQ=, ActorId: [4:7494341095626021519:2348], ActorState: ExecuteState, TraceId: 01js2e0bdv7dqqj6qepqjq3tvb, Reply query error, msg: Pending previous query completion proxyRequestId: 464 2025-04-17T17:51:04.868494Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZWJiNzJmYmUtYjdjMjkwZWYtODUyZWEyZjctNTcyODA2MDQ=, ActorId: [4:7494341095626021519:2348], ActorState: ExecuteState, TraceId: 01js2e0bdv7dqqj6qepqjq3tvb, Reply query error, msg: Pending previous query completion proxyRequestId: 472 2025-04-17T17:51:04.870690Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWRkYjY5OGUtNTNlY2JmNTktYjY1ZDQyOGItNDBjYmRlZDU=, ActorId: [4:7494341095626021539:2353], ActorState: ExecuteState, TraceId: 01js2e0bdvc44436wk0f1c7htj, Reply query error, msg: Pending previous query completion proxyRequestId: 466 2025-04-17T17:51:04.870738Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWRkYjY5OGUtNTNlY2JmNTktYjY1ZDQyOGItNDBjYmRlZDU=, ActorId: [4:7494341095626021539:2353], ActorState: ExecuteState, TraceId: 01js2e0bdvc44436wk0f1c7htj, Reply query error, msg: Pending previous query completion proxyRequestId: 468 2025-04-17T17:51:04.870767Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWRkYjY5OGUtNTNlY2JmNTktYjY1ZDQyOGItNDBjYmRlZDU=, ActorId: [4:7494341095626021539:2353], ActorState: ExecuteState, TraceId: 01js2e0bdvc44436wk0f1c7htj, Reply query error, msg: Pending previous query completion proxyRequestId: 471 2025-04-17T17:51:04.870797Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWRkYjY5OGUtNTNlY2JmNTktYjY1ZDQyOGItNDBjYmRlZDU=, ActorId: [4:7494341095626021539:2353], ActorState: ExecuteState, TraceId: 01js2e0bdvc44436wk0f1c7htj, Reply query error, msg: Pending previous query completion proxyRequestId: 474 2025-04-17T17:51:04.870853Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWRkYjY5OGUtNTNlY2JmNTktYjY1ZDQyOGItNDBjYmRlZDU=, ActorId: [4:7494341095626021539:2353], ActorState: ExecuteState, TraceId: 01js2e0bdvc44436wk0f1c7htj, Reply query error, msg: Pending previous query completion proxyRequestId: 476 2025-04-17T17:51:04.870884Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWRkYjY5OGUtNTNlY2JmNTktYjY1ZDQyOGItNDBjYmRlZDU=, ActorId: [4:7494341095626021539:2353], ActorState: ExecuteState, TraceId: 01js2e0bdvc44436wk0f1c7htj, Reply query error, msg: Pending previous query completion proxyRequestId: 477 2025-04-17T17:51:04.870912Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWRkYjY5OGUtNTNlY2JmNTktYjY1ZDQyOGItNDBjYmRlZDU=, ActorId: [4:7494341095626021539:2353], ActorState: ExecuteState, TraceId: 01js2e0bdvc44436wk0f1c7htj, Reply query error, msg: Pending previous query completion proxyRequestId: 478 2025-04-17T17:51:04.870969Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWRkYjY5OGUtNTNlY2JmNTktYjY1ZDQyOGItNDBjYmRlZDU=, ActorId: [4:7494341095626021539:2353], ActorState: ExecuteState, TraceId: 01js2e0bdvc44436wk0f1c7htj, Reply query error, msg: Pending previous query completion proxyRequestId: 479 2025-04-17T17:51:04.871001Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWRkYjY5OGUtNTNlY2JmNTktYjY1ZDQyOGItNDBjYmRlZDU=, ActorId: [4:7494341095626021539:2353], ActorState: ExecuteState, TraceId: 01js2e0bdvc44436wk0f1c7htj, Reply query error, msg: Pending previous query completion proxyRequestId: 483 2025-04-17T17:51:04.871231Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGY4OTlmMGQtNzdiMzdkYTYtMjdkNjcxYmYtOTQ3ZTFjOWI=, ActorId: [4:7494341095626021509:2346], ActorState: ExecuteState, TraceId: 01js2e0bdt5736d1qrykvbx6wz, Reply query error, msg: Pending previous query completion proxyRequestId: 467 2025-04-17T17:51:04.871266Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGY4OTlmMGQtNzdiMzdkYTYtMjdkNjcxYmYtOTQ3ZTFjOWI=, ActorId: [4:7494341095626021509:2346], ActorState: ExecuteState, TraceId: 01js2e0bdt5736d1qrykvbx6wz, Reply query error, msg: Pending previous query completion proxyRequestId: 473 2025-04-17T17:51:04.871292Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGY4OTlmMGQtNzdiMzdkYTYtMjdkNjcxYmYtOTQ3ZTFjOWI=, ActorId: [4:7494341095626021509:2346], ActorState: ExecuteState, TraceId: 01js2e0bdt5736d1qrykvbx6wz, Reply query error, msg: Pending previous query completion proxyRequestId: 480 2025-04-17T17:51:04.871324Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGY4OTlmMGQtNzdiMzdkYTYtMjdkNjcxYmYtOTQ3ZTFjOWI=, ActorId: [4:7494341095626021509:2346], ActorState: ExecuteState, TraceId: 01js2e0bdt5736d1qrykvbx6wz, Reply query error, msg: Pending previous query completion proxyRequestId: 482 2025-04-17T17:51:04.871349Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGY4OTlmMGQtNzdiMzdkYTYtMjdkNjcxYmYtOTQ3ZTFjOWI=, ActorId: [4:7494341095626021509:2346], ActorState: ExecuteState, TraceId: 01js2e0bdt5736d1qrykvbx6wz, Reply query error, msg: Pending previous query completion proxyRequestId: 492 2025-04-17T17:51:04.871375Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGY4OTlmMGQtNzdiMzdkYTYtMjdkNjcxYmYtOTQ3ZTFjOWI=, ActorId: [4:7494341095626021509:2346], ActorState: ExecuteState, TraceId: 01js2e0bdt5736d1qrykvbx6wz, Reply query error, msg: Pending previous query completion proxyRequestId: 494 2025-04-17T17:51:04.871411Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzUzNzE5MGMtNTJiNjY5OC1mYzlmOGI2Yy0zZTEyNjk3Zg==, ActorId: [4:7494341095626021522:2351], ActorState: ExecuteState, TraceId: 01js2e0bdv8f6dwj7azm8ht2ta, Reply query error, msg: Pending previous query completion proxyRequestId: 469 2025-04-17T17:51:04.871436Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzUzNzE5MGMtNTJiNjY5OC1mYzlmOGI2Yy0zZTEyNjk3Zg==, ActorId: [4:7494341095626021522:2351], ActorState: ExecuteState, TraceId: 01js2e0bdv8f6dwj7azm8ht2ta, Reply query error, msg: Pending previous query completion proxyRequestId: 489 2025-04-17T17:51:04.871461Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzUzNzE5MGMtNTJiNjY5OC1mYzlmOGI2Yy0zZTEyNjk3Zg==, ActorId: [4:7494341095626021522:2351], ActorState: ExecuteState, TraceId: 01js2e0bdv8f6dwj7azm8ht2ta, Reply query error, msg: Pending previous query completion proxyRequestId: 495 2025-04-17T17:51:04.871514Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZWJiNzJmYmUtYjdjMjkwZWYtODUyZWEyZjctNTcyODA2MDQ=, ActorId: [4:7494341095626021519:2348], ActorState: ExecuteState, TraceId: 01js2e0bdv7dqqj6qepqjq3tvb, Reply query error, msg: Pending previous query completion proxyRequestId: 481 2025-04-17T17:51:04.871564Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZWJiNzJmYmUtYjdjMjkwZWYtODUyZWEyZjctNTcyODA2MDQ=, ActorId: [4:7494341095626021519:2348], ActorState: ExecuteState, TraceId: 01js2e0bdv7dqqj6qepqjq3tvb, Reply query error, msg: Pending previous query completion proxyRequestId: 490 2025-04-17T17:51:04.871596Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzYxNWM4ODQtZjE4ZTRlMC1lMjc4MTA3NS1lYzBhOTU5Zg==, ActorId: [4:7494341095626021507:2345], ActorState: ExecuteState, TraceId: 01js2e0bdt1kntytcrq7eare39, Reply query error, msg: Pending previous query completion proxyRequestId: 484 2025-04-17T17:51:04.871629Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzYxNWM4ODQtZjE4ZTRlMC1lMjc4MTA3NS1lYzBhOTU5Zg==, ActorId: [4:7494341095626021507:2345], ActorState: ExecuteState, TraceId: 01js2e0bdt1kntytcrq7eare39, Reply query error, msg: Pending previous query completion proxyRequestId: 486 2025-04-17T17:51:04.871663Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjJkMDY2N2YtM2E2ZDg3NTEtNzcxZDk4ODMtMjhjZjdhZjg=, ActorId: [4:7494341095626021520:2349], ActorState: ExecuteState, TraceId: 01js2e0bdtd55g36hytywgjjwk, Reply query error, msg: Pending previous query completion proxyRequestId: 485 2025-04-17T17:51:04.871712Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjJkMDY2N2YtM2E2ZDg3NTEtNzcxZDk4ODMtMjhjZjdhZjg=, ActorId: [4:7494341095626021520:2349], ActorState: ExecuteState, TraceId: 01js2e0bdtd55g36hytywgjjwk, Reply query error, msg: Pending previous query completion proxyRequestId: 487 2025-04-17T17:51:04.871739Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjJkMDY2N2YtM2E2ZDg3NTEtNzcxZDk4ODMtMjhjZjdhZjg=, ActorId: [4:7494341095626021520:2349], ActorState: ExecuteState, TraceId: 01js2e0bdtd55g36hytywgjjwk, Reply query error, msg: Pending previous query completion proxyRequestId: 493 2025-04-17T17:51:04.871778Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjJkMDY2N2YtM2E2ZDg3NTEtNzcxZDk4ODMtMjhjZjdhZjg=, ActorId: [4:7494341095626021520:2349], ActorState: ExecuteState, TraceId: 01js2e0bdtd55g36hytywgjjwk, Reply query error, msg: Pending previous query completion proxyRequestId: 496 2025-04-17T17:51:04.871809Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2FlYjIyMGUtZjA3ZTQ5NGYtYzM1M2ZmYjItZTQ3ZDcxNTA=, ActorId: [4:7494341095626021521:2350], ActorState: ExecuteState, TraceId: 01js2e0bdv4nj3ge4sbtv0p44d, Reply query error, msg: Pending previous query completion proxyRequestId: 488 2025-04-17T17:51:04.871834Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2FlYjIyMGUtZjA3ZTQ5NGYtYzM1M2ZmYjItZTQ3ZDcxNTA=, ActorId: [4:7494341095626021521:2350], ActorState: ExecuteState, TraceId: 01js2e0bdv4nj3ge4sbtv0p44d, Reply query error, msg: Pending previous query completion proxyRequestId: 491 2025-04-17T17:51:04.871867Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmYzM2E0MC03YTVjNjc1YS1hYWNjNGI5MC03MTk4MzVmZA==, ActorId: [4:7494341095626021518:2347], ActorState: ExecuteState, TraceId: 01js2e0bf4bchva9b2tde76x52, Reply query error, msg: Pending previous query completion proxyRequestId: 497 2025-04-17T17:51:04.954437Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzYxNWM4ODQtZjE4ZTRlMC1lMjc4MTA3NS1lYzBhOTU5Zg==, ActorId: [4:7494341095626021507:2345], ActorState: ExecuteState, TraceId: 01js2e0bqs8qp788j6nbq1z7kf, Reply query error, msg: Pending previous query completion proxyRequestId: 500 2025-04-17T17:51:04.955053Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGY4OTlmMGQtNzdiMzdkYTYtMjdkNjcxYmYtOTQ3ZTFjOWI=, ActorId: [4:7494341095626021509:2346], ActorState: ExecuteState, TraceId: 01js2e0bqs8sdmkmxrng9768q1, Reply query error, msg: Pending previous query completion proxyRequestId: 505 2025-04-17T17:51:04.955443Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzUzNzE5MGMtNTJiNjY5OC1mYzlmOGI2Yy0zZTEyNjk3Zg==, ActorId: [4:7494341095626021522:2351], ActorState: ExecuteState, TraceId: 01js2e0bqs2dqehr72fntsw2c1, Reply query error, msg: Pending previous query completion proxyRequestId: 507 2025-04-17T17:51:04.955785Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjJkMDY2N2YtM2E2ZDg3NTEtNzcxZDk4ODMtMjhjZjdhZjg=, ActorId: [4:7494341095626021520:2349], ActorState: ExecuteState, TraceId: 01js2e0bqsen8dy327hg6jgg6y, Reply query error, msg: Pending previous query completion proxyRequestId: 510 2025-04-17T17:51:04.956000Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGY4OTlmMGQtNzdiMzdkYTYtMjdkNjcxYmYtOTQ3ZTFjOWI=, ActorId: [4:7494341095626021509:2346], ActorState: ExecuteState, TraceId: 01js2e0bqs8sdmkmxrng9768q1, Reply query error, msg: Pending previous query completion proxyRequestId: 509 |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> TResourceBroker::TestRandomQueue [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History |77.3%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:159:2058] recipient: [1:157:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:159:2058] recipient: [1:157:2137] Leader for TabletID 9437184 is [1:165:2141] sender: [1:166:2058] recipient: [1:157:2137] Leader for TabletID 9437185 is [0:0:0] sender: [2:169:2049] recipient: [2:160:2095] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:169:2049] recipient: [2:160:2095] Leader for TabletID 9437185 is [2:181:2098] sender: [2:182:2049] recipient: [2:160:2095] Leader for TabletID 9437184 is [1:165:2141] sender: [1:209:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:181:2098] sender: [1:211:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:181:2098] sender: [2:213:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:181:2098] sender: [2:214:2049] recipient: [2:154:2094] Leader for TabletID 9437185 is [2:181:2098] sender: [1:219:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:181:2098] sender: [2:217:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:181:2098] sender: [2:220:2049] recipient: [2:218:2111] Leader for TabletID 9437185 is [2:221:2112] sender: [2:222:2049] recipient: [2:218:2111] Leader for TabletID 9437185 is [2:221:2112] sender: [1:251:2058] recipient: [1:15:2062] >> TResourceBroker::TestOverusageDifferentResources [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2025-04-17T17:51:11.424033Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-1 (1 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424109Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-1 (1 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424170Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-2 (2 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424212Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-3 (3 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424250Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-4 (4 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424298Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-5 (5 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424375Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-8 (8 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424409Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-9 (9 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424513Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-14 (14 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424591Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-17 (17 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424631Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-18 (18 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424766Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-25 (25 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424855Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-29 (29 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.424895Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-30 (30 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425090Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-39 (39 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425229Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-45 (45 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425365Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-51 (51 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425431Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-52 (52 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425532Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-56 (56 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425569Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-57 (57 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425696Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-63 (63 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425789Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-66 (66 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425862Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-69 (69 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425899Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-70 (70 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425952Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-72 (72 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.425988Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-73 (73 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.426125Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-80 (80 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.426167Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-81 (81 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.428554Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-84 (84 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.428709Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-91 (91 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.428924Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-100 (100 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.429082Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-107 (107 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.429155Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-110 (110 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.429241Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-114 (114 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.429544Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-129 (129 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.429586Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-130 (130 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.429695Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-135 (135 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.429835Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-141 (141 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.429875Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-142 (142 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.429982Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-147 (147 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.430150Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-155 (155 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.430422Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-167 (167 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.430477Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-168 (168 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.430567Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-172 (172 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.430638Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-175 (175 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.430674Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-176 (176 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.430707Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-177 (177 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.430815Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-182 (182 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.431176Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-196 (196 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.431340Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-204 (204 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.431381Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-205 (205 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.431433Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-207 (207 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.431545Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-212 (212 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.431648Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-215 (215 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.431843Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-225 (225 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.431897Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-227 (227 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.431956Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-228 (228 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.432093Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-233 (233 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.432155Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-235 (235 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.432430Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-249 (249 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.432509Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-252 (252 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.432662Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-258 (258 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.432740Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-260 (260 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.432996Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-266 (266 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433080Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-269 (269 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433199Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-274 (274 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433400Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-283 (283 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433476Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-286 (286 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433517Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-287 (287 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433606Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-290 (290 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433657Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-291 (291 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433749Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-292 (292 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433915Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-299 (299 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.433989Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-302 (302 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.434120Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-308 (308 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.434341Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-316 (316 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.434425Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-319 (319 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.434513Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-323 (323 by [2:99:2134])' of unknown type 'wrong' to d ... 25-04-17T17:51:11.474592Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-365 (365 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.474646Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-369 (369 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.474689Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-370 (370 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.474793Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-404 (404 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.474852Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-418 (418 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.474905Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-439 (439 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475000Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-452 (452 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475098Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-465 (465 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475138Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-470 (470 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475194Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-472 (472 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475240Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-493 (493 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475277Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-502 (502 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475337Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-505 (505 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475392Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-512 (512 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475439Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-533 (533 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475503Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-547 (547 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475577Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-551 (551 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475619Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-564 (564 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475660Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-573 (573 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475700Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-582 (582 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475746Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-589 (589 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475828Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-599 (599 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475866Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-608 (608 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475904Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-615 (615 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.475962Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-629 (629 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476004Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-647 (647 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476118Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-685 (685 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476148Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-695 (695 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476323Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-732 (732 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476428Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-793 (793 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476488Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-799 (799 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476533Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-805 (805 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476570Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-812 (812 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476622Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-816 (816 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476713Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-900 (900 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476772Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-904 (904 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476847Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-914 (914 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.476909Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-944 (944 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477018Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-972 (972 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477113Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-8 (8 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477194Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-52 (52 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477236Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-69 (69 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477318Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-91 (91 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477376Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-110 (110 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477499Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-172 (172 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477606Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-215 (215 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477683Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-269 (269 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477725Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-274 (274 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477770Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-331 (331 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477832Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-347 (347 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477869Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-356 (356 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.477969Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-402 (402 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478060Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-411 (411 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478158Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-497 (497 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478233Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-501 (501 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478260Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-528 (528 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478324Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-538 (538 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478360Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-539 (539 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478427Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-565 (565 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478454Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-571 (571 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478534Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-572 (572 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478574Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-590 (590 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478617Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-597 (597 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478659Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-607 (607 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478732Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-638 (638 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478809Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-675 (675 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478851Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-684 (684 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.478898Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-714 (714 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479048Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-802 (802 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479089Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-804 (804 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479125Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-813 (813 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479167Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-814 (814 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479242Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-861 (861 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479384Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-870 (870 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479434Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-883 (883 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479471Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-893 (893 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479572Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-910 (910 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-17T17:51:11.479673Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-964 (964 by [2:99:2134])' of unknown type 'wrong' to default queue >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2025-04-17T17:51:11.190878Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2025-04-17T17:51:11.191088Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' is required" 2025-04-17T17:51:11.191227Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2136] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2136] Leader for TabletID 9437184 is [1:108:2140] sender: [1:109:2057] recipient: [1:102:2136] Leader for TabletID 9437184 is [1:108:2140] sender: [1:128:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [1:163:2057] recipient: [1:161:2168] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:163:2057] recipient: [1:161:2168] Leader for TabletID 9437185 is [1:167:2172] sender: [1:168:2057] recipient: [1:161:2168] Leader for TabletID 9437185 is [1:167:2172] sender: [1:187:2057] recipient: [1:14:2061] >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds >> SlowTopicAutopartitioning::CDC_Write >> YdbTableSplit::SplitByLoadWithDeletes >> YdbTableSplit::SplitByLoadWithUpdates |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> TPQTestSlow::TestWriteVeryBigMessage |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SystemView::GroupsFields [GOOD] >> SystemView::Describe >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> TTxAllocatorClientTest::Boot >> TTxAllocatorClientTest::InitiatingRequest >> TTxAllocatorClientTest::Boot [GOOD] >> TTxAllocatorClientTest::InitiatingRequest [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> SystemView::QueryStats >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-04-17T17:51:16.236950Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T17:51:16.237332Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T17:51:16.237865Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T17:51:16.239087Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.239480Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T17:51:16.249714Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.249842Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.249962Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T17:51:16.250116Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.250241Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.250366Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T17:51:16.250513Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T17:51:16.251222Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-17T17:51:16.251755Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.251849Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.251942Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-17T17:51:16.251985Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange >> TTxAllocatorClientTest::AllocateOverTheEdge |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |77.4%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-04-17T17:51:16.275061Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T17:51:16.275619Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T17:51:16.276404Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T17:51:16.280749Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.281382Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T17:51:16.292600Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.292735Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.292851Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T17:51:16.293016Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.293093Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:16.293193Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T17:51:16.293383Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-04-17T17:51:17.291212Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T17:51:17.291700Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T17:51:17.292422Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T17:51:17.294075Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.294739Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T17:51:17.307906Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.308012Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.308105Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T17:51:17.308254Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.308332Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.308443Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T17:51:17.308569Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T17:51:17.309194Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-17T17:51:17.309687Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.309776Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.309857Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-17T17:51:17.309899Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 2025-04-17T17:51:17.310076Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-17T17:51:17.310374Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-17T17:51:17.310541Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-17T17:51:17.310701Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-17T17:51:17.310814Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-17T17:51:17.311202Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.311290Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.311379Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-04-17T17:51:17.311417Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 5000 to# 10000 2025-04-17T17:51:17.311589Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-17T17:51:17.311732Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-17T17:51:17.311894Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-17T17:51:17.312146Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-17T17:51:17.312267Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-17T17:51:17.312594Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.312642Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.312745Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-04-17T17:51:17.312777Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 10000 to# 15000 2025-04-17T17:51:17.312933Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob >> Cdc::DocApi[PqRunner] >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet >> Cdc::UuidExchange[PqRunner] >> Cdc::KeysOnlyLog[PqRunner] >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> ColumnStatistics::CountMinSketchStatistics [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] Test command err: 2025-04-17T17:51:09.652702Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 [1:7:2054] 2025-04-17T17:51:09.652980Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:8:2055] worker 0 2025-04-17T17:51:09.653041Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:9:2056] worker 1 2025-04-17T17:51:09.653065Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:10:2057] worker 2 2025-04-17T17:51:09.653086Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:11:2058] worker 3 2025-04-17T17:51:09.653119Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:12:2059] worker 4 2025-04-17T17:51:09.653144Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:13:2060] worker 5 2025-04-17T17:51:09.653164Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:14:2061] worker 6 2025-04-17T17:51:09.653184Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:15:2062] worker 7 2025-04-17T17:51:09.653205Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:16:2063] worker 8 2025-04-17T17:51:09.653242Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:17:2064] worker 9 Sending message to [1:9:2056] from [1:7:2054] id 1 Sending message to [1:10:2057] from [1:7:2054] id 2 Sending message to [1:11:2058] from [1:7:2054] id 3 Sending message to [1:12:2059] from [1:7:2054] id 4 Sending message to [1:13:2060] from [1:7:2054] id 5 Sending message to [1:14:2061] from [1:7:2054] id 6 Sending message to [1:15:2062] from [1:7:2054] id 7 Sending message to [1:16:2063] from [1:7:2054] id 8 Sending message to [1:17:2064] from [1:7:2054] id 9 Sending message to [1:8:2055] from [1:7:2054] id 10 2025-04-17T17:51:10.416130Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [1:12:2059] 2025-04-17T17:51:10.416237Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [1:13:2060] 2025-04-17T17:51:10.416318Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [1:14:2061] 2025-04-17T17:51:10.416388Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [1:15:2062] 2025-04-17T17:51:10.416468Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [1:16:2063] 2025-04-17T17:51:10.416524Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [1:17:2064] 2025-04-17T17:51:10.416971Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [1:8:2055] 2025-04-17T17:51:10.417030Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [1:9:2056] 2025-04-17T17:51:10.417082Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [1:10:2057] 2025-04-17T17:51:10.417139Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [1:11:2058] 2025-04-17T17:51:10.417192Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:16:2063] 2025-04-17T17:51:10.418678Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:16:2063] 2025-04-17T17:51:10.458264Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:16:2063] Initiator [1:7:2054] 2025-04-17T17:51:10.482682Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:17:2064] 2025-04-17T17:51:10.484239Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:17:2064] 2025-04-17T17:51:10.517342Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:17:2064] Initiator [1:7:2054] 2025-04-17T17:51:10.540589Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:7:2054] 2025-04-17T17:51:10.540734Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:7:2054] 2025-04-17T17:51:10.546112Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [1:8:2055] 2025-04-17T17:51:10.547623Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [1:8:2055] 2025-04-17T17:51:10.578833Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:8:2055] Initiator [1:7:2054] 2025-04-17T17:51:10.600756Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:9:2056] 2025-04-17T17:51:10.602333Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:9:2056] 2025-04-17T17:51:10.633949Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:9:2056] Initiator [1:7:2054] 2025-04-17T17:51:10.654574Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:10:2057] 2025-04-17T17:51:10.656134Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:10:2057] 2025-04-17T17:51:10.681487Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:10:2057] Initiator [1:7:2054] 2025-04-17T17:51:10.698113Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:11:2058] 2025-04-17T17:51:10.699678Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:11:2058] 2025-04-17T17:51:10.731515Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:11:2058] Initiator [1:7:2054] 2025-04-17T17:51:10.753159Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:12:2059] 2025-04-17T17:51:10.754629Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:12:2059] 2025-04-17T17:51:10.789384Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:12:2059] Initiator [1:7:2054] 2025-04-17T17:51:10.811485Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:13:2060] 2025-04-17T17:51:10.812954Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:13:2060] 2025-04-17T17:51:10.853536Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:13:2060] Initiator [1:7:2054] 2025-04-17T17:51:10.875794Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:14:2061] 2025-04-17T17:51:10.877159Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:14:2061] 2025-04-17T17:51:10.905646Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:14:2061] Initiator [1:7:2054] 2025-04-17T17:51:10.931039Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:15:2062] 2025-04-17T17:51:10.932403Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:15:2062] 2025-04-17T17:51:10.961472Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:15:2062] Initiator [1:7:2054] 2025-04-17T17:51:10.992966Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:7:2054] 2025-04-17T17:51:10.993146Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:7:2054] 2025-04-17T17:51:10.998523Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [1:7:2054] 2025-04-17T17:51:10.998651Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [1:7:2054] 2025-04-17T17:51:11.003279Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:7:2054] 2025-04-17T17:51:11.003406Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:7:2054] 2025-04-17T17:51:11.008831Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:7:2054] 2025-04-17T17:51:11.008970Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:7:2054] 2025-04-17T17:51:11.014140Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:7:2054] 2025-04-17T17:51:11.014285Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:7:2054] 2025-04-17T17:51:11.019162Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:7:2054] 2025-04-17T17:51:11.019279Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:7:2054] 2025-04-17T17:51:11.027279Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:7:2054] 2025-04-17T17:51:11.027415Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:7:2054] 2025-04-17T17:51:11.032440Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:7:2054] 2025-04-17T17:51:11.032570Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:7:2054] 2025-04-17T17:51:11.037936Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:7:2054] 2025-04-17T17:51:11.038079Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:7:2054] 2025-04-17T17:51:11.043304Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:7:2054] Initiator [1:6:2053] TEST 2 10 duration 1.536024s 2025-04-17T17:51:11.380788Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 [2:7:2054] 2025-04-17T17:51:11.381118Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:8:2055] worker 0 2025-04-17T17:51:11.381150Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:9:2056] worker 1 2025-04-17T17:51:11.381169Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:10:2057] worker 2 2025-04-17T17:51:11.381198Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:11:2058] worker 3 2025-04-17T17:51:11.381224Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:12:2059] worker 4 2025-04-17T17:51:11.381301Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:13:2060] worker 5 2025-04-17T17:51:11.381329Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:14:2061] worker 6 2025-04-17T17:51:11.381347Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:15:2062] worker 7 2025-04-17T17:51:11.381375Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:16:2063] worker 8 2025-04-17T17:51:11.381410Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:17:2064] worker 9 2025-04-17T17:51:11.381429Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:18:2065] worker 10 2025-04-17T17:51:11.381445Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:19:2066] worker 11 2025-04-17T17:51:11.381465Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:20:2067] worker 12 2025-04-17T17:51:11.381486Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:21:2068] worker 13 2025-04-17T17:51:11.381505Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:22:2069] worker 14 2025-04-17T17:51:11.381531Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:23:2070] worker 15 2025-04-17T17:51:11.381556Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:24:2071] worker 16 2025-04-17T17:51:11.381572Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:25:2072] worker 17 2025-04-17T17:51:11.381587Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:26:2073] worker 18 2025-04-17T17:51:11.381600Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:27:2074] worker 19 Sending message to [2:9:2056] from [2:7:2054] id 1 Sending message to [2:10:2057] from [2:7:2054] id 2 Sending message to [2:11:2058] from [2:7:2054] id 3 Sending message to [2:12:2059] from [2:7:2054] id 4 Sending message to [2:13:2060] from [2:7:2054] id 5 Sending message to [2:14:2061] from [2:7:2054] id 6 Sending message to [2:15:2062] from [2:7:2054] id 7 Sending message to [2:16:2063] from [2:7:2054] id 8 Sending message to [2:17:2064] from [2:7: ... esponse node 0 [2:7:2054] 2025-04-17T17:51:13.426839Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 11 [2:7:2054] 2025-04-17T17:51:13.426862Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 11 [2:7:2054] 2025-04-17T17:51:13.426896Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 12 [2:7:2054] 2025-04-17T17:51:13.426936Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 12 [2:7:2054] 2025-04-17T17:51:13.427015Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 13 [2:7:2054] 2025-04-17T17:51:13.427048Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 13 [2:7:2054] 2025-04-17T17:51:13.427107Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 14 [2:7:2054] 2025-04-17T17:51:13.427129Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 14 [2:7:2054] 2025-04-17T17:51:13.427183Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 15 [2:7:2054] 2025-04-17T17:51:13.427206Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 15 [2:7:2054] 2025-04-17T17:51:13.427283Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [2:7:2054] 2025-04-17T17:51:13.427375Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [2:7:2054] 2025-04-17T17:51:13.452708Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [2:7:2054] 2025-04-17T17:51:13.452866Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [2:7:2054] 2025-04-17T17:51:13.494767Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [2:7:2054] 2025-04-17T17:51:13.494940Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [2:7:2054] 2025-04-17T17:51:13.516894Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [2:7:2054] 2025-04-17T17:51:13.517054Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [2:7:2054] 2025-04-17T17:51:13.540309Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [2:7:2054] 2025-04-17T17:51:13.540471Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [2:7:2054] 2025-04-17T17:51:13.554163Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [2:7:2054] 2025-04-17T17:51:13.559588Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [2:7:2054] 2025-04-17T17:51:13.573277Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [2:7:2054] 2025-04-17T17:51:13.573440Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [2:7:2054] 2025-04-17T17:51:13.585848Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [2:7:2054] 2025-04-17T17:51:13.586024Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [2:7:2054] 2025-04-17T17:51:13.597789Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [2:7:2054] 2025-04-17T17:51:13.597938Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [2:7:2054] 2025-04-17T17:51:13.604060Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [2:7:2054] 2025-04-17T17:51:13.604217Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [2:7:2054] 2025-04-17T17:51:13.609490Z node 2 :TABLET_AGGREGATOR INFO: aggregator request processed [2:7:2054] Initiator [2:6:2053] TEST 2 20 duration 2.445733s 2025-04-17T17:51:13.999108Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 [3:7:2054] 2025-04-17T17:51:13.999238Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [3:7:2054] self [3:8:2055] worker 0 Sending message to [3:8:2055] from [3:7:2054] id 1 Sending message to [3:8:2055] from [3:7:2054] id 2 Sending message to [3:8:2055] from [3:7:2054] id 3 Sending message to [3:8:2055] from [3:7:2054] id 4 Sending message to [3:8:2055] from [3:7:2054] id 5 Sending message to [3:8:2055] from [3:7:2054] id 6 Sending message to [3:8:2055] from [3:7:2054] id 7 Sending message to [3:8:2055] from [3:7:2054] id 8 Sending message to [3:8:2055] from [3:7:2054] id 9 Sending message to [3:8:2055] from [3:7:2054] id 10 2025-04-17T17:51:15.021679Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [3:8:2055] 2025-04-17T17:51:15.021748Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [3:8:2055] 2025-04-17T17:51:15.021785Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [3:8:2055] 2025-04-17T17:51:15.021881Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [3:8:2055] 2025-04-17T17:51:15.021925Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [3:8:2055] 2025-04-17T17:51:15.021957Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [3:8:2055] 2025-04-17T17:51:15.021985Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [3:8:2055] 2025-04-17T17:51:15.022012Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [3:8:2055] 2025-04-17T17:51:15.022045Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [3:8:2055] 2025-04-17T17:51:15.022083Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [3:8:2055] 2025-04-17T17:51:15.022506Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [3:8:2055] 2025-04-17T17:51:15.023910Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [3:8:2055] 2025-04-17T17:51:15.053239Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [3:8:2055] 2025-04-17T17:51:15.054808Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [3:8:2055] 2025-04-17T17:51:15.088634Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [3:8:2055] 2025-04-17T17:51:15.089936Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [3:8:2055] 2025-04-17T17:51:15.133684Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [3:8:2055] 2025-04-17T17:51:15.137349Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [3:8:2055] 2025-04-17T17:51:15.167529Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [3:8:2055] 2025-04-17T17:51:15.168907Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [3:8:2055] 2025-04-17T17:51:15.206526Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [3:8:2055] 2025-04-17T17:51:15.207873Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [3:8:2055] 2025-04-17T17:51:15.237075Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [3:8:2055] 2025-04-17T17:51:15.238129Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [3:8:2055] 2025-04-17T17:51:15.268291Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [3:8:2055] 2025-04-17T17:51:15.269704Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [3:8:2055] 2025-04-17T17:51:15.301622Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [3:8:2055] 2025-04-17T17:51:15.303072Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [3:8:2055] 2025-04-17T17:51:15.346082Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [3:8:2055] 2025-04-17T17:51:15.347187Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [3:8:2055] 2025-04-17T17:51:15.394056Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:8:2055] Initiator [3:7:2054] 2025-04-17T17:51:15.717467Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [3:7:2054] 2025-04-17T17:51:15.718479Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [3:7:2054] 2025-04-17T17:51:15.787644Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:7:2054] Initiator [3:6:2053] TEST 2 1 duration 2.070655s 2025-04-17T17:51:16.185229Z node 4 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [4:6:2053] self [4:7:2054] worker 0 Sending message to [4:7:2054] from [4:7:2054] id 1 Sending message to [4:7:2054] from [4:7:2054] id 2 Sending message to [4:7:2054] from [4:7:2054] id 3 Sending message to [4:7:2054] from [4:7:2054] id 4 Sending message to [4:7:2054] from [4:7:2054] id 5 Sending message to [4:7:2054] from [4:7:2054] id 6 Sending message to [4:7:2054] from [4:7:2054] id 7 Sending message to [4:7:2054] from [4:7:2054] id 8 Sending message to [4:7:2054] from [4:7:2054] id 9 Sending message to [4:7:2054] from [4:7:2054] id 10 2025-04-17T17:51:17.442380Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [4:7:2054] 2025-04-17T17:51:17.442441Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [4:7:2054] 2025-04-17T17:51:17.442466Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [4:7:2054] 2025-04-17T17:51:17.442488Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [4:7:2054] 2025-04-17T17:51:17.442571Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [4:7:2054] 2025-04-17T17:51:17.442605Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [4:7:2054] 2025-04-17T17:51:17.442638Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [4:7:2054] 2025-04-17T17:51:17.442671Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [4:7:2054] 2025-04-17T17:51:17.442701Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [4:7:2054] 2025-04-17T17:51:17.442734Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [4:7:2054] 2025-04-17T17:51:17.443094Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [4:7:2054] 2025-04-17T17:51:17.445212Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [4:7:2054] 2025-04-17T17:51:17.484656Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [4:7:2054] 2025-04-17T17:51:17.486969Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [4:7:2054] 2025-04-17T17:51:17.542346Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [4:7:2054] 2025-04-17T17:51:17.544019Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [4:7:2054] 2025-04-17T17:51:17.630117Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [4:7:2054] 2025-04-17T17:51:17.632383Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [4:7:2054] 2025-04-17T17:51:17.684995Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [4:7:2054] 2025-04-17T17:51:17.687439Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [4:7:2054] 2025-04-17T17:51:17.755558Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [4:7:2054] 2025-04-17T17:51:17.757674Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [4:7:2054] 2025-04-17T17:51:17.810761Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [4:7:2054] 2025-04-17T17:51:17.812919Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [4:7:2054] 2025-04-17T17:51:17.858531Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [4:7:2054] 2025-04-17T17:51:17.860510Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [4:7:2054] 2025-04-17T17:51:17.905104Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [4:7:2054] 2025-04-17T17:51:17.907637Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [4:7:2054] 2025-04-17T17:51:17.954645Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [4:7:2054] 2025-04-17T17:51:17.956686Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [4:7:2054] 2025-04-17T17:51:18.040502Z node 4 :TABLET_AGGREGATOR INFO: aggregator request processed [4:7:2054] Initiator [4:6:2053] TEST 2 1 duration 2.612802s >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |77.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |77.4%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |77.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |77.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan >> SystemView::AuthUsers_ResultOrder [GOOD] >> SystemView::AuthUsers_TableRange >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: 2025-04-17T17:48:30.619181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:30.619312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:30.619357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00153d/r3tmp/tmp1zMQgZ/pdisk_1.dat 2025-04-17T17:48:32.646243Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4527, node 1 2025-04-17T17:48:33.179218Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:33.179282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:33.179318Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:33.179757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:48:33.182560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:48:33.272668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:33.272817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:33.293857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13419 2025-04-17T17:48:34.084441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:48:47.899782Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T17:48:47.965012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:47.965142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:48.028027Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:48:48.039271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:48.344076Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:48.344810Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:48.345421Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:48.345842Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:48.345930Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:48.346022Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:48.346124Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:48.347470Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:48.347622Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:48.575536Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:48.575640Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:48.612915Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:48.802420Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:48.883433Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T17:48:48.883536Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T17:48:48.949282Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T17:48:48.949484Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T17:48:48.949684Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T17:48:48.949748Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T17:48:48.949814Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T17:48:48.949874Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T17:48:48.949927Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T17:48:48.949975Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T17:48:48.950586Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T17:48:48.987740Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:48:48.987874Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2599], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:48:49.002881Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1879:2605] 2025-04-17T17:48:49.017082Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1912:2622] 2025-04-17T17:48:49.023058Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1912:2622], schemeshard id = 72075186224037897 2025-04-17T17:48:49.027560Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T17:48:49.058761Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T17:48:49.058820Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T17:48:49.058899Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T17:48:49.080911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T17:48:49.093971Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T17:48:49.094128Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T17:48:49.362127Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T17:48:49.602217Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T17:48:49.714780Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T17:48:51.080227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2231:3067], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:51.080423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:51.099159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T17:48:51.319444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:48:51.319624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:48:51.319875Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:48:51.319966Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:48:51.320046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:48:51.320114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:48:51.320204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:48:51.320358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:48:51.320461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:48:51.320607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:48:51.320731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:48:51.320850Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2318:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:48:51.360648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:48:51.360769Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... ARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-17T17:48:53.997980Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000018s 2025-04-17T17:49:06.439389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:49:06.439499Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:07.761621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:49:07.761679Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:06.690765Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T17:51:06.690854Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T17:51:06.690894Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T17:51:06.690931Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T17:51:08.390420Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-17T17:51:08.390526Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 214.000000s, at schemeshard: 72075186224037897 2025-04-17T17:51:08.391170Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-17T17:51:08.423729Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T17:51:09.598148Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T17:51:09.598274Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T17:51:09.598336Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T17:51:09.598392Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T17:51:09.610459Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T17:51:09.750819Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T17:51:09.782560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6964:5157], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:09.782694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6974:5162], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:09.782794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:09.852759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T17:51:09.923233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6978:5165], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T17:51:10.166733Z node 2 :TX_PROXY ERROR: Actor# [2:7076:5212] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:10.624576Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7105:5227]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:51:10.625299Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T17:51:10.625414Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7107:5229] 2025-04-17T17:51:10.639711Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7107:5229] 2025-04-17T17:51:10.640628Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7108:5230] 2025-04-17T17:51:10.640844Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7107:5229], server id = [2:7108:5230], tablet id = 72075186224037894, status = OK 2025-04-17T17:51:10.640950Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7108:5230], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T17:51:10.641022Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T17:51:10.641169Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T17:51:10.641262Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7105:5227], StatRequests.size() = 1 2025-04-17T17:51:14.270878Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDM4NGU4ZTAtNDdjMDk2NGMtNjY1NjJmMWUtNTcyNGExODY=, TxId: 2025-04-17T17:51:14.270970Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDM4NGU4ZTAtNDdjMDk2NGMtNjY1NjJmMWUtNTcyNGExODY=, TxId: 2025-04-17T17:51:14.290167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T17:51:14.306420Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T17:51:14.306497Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T17:51:14.351208Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T17:51:14.351272Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T17:51:14.450292Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7107:5229], schemeshard count = 1 2025-04-17T17:51:16.845941Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T17:51:16.845993Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T17:51:16.846029Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T17:51:16.846088Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T17:51:16.848888Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T17:51:16.875440Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T17:51:16.875938Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T17:51:16.876009Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T17:51:16.876730Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T17:51:16.905077Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T17:51:16.914336Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T17:51:16.915129Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7224:5296], server id = [2:7225:5297], tablet id = 72075186224037899, status = OK 2025-04-17T17:51:16.915498Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7224:5296], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:51:17.063936Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T17:51:17.064063Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T17:51:17.064308Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T17:51:17.064514Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T17:51:17.064695Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7224:5296], server id = [2:7225:5297], tablet id = 72075186224037899 2025-04-17T17:51:17.064730Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:51:17.064893Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T17:51:17.094351Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T17:51:17.365281Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7245:5316]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T17:51:17.365478Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T17:51:17.365515Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7245:5316], StatRequests.size() = 1 2025-04-17T17:51:17.857952Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjkxYWIwNjctZjUxMzNkZWMtNjUxOWNkYzMtMTEzOWRjZDI=, TxId: 2025-04-17T17:51:17.858010Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjkxYWIwNjctZjUxMzNkZWMtNjUxOWNkYzMtMTEzOWRjZDI=, TxId: 2025-04-17T17:51:17.858995Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T17:51:17.874750Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:7258:5447]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T17:51:17.875158Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T17:51:17.875217Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T17:51:17.877684Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T17:51:17.877740Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T17:51:17.877807Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T17:51:17.937292Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> ExternalBlobsMultipleChannels::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 11480127053319167212 SEND TEvPut with key [1:1:1:0:0:3201024:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:3201024:0] 2025-04-17T17:51:20.530426Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:188:17] ServerId# [1:296:58] TabletId# 72057594037932033 PipeClientId# [3:188:17] 2025-04-17T17:51:20.530632Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:223:17] ServerId# [1:301:63] TabletId# 72057594037932033 PipeClientId# [8:223:17] 2025-04-17T17:51:20.530740Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:209:17] ServerId# [1:299:61] TabletId# 72057594037932033 PipeClientId# [6:209:17] 2025-04-17T17:51:20.530835Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:202:17] ServerId# [1:298:60] TabletId# 72057594037932033 PipeClientId# [5:202:17] 2025-04-17T17:51:20.530930Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:195:17] ServerId# [1:297:59] TabletId# 72057594037932033 PipeClientId# [4:195:17] 2025-04-17T17:51:20.531021Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:181:17] ServerId# [1:295:57] TabletId# 72057594037932033 PipeClientId# [2:181:17] 2025-04-17T17:51:20.531166Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:216:17] ServerId# [1:300:62] TabletId# 72057594037932033 PipeClientId# [7:216:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> SystemView::AuthGroups_TableRange [GOOD] >> SystemView::AuthOwners >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> SystemView::Describe [GOOD] >> SystemView::DescribeSystemFolder >> TDataShardLocksTest::UseLocksCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: 2025-04-17T17:49:25.500904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:25.501278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:25.501324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001901/r3tmp/tmpL5XnjA/pdisk_1.dat 2025-04-17T17:49:25.841890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:25.888601Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:25.932146Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:49:25.933124Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:49:25.933334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:25.933442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:25.945396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:26.034878Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:49:26.034946Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:49:26.035226Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:49:26.144136Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:49:26.144267Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:49:26.144972Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:49:26.145102Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:49:26.145512Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:49:26.145726Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:49:26.145840Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:49:26.147603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:49:26.148048Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:49:26.148562Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:49:26.148607Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:49:26.174766Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:49:26.175673Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:49:26.176045Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:49:26.176216Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:26.224594Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:49:26.225322Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:26.225440Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:49:26.226978Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:49:26.227045Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:49:26.227137Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:49:26.227508Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:49:26.227647Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:49:26.227736Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:49:26.238594Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:49:26.268150Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:49:26.268351Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:49:26.268464Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:49:26.268495Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:49:26.268524Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:49:26.268555Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:49:26.268783Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:26.268827Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:26.269153Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:49:26.269249Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:49:26.269350Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:49:26.269386Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:49:26.269421Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:49:26.269467Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:49:26.269512Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:49:26.269598Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:49:26.269643Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:26.269782Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:26.269815Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:26.269859Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:49:26.270360Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:49:26.270398Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:49:26.270495Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:49:26.270702Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:49:26.270768Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:49:26.270867Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:49:26.270910Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:49:26.270958Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:49:26.270990Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:49:26.271018Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:26.271378Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:49:26.271433Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:49:26.271475Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:49:26.271520Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:26.271561Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:49:26.271588Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:49:26.271614Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:49:26.271639Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:49:26.271664Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:49:26.272908Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:49:26.272961Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:49:26.288289Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... w KQP executer: [13:993:2684] isRollback: 1 2025-04-17T17:51:21.748014Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, ActorId: [13:839:2684], ActorState: ExecuteState, TraceId: 01js2e0vzz89r3ky08wt6ad61m, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T17:51:21.748708Z node 13 :KQP_EXECUTER DEBUG: TxId: 281474976715665. Resolved key sets: 0 2025-04-17T17:51:21.748828Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:21.748877Z node 13 :KQP_EXECUTER DEBUG: TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 1, snapshot: {0, 0} 2025-04-17T17:51:21.749025Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [], lockTxId: (empty maybe), locks: Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback, immediate: 1 2025-04-17T17:51:21.749128Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-04-17T17:51:21.749194Z node 13 :KQP_EXECUTER INFO: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T17:51:21.749253Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-04-17T17:51:21.749303Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-17T17:51:21.749342Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-17T17:51:21.749619Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:993:2684], Recipient [13:962:2778]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 993 RawX2: 55834577532 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-04-17T17:51:21.749664Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:51:21.749790Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:962:2778], Recipient [13:962:2778]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T17:51:21.749824Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T17:51:21.749892Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:21.750065Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-17T17:51:21.750146Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-17T17:51:21.750221Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:51:21.750256Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-17T17:51:21.750287Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:51:21.750318Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:51:21.750357Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-04-17T17:51:21.750405Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-17T17:51:21.750460Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:51:21.750491Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:51:21.750515Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-17T17:51:21.750543Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-17T17:51:21.750614Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-04-17T17:51:21.750755Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2025-04-17T17:51:21.750845Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T17:51:21.750927Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:51:21.750957Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-17T17:51:21.750983Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:51:21.751015Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-17T17:51:21.751071Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:51:21.751191Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-04-17T17:51:21.751221Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:51:21.751251Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:51:21.751281Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:51:21.751326Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:51:21.751356Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:51:21.751406Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-17T17:51:21.751465Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:51:21.751500Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-17T17:51:21.751538Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:21.751691Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-17T17:51:21.751837Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T17:51:21.751941Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e0vzz89r3ky08wt6ad61m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T17:51:21.752116Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, ActorId: [13:839:2684], ActorState: CleanupState, TraceId: 01js2e0vzz89r3ky08wt6ad61m, EndCleanup, isFinal: 0 2025-04-17T17:51:21.752332Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=MWE2YzcwMGQtMTNmMDFiMTYtYzVmMDI5M2ItYWVhOTdjNTY=, ActorId: [13:839:2684], ActorState: CleanupState, TraceId: 01js2e0vzz89r3ky08wt6ad61m, Sent query response back to proxy, proxyRequestId: 8, proxyId: [13:57:2104] 2025-04-17T17:51:22.059537Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1009:2811], Recipient [13:962:2778]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:51:22.059670Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:51:22.059764Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1008:2810], serverId# [13:1009:2811], sessionId# [0:0:0] 2025-04-17T17:51:22.059994Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [13:594:2519], Recipient [13:962:2778]: NKikimr::TEvDataShard::TEvGetOpenTxs |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-17T17:50:29.230513Z 00000.006 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.012 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.013 II| FAKE_ENV: Starting storage for BS group 1 00000.013 II| FAKE_ENV: Starting storage for BS group 2 00000.013 II| FAKE_ENV: Starting storage for BS group 3 00000.023 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [1:30:2062]) priority=200 resources={1, 0} 00000.023 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.024 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [1:30:2062]) from queue queue_background_compaction 00000.024 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.024 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 50.000000 (insert task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.027 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [1:30:2062]) (release resources {1, 0}) 00000.027 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 50.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.028 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.029 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.029 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.029 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.029 II| FAKE_ENV: DS.0 gone, left {771b, 9}, put {791b, 10} 00000.029 II| FAKE_ENV: DS.1 gone, left {1347b, 10}, put {1347b, 10} 00000.029 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.029 II| FAKE_ENV: All BS storage groups are stopped 00000.029 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.029 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 23}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T17:50:29.283186Z 00000.006 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.012 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [2:8:2055]) priority=0 resources={1, 0} 00000.012 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.012 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [2:8:2055]) from queue queue_background_compaction 00000.012 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.012 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [2:8:2055])) 00000.013 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [2:30:2062]) priority=200 resources={1, 0} 00000.013 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_background_compaction 00000.013 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.015 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [2:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.015 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.015 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [2:30:2062]) from queue queue_compaction_gen0 00000.015 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.015 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.018 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [2:30:2062]) (release resources {1, 0}) 00000.018 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.019 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.019 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.019 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.019 II| FAKE_ENV: DS.0 gone, left {1262b, 14}, put {1282b, 15} 00000.019 II| FAKE_ENV: DS.1 gone, left {1890b, 15}, put {1890b, 15} 00000.019 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.019 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.020 II| FAKE_ENV: All BS storage groups are stopped 00000.020 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.020 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 31}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T17:50:29.308330Z 00000.005 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.011 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [3:8:2055]) priority=0 resources={1, 0} 00000.011 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.011 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [3:8:2055]) from queue queue_background_compaction 00000.011 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.011 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [3:8:2055])) 00000.013 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [3:30:2062]) priority=200 resources={1, 0} 00000.013 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_background_compaction 00000.013 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.014 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.014 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.014 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [3:30:2062]) from queue queue_compaction_gen0 00000.014 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.014 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.017 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [3:30:2062]) (release resources {1, 0}) 00000.017 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.018 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (2 by [3:30:2062]) priority=200 resources={1, 0} 00000.018 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_background_compaction 00000.018 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.019 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (2 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.019 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.019 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [3:30:2062]) from queue queue_compaction_gen0 00000.019 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.019 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.750000 (insert task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.043 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [3:30:2062]) (release resources {1, 0}) 00000.043 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.750000 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.044 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (3 by [3:30:2062]) priority=200 resources={1, 0} 00000.044 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_background_compaction 00000.044 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.045 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (3 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.045 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.045 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (3 by [3:30:2062]) from queue queue_compaction_gen0 00000.045 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.045 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.500000 (insert task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.048 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (3 by [3:30:2062]) (release resources {1, 0}) 00000.048 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.500000 to 0.000000 (remove task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.049 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (4 by [3:30:2062]) priority=200 resources={1, 0} 00000.049 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_background_compaction 00000.049 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.050 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (4 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.050 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.050 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (4 by [3:30:2062]) from queue queue_compaction_gen0 00000.050 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.050 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.250000 (insert task gen0-table-101-tablet-1 (4 by [3:30:2062])) 00000.053 DD| R ... eader{1:2:97} starting compaction 00000.071 II| TABLET_EXECUTOR: Leader{1:2:98} starting Scan{15 on 101, Compact{1.2.97, eph 8}} 00000.071 II| TABLET_EXECUTOR: Leader{1:2:98} started compaction 15 00000.071 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} begin on TSubset{head 9, 1m 1p 0c} 00000.073 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} end=0, 80r seen, TFwd{fetch=1.93KiB,saved=1.93KiB,usage=1.93KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.075 II| TABLET_EXECUTOR: Leader{1:2:98} Compact 15 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 97, product {1 parts epoch 9} done 00000.079 II| TABLET_EXECUTOR: Leader{1:2:109} starting compaction 00000.079 II| TABLET_EXECUTOR: Leader{1:2:110} starting Scan{17 on 101, Compact{1.2.109, eph 9}} 00000.079 II| TABLET_EXECUTOR: Leader{1:2:110} started compaction 17 00000.079 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} begin on TSubset{head 10, 1m 1p 0c} 00000.080 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} end=0, 90r seen, TFwd{fetch=2.21KiB,saved=2.21KiB,usage=2.21KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.083 II| TABLET_EXECUTOR: Leader{1:2:111} Compact 17 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 109, product {1 parts epoch 10} done 00000.086 II| TABLET_EXECUTOR: Leader{1:2:121} starting compaction 00000.087 II| TABLET_EXECUTOR: Leader{1:2:122} starting Scan{19 on 101, Compact{1.2.121, eph 10}} 00000.087 II| TABLET_EXECUTOR: Leader{1:2:122} started compaction 19 00000.087 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} begin on TSubset{head 11, 1m 1p 0c} 00000.088 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} end=0, 100r seen, TFwd{fetch=2.48KiB,saved=2.48KiB,usage=2.48KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.093 II| TABLET_EXECUTOR: Leader{1:2:123} Compact 19 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 121, product {1 parts epoch 11} done 00000.097 II| TABLET_EXECUTOR: Leader{1:2:133} starting compaction 00000.097 II| TABLET_EXECUTOR: Leader{1:2:134} starting Scan{21 on 101, Compact{1.2.133, eph 11}} 00000.097 II| TABLET_EXECUTOR: Leader{1:2:134} started compaction 21 00000.097 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} begin on TSubset{head 12, 1m 1p 0c} 00000.099 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} end=0, 110r seen, TFwd{fetch=2.75KiB,saved=2.75KiB,usage=2.75KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.101 II| TABLET_EXECUTOR: Leader{1:2:135} Compact 21 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 133, product {1 parts epoch 12} done 00000.103 II| TABLET_EXECUTOR: Leader{1:2:137} starting Scan{24 on 101, DummyScan} 00000.103 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} begin on TSubset{head 12, 1m 1p 0c} 00000.106 II| TABLET_EXECUTOR: Leader{1:2:146} starting compaction 00000.107 II| TABLET_EXECUTOR: Leader{1:2:147} starting Scan{25 on 101, Compact{1.2.146, eph 12}} 00000.107 II| TABLET_EXECUTOR: Leader{1:2:147} started compaction 25 00000.107 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} begin on TSubset{head 13, 1m 1p 0c} 00000.109 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} end=0, 120r seen, TFwd{fetch=3.03KiB,saved=3.03KiB,usage=3.03KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.110 II| TABLET_EXECUTOR: Leader{1:2:147} Compact 25 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 146, product {1 parts epoch 13} done 00000.113 II| TABLET_EXECUTOR: Leader{1:2:158} starting compaction 00000.113 II| TABLET_EXECUTOR: Leader{1:2:159} starting Scan{27 on 101, Compact{1.2.158, eph 13}} 00000.113 II| TABLET_EXECUTOR: Leader{1:2:159} started compaction 27 00000.114 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} begin on TSubset{head 14, 1m 1p 0c} 00000.115 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} end=0, 130r seen, TFwd{fetch=3.44KiB,saved=3.44KiB,usage=3.44KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.117 II| TABLET_EXECUTOR: Leader{1:2:160} Compact 27 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 158, product {1 parts epoch 14} done 00000.120 II| TABLET_EXECUTOR: Leader{1:2:170} starting compaction 00000.120 II| TABLET_EXECUTOR: Leader{1:2:171} starting Scan{29 on 101, Compact{1.2.170, eph 14}} 00000.120 II| TABLET_EXECUTOR: Leader{1:2:171} started compaction 29 00000.120 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} begin on TSubset{head 15, 1m 1p 0c} 00000.122 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} end=0, 140r seen, TFwd{fetch=3.87KiB,saved=3.87KiB,usage=3.87KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.127 II| TABLET_EXECUTOR: Leader{1:2:172} Compact 29 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 170, product {1 parts epoch 15} done 00000.130 II| TABLET_EXECUTOR: Leader{1:2:182} starting compaction 00000.130 II| TABLET_EXECUTOR: Leader{1:2:183} starting Scan{31 on 101, Compact{1.2.182, eph 15}} 00000.130 II| TABLET_EXECUTOR: Leader{1:2:183} started compaction 31 00000.130 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} begin on TSubset{head 16, 1m 1p 0c} 00000.133 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} end=0, 150r seen, TFwd{fetch=4.3KiB,saved=4.3KiB,usage=4.3KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.134 II| TABLET_EXECUTOR: Leader{1:2:183} Compact 31 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 182, product {1 parts epoch 16} done 00000.138 II| TABLET_EXECUTOR: Leader{1:2:194} starting compaction 00000.138 II| TABLET_EXECUTOR: Leader{1:2:195} starting Scan{33 on 101, Compact{1.2.194, eph 16}} 00000.138 II| TABLET_EXECUTOR: Leader{1:2:195} started compaction 33 00000.138 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} begin on TSubset{head 17, 1m 1p 0c} 00000.140 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} end=0, 160r seen, TFwd{fetch=4.73KiB,saved=4.73KiB,usage=4.73KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.142 II| TABLET_EXECUTOR: Leader{1:2:196} Compact 33 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 194, product {1 parts epoch 17} done 00000.145 II| TABLET_EXECUTOR: Leader{1:2:206} starting compaction 00000.145 II| TABLET_EXECUTOR: Leader{1:2:207} starting Scan{35 on 101, Compact{1.2.206, eph 17}} 00000.145 II| TABLET_EXECUTOR: Leader{1:2:207} started compaction 35 00000.145 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} begin on TSubset{head 18, 1m 1p 0c} 00000.147 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} end=0, 170r seen, TFwd{fetch=5.16KiB,saved=5.16KiB,usage=5.16KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.149 II| TABLET_EXECUTOR: Leader{1:2:207} Compact 35 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 206, product {1 parts epoch 18} done 00000.152 II| TABLET_EXECUTOR: Leader{1:2:218} starting compaction 00000.152 II| TABLET_EXECUTOR: Leader{1:2:219} starting Scan{37 on 101, Compact{1.2.218, eph 18}} 00000.153 II| TABLET_EXECUTOR: Leader{1:2:219} started compaction 37 00000.153 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} begin on TSubset{head 19, 1m 1p 0c} 00000.155 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} end=0, 180r seen, TFwd{fetch=5.59KiB,saved=5.59KiB,usage=5.59KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.162 II| TABLET_EXECUTOR: Leader{1:2:220} Compact 37 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 218, product {1 parts epoch 19} done 00000.165 II| TABLET_EXECUTOR: Leader{1:2:230} starting compaction 00000.165 II| TABLET_EXECUTOR: Leader{1:2:231} starting Scan{39 on 101, Compact{1.2.230, eph 19}} 00000.165 II| TABLET_EXECUTOR: Leader{1:2:231} started compaction 39 00000.165 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} begin on TSubset{head 20, 1m 1p 0c} 00000.167 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} end=0, 190r seen, TFwd{fetch=6.02KiB,saved=6.02KiB,usage=6.02KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.169 II| TABLET_EXECUTOR: Leader{1:2:231} Compact 39 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 230, product {1 parts epoch 20} done 00000.173 II| TABLET_EXECUTOR: Leader{1:2:242} starting compaction 00000.173 II| TABLET_EXECUTOR: Leader{1:2:243} starting Scan{41 on 101, Compact{1.2.242, eph 20}} 00000.174 II| TABLET_EXECUTOR: Leader{1:2:243} started compaction 41 00000.174 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} begin on TSubset{head 21, 1m 1p 0c} 00000.178 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} end=0, 200r seen, TFwd{fetch=6.45KiB,saved=6.45KiB,usage=6.45KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.180 II| TABLET_EXECUTOR: Leader{1:2:243} Compact 41 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 242, product {1 parts epoch 21} done 00000.183 II| TABLET_EXECUTOR: Leader{1:2:254} starting compaction 00000.184 II| TABLET_EXECUTOR: Leader{1:2:255} starting Scan{43 on 101, Compact{1.2.254, eph 21}} 00000.184 II| TABLET_EXECUTOR: Leader{1:2:255} started compaction 43 00000.184 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} begin on TSubset{head 22, 1m 1p 0c} 00000.186 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} end=0, 210r seen, TFwd{fetch=6.88KiB,saved=6.88KiB,usage=6.88KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.188 II| TABLET_EXECUTOR: Leader{1:2:256} Compact 43 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 254, product {1 parts epoch 22} done 00000.192 II| TABLET_EXECUTOR: Leader{1:2:266} starting compaction 00000.192 II| TABLET_EXECUTOR: Leader{1:2:267} starting Scan{45 on 101, Compact{1.2.266, eph 22}} 00000.192 II| TABLET_EXECUTOR: Leader{1:2:267} started compaction 45 00000.192 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} begin on TSubset{head 23, 1m 1p 0c} 00000.195 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} end=0, 220r seen, TFwd{fetch=7.43KiB,saved=7.43KiB,usage=7.43KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.199 II| TABLET_EXECUTOR: Leader{1:2:268} Compact 45 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 266, product {1 parts epoch 23} done 00000.201 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} end=0, 111r seen, TFwd{fetch=3.03KiB,saved=3.03KiB,usage=3.03KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1} 00000.202 II| TABLET_EXECUTOR: Leader{1:2:270} suiciding, Waste{2:0, 8879b +(262, 99771b), 269 trc, -99771b acc} 00000.204 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.204 NN| TABLET_SAUSAGECACHE: Poison cache serviced 23 reqs hit {24 76962b} miss {0 0b} 00000.204 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.204 II| FAKE_ENV: DS.0 gone, left {27016b, 269}, put {27036b, 270} 00000.206 II| FAKE_ENV: DS.1 gone, left {111149b, 290}, put {111149b, 290} 00000.207 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.207 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.207 II| FAKE_ENV: All BS storage groups are stopped 00000.207 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.207 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 153}, stopped >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |77.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> ExternalBlobsMultipleChannels::WithCompaction >> SystemView::TopPartitionsByCpuFollowers [GOOD] >> SystemView::TabletsFields |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2025-04-17T17:51:15.172083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:15.172630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:15.172689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000831/r3tmp/tmphVejMk/pdisk_1.dat 2025-04-17T17:51:15.675690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:15.769346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:15.824202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:15.824392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:15.839170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:15.937327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:16.025167Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:51:16.026449Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:51:16.027301Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:51:16.027836Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-04-17T17:51:16.028114Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:16.087252Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:51:16.087860Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:51:16.088348Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-04-17T17:51:16.088554Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:16.098105Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:51:16.098953Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:16.099104Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:16.100942Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:51:16.101026Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:51:16.101090Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:51:16.101474Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:16.101669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:51:16.101763Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:705:2573] in generation 1 2025-04-17T17:51:16.102154Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:16.102341Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:16.103777Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T17:51:16.103835Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T17:51:16.103875Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T17:51:16.104165Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:16.104246Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:51:16.104311Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-04-17T17:51:16.115402Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:51:16.141732Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:51:16.142000Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:51:16.142161Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-04-17T17:51:16.142221Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:16.142269Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:51:16.142308Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:16.142664Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:51:16.142816Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:51:16.142948Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:51:16.142991Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T17:51:16.143063Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:51:16.143127Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-04-17T17:51:16.143162Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T17:51:16.143187Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T17:51:16.143242Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:51:16.143588Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:51:16.143632Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:51:16.143896Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:51:16.144015Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:51:16.144235Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:16.144301Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:16.144384Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:51:16.144434Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:51:16.144472Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:51:16.144508Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:51:16.144558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:16.144618Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T17:51:16.144694Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T17:51:16.145198Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:687:2582], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:51:16.145235Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:51:16.145294Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:665:2569], serverId# [1:687:2582], sessionId# [0:0:0] 2025-04-17T17:51:16.145355Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:51:16.145386Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:16.145413Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037889 2025-04-17T17:51:16.145438Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T17:51:16.145461Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T17:51:16.145482Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T17:51:16.145517Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:51:16.145617Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:687:2582] 2025-04-17T17:51:16.145661Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:51:16.145777Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:16.145985Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:51:16.146035Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:51:16.146154Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:51:16.146851Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:51:16.146911Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:51:16.146953Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:51:16.146991Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:51:16.147327Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:51:16.147369Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:51:16.147408Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:51:16.147445Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:51:16.147524Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:51:16.147557Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 720751 ... xecution status for [0:2] at 72075186224037888 is Executed 2025-04-17T17:51:24.845499Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:51:24.845528Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-17T17:51:24.845551Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:24.845577Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:51:24.845613Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:51:24.845647Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:51:24.845791Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:61:2108], Recipient [2:984:2785]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2025-04-17T17:51:24.857477Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e0yvtffqvqrbqd12jgx11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjgxNDhhMDktMTQ4ZDJhMzAtM2E2MWNlZGUtMTMyZjRkNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:24.860381Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1012:2800], Recipient [2:984:2785]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-17T17:51:24.860519Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:51:24.860584Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-17T17:51:24.860690Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:51:24.860741Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:51:24.860790Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:51:24.860821Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:51:24.860868Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-17T17:51:24.860908Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:51:24.860940Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:51:24.860987Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:51:24.861009Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:51:24.861141Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-17T17:51:24.861391Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-17T17:51:24.861435Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is DelayComplete 2025-04-17T17:51:24.861468Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:51:24.861503Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:51:24.861534Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:51:24.861575Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:51:24.861593Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:51:24.861619Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-17T17:51:24.861660Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:51:24.872450Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:24.872513Z node 2 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715661] at 72075186224037888 on unit CompleteWrite 2025-04-17T17:51:24.872566Z node 2 :TX_DATASHARD DEBUG: Complete write [2500 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [2:941:2726] 2025-04-17T17:51:24.872631Z node 2 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-17T17:51:24.872663Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:24.872772Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:24.872814Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T17:51:24.872864Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-17T17:51:24.872941Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:24.873018Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:51:24.873069Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:51:24.873113Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1012:2800], 0} after executionsCount# 1 2025-04-17T17:51:24.873159Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1012:2800], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:51:24.873235Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1012:2800], 0} finished in read 2025-04-17T17:51:24.873408Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:984:2785], Recipient [2:674:2575]: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-17T17:51:24.873438Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:51:24.873466Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-04-17T17:51:24.874338Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1012:2800], Recipient [2:984:2785]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:51:24.874399Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-17T17:51:24.874704Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1012:2800], Recipient [2:674:2575]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-04-17T17:51:24.874844Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-17T17:51:24.874897Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2025-04-17T17:51:24.874952Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-17T17:51:24.874978Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2025-04-17T17:51:24.875032Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:51:24.875070Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:51:24.875110Z node 2 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2025-04-17T17:51:24.875138Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-17T17:51:24.875159Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:51:24.875179Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2025-04-17T17:51:24.875200Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2025-04-17T17:51:24.875305Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-04-17T17:51:24.875520Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-17T17:51:24.875564Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:1012:2800], 1} after executionsCount# 1 2025-04-17T17:51:24.875628Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1012:2800], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:51:24.875702Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1012:2800], 1} finished in read 2025-04-17T17:51:24.875767Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-17T17:51:24.875812Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:51:24.875839Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:51:24.875865Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:51:24.875905Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-17T17:51:24.875926Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:51:24.875948Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-04-17T17:51:24.875972Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:51:24.876060Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:51:24.876625Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1012:2800], Recipient [2:674:2575]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-17T17:51:24.876664Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } |77.5%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak >> KqpPg::JoinWithQueryService+StreamLookup >> KqpPg::EmptyQuery+useSink >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] >> ShowCreateView::WithSingleQuotedTablePathPrefix [GOOD] >> ShowCreateView::WithTwoTablePathPrefixes >> KqpPg::InsertFromSelect_Simple+useSink >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> KqpPg::InsertNoTargetColumns_Simple+useSink >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: 2025-04-17T17:49:28.404101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:28.404656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:28.404738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018d3/r3tmp/tmpyzMYyM/pdisk_1.dat 2025-04-17T17:49:28.812673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:28.859976Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:28.900613Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:49:28.901470Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:49:28.901651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:28.901767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:28.913229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:28.996726Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:49:28.996786Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:49:28.996929Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:49:29.104737Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:49:29.104828Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:49:29.105287Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:49:29.105358Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:49:29.105594Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:49:29.105727Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:49:29.105819Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:49:29.107431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:49:29.107923Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:49:29.108582Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:49:29.108633Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:49:29.147728Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:49:29.148815Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:49:29.149290Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:49:29.149601Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:29.196989Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:49:29.197779Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:29.197902Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:49:29.199661Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:49:29.199750Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:49:29.199808Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:49:29.200204Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:49:29.200363Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:49:29.200468Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:49:29.200918Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:49:29.244093Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:49:29.244315Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:49:29.244476Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:49:29.244513Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:49:29.244554Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:49:29.244587Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:49:29.244796Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:29.244850Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:29.245100Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:49:29.245219Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:49:29.245327Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:49:29.245380Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:49:29.245453Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:49:29.245490Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:49:29.245543Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:49:29.245578Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:49:29.245621Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:29.245734Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:29.245784Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:29.245826Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:49:29.246220Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:49:29.246260Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:49:29.246451Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:49:29.246639Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:49:29.246709Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:49:29.246811Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:49:29.246859Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:49:29.246900Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:49:29.246962Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:49:29.247001Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:29.247260Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:49:29.247303Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:49:29.247343Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:49:29.247386Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:29.247436Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:49:29.247465Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:49:29.247500Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:49:29.247530Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:49:29.247553Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:49:29.248280Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:49:29.248332Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:29.248375Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:29.248462Z node 1 :TX_DATASHARD TRACE: Prop ... ounter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } 2025-04-17T17:51:27.199742Z node 14 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-17T17:51:27.199933Z node 14 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2025-04-17T17:51:27.200043Z node 14 :TX_DATASHARD NOTICE: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-17T17:51:27.200274Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:871:2684], Table: `/Root/table` ([72057594046644480:2:1]), SessionActorId: [14:843:2684]Got WRONG SHARD STATE for table `/Root/table`. ShardID=72075186224037888, Sink=[14:871:2684].{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } 2025-04-17T17:51:27.200456Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:864:2684], SessionActorId: [14:843:2684], statusCode=UNAVAILABLE. Issue=
: Error: Wrong shard state. Table `/Root/table`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 . sessionActorId=[14:843:2684]. isRollback=0 2025-04-17T17:51:27.200971Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=YzhiMGE5MmUtZGVlYjE2YTgtMzA3NDkxYWQtZjFhNGY4NQ==, ActorId: [14:843:2684], ActorState: ExecuteState, TraceId: 01js2e11b57kvq2wdxkd0pwg1n, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [14:997:2684] from: [14:864:2684] 2025-04-17T17:51:27.201243Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [14:864:2684], Recipient [14:689:2579]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback } 2025-04-17T17:51:27.201291Z node 14 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-17T17:51:27.201378Z node 14 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2025-04-17T17:51:27.201419Z node 14 :TX_DATASHARD NOTICE: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-17T17:51:27.201607Z node 14 :KQP_EXECUTER ERROR: ActorId: [14:997:2684] TxId: 281474976715665. Ctx: { TraceId: 01js2e11b57kvq2wdxkd0pwg1n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YzhiMGE5MmUtZGVlYjE2YTgtMzA3NDkxYWQtZjFhNGY4NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: {
: Error: Wrong shard state. Table `/Root/table`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } 2025-04-17T17:51:27.202011Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=YzhiMGE5MmUtZGVlYjE2YTgtMzA3NDkxYWQtZjFhNGY4NQ==, ActorId: [14:843:2684], ActorState: ExecuteState, TraceId: 01js2e11b57kvq2wdxkd0pwg1n, Create QueryResponse for error on request, msg: ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR 2025-04-17T17:51:27.203919Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [14:593:2518], Recipient [14:689:2579]: NActors::TEvents::TEvPoison 2025-04-17T17:51:27.204416Z node 14 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-04-17T17:51:27.204511Z node 14 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-17T17:51:27.224321Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [14:1001:2813], Recipient [14:1003:2814]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:51:27.230493Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [14:1001:2813], Recipient [14:1003:2814]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:51:27.230655Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [14:1001:2813], Recipient [14:1003:2814]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:51:27.234116Z node 14 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:1003:2814] 2025-04-17T17:51:27.234491Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:27.242799Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:27.244367Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:27.247537Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:51:27.247689Z node 14 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:51:27.247799Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:51:27.248511Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:27.248891Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:51:27.249007Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:51:27.249107Z node 14 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2025-04-17T17:51:27.249314Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T17:51:27.249402Z node 14 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:51:27.249597Z node 14 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [14:1017:2821] 2025-04-17T17:51:27.249688Z node 14 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:27.249760Z node 14 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2025-04-17T17:51:27.249827Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:27.250265Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [14:61:2108], Recipient [14:1003:2814]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2025-04-17T17:51:27.250752Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:1003:2814], Recipient [14:1003:2814]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:51:27.250807Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:51:27.251184Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435075, Sender [14:1003:2814], Recipient [14:1003:2814]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2025-04-17T17:51:27.251235Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2025-04-17T17:51:27.251892Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [14:24:2071], Recipient [14:1003:2814]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 600} 2025-04-17T17:51:27.251968Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T17:51:27.252046Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 600 2025-04-17T17:51:27.252123Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:27.253662Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:27.253742Z node 14 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2025-04-17T17:51:27.253946Z node 14 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-17T17:51:27.254032Z node 14 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-04-17T17:51:27.254107Z node 14 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-04-17T17:51:27.254624Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:1003:2814], Recipient [14:903:2727]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-04-17T17:51:27.254676Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T17:51:27.254755Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-17T17:51:27.254894Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-04-17T17:51:27.254983Z node 14 :TX_DATASHARD NOTICE: Outdated readset for 500:281474976715663 at 72075186224037889 2025-04-17T17:51:27.255064Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T17:51:27.255154Z node 14 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-04-17T17:51:27.255390Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [14:903:2727], Recipient [14:1003:2814]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-17T17:51:27.255446Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:51:27.255530Z node 14 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-04-17T17:51:27.255641Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T17:51:27.255916Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [14:24:2071], Recipient [14:1003:2814]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 400 NextReadStep# 600 ReadStep# 600 } 2025-04-17T17:51:27.255973Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T17:51:27.256072Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 400 next step 600 2025-04-17T17:51:27.256311Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |77.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |77.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> SystemView::ShowCreateTablePartitionSettings [GOOD] >> SystemView::ShowCreateTableReadReplicas >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> KqpPg::ReadPgArray >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-04-17T17:51:11.770501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:11.770994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:11.771062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ce/r3tmp/tmp5VPa1p/pdisk_1.dat 2025-04-17T17:51:12.274033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:12.354417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:12.399864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:12.399986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:12.411329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:12.532883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:12.603668Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-17T17:51:12.603926Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:12.671452Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:12.671582Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:12.673217Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:51:12.673310Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:51:12.673382Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:51:12.673752Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:12.674012Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:51:12.674072Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2578] in generation 1 2025-04-17T17:51:12.675700Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-04-17T17:51:12.675885Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:12.686666Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:12.686787Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:12.688134Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T17:51:12.688201Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T17:51:12.688260Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T17:51:12.688572Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:12.688668Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:51:12.688740Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-17T17:51:12.702887Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:51:12.754757Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:51:12.754997Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:51:12.755153Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-17T17:51:12.755194Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:12.755233Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:51:12.755270Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:12.755561Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:51:12.755646Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T17:51:12.755706Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:51:12.755800Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-17T17:51:12.755842Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T17:51:12.755866Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T17:51:12.755901Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:51:12.756329Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:51:12.756438Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:51:12.756598Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:12.756639Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:12.756681Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:51:12.756730Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:12.756775Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T17:51:12.756851Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T17:51:12.757281Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:690:2585], sessionId# [0:0:0] 2025-04-17T17:51:12.757351Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:51:12.757377Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:12.757402Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T17:51:12.757456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:51:12.757590Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:12.757786Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:51:12.757885Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:51:12.759738Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:12.770965Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:51:12.771091Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:51:12.821589Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-17T17:51:12.821775Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T17:51:12.822054Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T17:51:12.822145Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T17:51:12.822754Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:51:12.835220Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T17:51:12.835322Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-17T17:51:12.976454Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:742:2620], sessionId# [0:0:0] 2025-04-17T17:51:12.976791Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-17T17:51:12.990705Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T17:51:12.990809Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:51:12.991348Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:51:12.991399Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:51:12.991446Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-17T17:51:12.991717Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T17:51:12.991885Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:51:12.992255Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:51:12.992291Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:12.992379Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:51:12.992446Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:51:13.005236Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:51:13.005790Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:13.007303Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:13.007350Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:51:13.007385Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T17:51:13.007600Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T ... sender: at tablet: 72075186224037888 2025-04-17T17:51:31.574320Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:51:31.574380Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:51:31.574430Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T17:51:31.574505Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:31.574594Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-04-17T17:51:31.574623Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:51:31.575225Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-04-17T17:51:31.575296Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:51:31.588360Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:51:31.588467Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T17:51:31.588821Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:31.588913Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:51:31.588946Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T17:51:31.588984Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-17T17:51:31.589037Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:51:31.589083Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T17:51:31.589170Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:51:31.593610Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T17:51:31.593862Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-17T17:51:31.593927Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T17:51:31.594812Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T17:51:31.594985Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T17:51:31.595213Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T17:51:31.595258Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:51:31.596341Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-04-17T17:51:31.596389Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-17T17:51:31.607760Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:837:2697], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:31.607854Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:847:2702], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:31.607921Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:31.614068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:31.621426Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:31.621542Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:51:31.621597Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T17:51:31.811315Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:31.811453Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:51:31.811508Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T17:51:31.819529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:851:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:51:31.857622Z node 4 :TX_PROXY ERROR: Actor# [4:933:2756] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:32.045474Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e15rp23vmyfgn99y3ca66, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MjViNjZhMmUtMzMxM2IwZTUtNWNlNjc1MDctNGUxN2Q0YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.056655Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1038:2799], serverId# [4:1039:2800], sessionId# [0:0:0] 2025-04-17T17:51:32.057067Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-04-17T17:51:32.057375Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744912292057275 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T17:51:32.057549Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744912292057275 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T17:51:32.057684Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-04-17T17:51:32.069716Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-17T17:51:32.069812Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:51:32.147795Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e1679ewpayqx2hg9t0q83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZGU0ZWVlZDMtNWUxYWJmYTEtNDY2MjIwMWUtZDBmMTM5ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.149803Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037889 2025-04-17T17:51:32.150100Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1744912292149997 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T17:51:32.151739Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 1744912292149997 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T17:51:32.151883Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 1744912292149997 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T17:51:32.151963Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 1744912292149997 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T17:51:32.152047Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-04-17T17:51:32.164542Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-17T17:51:32.164612Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:51:32.168670Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1087:2839], serverId# [4:1088:2840], sessionId# [0:0:0] 2025-04-17T17:51:32.184575Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1089:2841], serverId# [4:1090:2842], sessionId# [0:0:0] >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::Insert_Serial+useSink >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |77.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> SystemView::TabletsFields [GOOD] >> SystemView::TabletsShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2025-04-17T17:51:24.006031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:24.006667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:24.006732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eb7/r3tmp/tmpUXQe6Q/pdisk_1.dat 2025-04-17T17:51:24.533208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:24.610900Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:24.664979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:24.665133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:24.676696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:24.775188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:25.269237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:25.269391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:25.269485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:25.276642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:25.474861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:51:25.571914Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:26.094171Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e0zjd9b1thpayb4r9w7sy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg0MTE2YWEtOTI4MDM0YTQtNzdhODFlZTgtMjhhMjVjZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.171792Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e10d03aecr50p5s27rmr0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFmYmZiMjItZTkyOGQwNWEtYTZlM2QyNTktNDdlZGQ2ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.240541Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e10f205wwvx7nad2xw7gb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI4ODUzZWEtMWFlODZjZmUtMWU0ZDg3MzEtMzkxNzdkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.304355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e10h61eahj6n2q7d6bk2c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI3MWNlM2MtODc2NThhY2EtM2MwNTVhMjMtNjdiOGVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.366102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2e10k643raswty0j53xj2c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmFiMzA0NmYtZjgyM2I0YzItYTIwNDIxMzktNjdjNjVkMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.425405Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2e10n44ve0893d0gvy620q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI3MTUyYjItM2JhNzE1Y2EtMWY2NDY3M2YtMThlZjAzNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.485982Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2e10q025tsfpdhr861fprz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNiMDVlZDUtNWU5NTQzMGItMjMwZWMyN2MtOWUwOGJlZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.546542Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2e10rwaea05y4p58de985v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTJhMGZkOTYtZWY3OGIyNjUtYjZhN2ZmYjctOTA3MGRjNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.607926Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2e10tr4d4chn3qn60mn4hd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2QwNWMyNDUtMTY2YjNkMzItMjNjNzBlOTktZjY2MjYzZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.670090Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2e10wp1aaap8rwq0rf0xm3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE4ZDA0YmYtMzVhN2Y3ZmUtNDQxNzU2YjktYzIwNmViN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.730505Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2e10ym4jffy838rk7a2k6m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQ2NGUzZDEtZTQwODcwZTItNDYwNWIwNjktYzg3YTRkMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.790842Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2e110g4w48cn1dpktxwhyw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdiZDcxMi1mYzcyZDY4YS1mZjFmYmQ2LTIwM2MwMTVj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.852545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2e112deb1wjv762s6nr80z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI2MDUzMjQtZjNmODhmYzQtZGNjOWQzYzUtOTk3MTAxMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.916431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2e114b5akg32t03qxze4za, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjNjdmN2UtMTI4NGYxMjktNGQ5YTc5MS05ZDA2MzhjZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:26.981922Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01js2e116b1dnes9f0re50z56x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM4OGJiZmQtOWEzZDYxZGItYjNjZTFhMDItOWM2ZGFjYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.045878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2e118d094smdrvn36jdcmw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ1OTgwYjktNTUyM2M4MTItNWU1YmU2ZjctYTIwOTczYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.108368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01js2e11ac2vebb1z7m0q92ve8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQyODYxN2UtZGRhNWEzNTItNmFiNmFiMGYtNjNjOGE5ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.171068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01js2e11ca1zsrb6gnjstfcsmd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBkZDI0ZjQtNDlhZDJkYjctNDI1OTM4OTEtOWY3NTBjMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.227933Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2e11e95ckerfn0fgxznc54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIzNTMxYjgtYzhlNGJlMzUtZDIzNGRmZDAtZjExZTY1MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.281122Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01js2e11g00zx4j33xz7hdv5ff, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZjYmI3ODEtN2JlNzY3MzMtN2Q5ZTI1MmMtOWQxZWNlNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.353778Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01js2e11hrdnrnq1re6acfsw9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY3OWYzZDItY2E4NWM5MjYtYzRlN2I2YTUtYTVhOGRjMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.426523Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01js2e11m18hytpe25d0hkzfz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM3ODJiZDEtNDA1MjUwNDItOTk5YmMyMDctYzNiOWJlZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.492306Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01js2e11p9075rg34jv1kk0r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q2NWQ4NWEtMTg5YzE5ZmEtMTA4MDU5NzQtN2NmY2I1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.549660Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01js2e11rb6abvqhvjgzrzp895, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M1ZTJhMTctNGUyODgyYmMtNzk3NjM0OWItMTA5MjFmMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.633183Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01js2e11t56qpmd72bvthz8wmw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFjMTk3YmUtZTc0ZjMzODUtMWM5ODk0ZjItZjU4MDU4ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:27.725706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01js2e11ws07y5kzp210g6yq7p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZmYTBjNjYtNWU0ZWQ1Yi1hZGE1ZWQzYy0yNDIzZGZi, Cu ... 83Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01js2e14jg3xzxvetc90fn19yx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjUxNzk3ZmMtZWE3M2Q5OTYtMjg1NGVjNDQtOWY3MzYyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:30.503744Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01js2e14mj1mhqw2yr0n3rst13, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA4ZDQ2MmEtNDhhMDVkMjMtOGI3ZWFhZC1lZWY4NjlmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:30.576807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01js2e14pfa22eh9h7mt0dvryr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRkZmVkYjctZTJhYzlmMzgtMjY3NjYwYTctNzk2NDBkY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:30.644346Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01js2e14rq7y0r7yzehj2yzwcw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM4NDVmM2EtNTEzOGIzMTAtY2MwMjk2ZjQtNzk1NGRkZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:30.761875Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01js2e14ty3zb97vc47107f5sp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NlNTQzMDMtODVjYmVmMGQtNDMwMGUxYmUtZThhYWRlYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:30.886712Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01js2e14yj594fta2a4wvqk51v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAxNGRhNjQtYzhiNmY4NC1lNzMxZThhYS1hYWQyZmM4ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.048665Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01js2e152m73h6av3c2t3ydp2x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWNkY2M1OGQtNWI1YTMwODEtOGY1MjVlNzUtNTY4MTEwNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.108865Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01js2e157f07jfpayzenyz5x4z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMyMjJkNWYtNTE5ZmJmMzgtZTViYzU0YzUtMjAwZmU5Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.165683Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01js2e159b9esj00x1f5d3hz3v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI4YWZhZWEtMTRjYWVmNGYtYzFiYjFlMzAtZTA2NDI4Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.217060Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01js2e15b4dxxxfwp4dgyzr69k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTUxYWQxMWEtZmY3ZmYyNmEtNWUyYTE5MGUtMmNjOTM1MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.271430Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01js2e15cpa6450a8f39eny2x6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzljMWQ4ZGQtZWQ3YmMxY2QtMTI2MzhmMDgtZjM3OWY3MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.329491Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01js2e15ecd9dycqnzyac9kpjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk5ZTY1ZTQtZmJmNTkxMC04Zjk2NzFmOS03ODRjNTFiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.395078Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01js2e15g8b6actmce9jd9pjsc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ2ZjViNmItZGUxNzY1NjItNmRjMjg1OTMtM2VmODM3MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.454452Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01js2e15ja1wxty4dysgx9qcxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzEyZjE2MmMtZDk5NzFkN2UtZjcwYmE4YTEtNGIwYTRiMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.515844Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01js2e15m45bn9vgbdy8kxbr64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIzMjgwMDYtNDc1ZGMwNzMtMTViMzljMDEtNzY5YzBkMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.572506Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01js2e15p3d2c4xc6a80g2wvah, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWE2NjQ4ZDktNWZiNTUwYmYtODBkZjU2NDctZTljNzMzMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.653625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01js2e15qvbte54zwrfvctxdab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzEzYmI2NWYtOTk4MDEyNDEtYjY4ODY3Ni0xY2Y4OTQ2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.720102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01js2e15tcaj7thc4vrnzvjr7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZmN2U5ZjctNWY3ODM2YmUtZjc2M2NkYTYtZjdhZTc0MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.848027Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01js2e15whbke0gcpks3416sjp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgwMWY2OGItZTE5NzFlYi02NjI3OTZhNS1jYWUyMWE1Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.918123Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01js2e160f81jddcfaxj3e5jtt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRiZTg3ZDQtMjIzNzI1YjktODA3ODI5MDMtOGNlNmZkNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.005112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01js2e162n5ak8sys15pgv27ab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmIyOTdlNjMtZTcwNzNmN2QtNTIzMmFlMTgtOTBkZjNlNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.080390Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01js2e165c7wq6jx3x3r3svjqn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTE1ODViNmItOThlMDhkZTItNDM4NzUyNzYtOWFmZGFhODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.145585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01js2e167q2neagv9sp5r7k8dz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE0MTg4ZGQtNDJiZDVjMTQtY2M3NmVkMzEtZDhlZjFhMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.269227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01js2e169yfxg2dypqqspwb1kj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFmZjc1NzEtNWIzNDhlZmQtZmJmMTk5OTMtZjZlMjllNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.332476Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01js2e16dk4rm7jqh7svsvbg5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjEwMzE2NTgtMTU2Mjk1NzUtNzU2YWZjODktOWM3N2FhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.430738Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01js2e16fkex3azqex0tw1zmg1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTUyYWExZWUtYjc1OGExNDEtN2Y4MzY2NDItYTdiNTg3YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.572054Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01js2e16k4dyg82stc0fbc3v5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU3ZWRjZDctMWIzZWVhMTYtNTdjMTVhNzMtMmNiMzYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.705016Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01js2e16q13x961bjrtm2s6rz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGM4NDJlNTAtZWI0YWYwZWEtN2U2YmU0MTctYjdhYThjYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.797988Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01js2e16vxbh1bh00bksc3nbrz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVlODdkNjgtOWNhNjVlODctYTliNDBjYmYtNWZiZTM1MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.861710Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01js2e16y5f994hqcf1ph8x4xg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZhMGZlMzAtNGRhMDUzMTItMjVlOWVjMGYtNjhkY2Q1ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.924383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01js2e17040nba5hmf6dqw7sjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc4MTkwMmEtYjZiYjBmNWMtMjlmNDliOGMtNzNkZDczMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.989267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01js2e1723fxhjzem9cbmnf08b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWFiODJjZjUtMTU2YWM1MmQtZDM1YjI2NzUtMmRkZjI1MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.055996Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01js2e17444rpze7zggnv7yxnr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIwMDhkNzctM2U4ZmIxMmQtNDIyYjIyOTUtODlhMmUyY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.122947Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01js2e17680bm9q180cwhmnqnx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhZDU3ZDItODk0Yzc3ZmQtYTg4YTE4ZDAtM2U4YjkxYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.191075Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01js2e178a97r0nxcppkz9ctj8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWYxNTMzZDEtOGQ1MDE5ODItMzIxZWQ2MTgtZTE2YWZkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.461949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01js2e17aybjewptdf6m7e57b6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjYzk4YS1lZDU2YjBiOC04YjFhNTg1Yy03YTA5ODNlZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpPg::CreateTableBulkUpsertAndRead >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce >> BSCStopPDisk::PDiskStop >> BSCStopPDisk::PDiskStop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop [GOOD] Test command err: RandomSeed# 13897368698995237807 >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> KqpPg::Insert_Serial+useSink [GOOD] >> KqpPg::Insert_Serial-useSink >> ExternalBlobsMultipleChannels::Simple [GOOD] >> SystemView::ShowCreateTablePartitionAtKeys [GOOD] >> SystemView::ShowCreateTableColumn >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> SystemView::QueryStats [GOOD] >> SystemView::PartitionStatsTtlFields ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2025-04-17T17:51:28.835614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:28.836183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:28.836241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ea4/r3tmp/tmpzKk9U5/pdisk_1.dat 2025-04-17T17:51:29.438816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:29.484682Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:29.531969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:29.532093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:29.547419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:29.644288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:30.125310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:30.125435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:30.125538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:30.131802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:30.321699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:51:30.399841Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:30.778915Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e14aa1zqaethdbnkxcpyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmEyOTZiYmMtY2QwM2QwZjYtNjlhMzM3ZDYtZWM1YzkyZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:30.888189Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e14za1dkbzykvga4xspcs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhkMDY4NDktNmMzMmI1MDAtZGMyOTEyN2MtNjFlZTEwODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:30.949573Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e152d3wjhkvxrte7jgv3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjljNTFmYzMtNTE1ZDllNDQtNjVlZmM0Y2UtYTc2ZTc2MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.008915Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e154b73jpn49v196jymnw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg0NmE2NzgtM2RkNGViMDYtYTc0MjM4NDMtNmRjZjEyN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.079950Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2e15664nc5x4sw7zxp9tp5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGViYjBlYjYtNjFiNjUyNzYtNDU4YTVkMDQtZjA0MDQxMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.155651Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2e158e4fwrxfc7ab3xnpk9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThiMTIzNDQtOTdiNTVkYTktZWYzYzFkNTctZDdiYjM4ZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.219037Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2e15avc7vd702akd6mf36t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTcxY2QzNWQtNGFlNDk1NGItMTgzOWNjN2MtYjM3NDY0MTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.283997Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2e15crbg9rqd0c67yjq80b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI0ODY4ZTYtOTQ0YzU3ODAtMmJkNDhhZC1lZTZhMmQwZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.379452Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2e15etab0s92jpyf59x3xx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ3NTQ0ZWYtZDYyNGFhZGUtZTdlMmU4NWEtYjI3OTlkODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.456986Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2e15ht9f35ekdmb9ne6vnr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ2YjIzZWQtY2ZjYmRjZWMtOTMzNGY3NDItM2QxMTRhMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.533808Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2e15m8dra9dnvh05e7xz5m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJmZGQyYS03NDAxNjJmYS0zYWY5YWEyNC1lZjdkZmQ0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.591632Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2e15pke2nm7288f7rv1sdm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTUwMzYzNTQtZTlhNzNhNGUtNWE4YWIyZTMtMjEzYTA4Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.647852Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2e15rd3vkpx0vhj00ya9s6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgyYmYwY2YtNWVjYzFjZDgtMzA2ZGZlOWYtYmEzOTBhMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.718015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2e15t54awcg13mjkph895h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU0OGQ5ZjItYWZkYzVhODUtYWY2NGFhNTgtOGUyYWQzNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.781111Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01js2e15wbcmwc5cpgj5d7hqk2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjMwZjk5YzQtZmQyMTY1OTktZWNhNDNlMzItYWE2MGY0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.842111Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2e15yb1s57jttt6hkxnqhg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Q0ODM4NDctYjBhYjQ4YjUtYWRlMWE3ZjktZWI4MWVhMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:31.959525Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01js2e16085hvzyts7b6nkvz0b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODExNjNmMzMtOWIyNjFiZDYtNTVlMGY2MGItZmNiOWVkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.021467Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01js2e163xc4ws37412ph3y4jc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhYjYzZS0xNzU5ZDY4Mi0zYzhkZWYyYi04NmQ4MTU2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.081588Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2e165v9qa1a3svcd0p53y6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI1MDg5NzUtZDdhNjkzYjQtZThmZTQ3ZWUtYTRlYWU0OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.158628Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01js2e167qeeqb26s4ydzxnznr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdiYWRiNmMtNDM2MzBmOGUtOTczODliZTQtY2M2ZjM5Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.253542Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01js2e16a470fgvbyqvdbe2t60, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUzMWNkMjgtMjc1OWFlNmUtMTcxMDdjZDktZDY0ZTY0ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.348398Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01js2e16d6cqvbrc713ke2c05d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhNTM2ZWEtZTE5ODI3Y2MtZWZkOGFhOTctMWRmYzUxNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.471169Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01js2e16g4ah2wynyc9v9vg0vz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWI0ODZkMmYtMjUxM2RjMmEtZjM2NjU0ZjQtN2I4Yzk4ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.539771Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01js2e16m0fh703gb9k8xyvbd9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2IwMWQwYy02YTRhMDdjYS0zMmI2MTJjNy02NjZhNWZiNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.608098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01js2e16p26z2668ga15ghw7sa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE1NzRlMDAtNmZjMWFhYzAtMmZhZDcwOTUtOWI1ZDViMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.678015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01js2e16r883azzv0b5ke73dcp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmUyZDA0YjYtYWJiOGRhNzUtNjNiZWQ5MmItZjc0ZTdi ... 564836Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01js2e19jbd0zhk14yxy11jszg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjIxMjgxY2ItNWE0OTRhYzAtMWRmMzM2ZWEtY2MzZmMwYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:35.631429Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01js2e19mm438axkvdb653cr8k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGE0YTVhMDQtMWM5NjdhNjktZWZmNTlhZTQtZWQ5NTNmNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:35.694006Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01js2e19pp65v8rvartk31rm2a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjQ2ZGY4NmEtYzdlOTI0M2MtYjJmZTE3OGYtOTc3MjNkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:35.756128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01js2e19rm1btvyt5c0thnrcdf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWY3ODE1YjUtZWNiMGQ5OTMtZjc2ZjhkODYtNjU0NjdlMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:35.841788Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01js2e19tj7csgks4r3rkv3yzp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE0OWYzNzMtNDY5MWY5NWYtM2RhYjkxYTctNmQyMWRkM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:35.910846Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01js2e19x91pn00ywqte6bvafs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUxZDg2MmMtYmE1YmFiMC01MzZlZTJiLTJiM2JjNjIz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.111887Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01js2e1a13ezw21tezgq64hhwj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdjZWQ3MmYtZjliZGEyNTItOTQ1OTkzM2MtMTE5YmJmODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.203237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01js2e1a5t0y4xjb9cjsj37d8y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM0ZjIxMzctZmZjNzA4MDktZmUxNmMyZC1kOTdkZDcwMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.270098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01js2e1a8g99vs989d40g1pbnf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU2MTIxYTYtZWZiMjY0ZTctMmI2ZmRmMGItZjcyNDA1Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.364953Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01js2e1aam765csa1q9wx2szsf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGUxMTE3NTgtZjE4YTEwYmQtMjY1YTY1ZmEtNWNlNjljMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.444438Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01js2e1adq7efhm0xappjvs0g3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2I1OTAzMTYtODdkYzNiZmItZjUxNDUxYi04MzM3NzkyMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.520943Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01js2e1ag509e1fmv3kcmwzfe5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM1NDk3ZDgtYWEwMjAzZDItZTFjMjQxMTctMzE2NzMzNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.591207Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01js2e1ajgbegr0ayyy7vnxa1q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFiZDJmY2UtODJmZGViZTctZTM1NjZhZTctZGZiZTBiNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.669363Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01js2e1amp89wcmvng1bh064fs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y5OWU2ZmEtODM0ZTJlMDgtODcyNjhjNGItM2ZiNTY4ZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.739571Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01js2e1aq4dqh5dys82656dbat, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjE4Zjk0MDEtMWNhODQ5ZTctZmFkZjU4OTQtNjllMjNkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.827080Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01js2e1asa5w1hry6d62ns6fr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM0NjhhYy1kZGZhMzgwNS0zZmIzMjIxMS04ZDExN2I1Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.907284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01js2e1aw376c5j6q9mmayrsn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTIyNDhhY2YtYTU4ZTZhMTYtZDk0NzVmOWQtMjc4ZGYzNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:36.963751Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01js2e1ayh6bd94dr2t759bs0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRjODQzYjMtMzhiNjBlMTktN2VhOTAyYjEtODAzZjQyYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.060330Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01js2e1b099g51rrqjjvhe6ftb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRmODEzMTEtOTcwNzBhMGMtZWUwNDEzYzYtMmIzODNlYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.154903Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01js2e1b3cf490fz1ntj1pq2xb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzUzOTcxMGEtNGMwNzQ4YTEtNDZhMzZhNzctZTczMWE2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.257027Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01js2e1b6s75avb8h38dqxjjpg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRjMDA5ODItYzA0ZTNiNGQtYTllM2U3YjctZTBhODBhOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.318133Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01js2e1b9f3dw528rajpwfb107, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjA2M2UxOTktNDM1YzMyODAtZDVjNmU3NmItNDI2ZjQ2MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.377068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01js2e1bbd62jeth4ry9gcke41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWEyOTA5N2QtYjFmMDQwYTktYjZmYTk5Y2QtMTRkY2RkZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.429868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01js2e1bd702deczchsp39cvty, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZlOWQwZmUtYjQyMzk3ZTgtZTNmN2E5OWMtNDc1M2E3OTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.479051Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01js2e1bev5d55a9m0bm4xkjv4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIyYTBiZGItYjUzMGJkZjgtZTY3MGNkODgtOWMzMWIwOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.535995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01js2e1bgddwnwkh5xk4wzcs43, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQwMTBiNTMtOGQ4ZGJmOTYtYzM1ZWQ3YTQtMzE1NWJhNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.630617Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01js2e1bjv9f7y0x55wjkvc9xf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQwOWU2NC00YTllMGViZC01NTBmZGJmMi1mMzZmOGQ3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.687311Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01js2e1bn40dc7zfeq00bfjx89, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIxZGJlNzctNDkwMTU4YWUtZmY0N2U5NGUtZGQ2NDliOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.751312Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01js2e1bpx106bj8gsr4k2d28c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDlkYjkzZDYtNzE3ZjFiMTktODRmOTE5OS03MWM2YjI0ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.824171Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01js2e1bry1z30fd7qd4xhd23t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg2MDAzZWEtYjUzNzVlYS1lNTQ3OWFiZS1mZmYzZTBmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.904139Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01js2e1bv7d3kt5xjc4qfnan9d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzUwZTc2MjAtMjQ2MjAwNTYtZGNiMTkyMTUtNWQ0Yzc2M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:37.986309Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01js2e1bxt44x63g23gzyf70s4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWI3NzIxYzUtNzlkNzZhZWUtOWEzZDg3NDgtYmJkZmM1MjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.079936Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01js2e1c09f5ffqxwvxzt0a0ke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRkYWMxMTItYzUzOGNjNGYtODk2OTkzMmMtN2E5YjM5Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.279123Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01js2e1c3x2j3rqryb1t0087ek, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTY4NzY4MzMtNWI3ZWI4MDEtZWE3ZjA1MDMtZGViYWNlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.386682Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01js2e1c9xes899s2na28bcc6f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E5YWE4MGYtY2Q3N2ZhNzQtYTJiYzFhNy1hOGE3ODhkNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.744709Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01js2e1cjq8wwfmz80b0rkpe9b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk4ZDMxYTYtM2ExMDhjMWUtZmNiYTFhOGItZTkzOWIxZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: 2025-04-17T17:49:26.372905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:26.373268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:26.373308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018f3/r3tmp/tmpwPE2nN/pdisk_1.dat 2025-04-17T17:49:26.722256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:26.764674Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:26.802602Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:49:26.803420Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:49:26.803646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:26.803760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:26.815207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:26.894291Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:49:26.894359Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:49:26.894517Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:49:27.037001Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:49:27.037110Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:49:27.037732Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:49:27.037873Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:49:27.038135Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:49:27.038334Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:49:27.038424Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:49:27.040159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:49:27.041605Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:49:27.042423Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:49:27.042503Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:49:27.073805Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:49:27.074940Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:49:27.075411Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:49:27.075670Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:49:27.120552Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:49:27.121384Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:49:27.121496Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:49:27.123256Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:49:27.123334Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:49:27.123481Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:49:27.123867Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:49:27.124024Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:49:27.124121Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:49:27.124584Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:49:27.172223Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:49:27.172408Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:49:27.172515Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:49:27.172552Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:49:27.172603Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:49:27.172659Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:49:27.172881Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:27.172924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:49:27.173178Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:49:27.173259Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:49:27.173351Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:49:27.173386Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:49:27.173416Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:49:27.173444Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:49:27.173471Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:49:27.173501Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:49:27.173541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:49:27.173641Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:27.173673Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:27.173734Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:49:27.174129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:49:27.174192Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:49:27.174288Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:49:27.174502Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:49:27.174564Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:49:27.174658Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:49:27.174699Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:49:27.174736Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:49:27.174818Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:49:27.174865Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:27.175138Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:49:27.175748Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:49:27.175783Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:49:27.175818Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:27.175867Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:49:27.175916Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:49:27.175963Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:49:27.175998Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:49:27.176025Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:49:27.176764Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:49:27.176811Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:49:27.176849Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:49:27.176905Z node 1 :TX_DATASHARD TRACE: Prop ... _DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v401/0 ImmediateWriteEdgeReplied# v401/0 2025-04-17T17:51:39.169602Z node 16 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-17T17:51:39.169677Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:51:39.169711Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:51:39.169747Z node 16 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T17:51:39.169784Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:51:39.169834Z node 16 :TX_DATASHARD DEBUG: Executing write operation for [0:5] at 72075186224037888 2025-04-17T17:51:39.169927Z node 16 :TX_DATASHARD TRACE: Operation [0:5] at 72075186224037888 aborting because it cannot acquire locks 2025-04-17T17:51:39.170049Z node 16 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=5; 2025-04-17T17:51:39.170204Z node 16 :TX_DATASHARD INFO: Write transaction 5 at 72075186224037888 has an error: Operation is aborting because it cannot acquire locks 2025-04-17T17:51:39.170279Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:51:39.170311Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T17:51:39.170341Z node 16 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-17T17:51:39.170370Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T17:51:39.170559Z node 16 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-17T17:51:39.170624Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-04-17T17:51:39.170670Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-17T17:51:39.170742Z node 16 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:51:39.170805Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:51:39.170864Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:51:39.170890Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:51:39.170928Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-17T17:51:39.171038Z node 16 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-17T17:51:39.171123Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T17:51:39.171200Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2025-04-17T17:51:39.171420Z node 16 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-17T17:51:39.171554Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:39.171926Z node 16 :KQP_COMPUTE ERROR: SelfId: [16:905:2680], Table: `/Root/table` ([72057594046644480:2:1]), SessionActorId: [16:840:2680]Got LOCKS BROKEN for table `/Root/table`. ShardID=72075186224037888, Sink=[16:905:2680].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-04-17T17:51:39.172153Z node 16 :KQP_COMPUTE ERROR: SelfId: [16:898:2680], SessionActorId: [16:840:2680], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[16:840:2680]. isRollback=0 2025-04-17T17:51:39.172651Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=YjBiYmFkYy1mNGQwYWEzMi0yZjdkNDE5Mi1jMDY3MjE5Ng==, ActorId: [16:840:2680], ActorState: ExecuteState, TraceId: 01js2e1cv93kds4m05t3r5qkyd, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [16:899:2680] from: [16:898:2680] 2025-04-17T17:51:39.172914Z node 16 :KQP_EXECUTER ERROR: ActorId: [16:899:2680] TxId: 281474976715663. Ctx: { TraceId: 01js2e1cv93kds4m05t3r5qkyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YjBiYmFkYy1mNGQwYWEzMi0yZjdkNDE5Mi1jMDY3MjE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-04-17T17:51:39.173348Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [16:898:2680], Recipient [16:689:2579]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-04-17T17:51:39.262332Z node 16 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-17T17:51:39.262704Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=YjBiYmFkYy1mNGQwYWEzMi0yZjdkNDE5Mi1jMDY3MjE5Ng==, ActorId: [16:840:2680], ActorState: ExecuteState, TraceId: 01js2e1cv93kds4m05t3r5qkyd, Create QueryResponse for error on request, msg: 2025-04-17T17:51:39.264313Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [16:689:2579], Recipient [16:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T17:51:39.264367Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T17:51:39.264448Z node 16 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-04-17T17:51:39.264638Z node 16 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-04-17T17:51:39.264765Z node 16 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-17T17:51:39.264902Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-04-17T17:51:39.264962Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T17:51:39.264995Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-04-17T17:51:39.265028Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:51:39.265060Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:51:39.265116Z node 16 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v401/0 ImmediateWriteEdgeReplied# v401/0 2025-04-17T17:51:39.265170Z node 16 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-17T17:51:39.265207Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T17:51:39.265235Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:51:39.265263Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T17:51:39.265290Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:51:39.265321Z node 16 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-17T17:51:39.265440Z node 16 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 2025-04-17T17:51:39.265533Z node 16 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037888 2025-04-17T17:51:39.265638Z node 16 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T17:51:39.265745Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:51:39.265793Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T17:51:39.265876Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-17T17:51:39.265937Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T17:51:39.265979Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-04-17T17:51:39.266009Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-17T17:51:39.266041Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:51:39.266070Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:51:39.266120Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T17:51:39.266145Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:51:39.266287Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-17T17:51:39.266360Z node 16 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-17T17:51:39.266394Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T17:51:39.266447Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-17T17:51:39.266533Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:39.267306Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [16:61:2108], Recipient [16:689:2579]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 16 Status: STATUS_NOT_FOUND 2025-04-17T17:51:39.275096Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [16:912:2730], Recipient [16:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:51:39.275233Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:51:39.275324Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:911:2729], serverId# [16:912:2730], sessionId# [0:0:0] 2025-04-17T17:51:39.275501Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [16:593:2518], Recipient [16:689:2579]: NKikimr::TEvDataShard::TEvGetOpenTxs |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-04-17T17:51:30.841092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:30.841665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:30.841720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e92/r3tmp/tmpoQwYA4/pdisk_1.dat 2025-04-17T17:51:31.345279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:31.418300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:31.457877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:31.457989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:31.470736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:31.560388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:32.027835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:32.027988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:32.028111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:32.041664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:32.249915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:51:32.331536Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:32.667013Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e165mbnj6jnyr7sxnvgja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjc4NjQ4MzYtNmQ4MDZjZTItYTVjMDQ3ZWYtYzdjYTAxNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.757460Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e16t7dsf19j851k58bzf9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzliNDhiYjEtZWQ3NDFkNDMtZGMwNTkzZWEtMjlkNTI3MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.825674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e16wtfv4zerphv4ycg6bj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBjNmRlYWYtY2I5NTRkY2EtYWQ0MTg0MjgtYTJkZDdiYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.911701Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e16yze53sbgynrty7pbrc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWJiM2MwMWYtZWJjZTQ1MzQtZWQzMzU0YTktYjBmNTM5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.027755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2e171n04sbqxsbfeydfgpv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM1MmY4MTUtZmQzNjIyMzAtOGY3MDcwYmEtZTNkNGI1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.099546Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2e175acgq2x27vc85rj81e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyOWM1NWYtZDk1ZjEwZGUtNGE4OTZiMS03ZGY0MTVmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.171383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2e177jdvjwgecebtnhqbe7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FiOGVmZjEtOGM0MjFjNDItNmRjZmI3MDItMTYyMWU4ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.241699Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2e179s8shmhz4d4sx4x49j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTEwZDdiOTgtM2FmYmNjNGQtMTUzN2ExYzMtMTZjYzBhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.312346Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2e17c00c0w8ts7hmcs9ajx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI3ZGYxMTEtMjAwNWJlOWEtYzc2OTk1ZGYtYjAwYWNiZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.384217Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2e17e7egd5yr1qfa2v5dq3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVhOWNiMTAtY2Q4MWUyZDktZmQ4OTdlZjEtNDhlZTJiZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.457045Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2e17ge57hwtvn1wae4yzkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA3MWM0OGQtNWE1OTI4N2MtZjA4NDA1YzAtM2Y3YTA5ZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.530750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2e17jq903294sfa7w9ddn8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA1NTBjZmYtMWNjNTdhMzctYTQzMDhkMzUtZmE4ZTViOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.604846Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2e17n1dqcxmbhe5jcrm5bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWFiYzhiY2YtMzQwNzM4ZDItODNkYjIyOGYtOGY2MDhiMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.678893Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2e17qb0ch7gxspexhftkdg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk4NjUwNTUtYzE2YTExODgtYWI4YTA2M2YtNTA1ZmVjYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.753090Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01js2e17sn9gt7pmfnwaqxksjg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhkYmJkZDQtZjZmYzM5YTItN2E3ZTdkNTMtODFkZjkxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.879430Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2e17w0a3wbphqnnxkaygmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjNTEzNTEtYTQzZDJmZTctZDRkYTNjMWQtNTIzY2Q1NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.026247Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01js2e180mcvt4edq2qcb8q5yx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFiNmU4M2YtMzE1Y2MwNzItNjdmMzQ2MC1hYWM1YzZhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.159460Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01js2e184m5n10mc8ztap89c6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ0MzNiMTktYmVlNjM0MDUtM2QxZGE3OGItNDhhZjViNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.219519Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2e188nb8ehzt28zh0nv14y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM3NDZiOTMtYTlmZTY3MmQtZjEyYzhkY2EtNDc2YjExMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.286106Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01js2e18ahak2sh8cq1th25py0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg1NWYwOWQtNWJkOTUzMTMtYTY2ZjBlNTMtMTlmMDFlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.370880Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01js2e18crb3a20vyjbz9cxn9f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzczYTI1NGQtZGJmYTViYWMtMTRjN2ZkOTgtYjc4ZTBjNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.459435Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01js2e18f8fmfwcpncygd3fgz6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRjMDRiOWMtZGY1YWM5NzItN2E5YmJkNTYtOWI4M2FjMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.519107Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01js2e18j1ekx7wndhphcy8agq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE1ZTAyYTUtODkzZWI5NjItZGFlMjUzNWUtM2MxZjAzMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.586003Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01js2e18kw2eem66bm12stvez4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZiYzQ0MDQtYWM3NTQ5MjctNzU2ZWUyZGUtNzIyNWQwNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.707792Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01js2e18pc0szzk8ch632w24n7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIyMjU4Ni02NzkwMGE4OC04ZGQ1NWM0LTVmZmNjZTY5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.788670Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01js2e18ss0mp09p7rg1b8bk9w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc1MjFmMjgtMTI3ZGZmMWQtZmEwOWRmYTQtZmEzNzI3ZWI= ... 316303Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01js2e1c7mdy5vhf4kwamjv7tr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTczMjI5NDEtZmYwYThiNzktNzZmZTI1YjQtNjNlYjgyNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.393791Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01js2e1cak0mywqn7jgbbqzz3q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRhZjQ3ZTgtN2U0NmQ1MDMtZmE1YjZkOTMtNWQyYWQ0OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.465038Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01js2e1ccz8pv5hmdvxh5199vk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJhNGY3MGMtODgyNmYyNDctNTRiMDQ3YWYtOWVjMGU0MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.534910Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01js2e1cf7104955nsahqzmat1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmExY2Q2OGYtOGI4ODZjMTItODNiMzQwMmQtYjcwMTkxMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.605096Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01js2e1che7xs2mcqbgvgycabr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY5Yzg0MTktNmEzN2MyN2UtYzQ5N2I4YzQtOTY5MDI5YjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.672593Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01js2e1ckk5rqy8k504swr3b60, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxMDFhMjEtOGE5OTVmNjQtYzQ2YjRhYzEtMzdkZDE1MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.741637Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01js2e1cnr22fx8jppgsg6me7a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgwMzYwMTUtZWI3NWMwMGEtMjE3MWEyMTktMjBlNjU2MGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.812085Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01js2e1cqw4c85edyy3rn4tg4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM5YWEyMTYtY2QzNWFjODUtNzA4MzMxYmItOTA1OWIyNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.901917Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01js2e1ct3625416s9xcw5sydy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRmYzQ0MTctNmZhOTBiNmQtNGQ3N2QyOWYtYWMyMjE4YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.970815Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01js2e1cww6hqmt5x88hg1qzq1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFmZTM2MjUtZDljYTViYWUtZWIzMzI4YzItZTg1YWExOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.035447Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01js2e1cz24tj9r1m26cz1cfa2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIxMzgyMDUtNTVmZWMzYjMtOGY5OGNjZDUtZThmZjk0OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.106895Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01js2e1d11b68wr6tzkxncx4dc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU1ODI1M2QtNmMyODA5ZGUtMTA1YTQ5OTgtZjhkOTI5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.184310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01js2e1d3b7230s5vxrncsckvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE0OGEzZjgtZmY4OTE4OC01YWJlMjAzNi1hMWZiMDVkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.267025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01js2e1d5r3nn7ra76prtdtrw8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRlODM0NGEtZWVmMjQ5Y2EtMTQ3ZjdhMDUtNWM3ZTc5NTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.356568Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01js2e1d8b08ndv554qf63e1z1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRlYTRjYTItN2MwMTUyYTgtMWNiYTUyMDgtZjA1MjYxMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.490331Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01js2e1dbd5jpry2j08tzc3qr3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEzOWMxNjUtMTg3YmRiODctYzkzZTNmZDUtYzcxOTUzOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.658102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01js2e1dfyemafsnfvfzy04mct, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjY4MGNmLTIwYTIwZDEzLTc5NTIwODliLTRjOGY3NzVk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.733360Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01js2e1dmh6n4ktfw1s9nbab9x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWJhMDNiYWItOGEzYzVmYmQtNDIwYjNkMzMtY2VjMzY0NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.807747Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01js2e1dpwbgmz5sb9e0j8hpcs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA4Nzg0YjktYmJjMWQ4ZGQtOWFjNGY3M2EtZmQxM2EzYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.880974Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01js2e1ds70w9wm90te2v8vg5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjZjRlMmItYjBhMTk4MjAtM2YyZjk3NDMtNWVhNTg1Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.951039Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01js2e1dvg43tjt57scmh79kgh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmY1OTRmNC1iZmZlODBjMS1hYzI2Y2EyMi1iNmY3NThjMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.024715Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01js2e1dxp9ntthf53jc2arypf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM2NWZjMTUtOTQwODFjN2EtYzcyZjcyNWMtZmQ3MTlkZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.086516Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01js2e1dzzd371q42xqz3m1bsm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjcxMjMzOTEtZWUxZTg1MjItOWE2YmYwYTYtY2Y4YTVjMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.155243Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01js2e1e1xb7ec2ty4kqn4gg47, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA2NjkzYTctMzZmODI0N2UtYTY1ZjY3ZjUtNTZiMGFhZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.287177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01js2e1e423vbvsjxam0t6kv13, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdlZmI5YzQtNDNlM2QxZDktN2EwNjM0ZDktNzkyMDc1MmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.356925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01js2e1e861n2e78g2pazgbr84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2YwOGViNTYtOGM4NDdkZDMtNjE4YmI3ZmEtMmFjYTkzYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.426475Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01js2e1eab7kzf1mvhwhqvhzdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWExODY1OWMtMWY0OTI5YWUtNGNkYWVmNmEtYzNlODkzMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.493155Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01js2e1ecj47520qkdjhhh7whz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTExMWQ3NzYtYzRiNTM4MzMtNGEyM2U4YTctYzRiNDIzMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.569859Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01js2e1eem47fpbphjy22cnxca, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MmIzNmYtNDMxMDY0MzMtZmUwZDM1YTUtODE5OGViZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.641111Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01js2e1eh09969js117fk9t70r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRkMmJkNjgtNmQ5ZTU3MzUtZGM0YTUwMDUtOGQ2NjQzNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.712407Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01js2e1ek84hk72cbb8s6tg1sy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY4MGI5YzUtOGY5MjQ5MWQtMmE2YjZhMjAtODFmMWVjMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.813772Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01js2e1enf81376e5saewrjb48, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY1NTYwNjctNjY5Mjc4OTEtNDYxMGI4NGMtOWVhMzIwYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.884030Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01js2e1erm6ve3x62hfgfq4vcw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY4MzhiZGQtMjJhYjI2YWQtOGExZjA0MTAtZmMxNTk2MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.952794Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01js2e1ett9g3qsmgqkb1ce145, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc2YjUzYWEtYTY1YjQwOWEtYjZmMTg0NTQtMzdhZGQzZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:41.027885Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01js2e1ex3achc01c6t238vv0r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUxOGUzYmYtZTczNmU0MjctMzZiZTFlOTYtY2U5YmQ1NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:41.191517Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01js2e1ezx87hhgfmemp723f2r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFhZTYyNjgtZTdiMzhhOTMtYTM3ZmJmODEtYTM3ZjAzMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> SystemView::TabletsShards [GOOD] >> SystemView::TabletsFollowers >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2025-04-17T17:51:31.077894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:31.078492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:31.078550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e81/r3tmp/tmpmI5xl1/pdisk_1.dat 2025-04-17T17:51:31.484004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:31.576749Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:31.620165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:31.620302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:31.632519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:31.725114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:32.149122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:32.149267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:32.149359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:32.157049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:32.351950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:51:32.462877Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:32.839676Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e169j0mx9jk5w9ghx8pz0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2MxOGE1MjgtMzZlNWE1Ni00N2JjYTJiNS1mN2ExNmY2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:32.918629Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e16zz103xyr400kk8ajbr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMwOWRjM2MtYTQ5ZTQ1MzEtMjc1MjY4YzItZjU4ZGQwOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.039434Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e172a269138db9ca5hv2q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY3ODg3ODgtNzQyYzY5Y2MtZTJhNWZkMDEtOTU1YzNjMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.180554Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e175zd0h23fmc5t2sv3pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI5MmY1YTAtMjA4YWJiNDAtMWE1OGI2LWY5NTk4Yjhl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.270131Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2e17ahc55j1gxvy3s7nn5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI5ZTAwMjQtYWYxNWFhLTRhNmU3N2Q5LTI5NGZlZWFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.351962Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2e17d8d899mdf6325hp5ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY0ZWM5OWYtYTA1NzYyOTYtMjQ5MDEwNmUtZTZhMjYyYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.474390Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2e17fr8t76d3vxpr8z6y42, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGYyNzc0YzctOThmMWIyYTctOGEzMWI1YS0yNDJiZTI0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.545190Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2e17kp02p812mahq77axgn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc4YTYyM2QtZjE3OWU2ODMtZmIzYzY0ZmEtMWIyYjk2ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.623490Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2e17nt1y9t81y1hrkzckj8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTMzMGI2ZTgtYzVkZGMzYTQtMThjMzJhNTAtZTQ3Mjk1N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.713616Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2e17ra54n9ye78fhb4kmsv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI0NjJiMmUtYWY5Y2U5MDYtMjE3NjliOC01NTljNjcxZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.856402Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2e17v42kj9wvt7ma5m137t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFlZGZiY2QtOTI3MGEwZWEtNGUzZWUyMDgtMzVlYzZlMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:33.980738Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2e17zq7f49fcrxpp5eycrd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg1NjQzOTUtZWQxNGUxMDctNzA4OTZjZmQtYzMyODEwYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.104164Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2e183n628adxgjjpwzbnb2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzViMWNlYTQtZWEyZTQzMGQtYjRjZWI1NDYtZTg0ZTUxMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.229258Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2e187dajend3qbj756929b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjkzNDNmNTktZTI1YmI1MGEtODU0ZjNhYzEtMzZlNzMwNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.329988Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01js2e18b6ccr2nw6qee8ctc24, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU3NWU4NTgtYTBkYzc3NjctNDA1ZDkxODgtMjUzMDRiYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.416648Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2e18ebf1ddr7pwvpfpvqbj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZkNmJiYTUtY2MxOWU2ZWQtYzM3NGI1N2YtOWFiZWUzYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.499313Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01js2e18h11zv0tbgm6t460hgj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlNjRlY2YtNzdmMzc1NjYtNzFhNzZlMTYtYWY3ODM5ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.581373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01js2e18kn11d9kahtrzpzk06w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODExYTljY2QtNGZlMTQyZGYtODk2YjY3N2UtMmYwYTNlZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.658152Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2e18p7ehvhfcr8hkva9ep6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjkxYWE0OWItYjBjOGNhYTctMzkxZTU3MzctMjMyMTRmZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.786553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01js2e18rperegetb81qw6w239, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTgxYjI5OWMtMWE5ZGI1YzctMWQyMDU3M2UtNjIwYzA4ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.858750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01js2e18wndgjhrfkmb01erzvj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM2Yzc5ZjQtY2YwYTBkMmMtYjEzZWEwOGItNzFmZWEzODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:34.937941Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01js2e18yvfb7mnez4mp5ce1br, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc5ZGI2OGMtYjBkMjE5MjQtYjk0ZmFhNi1iY2IxZGNlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:35.040508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01js2e191d4w9xas76c5savpnt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYzZmYzNTctYTAwYzQ1NGYtZDRiZmFjM2YtYjc1ODIzNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:35.129890Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01js2e194g5p8mgyfs55n33e3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWUyYzQ0MDMtZmYyYmE1ZmEtZWU5MWZlZDItZmEzYTUyYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:35.201008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01js2e197c3kxgx59n99ertm56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYwN2Y4MWYtNWNkN2E1NGEtN2Y0MzZkNjYtOTY0ODhjMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:35.260679Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01js2e199h61nrqbmpshwmfzp8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTFiZDFlNWQtMTgwNWJkMWMtZTQyMzY3MjktMjA2NjFmNWI=, Cu ... mMTk4OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.508574Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01js2e1cdm1eycfxatk69rsz68, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODVlZTBhLWUxYWI3OTctNjkwNDlhZDQtNjI4MDNmNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.608724Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01js2e1ch25hcx2g74km7mg2fb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIxNDYzYmItY2E0YjBjOTUtYTY1ZGU5OGMtNThjNGEzY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.699473Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01js2e1cm194wdc1889t6tfczt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVhZGY5YjctZWQ4Y2I4ZTQtNzAwZjM3MzMtZTgyNWE4YjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.771279Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01js2e1cpwc63we02aatnnxf3n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUyZDdkYTYtZGQxZGI1MS0zZjA5MGZjMC1iZjllNTIxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.854418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01js2e1cs53jtwwfava9kvg6mj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYxODdhY2UtODNhOGUzY2ItZjVjZTJlYmQtYTYyYWY0OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:38.927227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01js2e1cvrawektzy5az86jy0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVlZjBkMzMtNGUwMzQzZDQtMTQ2OWE2YWYtYTBmYzU2YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.017187Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01js2e1cy47k1900wt7c853hfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDEwZDQ5YTktMTFiODg5N2ItYzVmM2U4MWMtNDYxZWEzN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.144165Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01js2e1d0v5vfebxz2j3ey3q0c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThlMzAxOGItYTI5Y2VlZjMtMWY0YjlmNDYtZmY3YTg1OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.249257Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01js2e1d4t4904kff1sx0h3w8f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2JlNzk4NGQtMzVjNzQ5ZmUtMjQ2ZDdiNGItNjE0OWRjZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.324068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01js2e1d84fe3jqa1k08ftf9rp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjExNGQ0MDQtYjczODMzNjAtODllOTg3YzMtNzdlMzNmNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.502008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01js2e1daj6cb5gf1r9qxfmm01, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRlYWNiNzUtYzlmM2M5NWQtYjEyZjRkZGUtZjE4ZDQ4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.608195Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01js2e1dgkaa7kebysp75w19vn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI1NzZmYzItZWJkOTRiMWQtN2NiNzJhMjgtYzk2OWJlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.690240Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01js2e1dkjbj6nxrek1cen6ken, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk2NTA2ZWUtYmY3ODU4MDYtMWZhNzRmNzYtYWZlNjk1M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.769672Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01js2e1dp8aht3rx95sfavhny7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDJhMDBjNWItMzUwZmVjZjAtM2Y0MjZkZTYtNzMxY2ZiZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.857088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01js2e1drd0abpxc2wj7n4ttrm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE3MThlYTYtMTg2NDYzMGMtZmFjMGYwNzYtOGJjY2UzZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.949744Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01js2e1dv760cp479brp5haqxf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFiZTJmYzgtZjBhZGM0OWEtZWZkMWYyYWUtZmY3ZjM4Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.032468Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01js2e1dy0cjra4thj6e6xeet6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI4M2MwMDYtYmJjODdmYjctZTZhNmIzNDMtMzNlMzMyYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.120718Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01js2e1e15amfr823pjfkkehne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdiNWNiNWUtYTNlOGU4YzgtZmVlNDBhZjQtN2M0Yjk0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.191950Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01js2e1e3d9ae6mfq6v59t580e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE0ZGM3MTQtOTc1NDYzNGMtNGY2MGNjNDktMTlhNjhiZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.283095Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01js2e1e5seggjkcw3cverkyb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIzNjExYzctNjM4MTI2MjktNzZkMDY3ZjUtYjRkNjk0MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.348445Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01js2e1e8d643mj7h6aa7thyk0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdlODcxYy0zZmE4OGFjZi03NDczYWUwMS03MjU5NTgxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.417985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01js2e1ead32zmq4qcs6asktsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWYzMjQ2ZWItZjg0MDc4ZWQtOTY4NTFmYi01NzM2YWIyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.493552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01js2e1ecpc8w06jsf9aj7hemc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmVhM2MxYWMtNTU5N2ZjOWItMWUzNzU4OS1hMjU0NGI3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.568157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01js2e1ef03v00jgmtm6zv4nm6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI0YTYwMDEtNDMzZWFjNDUtNTJkNjM5MTItMTJkYTNhZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.645648Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01js2e1ehb297e7bpgbw6vzf2t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk0MmQwNjctOWIyOGQ2NzAtMThmY2NkZTAtYzM0ZDUyMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.728832Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01js2e1eksb70cbg5xvt6m03r9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmYzYmRmODUtZjQyMTM4NDgtMmZmNmZmZTMtYTRhOGU1NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.807917Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01js2e1epc4wbx599pqc1bkd8k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzdmOTRlMTUtYjQzYWRkNDAtZjczNGFiMDEtYWJjYWQxM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.883899Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01js2e1erv4jqk7fynnwbt359v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIyMTI5OTItZWU5YzMxODktMjZhZWU0OC04MDAzNWMyZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.954070Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01js2e1ev60hcxr7yax125nydz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTUwMzAxZC1lMDg2MzAwYy1jY2Q3MjlhZC1kOGU0NGRlOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:41.025832Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01js2e1exceq2h27bq4t1hnrpf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzAxZmNkODAtNGNhMzJhMTEtMWVjYTY4NzgtMWQ5N2Y3MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:41.096654Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01js2e1ezk0y93ce5f66vp3x17, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTUwODA0Yi0yYzRhOWY0OC02OTE2MzNhNi0xMGEwYmE1Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:41.163565Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01js2e1f1t37mxr5np31bbmr8j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzRiNzU3NWUtYjlkMDBhYWUtOTFhMjAyMjktZjgxMzgwYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:41.223137Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01js2e1f3x6t4q2svcqt8htqy3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRjMTFmZmEtODRlMTVlODAtNjJlMjEwODEtZjExYzI5ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:41.283390Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01js2e1f5r5ygvrd963cr95gc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjhlZTkzN2UtYjFhYTViNWUtMjg4ODg0OTgtNmIyYjhmN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:41.302700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-04-17T17:51:41.758249Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01js2e1ffj484marejsvsn6j8d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI0YzczNWMtZTNlZGU0MGUtMmJjYjU4YzQtNjgwZGFkNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> TEnumerationTest::TestPublish [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2025-04-17T17:51:15.090307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341158920840037:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:15.090356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00145d/r3tmp/tmpAxfbZV/pdisk_1.dat 2025-04-17T17:51:15.996604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:16.109846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:16.118469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:16.118527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:16.144558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4491, node 1 2025-04-17T17:51:16.408798Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:16.408819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:16.408830Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:16.408948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:16.742167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21593 2025-04-17T17:51:19.487044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341176100710070:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.487169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.910256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:51:20.096224Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341158920840037:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:20.096291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:51:20.246574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341180395677539:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:20.246648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:20.257956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912280146 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912280146 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-17T17:51:20.427000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341180395677651:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:20.427087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:20.427396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341180395677656:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:20.443519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:51:20.443721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:51:20.443749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-17T17:51:20.443847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:51:20.443866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-17T17:51:20.443927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:51:20.443999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-17T17:51:20.444262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:3 1 -> 128 2025-04-17T17:51:20.444514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:51:20.444534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T17:51:20.515181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341180395677695:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:20.515271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341180395677697:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:20.515318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:20.530442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-17T17:51:20.531035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:51:20.548346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:51:20.548516Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710660:2 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:51:20.548731Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710660:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:51:20.548842Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710660:3, ProgressState 2025-04-17T17:51:20.548920Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPr ... n/3?node_id=1&id=ZDBjNzZmOTYtYzRjOWNjZmMtZDkwZDg4YzEtY2NjZjExNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.231373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714021. Ctx: { TraceId: 01js2e1e5xfq8bhmfaeeykwxgq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU1ZDc1ODMtYTk3NzEyYjQtODc0NTIyMzYtNTM3MmIzNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.234094Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714022. Ctx: { TraceId: 01js2e1e6292ktvzcn4jf9e347, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDFlYmQ4NzYtZmUxYTRkMDgtZTZjZDQyZmEtZWEyNGY3ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.247871Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714023. Ctx: { TraceId: 01js2e1e6mcvvvwsnajyg8n4ky, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM4ZTIwNWMtZDQ2YzAyYTctNzEwMTFiY2YtY2IzNDUyOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.247995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714024. Ctx: { TraceId: 01js2e1e6pa6p8vkj478r70tbc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjk0NjVhYWItYjc3Y2M5NjYtNzE5Yjc4MmQtMzIwZWRiOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.251627Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714025. Ctx: { TraceId: 01js2e1e6m6cw2p92dyf67d68f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2QwZDJjMDEtNmNjNjY0NGYtZGI1ZDQyMzctNjY5YmE3ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.257974Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714027. Ctx: { TraceId: 01js2e1e6tamxbnw40e04c1ra2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWZlMDRlNmYtY2VmNjE0ZjctODgxZGFiMjYtNjNmMDk0MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.258124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714026. Ctx: { TraceId: 01js2e1e6t9zf0nyvk13aj804x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBjNzZmOTYtYzRjOWNjZmMtZDkwZDg4YzEtY2NjZjExNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.265707Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714028. Ctx: { TraceId: 01js2e1e6x2ak6sy0597572tb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkyZWM5ZWUtOTFiN2M1ZWEtMzY4ZTI5YmMtYWI4MTU4YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.273419Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714030. Ctx: { TraceId: 01js2e1e6sewy2hrm86d161b08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQyMDdiNC04MjUwZGVjZi04OTIzOGI3Ni1hMjgzMDE0Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.276216Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714031. Ctx: { TraceId: 01js2e1e75dvnmxv4kdep5fzwm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDFlYmQ4NzYtZmUxYTRkMDgtZTZjZDQyZmEtZWEyNGY3ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.279509Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714032. Ctx: { TraceId: 01js2e1e7a4k05084bv7tqggv0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM4ZTIwNWMtZDQ2YzAyYTctNzEwMTFiY2YtY2IzNDUyOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.281291Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714029. Ctx: { TraceId: 01js2e1e6s54fntgxyctwzg52q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAxMzk2NjQtNjEwOGU5MTUtYzIwMDJhZDMtNzI0NjFlMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.289588Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714033. Ctx: { TraceId: 01js2e1e7a1pv2exepeg0smhnd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjk0NjVhYWItYjc3Y2M5NjYtNzE5Yjc4MmQtMzIwZWRiOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.298490Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714034. Ctx: { TraceId: 01js2e1e7e1vttdsvcjg1jwngy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU1ZDc1ODMtYTk3NzEyYjQtODc0NTIyMzYtNTM3MmIzNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.302529Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714035. Ctx: { TraceId: 01js2e1e8019mcqy2tkamhcn2k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBjNzZmOTYtYzRjOWNjZmMtZDkwZDg4YzEtY2NjZjExNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.323580Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714036. Ctx: { TraceId: 01js2e1e812rfefb4xsc5bbd79, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2QwZDJjMDEtNmNjNjY0NGYtZGI1ZDQyMzctNjY5YmE3ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.330704Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714037. Ctx: { TraceId: 01js2e1e8b5nvmsc451s6tx4gv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWZlMDRlNmYtY2VmNjE0ZjctODgxZGFiMjYtNjNmMDk0MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.336812Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714038. Ctx: { TraceId: 01js2e1e8c4zvvym7vhfqe1y9b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkyZWM5ZWUtOTFiN2M1ZWEtMzY4ZTI5YmMtYWI4MTU4YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.348792Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714040. Ctx: { TraceId: 01js2e1e9q72acvgtwqv46tv1f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM4ZTIwNWMtZDQ2YzAyYTctNzEwMTFiY2YtY2IzNDUyOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.350755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714039. Ctx: { TraceId: 01js2e1e9g5zxba072xmzptqr9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDFlYmQ4NzYtZmUxYTRkMDgtZTZjZDQyZmEtZWEyNGY3ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.353379Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714041. Ctx: { TraceId: 01js2e1e9q1sk41f9d9qg041yw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjk0NjVhYWItYjc3Y2M5NjYtNzE5Yjc4MmQtMzIwZWRiOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.354801Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714042. Ctx: { TraceId: 01js2e1e9t8565ca85d4n4s4vn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU1ZDc1ODMtYTk3NzEyYjQtODc0NTIyMzYtNTM3MmIzNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.356086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714043. Ctx: { TraceId: 01js2e1e9ffvsfnv5xcvcxa03f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQyMDdiNC04MjUwZGVjZi04OTIzOGI3Ni1hMjgzMDE0Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.357388Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714044. Ctx: { TraceId: 01js2e1e9v9pdgqdp1e5tdde84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBjNzZmOTYtYzRjOWNjZmMtZDkwZDg4YzEtY2NjZjExNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-17T17:51:40.360692Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714045. Ctx: { TraceId: 01js2e1e9z429ghawgga9pma1a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAxMzk2NjQtNjEwOGU5MTUtYzIwMDJhZDMtNzI0NjFlMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912280146 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-17T17:51:40.367572Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714046. Ctx: { TraceId: 01js2e1eab94gfwmeyxgn1yen6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkyZWM5ZWUtOTFiN2M1ZWEtMzY4ZTI5YmMtYWI4MTU4YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.367621Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714047. Ctx: { TraceId: 01js2e1eab9d0ydt6h2rbg33ga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2QwZDJjMDEtNmNjNjY0NGYtZGI1ZDQyMzctNjY5YmE3ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.377787Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714048. Ctx: { TraceId: 01js2e1eak2trx0m0m1m31vrzj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWZlMDRlNmYtY2VmNjE0ZjctODgxZGFiMjYtNjNmMDk0MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:40.383630Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714049. Ctx: { TraceId: 01js2e1eak0445432nb0r03s7y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM4ZTIwNWMtZDQ2YzAyYTctNzEwMTFiY2YtY2IzNDUyOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912280146 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2025-04-17T17:51:14.349009Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341153588093077:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:14.349088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001468/r3tmp/tmpCqMKSq/pdisk_1.dat 2025-04-17T17:51:14.916969Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:14.954835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:14.954993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:14.963739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1804, node 1 2025-04-17T17:51:15.486436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:15.486455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:15.486462Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:15.486997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:15.770109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:29574 2025-04-17T17:51:19.039565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341175062930614:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.039653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.352280Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341153588093077:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:19.352363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:51:19.421163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:51:19.706696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341175062930805:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.706796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.732996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912279593 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912279593 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-17T17:51:19.976791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341175062930913:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.977007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.978070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341175062930923:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.978190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341175062930924:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.978224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341175062930925:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.978259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.978358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341175062930927:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.985147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:51:19.985631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:51:19.985676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710661:1, at schemeshard: 72057594046644480 2025-04-17T17:51:19.985790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:51:19.985807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:51:19.985869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:51:19.985942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710661:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-17T17:51:19.986259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710661:3 1 -> 128 2025-04-17T17:51:19.986562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:51:19.986600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T17:51:20.002984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-17T17:51:20.003142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), at schemeshard: 72057594046644480 2025-04-17T17:51:20.003712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710662:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-17T17:51:20.003796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710662:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [ ... ydb://session/3?node_id=1&id=Zjg1NWE0MjItYTUxNTdhYmEtNWYzODcwYWUtMzhmMjBhZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.816365Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714004. Ctx: { TraceId: 01js2e1drqdpjjw3hyb1m57k39, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA3OTdhNzMtNmUzY2RkMTgtNmMyY2RiMGItM2MxNzE2ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.817475Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714005. Ctx: { TraceId: 01js2e1drq8wn55zwchv246qrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI4OTRlZDYtODRhYzAwNDUtODNhMmNmZDUtZTMxOGQ4Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.846615Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714013. Ctx: { TraceId: 01js2e1dt236wse3fgz9wddpfk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA4MGM4ZmEtZGZmNjYzNGYtY2I0NjE4NjgtZDgzOTRjN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.846683Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714012. Ctx: { TraceId: 01js2e1dt20xh699mtesfxh12m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA4ODQ0OWItM2ZhNjFjMWEtYzFmN2RlYmMtMzE5ZGNj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.849758Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714014. Ctx: { TraceId: 01js2e1dt2ahw3qybes9q2d0y6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJmMjE2YzctNTU4MDVhYjQtZjY4ZDUxYTYtODE5Y2M2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.853913Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714017. Ctx: { TraceId: 01js2e1dtabkmg38v2bjtjbwn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg0M2ZjZjItZTcyMzY1NjgtYjFjYTI1N2EtNjhmYjNhYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.854458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714015. Ctx: { TraceId: 01js2e1dta0h7dyeadpwd4drz4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRiYjI5OTQtNzkyNDg2NjgtNWI2MzQ4MzgtMzBiNjRjYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.855473Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714016. Ctx: { TraceId: 01js2e1dta16b5akqd4z5wqys3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjg1NWE0MjItYTUxNTdhYmEtNWYzODcwYWUtMzhmMjBhZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.856708Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714019. Ctx: { TraceId: 01js2e1dtbfdadqb21hn1eekdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA3OTdhNzMtNmUzY2RkMTgtNmMyY2RiMGItM2MxNzE2ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.857830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714018. Ctx: { TraceId: 01js2e1dtbbym1mzcdnbkqk4me, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE4YjFhYTMtOWYyNTMzYy0yMzVlYTNkOS04MGM1MDdhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.863127Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714020. Ctx: { TraceId: 01js2e1dtj7m9xv7w2p6qganev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljZmYwM2QtNTRkYWRkMjEtZjlmYzNkZmUtNmQzYTM1ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.866109Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714021. Ctx: { TraceId: 01js2e1dtp5b42qxrnzf83ae5t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI4OTRlZDYtODRhYzAwNDUtODNhMmNmZDUtZTMxOGQ4Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.874287Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714023. Ctx: { TraceId: 01js2e1dv0e3t5j88kaye68x0c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJmMjE2YzctNTU4MDVhYjQtZjY4ZDUxYTYtODE5Y2M2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.875353Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714022. Ctx: { TraceId: 01js2e1dv0b69npjw3qp68wy27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA4MGM4ZmEtZGZmNjYzNGYtY2I0NjE4NjgtZDgzOTRjN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.878504Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714024. Ctx: { TraceId: 01js2e1dv33ehfqbqfdaswn00y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA4ODQ0OWItM2ZhNjFjMWEtYzFmN2RlYmMtMzE5ZGNj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.880369Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714025. Ctx: { TraceId: 01js2e1dv368bwnw6f76afdj6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg0M2ZjZjItZTcyMzY1NjgtYjFjYTI1N2EtNjhmYjNhYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.884280Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714026. Ctx: { TraceId: 01js2e1dva9y7vwy27a5vxk5fq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRiYjI5OTQtNzkyNDg2NjgtNWI2MzQ4MzgtMzBiNjRjYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.890921Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714027. Ctx: { TraceId: 01js2e1dve6ydf3m27jyn0mfgs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE4YjFhYTMtOWYyNTMzYy0yMzVlYTNkOS04MGM1MDdhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.891620Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714028. Ctx: { TraceId: 01js2e1dvh5r2q0k0w9zzrkq2k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjg1NWE0MjItYTUxNTdhYmEtNWYzODcwYWUtMzhmMjBhZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-17T17:51:39.900316Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714029. Ctx: { TraceId: 01js2e1dvrac6b04wp9rjrywzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA3OTdhNzMtNmUzY2RkMTgtNmMyY2RiMGItM2MxNzE2ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.904768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714030. Ctx: { TraceId: 01js2e1dvrfg8aap5ega51z9yp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljZmYwM2QtNTRkYWRkMjEtZjlmYzNkZmUtNmQzYTM1ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.907750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714031. Ctx: { TraceId: 01js2e1dvyb4nyrh1md5rqqpw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI4OTRlZDYtODRhYzAwNDUtODNhMmNmZDUtZTMxOGQ4Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.908733Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714032. Ctx: { TraceId: 01js2e1dw05bmn6fhpcd443jpa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRiYjI5OTQtNzkyNDg2NjgtNWI2MzQ4MzgtMzBiNjRjYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.908964Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714033. Ctx: { TraceId: 01js2e1dw10sq7xrg7azvwjp9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg0M2ZjZjItZTcyMzY1NjgtYjFjYTI1N2EtNjhmYjNhYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2025-04-17T17:51:39.909799Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714034. Ctx: { TraceId: 01js2e1dvza3m11dyb6btcj16x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJmMjE2YzctNTU4MDVhYjQtZjY4ZDUxYTYtODE5Y2M2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912279593 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-17T17:51:39.910891Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714035. Ctx: { TraceId: 01js2e1dw06zng0y7nq9pbny6y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA4ODQ0OWItM2ZhNjFjMWEtYzFmN2RlYmMtMzE5ZGNj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.911277Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714036. Ctx: { TraceId: 01js2e1dvz64fe86cz936324gj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA4MGM4ZmEtZGZmNjYzNGYtY2I0NjE4NjgtZDgzOTRjN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.915030Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714037. Ctx: { TraceId: 01js2e1dw672brdqzjy7yy3x71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE4YjFhYTMtOWYyNTMzYy0yMzVlYTNkOS04MGM1MDdhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:39.916061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714038. Ctx: { TraceId: 01js2e1dw63t3h8d1zce1mhpg3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjg1NWE0MjItYTUxNTdhYmEtNWYzODcwYWUtMzhmMjBhZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912279593 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> KqpPg::Insert_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] >> SystemView::AuthOwners [GOOD] >> SystemView::AuthOwners_Access >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink >> SystemView::DescribeSystemFolder [GOOD] >> SystemView::DescribeAccessDenied |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> KqpProxy::PassErrroViaSessionActor >> SystemView::AuthUsers_TableRange [GOOD] >> SystemView::AuthPermissions_ResultOrder >> TCdcStreamTests::Basic >> TCdcStreamTests::VirtualTimestamps >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> TableCreation::ConcurrentTableCreation >> TSchemeShardServerLess::StorageBilling [GOOD] >> ScriptExecutionsTest::RunCheckLeaseStatus >> KqpProxy::NoLocalSessionExecution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:50:56.915732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:50:56.915824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.915862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:50:56.915902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:50:56.915970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:50:56.916015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:50:56.916075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.916144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:50:56.916512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:50:56.996641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:50:56.996707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:57.017561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:50:57.017928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:50:57.018116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:50:57.038625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:50:57.038817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:50:57.039501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.039724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:50:57.042990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.043865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.043906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.044038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:50:57.044077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:57.044103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:50:57.044180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.049168Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:50:57.199191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:57.199404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.199606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:50:57.199903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:50:57.199959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.207143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.207281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:50:57.207467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.207529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:50:57.207565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:50:57.207593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:50:57.209506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.209559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:50:57.209593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:50:57.211286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.211332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.211365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.211414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.215018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:57.217190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:50:57.217386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:50:57.218301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:57.218412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:57.218468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.218724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:50:57.218782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:57.218917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:50:57.219001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:50:57.220851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:57.220899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:57.221068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:57.221112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:50:57.221450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:57.221492Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:50:57.221576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:57.221607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.221646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:57.221686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.221729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:50:57.221786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:57.221815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:50:57.221841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:50:57.221898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:57.221930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:50:57.221960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:50:57.223666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:57.223774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:57.223809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T17:51:45.499305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-17T17:51:45.499370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72075186233409549 2025-04-17T17:51:45.499498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-17T17:51:45.499633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-04-17T17:51:45.499697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-17T17:51:45.502081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-17T17:51:45.503419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-17T17:51:45.503691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2025-04-17T17:51:45.503730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2025-04-17T17:51:45.503930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 2] 2025-04-17T17:51:45.504125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2025-04-17T17:51:45.504168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:655:2567], at schemeshard: 72075186233409549, txId: 107, path id: 1 2025-04-17T17:51:45.504211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:655:2567], at schemeshard: 72075186233409549, txId: 107, path id: 2 2025-04-17T17:51:45.504701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-17T17:51:45.504751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2025-04-17T17:51:45.504833Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-17T17:51:45.504865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-04-17T17:51:45.504901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-17T17:51:45.505703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-04-17T17:51:45.505807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-04-17T17:51:45.505840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-04-17T17:51:45.505875Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2025-04-17T17:51:45.505911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2025-04-17T17:51:45.506663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-04-17T17:51:45.506736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-04-17T17:51:45.506763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-04-17T17:51:45.506809Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:51:45.506836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2025-04-17T17:51:45.506894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-17T17:51:45.509447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-17T17:51:45.509499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2025-04-17T17:51:45.509861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-17T17:51:45.510007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-17T17:51:45.510041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T17:51:45.510076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-17T17:51:45.510107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T17:51:45.510141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-17T17:51:45.510240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:796:2677] message: TxId: 107 2025-04-17T17:51:45.510282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T17:51:45.510319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-17T17:51:45.510355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-17T17:51:45.510468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2025-04-17T17:51:45.511836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-04-17T17:51:45.511986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-04-17T17:51:45.513795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-17T17:51:45.513842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2208:4053] TestWaitNotification: OK eventTxId 107 2025-04-17T17:51:45.532633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 768 RawX2: 4294969953 } TabletId: 72075186233409552 State: 4 2025-04-17T17:51:45.532731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2025-04-17T17:51:45.534706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2025-04-17T17:51:45.535199Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409552 2025-04-17T17:51:45.537970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2025-04-17T17:51:45.539270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2025-04-17T17:51:45.540353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2025-04-17T17:51:45.540404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2025-04-17T17:51:45.540479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-04-17T17:51:45.543122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409549:4 2025-04-17T17:51:45.543186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2025-04-17T17:51:45.543743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2025-04-17T17:51:45.697871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-17T17:51:45.697986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-17T17:51:45.698049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-17T17:51:45.698110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-17T17:51:45.698146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-17T17:51:45.698196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-17T17:51:45.698239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-17T17:51:45.698281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:51:45.698322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:51:45.770060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:51:45.770427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":1600452180,"quantity":59,"finish":1600452239,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-1600452180-1600452239-0","cloud_id":"CLOUD_ID_VAL","source_wt":1600452240,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 2020-09-18T18:04:00.027000Z, LastBillTime: 2020-09-18T18:02:00.000000Z, lastBilled: 2020-09-18T18:02:00.000000Z--2020-09-18T18:02:59.000000Z, toBill: 2020-09-18T18:03:00.000000Z--2020-09-18T18:03:59.000000Z, next retry at: 2020-09-18T18:05:00.000000Z 2025-04-17T17:51:45.777500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete grabMeteringMessage has happened 2025-04-17T17:51:45.777681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> TableCreation::MultipleTablesCreation >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::RetentionPeriod >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, b} | 2 6 86b {2, NULL} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, b} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, NULL} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, baaaa} | 2 6 86b {2, aaa} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, baaaa} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, aaa} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0, 2), [2, 3), [3, 4), [4, 6), [6, 8), [8, 9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | P ... {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{48} Label{484 rev 1, 138b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{51} Label{514 rev 1, 138b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative >> KqpProxy::InvalidSessionID >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TableCreation::ConcurrentUpdateTable >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::CreateOldTable >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream >> ShowCreateView::WithTwoTablePathPrefixes [GOOD] >> SystemView::AuthGroups >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> TableCreation::ConcurrentUpdateTable [GOOD] >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |77.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> SystemView::DescribeAccessDenied [GOOD] >> SystemView::CollectScanQueries >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] Test command err: 2025-04-17T17:51:46.159795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341290473407373:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:46.162507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00199b/r3tmp/tmpgH7264/pdisk_1.dat 2025-04-17T17:51:46.607250Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:46.633166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:46.633295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:46.635263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6534 TServer::EnableGrpc on GrpcPort 29680, node 1 2025-04-17T17:51:46.961807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:46.961836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:46.961843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:46.961984Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:51:47.101054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:47.115048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:51:49.340577Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:49.342013Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:49.351406Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.351456Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.351475Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:51:49.351496Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:51:49.351512Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:49.355192Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.358461Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-17T17:51:49.358470Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-17T17:51:49.358507Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-17T17:51:49.358746Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-17T17:51:49.358750Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-17T17:51:49.358765Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-17T17:51:49.362697Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-17T17:51:49.362713Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-17T17:51:49.362736Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-17T17:51:49.371378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-17T17:51:49.374263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:51:49.375969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:51:49.386031Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-17T17:51:49.386095Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710659 2025-04-17T17:51:49.386378Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-17T17:51:49.386415Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-17T17:51:49.388364Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-17T17:51:49.388391Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710660 2025-04-17T17:51:49.533363Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-17T17:51:49.580883Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-17T17:51:49.588797Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-17T17:51:49.591561Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:51:49.640661Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:51:49.664680Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:51:49.665050Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 1c18e48c-3b2a946a-8c93a2a-c658092c, Bootstrap. Database: /dc-1 2025-04-17T17:51:49.692422Z node 1 :KQP_PROXY DEBUG: Request has 18444999161399.859219s seconds to be completed 2025-04-17T17:51:49.695100Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZmU1NzJkZGQtMjczMzM1NjUtNzA4M2YyM2ItMjJkMmFkZGU=, workerId: [1:7494341303358310141:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T17:51:49.695223Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:49.695988Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 1c18e48c-3b2a946a-8c93a2a-c658092c, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:51:49.696544Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZmU1NzJkZGQtMjczMzM1NjUtNzA4M2YyM2ItMjJkMmFkZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7494341303358310141:2333] 2025-04-17T17:51:49.696596Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7494341303358310143:2467] 2025-04-17T17:51:49.698488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341303358310144:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:49.698580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:49.702280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341303358310156:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:49.706542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:51:49.716523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341303358310158:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:51:49.819485Z node 1 :TX_PROXY ERROR: Actor# [1:7494341303358310200:2499] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:50.355230Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: p ... ater. Request: create. Transaction completed: 281474976715684. Doublechecking... 2025-04-17T17:51:55.139303Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715684. Doublechecking... 2025-04-17T17:51:55.139317Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715684. Doublechecking... 2025-04-17T17:51:55.139329Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715684. Doublechecking... 2025-04-17T17:51:55.158540Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.162641Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.162756Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.162781Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.162805Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.162827Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.162878Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.165996Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.174400Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.178420Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.181201Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.183061Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.183983Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.188586Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.189049Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.190051Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.190558Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.191109Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.192609Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.193157Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.195722Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.196340Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.196805Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.200885Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-17T17:51:55.200892Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.202410Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.203344Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.208629Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.212157Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.214365Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.215729Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.218724Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.224754Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.224802Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.230590Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.230672Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.233200Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.233565Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.234640Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.235696Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-17T17:51:55.242676Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7494341331002969332:2364], selfId: [2:7494341313823098984:2268], source: [2:7494341331002969329:2363] 2025-04-17T17:51:55.244433Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjY3NGRjNjItNWE4MDlkOWEtNDM1ZDQ3MzQtNGI2ZDJjZDg=, TxId: 2025-04-17T17:51:55.244468Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjY3NGRjNjItNWE4MDlkOWEtNDM1ZDQ3MzQtNGI2ZDJjZDg=, TxId: 2025-04-17T17:51:55.244598Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, start saving rows range [0; 1) 2025-04-17T17:51:55.244655Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, Bootstrap. Database: /dc-1 2025-04-17T17:51:55.244969Z node 2 :KQP_PROXY DEBUG: Request has 18444999161394.306672s seconds to be completed 2025-04-17T17:51:55.246857Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=Y2Q3NjdhZmItM2EwMGQ4YS0zNzE4OTZhMS1mNzRiNzA4Mw==, workerId: [2:7494341331002969616:2378], database: /dc-1, longSession: 1, local sessions count: 4 2025-04-17T17:51:55.247086Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:55.247197Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NjY3NGRjNjItNWE4MDlkOWEtNDM1ZDQ3MzQtNGI2ZDJjZDg=, workerId: [2:7494341331002969329:2363], local sessions count: 3 2025-04-17T17:51:55.247819Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-17T17:51:55.248154Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=Y2Q3NjdhZmItM2EwMGQ4YS0zNzE4OTZhMS1mNzRiNzA4Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7494341331002969616:2378] 2025-04-17T17:51:55.248182Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7494341331002969618:3070] 2025-04-17T17:51:55.293550Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=Y2Y4NmUwYzktZmE2YzVlOGYtYjllNWNmZmYtMzAzMzExMmM=, workerId: [2:7494341331002968967:2361], local sessions count: 2 2025-04-17T17:51:55.389491Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7494341331002969617:2379], selfId: [2:7494341313823098984:2268], source: [2:7494341331002969616:2378] 2025-04-17T17:51:55.390164Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2Q3NjdhZmItM2EwMGQ4YS0zNzE4OTZhMS1mNzRiNzA4Mw==, TxId: 2025-04-17T17:51:55.390208Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2Q3NjdhZmItM2EwMGQ4YS0zNzE4OTZhMS1mNzRiNzA4Mw==, TxId: 2025-04-17T17:51:55.390309Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, result part successfully saved 2025-04-17T17:51:55.390321Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, reply SUCCESS, issues: 2025-04-17T17:51:55.390526Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, Bootstrap. Database: /dc-1 2025-04-17T17:51:55.390671Z node 2 :KQP_PROXY DEBUG: Request has 18444999161394.160956s seconds to be completed 2025-04-17T17:51:55.392486Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NDdmZGMzN2YtZGE3ZWFiZjgtNGFhMGE2NzMtZGI0NjEyY2M=, workerId: [2:7494341331002969657:2391], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-17T17:51:55.392596Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:55.392884Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=Y2Q3NjdhZmItM2EwMGQ4YS0zNzE4OTZhMS1mNzRiNzA4Mw==, workerId: [2:7494341331002969616:2378], local sessions count: 2 2025-04-17T17:51:55.392970Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 15bc42d-56dc7b6f-17b302eb-5db09ca1, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-17T17:51:55.393228Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NDdmZGMzN2YtZGE3ZWFiZjgtNGFhMGE2NzMtZGI0NjEyY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7494341331002969657:2391] 2025-04-17T17:51:55.393269Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7494341331002969660:3090] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] Test command err: 2025-04-17T17:51:46.169767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341291772656962:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:46.171714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019a0/r3tmp/tmpWx2lqc/pdisk_1.dat 2025-04-17T17:51:46.700153Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:46.755189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:46.755266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:46.759220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23300 TServer::EnableGrpc on GrpcPort 12292, node 1 2025-04-17T17:51:47.166104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:47.166130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:47.166140Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:47.166292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:51:47.318467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:47.332653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:51:49.302522Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:49.305036Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:49.309380Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:51:49.309404Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:51:49.309433Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:49.309468Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:49.312743Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.312789Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.312808Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.313549Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-17T17:51:49.313558Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-17T17:51:49.313611Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-17T17:51:49.313715Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-17T17:51:49.313721Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-17T17:51:49.313734Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-17T17:51:49.317774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-17T17:51:49.321915Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-17T17:51:49.321952Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-17T17:51:49.321979Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-17T17:51:49.322723Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.328168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:51:49.329357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:51:49.350281Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-17T17:51:49.350353Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-17T17:51:49.352538Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-17T17:51:49.352570Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-17T17:51:49.353161Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-17T17:51:49.353179Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-17T17:51:49.491913Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-17T17:51:49.548044Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:51:49.560581Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-17T17:51:49.570286Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-17T17:51:49.638363Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:51:49.646442Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:51:49.646855Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: b2f83a05-f9a06ac9-c11c764f-86e439f8, Bootstrap. Database: /dc-1 2025-04-17T17:51:49.667409Z node 1 :KQP_PROXY DEBUG: Request has 18444999161399.884235s seconds to be completed 2025-04-17T17:51:49.670142Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=MmMxMTQ3NDAtNWI0OTY4ODItN2VmOGM2ZWYtY2Y5MDhkNjA=, workerId: [1:7494341304657559684:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T17:51:49.670328Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:49.671415Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: b2f83a05-f9a06ac9-c11c764f-86e439f8, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:51:49.672001Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MmMxMTQ3NDAtNWI0OTY4ODItN2VmOGM2ZWYtY2Y5MDhkNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7494341304657559684:2333] 2025-04-17T17:51:49.672076Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7494341304657559686:2469] 2025-04-17T17:51:49.673824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341304657559687:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:49.673907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341304657559699:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:49.673967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:49.677798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:51:49.691556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341304657559701:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:51:49.774121Z node 1 :TX_PROXY ERROR: Actor# [1:7494341304657559743:2501] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/p ... tateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:51:55.091330Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-17T17:51:55.091347Z node 2 :TX_PROXY ERROR: Actor# [2:7494341330101407536:2632] txid# 281474976715668, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-04-17T17:51:55.091412Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715673 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:51:55.091418Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-17T17:51:55.091480Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715668 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:51:55.091485Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-17T17:51:55.091512Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715671 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:51:55.091520Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-17T17:51:55.091579Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715674 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:51:55.091584Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-17T17:51:55.106657Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715669. Doublechecking... 2025-04-17T17:51:55.149642Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7494341325806440176:2366], selfId: [2:7494341312921537434:2277], source: [2:7494341325806440174:2365] 2025-04-17T17:51:55.149900Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 73a40a38-17558113-332cde55-881d9306, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzdhMmNmNGItYzlmZjA0NzgtOWY3MzVkYjItNzlkMWM2ZjA=, TxId: 2025-04-17T17:51:55.149921Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 73a40a38-17558113-332cde55-881d9306, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzdhMmNmNGItYzlmZjA0NzgtOWY3MzVkYjItNzlkMWM2ZjA=, TxId: 2025-04-17T17:51:55.150043Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 73a40a38-17558113-332cde55-881d9306, start saving rows range [0; 1) 2025-04-17T17:51:55.150109Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 73a40a38-17558113-332cde55-881d9306, Bootstrap. Database: /dc-1 2025-04-17T17:51:55.150336Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MzdhMmNmNGItYzlmZjA0NzgtOWY3MzVkYjItNzlkMWM2ZjA=, workerId: [2:7494341325806440174:2365], local sessions count: 2 2025-04-17T17:51:55.150421Z node 2 :KQP_PROXY DEBUG: Request has 18444999161394.401207s seconds to be completed 2025-04-17T17:51:55.150921Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.152187Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZjIwMjFkNWYtMTY2ZTgwYzEtNDFkYTRjYjItMzUzZDgxMTM=, workerId: [2:7494341330101407624:2376], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-17T17:51:55.152319Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:55.152562Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 73a40a38-17558113-332cde55-881d9306, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-17T17:51:55.152884Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjIwMjFkNWYtMTY2ZTgwYzEtNDFkYTRjYjItMzUzZDgxMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7494341330101407624:2376] 2025-04-17T17:51:55.152913Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7494341330101407626:2695] 2025-04-17T17:51:55.156536Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.161678Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.169403Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.171933Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.172334Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.174575Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.175144Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.178543Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.186297Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:55.219662Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NTBjNTkxMjYtZDU4ZmIxNmUtNWRjZDZlOTEtODdiNTMxZGY=, workerId: [2:7494341325806440103:2361], local sessions count: 2 2025-04-17T17:51:55.291549Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7494341330101407625:2377], selfId: [2:7494341312921537434:2277], source: [2:7494341330101407624:2376] 2025-04-17T17:51:55.291844Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 73a40a38-17558113-332cde55-881d9306, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjIwMjFkNWYtMTY2ZTgwYzEtNDFkYTRjYjItMzUzZDgxMTM=, TxId: 2025-04-17T17:51:55.291870Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 73a40a38-17558113-332cde55-881d9306, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjIwMjFkNWYtMTY2ZTgwYzEtNDFkYTRjYjItMzUzZDgxMTM=, TxId: 2025-04-17T17:51:55.292002Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 73a40a38-17558113-332cde55-881d9306, result part successfully saved 2025-04-17T17:51:55.292017Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 73a40a38-17558113-332cde55-881d9306, reply SUCCESS, issues: 2025-04-17T17:51:55.292194Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZjIwMjFkNWYtMTY2ZTgwYzEtNDFkYTRjYjItMzUzZDgxMTM=, workerId: [2:7494341330101407624:2376], local sessions count: 1 2025-04-17T17:51:55.292350Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 73a40a38-17558113-332cde55-881d9306, Bootstrap. Database: /dc-1 2025-04-17T17:51:55.292463Z node 2 :KQP_PROXY DEBUG: Request has 18444999161394.259165s seconds to be completed 2025-04-17T17:51:55.294482Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MTJkMWFiZS1mZjBiOWJhLTM1YWU5ODcxLWFlNTc0ZGU0, workerId: [2:7494341330101407670:2388], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:51:55.294645Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:55.295384Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 73a40a38-17558113-332cde55-881d9306, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-17T17:51:55.295733Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MTJkMWFiZS1mZjBiOWJhLTM1YWU5ODcxLWFlNTc0ZGU0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7494341330101407670:2388] 2025-04-17T17:51:55.295773Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7494341330101407672:2720] >> TableCreation::CreateOldTable [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] >> SystemView::AuthOwners_Access [GOOD] >> SystemView::AuthOwners_ResultOrder >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2025-04-17T17:51:49.002802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341299456547571:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:49.014471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001982/r3tmp/tmpM5Wh6i/pdisk_1.dat 2025-04-17T17:51:49.447058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:49.461412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:49.461489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:49.464231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20087 TServer::EnableGrpc on GrpcPort 4479, node 1 2025-04-17T17:51:49.681069Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:49.681087Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:49.681093Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:49.681201Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:51:49.801362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:51.681889Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:51.684247Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:51.685473Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:51:51.685518Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:51:51.685535Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:51.685565Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:51.685662Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.685701Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.685769Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.685938Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.686555Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-17T17:51:51.686570Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-17T17:51:51.686614Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-17T17:51:51.686738Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-17T17:51:51.686750Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-17T17:51:51.686773Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-17T17:51:51.687529Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-17T17:51:51.687541Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-17T17:51:51.687576Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-17T17:51:51.690569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-17T17:51:51.692154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:51:51.694480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:51:51.699114Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-17T17:51:51.699166Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-04-17T17:51:51.699225Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-17T17:51:51.699236Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-04-17T17:51:51.704017Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-17T17:51:51.704053Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-17T17:51:51.827030Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-17T17:51:51.860602Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-17T17:51:51.890029Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-17T17:51:51.911544Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:51:51.938530Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:51:51.952416Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:51:51.952817Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: bb197db-a721c61e-eb9dbc66-8d25415, Bootstrap. Database: /dc-1 2025-04-17T17:51:51.973077Z node 1 :KQP_PROXY DEBUG: Request has 18444999161397.578560s seconds to be completed 2025-04-17T17:51:51.975332Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZDZmM2QwNTMtY2UzNDFkNGEtNDJmM2QwMWMtMTkyNGVlODA=, workerId: [1:7494341312341450209:2332], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T17:51:51.975473Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:51.976208Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: bb197db-a721c61e-eb9dbc66-8d25415, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:51:51.976601Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZDZmM2QwNTMtY2UzNDFkNGEtNDJmM2QwMWMtMTkyNGVlODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7494341312341450209:2332] 2025-04-17T17:51:51.976628Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7494341312341450212:2464] 2025-04-17T17:51:51.978042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341312341450223:2337], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.978104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341312341450211:2334], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.978219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.980811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:51:51.989204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341312341450226:2338], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:51:52.045910Z node 1 :TX_PROXY ERROR: Actor# [1:7494341316636417565:2496] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" se ... 2025-04-17T17:51:56.901824Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YjU3YTIxMjAtNGI5NzIzMjMtMTkwNDc2NjEtNzk2NjFiNTA=, workerId: [2:7494341331625256543:2351], database: dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:51:56.901950Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:56.902021Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MTY0MTNkNmUtZGMyY2ZmODAtYzMwZWU0YzUtNGU2N2JhYjI=, workerId: [2:7494341331625256425:2333], local sessions count: 1 2025-04-17T17:51:56.902232Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YjU3YTIxMjAtNGI5NzIzMjMtMTkwNDc2NjEtNzk2NjFiNTA=, CurrentExecutionId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7494341331625256543:2351] 2025-04-17T17:51:56.902272Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7494341331625256544:2528] 2025-04-17T17:51:56.918270Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2e1yfp629320tyg6wxgqcq", Request has 18444999161392.633367s seconds to be completed 2025-04-17T17:51:56.919886Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2e1yfp629320tyg6wxgqcq", Created new session, sessionId: ydb://session/3?node_id=2&id=ZjFkMDg3MDMtZGE1OWQzMWQtM2Q5NjE2NzktNjRmMDZlMzI=, workerId: [2:7494341331625256559:2361], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:51:56.920015Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01js2e1yfp629320tyg6wxgqcq 2025-04-17T17:51:56.922514Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-04-17T17:51:56.922535Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-04-17T17:51:56.922562Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-17T17:51:56.925297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-04-17T17:51:56.926414Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-04-17T17:51:56.926450Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-04-17T17:51:56.949481Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, Bootstrap. Database: /dc-1 2025-04-17T17:51:56.949663Z node 2 :KQP_PROXY DEBUG: Request has 18444999161392.601975s seconds to be completed 2025-04-17T17:51:56.951368Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MjgxY2Y2ZDktY2JmM2MxMzEtY2RlNTAxMWEtZDIyM2YwZTY=, workerId: [2:7494341331625256635:2365], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-17T17:51:56.951488Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:56.951565Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7494341331625256422:2462], selfId: [2:7494341318740353879:2277], source: [2:7494341331625256543:2351] 2025-04-17T17:51:56.951880Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-17T17:51:56.952193Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MjgxY2Y2ZDktY2JmM2MxMzEtY2RlNTAxMWEtZDIyM2YwZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7494341331625256635:2365] 2025-04-17T17:51:56.952217Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7494341331625256639:2581] 2025-04-17T17:51:56.961687Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-04-17T17:51:57.043170Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:57.044299Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:51:57.070482Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZjFkMDg3MDMtZGE1OWQzMWQtM2Q5NjE2NzktNjRmMDZlMzI=, workerId: [2:7494341331625256559:2361], local sessions count: 2 2025-04-17T17:51:57.123804Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7494341331625256638:2366], selfId: [2:7494341318740353879:2277], source: [2:7494341331625256635:2365] 2025-04-17T17:51:57.124555Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjgxY2Y2ZDktY2JmM2MxMzEtY2RlNTAxMWEtZDIyM2YwZTY=, TxId: 2025-04-17T17:51:57.124578Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjgxY2Y2ZDktY2JmM2MxMzEtY2RlNTAxMWEtZDIyM2YwZTY=, TxId: 2025-04-17T17:51:57.124699Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, start saving rows range [0; 1) 2025-04-17T17:51:57.124767Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, Bootstrap. Database: /dc-1 2025-04-17T17:51:57.125071Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MjgxY2Y2ZDktY2JmM2MxMzEtY2RlNTAxMWEtZDIyM2YwZTY=, workerId: [2:7494341331625256635:2365], local sessions count: 1 2025-04-17T17:51:57.125095Z node 2 :KQP_PROXY DEBUG: Request has 18444999161392.426529s seconds to be completed 2025-04-17T17:51:57.126751Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MmQ3ZDJiMjUtNWJhNmQ1NTYtODNlMTY3OGYtZDYxNWEwYTI=, workerId: [2:7494341335920223989:2377], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:51:57.126871Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:57.127163Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-17T17:51:57.129278Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MmQ3ZDJiMjUtNWJhNmQ1NTYtODNlMTY3OGYtZDYxNWEwYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7494341335920223989:2377] 2025-04-17T17:51:57.129304Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7494341335920223991:2614] 2025-04-17T17:51:57.259665Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7494341335920223990:2378], selfId: [2:7494341318740353879:2277], source: [2:7494341335920223989:2377] 2025-04-17T17:51:57.259906Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmQ3ZDJiMjUtNWJhNmQ1NTYtODNlMTY3OGYtZDYxNWEwYTI=, TxId: 2025-04-17T17:51:57.259935Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmQ3ZDJiMjUtNWJhNmQ1NTYtODNlMTY3OGYtZDYxNWEwYTI=, TxId: 2025-04-17T17:51:57.260040Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, result part successfully saved 2025-04-17T17:51:57.260058Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, reply SUCCESS, issues: 2025-04-17T17:51:57.260522Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MmQ3ZDJiMjUtNWJhNmQ1NTYtODNlMTY3OGYtZDYxNWEwYTI=, workerId: [2:7494341335920223989:2377], local sessions count: 1 2025-04-17T17:51:57.260603Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, Bootstrap. Database: /dc-1 2025-04-17T17:51:57.260683Z node 2 :KQP_PROXY DEBUG: Request has 18444999161392.290943s seconds to be completed 2025-04-17T17:51:57.262342Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YjRlZWQ2YS0yNDRjOTBkLWJmYjhkODMwLWUzNTIyNDVk, workerId: [2:7494341335920224018:2387], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:51:57.262463Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:57.262656Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 6ee5def1-f00e06c3-cfc85a69-47f05fc0, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-17T17:51:57.262897Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YjRlZWQ2YS0yNDRjOTBkLWJmYjhkODMwLWUzNTIyNDVk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7494341335920224018:2387] 2025-04-17T17:51:57.262926Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7494341335920224020:2625] >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2025-04-17T17:51:47.221742Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341295974903733:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:47.221932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:51:47.314903Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341293849596778:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:47.315500Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00198b/r3tmp/tmpdwpC53/pdisk_1.dat 2025-04-17T17:51:48.062641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:48.062725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:48.064720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:48.064809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:48.076574Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:51:48.076759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:48.084340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:48.114957Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25717 2025-04-17T17:51:51.056528Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:51.059835Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:51.064383Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=Yjc2NjQ4ZWUtNzBmNzBmZDMtMTk0YzY5MTctNzRkODNhMDc=, workerId: [2:7494341311029466254:2307], database: , longSession: 1, local sessions count: 1 2025-04-17T17:51:51.064428Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:51.064613Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:51.064660Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:51:51.064686Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:51:51.064702Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:51.065579Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:51.065638Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.065657Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.065727Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.078289Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.081036Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.117615Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:51.124018Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:51.125774Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=Yjc2NjQ4ZWUtNzBmNzBmZDMtMTk0YzY5MTctNzRkODNhMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [2:8678280833929343339:121] 2025-04-17T17:51:51.125834Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 600.000000s actor id: [1:7494341313154773516:2483] 2025-04-17T17:51:51.126321Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:51:51.126373Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:51:51.126400Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:51.126446Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:51.126517Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.126549Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.133593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341313154773517:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.133685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.133726Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.133758Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:51.138265Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=Yjc2NjQ4ZWUtNzBmNzBmZDMtMTk0YzY5MTctNzRkODNhMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7494341311029466254:2307] 2025-04-17T17:51:51.138319Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [2:7494341311029466280:2119] 2025-04-17T17:51:51.155765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341311029466279:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.155829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.557357Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2e1rvj1hny7va4cmcc0qn5", Created new session, sessionId: ydb://session/3?node_id=2&id=MTJkNDhiMDktOWRmOGQ1MDgtMzA3MzNhMTUtZTEwZjcyNjA=, workerId: [2:7494341311029466293:2312], database: , longSession: 0, local sessions count: 2 2025-04-17T17:51:51.557613Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2e1rvj1hny7va4cmcc0qn5, Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MTJkNDhiMDktOWRmOGQ1MDgtMzA3MzNhMTUtZTEwZjcyNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 4, targetId: [2:7494341311029466293:2312] 2025-04-17T17:51:51.557644Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 4 timeout: 300.000000s actor id: [2:7494341311029466294:2123] 2025-04-17T17:51:51.559428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341311029466295:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.559552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.559908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341311029466300:2316], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:51.576783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:51:51.646430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341311029466302:2317], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:51:51.840683Z node 2 :TX_PROXY ERROR: Actor# [2:7494341311029466330:2133] txid# 281474976715658, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:51.883034Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2e1rvj1hny7va4cmcc0qn5", Forwarded response to sender actor, requestId: 4, sender: [2:7494341311029466292:2311], selfId: [2:7494341293849596923:2211], source: [2:7494341311029466293:2312] 2025-04-17T17:51:51.883595Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MTJkNDhiMDktOWRmOGQ1MDgtMzA3MzNhMTUtZTEwZjcyNjA=, workerId: [2:7494341311029466293:2312], local sessions count: 1 2025-04-17T17:51:51.887775Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7494341295974903752:2281], selfId: [2:7494341293849596923:2211], source: [2:7494341311029466254:2307] 2025-04-17T17:51:51.890362Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7494341300269871569:2460], selfId: [1:7494341295974903752:2281], source: [2:7494341293849596923:2211] 2025-04-17T17:51:52.215740Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341295974903733:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:52.215800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:51:53.330358Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494341321140833504:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:53.330401Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00198b/r3tmp/tmpijKxF4/pdisk_1.dat 2025-04-17T17:51:53.472823Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:53.497029Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:53.498256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Conn ... 594046644480 PathId: 5 } 2025-04-17T17:51:56.151544Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976715660 2025-04-17T17:51:56.287884Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-04-17T17:51:56.344260Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-04-17T17:51:56.352973Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715661. Doublechecking... 2025-04-17T17:51:56.377546Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:51:56.412798Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:51:56.438556Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:51:56.439595Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 8b611818-dc8ef270-16d45744-14798c29, Bootstrap. Database: /Root 2025-04-17T17:51:56.439759Z node 3 :KQP_PROXY DEBUG: Request has 18444999161393.111878s seconds to be completed 2025-04-17T17:51:56.441527Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=Y2JhOWI3Yy04MjkyY2Q3ZS0yYjQwNWIxMC00ZWU0MDRiNA==, workerId: [3:7494341334025736836:2358], database: /Root, longSession: 1, local sessions count: 1 2025-04-17T17:51:56.441645Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:56.442641Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 8b611818-dc8ef270-16d45744-14798c29, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:51:56.443109Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=Y2JhOWI3Yy04MjkyY2Q3ZS0yYjQwNWIxMC00ZWU0MDRiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [3:7494341334025736836:2358] 2025-04-17T17:51:56.443141Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 300.000000s actor id: [3:7494341334025736838:2951] 2025-04-17T17:51:56.443732Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494341334025736839:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:56.443809Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:56.444067Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494341334025736844:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:56.447192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:2, at schemeshard: 72057594046644480 2025-04-17T17:51:56.464857Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494341334025736846:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-17T17:51:56.525641Z node 3 :TX_PROXY ERROR: Actor# [3:7494341334025736907:3001] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:56.685561Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [3:7494341334025736837:2359], selfId: [3:7494341321140833653:2213], source: [3:7494341334025736836:2358] 2025-04-17T17:51:56.686001Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 8b611818-dc8ef270-16d45744-14798c29, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=Y2JhOWI3Yy04MjkyY2Q3ZS0yYjQwNWIxMC00ZWU0MDRiNA==, TxId: 2025-04-17T17:51:56.686045Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 8b611818-dc8ef270-16d45744-14798c29, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=Y2JhOWI3Yy04MjkyY2Q3ZS0yYjQwNWIxMC00ZWU0MDRiNA==, TxId: 2025-04-17T17:51:56.686060Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 8b611818-dc8ef270-16d45744-14798c29. Result: SUCCESS. Issues: 2025-04-17T17:51:56.686342Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=Y2JhOWI3Yy04MjkyY2Q3ZS0yYjQwNWIxMC00ZWU0MDRiNA==, workerId: [3:7494341334025736836:2358], local sessions count: 0 2025-04-17T17:51:56.688135Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZjUzZWM0OWEtOTFjZmJhZjktZjkxNmFmZi1hZjg5ZGE3Mw==, workerId: [3:7494341334025736992:2376], database: /Root, longSession: 1, local sessions count: 1 2025-04-17T17:51:56.688281Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:56.688544Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2e1xq2djx3zhterz2zcmv7, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZjUzZWM0OWEtOTFjZmJhZjktZjkxNmFmZi1hZjg5ZGE3Mw==, CurrentExecutionId: 8b611818-dc8ef270-16d45744-14798c29, CustomerSuppliedId: 01js2e1xq2djx3zhterz2zcmv7, PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [3:7494341334025736992:2376] 2025-04-17T17:51:56.688575Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [3:7494341334025736993:3044] 2025-04-17T17:51:56.708271Z node 3 :KQP_PROXY DEBUG: TraceId: "01js2e1y937v22dk3m78533svs", Request has 18444999161392.843373s seconds to be completed 2025-04-17T17:51:56.710102Z node 3 :KQP_PROXY DEBUG: TraceId: "01js2e1y937v22dk3m78533svs", Created new session, sessionId: ydb://session/3?node_id=3&id=ZTk1MTM1LWJjMzVhMWNhLThkZmQxYTAzLTEwYmI2Y2E=, workerId: [3:7494341334025737005:2382], database: /Root, longSession: 1, local sessions count: 2 2025-04-17T17:51:56.710269Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 01js2e1y937v22dk3m78533svs 2025-04-17T17:51:56.715711Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2e1y9bfpwwefr0g9yrjddf, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZTk1MTM1LWJjMzVhMWNhLThkZmQxYTAzLTEwYmI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [3:7494341334025737005:2382] 2025-04-17T17:51:56.715750Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [3:7494341334025737008:3050] 2025-04-17T17:51:56.734372Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494341334025737012:3052], for# user@builtin, access# DescribeSchema 2025-04-17T17:51:56.734419Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494341334025737012:3052], for# user@builtin, access# DescribeSchema 2025-04-17T17:51:56.743908Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8b611818-dc8ef270-16d45744-14798c29, Bootstrap. Database: /Root 2025-04-17T17:51:56.745014Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494341334025737009:2384], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:51:56.745558Z node 3 :KQP_PROXY DEBUG: Request has 18444999161392.806070s seconds to be completed 2025-04-17T17:51:56.746115Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTk1MTM1LWJjMzVhMWNhLThkZmQxYTAzLTEwYmI2Y2E=, ActorId: [3:7494341334025737005:2382], ActorState: ExecuteState, TraceId: 01js2e1y9bfpwwefr0g9yrjddf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:51:56.747118Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ODlmNGU0NTMtZmUzYTQxM2UtYTIzZTBhNGItZGQ3ODljMmU=, workerId: [3:7494341334025737021:2387], database: /Root, longSession: 1, local sessions count: 3 2025-04-17T17:51:56.747240Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:56.747310Z node 3 :KQP_PROXY DEBUG: TraceId: "01js2e1y9bfpwwefr0g9yrjddf", Forwarded response to sender actor, requestId: 9, sender: [3:7494341334025737007:2383], selfId: [3:7494341321140833653:2213], source: [3:7494341334025737005:2382] 2025-04-17T17:51:56.747368Z node 3 :KQP_PROXY DEBUG: TraceId: "01js2e1xq2djx3zhterz2zcmv7", Forwarded response to sender actor, requestId: 7, sender: [3:7494341334025736833:2949], selfId: [3:7494341321140833653:2213], source: [3:7494341334025736992:2376] 2025-04-17T17:51:56.747455Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8b611818-dc8ef270-16d45744-14798c29, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-17T17:51:56.747667Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ODlmNGU0NTMtZmUzYTQxM2UtYTIzZTBhNGItZGQ3ODljMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [3:7494341334025737021:2387] 2025-04-17T17:51:56.747694Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [3:7494341334025737023:3056] >> TableCreation::SimpleTableCreation >> KqpProxy::PingNotExistedSession >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled >> KqpProxy::NodeDisconnectedTest [GOOD] >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> SystemView::AuthPermissions_ResultOrder [GOOD] >> SystemView::AuthPermissions_Selects |77.7%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NodeDisconnectedTest [GOOD] Test command err: 2025-04-17T17:51:45.461104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341284442011422:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:45.461149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019aa/r3tmp/tmpA2fl46/pdisk_1.dat 2025-04-17T17:51:45.823329Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:45.876363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:45.876451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:45.878294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21674 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:51:46.133827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:46.178630Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:51:48.370467Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:48.372296Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:48.377745Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=YjI1NzMwMDktNzA3NjhjMDMtZTgzOWZhY2ItMTc5MzkxNDQ=, workerId: [1:7494341297326913906:2309], database: , longSession: 0, local sessions count: 1 2025-04-17T17:51:48.377791Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:48.382274Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YjI1NzMwMDktNzA3NjhjMDMtZTgzOWZhY2ItMTc5MzkxNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7494341297326913906:2309] 2025-04-17T17:51:48.382315Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2025-04-17T17:51:48.382831Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:51:48.382863Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:51:48.382889Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:48.382942Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:48.383030Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:48.383098Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:48.383195Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:48.385861Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjI1NzMwMDktNzA3NjhjMDMtZTgzOWZhY2ItMTc5MzkxNDQ=, ActorId: [1:7494341297326913906:2309], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxyRequestId: 2 2025-04-17T17:51:48.386248Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:48.386289Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:48.386348Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7494341288736979282:2282], selfId: [1:7494341284442011669:2278], source: [1:7494341297326913906:2309] 2025-04-17T17:51:48.388306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341297326913929:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:48.388387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:48.392515Z node 1 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-17T17:51:48.392552Z node 1 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 2 2025-04-17T17:51:55.236406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:55.237025Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:55.237156Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:51:55.239258Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:55.239693Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:55.239746Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019aa/r3tmp/tmpVIaTXx/pdisk_1.dat 2025-04-17T17:51:55.700114Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3350 KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1141:2687] 2025-04-17T17:51:56.056177Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MjMzOGFjYjMtZThhOGE0Y2YtMzZhNzVhOGQtMjcxOTI5ZmU=, workerId: [3:1142:2375], database: , longSession: 1, local sessions count: 1 2025-04-17T17:51:56.056399Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=MjMzOGFjYjMtZThhOGE0Y2YtMzZhNzVhOGQtMjcxOTI5ZmU= 2025-04-17T17:51:56.057249Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MjMzOGFjYjMtZThhOGE0Y2YtMzZhNzVhOGQtMjcxOTI5ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2025-04-17T17:51:56.057397Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:51:56.057831Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MjMzOGFjYjMtZThhOGE0Y2YtMzZhNzVhOGQtMjcxOTI5ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1142:2375] 2025-04-17T17:51:56.057892Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:51:56.363608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1143:2688], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:56.363782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:56.364231Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1144:2376], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:56.364352Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:56.392468Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(3) 2025-04-17T17:51:56.392566Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 3 sessionId: ydb://session/3?node_id=3&id=MjMzOGFjYjMtZThhOGE0Y2YtMzZhNzVhOGQtMjcxOTI5ZmU= status: TIMEOUT round: 0 2025-04-17T17:51:56.392690Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-17T17:51:56.392718Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 2 sessionId: ydb://session/3?node_id=3&id=MjMzOGFjYjMtZThhOGE0Y2YtMzZhNzVhOGQtMjcxOTI5ZmU= status: TIMEOUT round: 0 2025-04-17T17:51:56.392842Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-17T17:51:56.393021Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjMzOGFjYjMtZThhOGE0Y2YtMzZhNzVhOGQtMjcxOTI5ZmU=, ActorId: [3:1142:2375], ActorState: ExecuteState, TraceId: 01js2e1xmt05tghan77y8tx44y, Create QueryResponse for error on request, msg: 2025-04-17T17:51:56.395685Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1142:2375] 2025-04-17T17:51:56.395915Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 2 2025-04-17T17:51:56.398766Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NTJlZTk3YTYtYjdmMjVjNDktYzk5NDE5MTktODcwMzRjMTA=, workerId: [3:1165:2380], database: , longSession: 1, local sessions count: 2 2025-04-17T17:51:56.398959Z node 3 :KQP_PROXY DE ... DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YmJhMGRmNjctOTlkYTMzNGMtOGQ1NzYyNTYtOGZlZTU0NmE=, workerId: [3:1411:2518], database: , longSession: 1, local sessions count: 56 2025-04-17T17:51:57.900952Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:57.901260Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 57, sender: [2:1141:2687], trace_id: 2025-04-17T17:51:57.901373Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 57 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:51:57.911774Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(57) 2025-04-17T17:51:57.911839Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 57 sessionId: ydb://session/3?node_id=3&id=YmJhMGRmNjctOTlkYTMzNGMtOGQ1NzYyNTYtOGZlZTU0NmE= status: TIMEOUT round: 0 2025-04-17T17:51:57.911939Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 57, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-17T17:51:57.913701Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZmQ0MWQwZTAtZjljMGMyMjItZDRjNGVhZTItNDNmMDg2YTI=, workerId: [3:1412:2519], database: , longSession: 1, local sessions count: 57 2025-04-17T17:51:57.913845Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=ZmQ0MWQwZTAtZjljMGMyMjItZDRjNGVhZTItNDNmMDg2YTI= 2025-04-17T17:51:57.914330Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZmQ0MWQwZTAtZjljMGMyMjItZDRjNGVhZTItNDNmMDg2YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 58, targetId: [3:8678280833929343339:121] 2025-04-17T17:51:57.914375Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 58 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:51:57.914670Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZmQ0MWQwZTAtZjljMGMyMjItZDRjNGVhZTItNDNmMDg2YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 87, targetId: [3:1412:2519] 2025-04-17T17:51:57.914704Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 87 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:51:57.916076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1413:2750], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:57.916157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:57.941091Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1414:2520], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:57.941307Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:57.954510Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(87) 2025-04-17T17:51:57.954624Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 87 sessionId: ydb://session/3?node_id=3&id=ZmQ0MWQwZTAtZjljMGMyMjItZDRjNGVhZTItNDNmMDg2YTI= status: TIMEOUT round: 0 2025-04-17T17:51:57.954720Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(58) 2025-04-17T17:51:57.954757Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 58 sessionId: ydb://session/3?node_id=3&id=ZmQ0MWQwZTAtZjljMGMyMjItZDRjNGVhZTItNDNmMDg2YTI= status: TIMEOUT round: 0 2025-04-17T17:51:57.954873Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmQ0MWQwZTAtZjljMGMyMjItZDRjNGVhZTItNDNmMDg2YTI=, ActorId: [3:1412:2519], ActorState: ExecuteState, TraceId: 01js2e1zet0vmgfdja6n9syq43, Create QueryResponse for error on request, msg: 2025-04-17T17:51:57.955058Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 58, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-17T17:51:57.956827Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 87, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1412:2519] 2025-04-17T17:51:57.957028Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 58 2025-04-17T17:51:57.958743Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=OTg0NjBmNDgtM2UwNzM3ZDYtYzdlZmFjODctYjJjMjI5Njk=, workerId: [3:1419:2523], database: , longSession: 1, local sessions count: 58 2025-04-17T17:51:57.958882Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:57.959209Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 59, sender: [2:1141:2687], trace_id: 2025-04-17T17:51:57.959303Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 59 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:51:57.959415Z node 3 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=3&id=OTg0NjBmNDgtM2UwNzM3ZDYtYzdlZmFjODctYjJjMjI5Njk=, rpc ctrl: [0:0:0], sameNode: 0, trace_id: 2025-04-17T17:51:57.959568Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 59, sender: [2:1141:2687], selfId: [2:206:2171], source: [3:236:2127] 2025-04-17T17:51:57.961091Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MjY1MzgzY2QtNGQwMjhmZTItZDBhYzNkOWYtMzQ4Mjg0YWE=, workerId: [3:1420:2524], database: , longSession: 1, local sessions count: 59 2025-04-17T17:51:57.961228Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=MjY1MzgzY2QtNGQwMjhmZTItZDBhYzNkOWYtMzQ4Mjg0YWE= 2025-04-17T17:51:57.961587Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MjY1MzgzY2QtNGQwMjhmZTItZDBhYzNkOWYtMzQ4Mjg0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [3:8678280833929343339:121] 2025-04-17T17:51:57.961625Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 60 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:51:57.961936Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MjY1MzgzY2QtNGQwMjhmZTItZDBhYzNkOWYtMzQ4Mjg0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 90, targetId: [3:1420:2524] 2025-04-17T17:51:57.961987Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 90 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:51:57.963102Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1421:2752], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:57.963258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:57.989094Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1423:2525], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:57.989280Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:58.003754Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:58.003879Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:58.006841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:58.006934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:58.018620Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(90) 2025-04-17T17:51:58.018696Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 90 sessionId: ydb://session/3?node_id=3&id=MjY1MzgzY2QtNGQwMjhmZTItZDBhYzNkOWYtMzQ4Mjg0YWE= status: TIMEOUT round: 0 2025-04-17T17:51:58.018812Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(59) 2025-04-17T17:51:58.018841Z node 2 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 59 2025-04-17T17:51:58.018938Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjY1MzgzY2QtNGQwMjhmZTItZDBhYzNkOWYtMzQ4Mjg0YWE=, ActorId: [3:1420:2524], ActorState: ExecuteState, TraceId: 01js2e1zga9n81a32ca145bp9a, Create QueryResponse for error on request, msg: 2025-04-17T17:51:58.020759Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(60) 2025-04-17T17:51:58.020806Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 60 sessionId: ydb://session/3?node_id=3&id=MjY1MzgzY2QtNGQwMjhmZTItZDBhYzNkOWYtMzQ4Mjg0YWE= status: TIMEOUT round: 0 2025-04-17T17:51:58.020910Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 60, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-17T17:51:58.021105Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 90, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1420:2524] 2025-04-17T17:51:58.021261Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 60 2025-04-17T17:51:58.023102Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZWNkNWU2MDYtZGY5MTk3MjYtNjRkZmExYzgtMzEyNGY4NA==, workerId: [3:1440:2528], database: , longSession: 1, local sessions count: 60 2025-04-17T17:51:58.023287Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:58.023622Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 61, sender: [2:1141:2687], trace_id: 2025-04-17T17:51:58.023721Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 61 timeout: 0.001000s actor id: [0:0:0] 2025-04-17T17:51:58.037199Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:51:58.037669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:58.038147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:58.048616Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(61) 2025-04-17T17:51:58.048689Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 61 sessionId: ydb://session/3?node_id=3&id=ZWNkNWU2MDYtZGY5MTk3MjYtNjRkZmExYzgtMzEyNGY4NA== status: TIMEOUT round: 0 2025-04-17T17:51:58.048776Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 61, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> TCdcStreamTests::DropStreamImplShouldFail >> KqpQueryPerf::RangeRead+QueryService >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] Test command err: 2025-04-17T17:51:46.699243Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341291768570783:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:46.700826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001996/r3tmp/tmp65ueur/pdisk_1.dat 2025-04-17T17:51:47.220713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:47.220823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:47.223425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:47.230970Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10775 TServer::EnableGrpc on GrpcPort 65457, node 1 2025-04-17T17:51:47.473803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:47.473837Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:47.473847Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:47.473975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:51:47.613676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:49.872315Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:49.873968Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:49.875265Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.875321Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.876555Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:51:49.876582Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:51:49.876596Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:49.876648Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:49.882847Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-17T17:51:49.882868Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-17T17:51:49.882907Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-17T17:51:49.883056Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-17T17:51:49.883061Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-17T17:51:49.883072Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-17T17:51:49.883373Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-17T17:51:49.883378Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-17T17:51:49.883396Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-17T17:51:49.890729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-17T17:51:49.898821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:51:49.901695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:51:49.907379Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-17T17:51:49.907438Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-17T17:51:49.920064Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-17T17:51:49.920103Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-17T17:51:49.920229Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-17T17:51:49.920241Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-17T17:51:50.092846Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-17T17:51:50.128328Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-17T17:51:50.137254Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-17T17:51:50.174466Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:51:50.187578Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:51:50.202073Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:51:50.211117Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: cd8376d6-daf75359-ecbd430f-82a244e, Bootstrap. Database: /dc-1 2025-04-17T17:51:50.242347Z node 1 :KQP_PROXY DEBUG: Request has 18444999161399.309293s seconds to be completed 2025-04-17T17:51:50.245286Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NzBlYTBmOGEtZTE0MjliOTktNDg3ZGVmMDQtNjgzZTUyNDM=, workerId: [1:7494341308948440855:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T17:51:50.245425Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:50.246677Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: cd8376d6-daf75359-ecbd430f-82a244e, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:51:50.247634Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NzBlYTBmOGEtZTE0MjliOTktNDg3ZGVmMDQtNjgzZTUyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7494341308948440855:2333] 2025-04-17T17:51:50.247674Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7494341308948440857:2467] 2025-04-17T17:51:50.252442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341308948440858:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:50.252555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341308948440868:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:50.252621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:50.256240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:51:50.270151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341308948440872:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:51:50.342072Z node 1 :TX_PROXY ERROR: Actor# [1:7494341308948440914:2499] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:50.862956Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7494341308948440856:2334], selfId: [1:7494341291768571025:2278], source: [1:7494341308948440855:2333] 2025-04-17T17:51:50.86356 ... , $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-04-17T17:51:58.043669Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YmU2ZmQwMDgtODgzM2MzMmYtOWJhYTM0YWUtZjdkODljZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 16, targetId: [2:7494341339174254918:2394] 2025-04-17T17:51:58.043704Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 16 timeout: 300.000000s actor id: [2:7494341343469222285:2604] 2025-04-17T17:51:58.259634Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, Bootstrap. Database: /dc-1 2025-04-17T17:51:58.259928Z node 2 :KQP_PROXY DEBUG: Request has 18444999161391.291711s seconds to be completed 2025-04-17T17:51:58.262018Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YjVkOWI0My00YmQxMjY0Mi02YmU4MmQyMC1jNzdhYjg1ZQ==, workerId: [2:7494341343469222305:2420], database: /dc-1, longSession: 1, local sessions count: 4 2025-04-17T17:51:58.262196Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:58.263086Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-17T17:51:58.263472Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YjVkOWI0My00YmQxMjY0Mi02YmU4MmQyMC1jNzdhYjg1ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 18, targetId: [2:7494341343469222305:2420] 2025-04-17T17:51:58.263528Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 18 timeout: 300.000000s actor id: [2:7494341343469222307:2618] 2025-04-17T17:51:58.432763Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 16, sender: [2:7494341343469222284:2413], selfId: [2:7494341321994384766:2207], source: [2:7494341339174254918:2394] 2025-04-17T17:51:58.433111Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 75a2ce77-6fb585b1-e3ddd754-7b536714, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmU2ZmQwMDgtODgzM2MzMmYtOWJhYTM0YWUtZjdkODljZjM=, TxId: 2025-04-17T17:51:58.433186Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 75a2ce77-6fb585b1-e3ddd754-7b536714, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmU2ZmQwMDgtODgzM2MzMmYtOWJhYTM0YWUtZjdkODljZjM=, TxId: 2025-04-17T17:51:58.433200Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 75a2ce77-6fb585b1-e3ddd754-7b536714. SUCCESS. Issues: 2025-04-17T17:51:58.434241Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YmU2ZmQwMDgtODgzM2MzMmYtOWJhYTM0YWUtZjdkODljZjM=, workerId: [2:7494341339174254918:2394], local sessions count: 3 2025-04-17T17:51:58.434276Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NWY1OGI4ZjAtZWVkODgwY2UtNjkxMGJlM2MtODEwYTE3Yzg=, workerId: [2:7494341339174254795:2351], local sessions count: 2 2025-04-17T17:51:58.484524Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 18, sender: [2:7494341343469222306:2421], selfId: [2:7494341321994384766:2207], source: [2:7494341343469222305:2420] 2025-04-17T17:51:58.484699Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjVkOWI0My00YmQxMjY0Mi02YmU4MmQyMC1jNzdhYjg1ZQ==, TxId: 01js2e200a9r0yat7axzbmbz2j 2025-04-17T17:51:58.484875Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, State: Get lease info, RunDataQuery: -- TScriptLeaseUpdater::OnGetLeaseInfo DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $lease_duration AS Interval; UPDATE `.metadata/script_execution_leases` SET lease_deadline=(CurrentUtcTimestamp() + $lease_duration) WHERE database = $database AND execution_id = $execution_id; 2025-04-17T17:51:58.485153Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YjVkOWI0My00YmQxMjY0Mi02YmU4MmQyMC1jNzdhYjg1ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 19, targetId: [2:7494341343469222305:2420] 2025-04-17T17:51:58.485184Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 19 timeout: 300.000000s actor id: [2:7494341343469222355:2634] 2025-04-17T17:51:58.581697Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341321994384603:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:58.581775Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:51:58.618575Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 19, sender: [2:7494341343469222354:2432], selfId: [2:7494341321994384766:2207], source: [2:7494341343469222305:2420] 2025-04-17T17:51:58.618851Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjVkOWI0My00YmQxMjY0Mi02YmU4MmQyMC1jNzdhYjg1ZQ==, TxId: 2025-04-17T17:51:58.618901Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, State: Update lease, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjVkOWI0My00YmQxMjY0Mi02YmU4MmQyMC1jNzdhYjg1ZQ==, TxId: 2025-04-17T17:51:58.619283Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YjVkOWI0My00YmQxMjY0Mi02YmU4MmQyMC1jNzdhYjg1ZQ==, workerId: [2:7494341343469222305:2420], local sessions count: 1 2025-04-17T17:51:58.623869Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2e204zdyqx3pj6xpx0b1qx, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=M2E2YjVkYjgtMjM4NDc5ZjgtMmY5ZjQ2MzEtN2E0NTJmOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [2:7494341339174254811:2361] 2025-04-17T17:51:58.623917Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [2:7494341343469222384:2645] 2025-04-17T17:51:59.044265Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2e204zdyqx3pj6xpx0b1qx", Forwarded response to sender actor, requestId: 20, sender: [2:7494341343469222383:2440], selfId: [2:7494341321994384766:2207], source: [2:7494341339174254811:2361] 2025-04-17T17:51:59.048058Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: e22f8b15-225001f0-9d750ab0-8e1185c, Bootstrap. Start TCheckLeaseStatusQueryActor 2025-04-17T17:51:59.048131Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, Bootstrap. Database: /dc-1 2025-04-17T17:51:59.048317Z node 2 :KQP_PROXY DEBUG: Request has 18444999161390.503314s seconds to be completed 2025-04-17T17:51:59.050125Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OTc5ZWZjZjgtN2I4NGQzZWMtNTcyYThmYTAtY2ZhZWRhOGU=, workerId: [2:7494341347764189729:2453], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:51:59.050276Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:51:59.050587Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, RunDataQuery: -- TCheckLeaseStatusQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, execution_status, finalization_status, issues, run_script_actor_id FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-17T17:51:59.051033Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OTc5ZWZjZjgtN2I4NGQzZWMtNTcyYThmYTAtY2ZhZWRhOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [2:7494341347764189729:2453] 2025-04-17T17:51:59.051071Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [2:7494341347764189731:2669] 2025-04-17T17:51:59.262706Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:59.428635Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 22, sender: [2:7494341347764189730:2454], selfId: [2:7494341321994384766:2207], source: [2:7494341347764189729:2453] 2025-04-17T17:51:59.428860Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTc5ZWZjZjgtN2I4NGQzZWMtNTcyYThmYTAtY2ZhZWRhOGU=, TxId: 2025-04-17T17:51:59.428991Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e22f8b15-225001f0-9d750ab0-8e1185c, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTc5ZWZjZjgtN2I4NGQzZWMtNTcyYThmYTAtY2ZhZWRhOGU=, TxId: 2025-04-17T17:51:59.429077Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: e22f8b15-225001f0-9d750ab0-8e1185c, reply success 2025-04-17T17:51:59.429376Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OTc5ZWZjZjgtN2I4NGQzZWMtNTcyYThmYTAtY2ZhZWRhOGU=, workerId: [2:7494341347764189729:2453], local sessions count: 1 2025-04-17T17:51:59.437136Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=M2E2YjVkYjgtMjM4NDc5ZjgtMmY5ZjQ2MzEtN2E0NTJmOWU=, workerId: [2:7494341339174254811:2361], local sessions count: 0 >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpProxy::PingNotExistedSession [GOOD] >> KqpQueryPerf::KvRead+QueryService >> TableCreation::SimpleTableCreation [GOOD] >> SystemView::AuthGroups [GOOD] >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> KqpPg::DropTablePg [GOOD] >> KqpPg::TableArrayInsert+useSink [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> SystemView::AuthGroupMembers >> SystemView::CollectScanQueries [GOOD] >> KqpPg::CopyTableSerialColumns+useSink >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> SystemView::CollectScriptingQueries >> Cdc::OldImageLogDebezium >> TableCreation::SimpleUpdateTable >> TSchemeShardTest::FindSubDomainPathIdActor >> KqpPg::DropTablePgMultiple >> KqpPg::TableArrayInsert-useSink >> TCdcStreamTests::CopyTableShouldNotCopyStream >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [GOOD] >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> SystemView::ShowCreateTableReadReplicas [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> TCdcStreamTests::MoveTableShouldFail >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> Cdc::HugeKey[YdsRunner] [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> SystemView::ShowCreateTableTtlSettings >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink >> TCdcStreamWithInitialScanTests::InitialScanProgress >> TCdcStreamTests::CheckSchemeLimits >> Cdc::HugeKey[TopicRunner] >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions >> KqpQueryPerf::RangeRead+QueryService [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TableCreation::SimpleUpdateTable [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions >> KqpQueryPerf::KvRead+QueryService [GOOD] >> KqpQueryPerf::KvRead-QueryService >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> KqpQueryPerf::Upsert+QueryService+UseSink >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> Cdc::OldImageLogDebezium [GOOD] >> SystemView::AuthOwners_ResultOrder [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> KqpPg::CopyTableSerialColumns+useSink [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream >> SystemView::CollectScriptingQueries [GOOD] >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> SystemView::AuthOwners_TableRange >> Cdc::NewImageLogDebezium >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::CopyTableSerialColumns-useSink >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> KqpProxy::DatabasesCacheForServerless >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink >> TCdcStreamWithInitialScanTests::DropStream >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |77.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |77.7%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |77.7%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:45:55.948155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:45:55.948240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:55.948274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:45:55.948302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:45:55.948338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:45:55.948375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:45:55.948428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:45:55.948495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:45:55.949923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:45:56.745995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:45:56.746031Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:45:56.879577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:45:56.880316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:45:56.880454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:45:57.002468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:45:57.004158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:45:57.013012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.013544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:45:57.033409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.040670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:57.040892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.042052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:45:57.042620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:57.042807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:45:57.044647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.070446Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:45:57.304223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:45:57.304480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.304664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:45:57.304869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:45:57.304928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.311766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.311873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:45:57.312027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.312068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:45:57.312101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:45:57.312152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:45:57.319096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.319159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:45:57.319197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:45:57.322309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.322361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.322400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.322453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.326258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:45:57.328019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:45:57.328194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:45:57.329138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:45:57.329266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:45:57.329313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.329557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:45:57.329604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:45:57.329753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:45:57.329820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:45:57.331698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:45:57.331751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:45:57.331910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:45:57.331962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:45:57.332274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:45:57.332320Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:45:57.332434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.332473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.332507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:45:57.332537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.332594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:45:57.332629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:45:57.332674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:45:57.332767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:45:57.332821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:45:57.332859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:45:57.332911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:45:57.334667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.334765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:45:57.334814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:52:05.401846Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:05.401890Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:209:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:52:05.401936Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:209:2211], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-17T17:52:05.402569Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:52:05.402629Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:52:05.402842Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:52:05.402904Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:52:05.402977Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:52:05.403036Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:52:05.403105Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T17:52:05.403178Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:52:05.403249Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:52:05.403308Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:52:05.403528Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T17:52:05.403630Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T17:52:05.403700Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-17T17:52:05.403755Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T17:52:05.405532Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:05.405645Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:05.405695Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:52:05.405780Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T17:52:05.405848Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:52:05.411668Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:05.411802Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:05.411856Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:52:05.411890Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T17:52:05.411925Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T17:52:05.412027Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T17:52:05.422215Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:52:05.431121Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:52:05.436720Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:52:05.436810Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:52:05.437475Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:52:05.437643Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:52:05.437710Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:524:2475] TestWaitNotification: OK eventTxId 102 2025-04-17T17:52:05.438429Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:05.438741Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 356us result status StatusSuccess 2025-04-17T17:52:05.439213Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false BalancerTabletID: 72075186233409547 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:05.439777Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:52:05.439960Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 216us result status StatusSuccess 2025-04-17T17:52:05.440409Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:05.923203Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:05.923508Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 331us result status StatusSuccess 2025-04-17T17:52:05.924015Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:06.190852Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: FindTabletSubDomainPathId for tablet 72075186233409546 |77.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2025-04-17T17:50:31.078649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340968751428329:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:31.078724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000578/r3tmp/tmpaFRis9/pdisk_1.dat 2025-04-17T17:50:31.427937Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:31.435654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:31.435749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4652, node 1 2025-04-17T17:50:31.453914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:31.459462Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:31.459551Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:31.525801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:31.525827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:31.525837Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:31.525971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:31.816998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:31.852158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:31.872020Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494340968859398211:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:31.872065Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:50:31.878550Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494340968995765981:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:31.878634Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-17T17:50:31.955833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:31.955910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:31.963415Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T17:50:31.969300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:31.972100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:31.972174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:31.988090Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T17:50:32.000093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:32.184090Z node 4 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-04-17T17:50:32.184162Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-04-17T17:50:32.195953Z node 4 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [4:7494340968995765995:2078], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-04-17T17:50:32.208669Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-17T17:50:32.209028Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:32.233673Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-04-17T17:50:32.233782Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2025-04-17T17:50:32.236283Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-17T17:50:32.236333Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2025-04-17T17:50:32.236370Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2025-04-17T17:50:32.237276Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-04-17T17:50:32.237336Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2025-04-17T17:50:32.237381Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2025-04-17T17:50:32.237407Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2025-04-17T17:50:32.237440Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2025-04-17T17:50:32.237472Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2025-04-17T17:50:32.237538Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2025-04-17T17:50:32.237569Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2025-04-17T17:50:32.237594Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2025-04-17T17:50:32.237620Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2025-04-17T17:50:32.258341Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2025-04-17T17:50:32.258385Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2025-04-17T17:50:32.258416Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2025-04-17T17:50:32.258437Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2025-04-17T17:50:32.258491Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2025-04-17T17:50:32.258519Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 20, result count# 0 2025-04-17T17:50:32.258549Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 21, result count# 0 2025-04-17T17:50:32.258646Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2025-04-17T17:50:32.000000Z 2025-04-17T17:50:32.258891Z node 4 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [4:7494340968995765995:2078], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Tenant1 2025-04-17T17:50:32.282732Z node 4 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [4:7494340968995765995:2078], database# /Root/Tenant1, no sysview processor 2025-04-17T17:50:32.294221Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2025-04-17T17:50:32.304541Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Tenant1 2025-04-17T17:50:32.321326Z node 4 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-04-17T17:50:32.322305Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2025-04-17T17:50:32.368770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:32.386390Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T17:50:32.389864Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494340974643066377:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:32.389951Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:50:32.397988Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340972369723819:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:32.398048Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:50:32.444709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:32.444813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-04-17T17:50:32.453137Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:50:32.474786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:32.528232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:32.528316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:32.532670Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:50:32.535013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:32.540012Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2025-04-17T17:50:32.540088Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2025 ... 3, finished: 1 2025-04-17T17:51:57.585883Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7494341339377442282:2428], owner: [19:7494341339377442279:2426], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:51:57.588210Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912317576, txId: 281474976710673] shutting down 2025-04-17T17:51:57.783619Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01js2e1z4y2myyh9pja6xq05bg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=NGU5NDZhNDYtOTY1YWQxOTMtZDMxYWQ3MzYtMjY5YWZkNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:57.786022Z node 19 :SYSTEM_VIEWS INFO: Scan started, actor: [19:7494341339377442316:2438], owner: [19:7494341339377442313:2436], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:51:57.787030Z node 19 :SYSTEM_VIEWS INFO: Scan prepared, actor: [19:7494341339377442316:2438], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:51:57.787428Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7494341339377442316:2438], row count: 3, finished: 1 2025-04-17T17:51:57.787456Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7494341339377442316:2438], owner: [19:7494341339377442313:2436], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:51:57.790445Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912317782, txId: 281474976710675] shutting down 2025-04-17T17:51:57.984895Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01js2e1zb58j7s8d2z9zb3m9qw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=ZjMyYWNjMWYtNDU0Y2Y4ODMtOGI1MzVkMmUtOWJhMDBjN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:57.987578Z node 19 :SYSTEM_VIEWS INFO: Scan started, actor: [19:7494341339377442349:2447], owner: [19:7494341339377442346:2445], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:51:57.988400Z node 19 :SYSTEM_VIEWS INFO: Scan prepared, actor: [19:7494341339377442349:2447], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:51:57.988922Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7494341339377442349:2447], row count: 4, finished: 1 2025-04-17T17:51:57.988973Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7494341339377442349:2447], owner: [19:7494341339377442346:2445], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:51:57.991763Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912317983, txId: 281474976710677] shutting down 2025-04-17T17:51:58.162136Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01js2e1zhdc4maenm6neevv3q9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=YjFlNmY5ZWQtMmQ1MmJhODEtZjMwZTQxYzctYTgxZmNjZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:51:58.165235Z node 19 :SYSTEM_VIEWS INFO: Scan started, actor: [19:7494341343672409677:2456], owner: [19:7494341343672409674:2454], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:51:58.186960Z node 19 :SYSTEM_VIEWS INFO: Scan prepared, actor: [19:7494341343672409677:2456], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:51:58.201444Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7494341343672409677:2456], row count: 4, finished: 1 2025-04-17T17:51:58.201489Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7494341343672409677:2456], owner: [19:7494341343672409674:2454], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:51:58.205813Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912318160, txId: 281474976710679] shutting down 2025-04-17T17:51:59.420310Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7494341347640959825:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:59.420360Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000578/r3tmp/tmpzHjzQV/pdisk_1.dat 2025-04-17T17:51:59.568601Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:59.597658Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:59.597747Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:59.599755Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4886, node 20 2025-04-17T17:51:59.689925Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:59.689955Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:59.689965Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:59.690135Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:00.132575Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:04.426336Z node 20 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7494341347640959825:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:04.538625Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:05.315755Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:52:05.467082Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494341373410764532:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:05.467097Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494341373410764523:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:05.467179Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:05.474746Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:52:05.493150Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7494341373410764537:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:52:05.573278Z node 20 :TX_PROXY ERROR: Actor# [20:7494341373410764598:2502] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:05.910961Z node 20 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e26tn231b4r3d7ppq66xq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=20&id=NzFhMTRkNGQtYWIzYjJhMzAtMWJlNGM1NmEtZTg2MTExNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:52:05.913699Z node 20 :SYSTEM_VIEWS INFO: Scan started, actor: [20:7494341373410764646:2370], owner: [20:7494341373410764645:2369], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:52:05.917179Z node 20 :SYSTEM_VIEWS INFO: Scan prepared, actor: [20:7494341373410764646:2370], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:52:05.917792Z node 20 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [20:7494341373410764646:2370], row count: 4, finished: 1 2025-04-17T17:52:05.917827Z node 20 :SYSTEM_VIEWS INFO: Scan finished, actor: [20:7494341373410764646:2370], owner: [20:7494341373410764645:2369], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:52:05.918002Z node 20 :SYSTEM_VIEWS INFO: Scan started, actor: [20:7494341373410764652:2373], owner: [20:7494341373410764645:2369], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:52:05.920516Z node 20 :SYSTEM_VIEWS INFO: Scan prepared, actor: [20:7494341373410764652:2373], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:52:05.920958Z node 20 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [20:7494341373410764652:2373], row count: 4, finished: 1 2025-04-17T17:52:05.920993Z node 20 :SYSTEM_VIEWS INFO: Scan finished, actor: [20:7494341373410764652:2373], owner: [20:7494341373410764645:2369], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-17T17:52:05.923503Z node 20 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912325908, txId: 281474976715661] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 5721, MsgBus: 5375 2025-04-17T17:52:01.206937Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341357153881255:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:01.207890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016e1/r3tmp/tmp6QhuY5/pdisk_1.dat 2025-04-17T17:52:01.589235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:01.599114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:01.599231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:01.603332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5721, node 1 2025-04-17T17:52:01.698099Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:01.698124Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:01.698131Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:01.698239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5375 TClient is connected to server localhost:5375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:02.286378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:02.333270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:02.568180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:02.767619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:02.851818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:04.500668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341370038784913:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:04.500789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:05.012444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:05.057033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:05.093093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:05.140198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:05.186683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:05.233654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:05.275482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:05.346990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341374333752873:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:05.347105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:05.347709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341374333752878:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:05.351890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:05.366918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341374333752881:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:05.422864Z node 1 :TX_PROXY ERROR: Actor# [1:7494341374333752935:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:06.207602Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341357153881255:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:06.207676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2025-04-17T17:51:59.441127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341346197263417:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:59.441172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001963/r3tmp/tmpbIlW8r/pdisk_1.dat 2025-04-17T17:51:59.913096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:59.913191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:59.924953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:59.935099Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18749, node 1 2025-04-17T17:52:00.166683Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:00.166708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:00.166716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:00.166831Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:00.561816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:02.597907Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:02.605191Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-17T17:52:02.609207Z node 1 :KQP_PROXY DEBUG: Received ping session request, request_id: 2, sender: [1:7494341350492231689:2318], trace_id: 01js2e22646bhpbbqe48bd4gky 2025-04-17T17:52:02.609400Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 5.000000s actor id: [0:0:0] 2025-04-17T17:52:02.609441Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:52:02.609460Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:52:02.609477Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:02.609541Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-17T17:52:02.609655Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:02.609709Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:02.609796Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:02.609908Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:02.609947Z node 1 :KQP_PROXY DEBUG: Session not found, targetId: [2:8678280833929343339:121] requestId: 2 2025-04-17T17:52:02.614163Z node 1 :KQP_PROXY DEBUG: TraceId: "01js2e22646bhpbbqe48bd4gky", Forwarded response to sender actor, requestId: 2, sender: [1:7494341350492231689:2318], selfId: [1:7494341346197263649:2281], source: [1:7494341346197263649:2281] 2025-04-17T17:52:03.977025Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341365430918160:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:03.977107Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001963/r3tmp/tmpZaQ2gS/pdisk_1.dat 2025-04-17T17:52:04.128991Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:04.158127Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:04.158243Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:04.160092Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25224 TServer::EnableGrpc on GrpcPort 18894, node 4 2025-04-17T17:52:04.409116Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:04.409145Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:04.409152Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:04.409280Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:52:04.476322Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:07.000723Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:07.001524Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:52:07.005256Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:52:07.005289Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:52:07.005335Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:07.005387Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:52:07.005441Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:07.005481Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:07.009427Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:07.009486Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:07.011056Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-17T17:52:07.011092Z node 4 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-17T17:52:07.011141Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-17T17:52:07.011228Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-17T17:52:07.011232Z node 4 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-17T17:52:07.011246Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-17T17:52:07.011284Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-17T17:52:07.011288Z node 4 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-17T17:52:07.011314Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-17T17:52:07.020391Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-17T17:52:07.023691Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:52:07.025469Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:52:07.033862Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-17T17:52:07.033915Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710659 2025-04-17T17:52:07.034900Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-17T17:52:07.034929Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-04-17T17:52:07.035841Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-17T17:52:07.035868Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-04-17T17:52:07.174624Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-17T17:52:07.214153Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-17T17:52:07.217342Z node 4 :KQP_PROXY DEBUG: Table scri ... XY DEBUG: Received create session request, trace_id: 2025-04-17T17:52:10.125788Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 956d116b-583b7684-453be340-b1f558b6, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-17T17:52:10.126058Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=MmE0MmIyMjAtZjY4Nzk2M2EtNGVkYjJhYTktY2Q4NmRlY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [4:7494341395495690548:2446] 2025-04-17T17:52:10.126088Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [4:7494341395495690550:2662] 2025-04-17T17:52:10.135422Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [4:7494341395495690549:2447], selfId: [4:7494341365430918394:2275], source: [4:7494341395495690548:2446] 2025-04-17T17:52:10.135976Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 956d116b-583b7684-453be340-b1f558b6, State: Get operation info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=MmE0MmIyMjAtZjY4Nzk2M2EtNGVkYjJhYTktY2Q4NmRlY2M=, TxId: 01js2e2bcfbv0r5f90xdjjn6a3 2025-04-17T17:52:10.136466Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 956d116b-583b7684-453be340-b1f558b6, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-04-17T17:52:10.136875Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=MmE0MmIyMjAtZjY4Nzk2M2EtNGVkYjJhYTktY2Q4NmRlY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 21, targetId: [4:7494341395495690548:2446] 2025-04-17T17:52:10.136902Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 21 timeout: 300.000000s actor id: [4:7494341395495690571:2667] 2025-04-17T17:52:10.149873Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 21, sender: [4:7494341395495690570:2453], selfId: [4:7494341365430918394:2275], source: [4:7494341395495690548:2446] 2025-04-17T17:52:10.150276Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 956d116b-583b7684-453be340-b1f558b6, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=MmE0MmIyMjAtZjY4Nzk2M2EtNGVkYjJhYTktY2Q4NmRlY2M=, TxId: 2025-04-17T17:52:10.150361Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 956d116b-583b7684-453be340-b1f558b6, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=MmE0MmIyMjAtZjY4Nzk2M2EtNGVkYjJhYTktY2Q4NmRlY2M=, TxId: 2025-04-17T17:52:10.150420Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 956d116b-583b7684-453be340-b1f558b6. UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-04-17T17:52:10.150847Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=MmE0MmIyMjAtZjY4Nzk2M2EtNGVkYjJhYTktY2Q4NmRlY2M=, workerId: [4:7494341395495690548:2446], local sessions count: 1 2025-04-17T17:52:10.151303Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 956d116b-583b7684-453be340-b1f558b6, successfully finalized script execution operation 2025-04-17T17:52:10.151332Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 956d116b-583b7684-453be340-b1f558b6, reply success 2025-04-17T17:52:10.159964Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2e2bdf07sgk78czm2q58q8, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=Y2FkZmE4YzItZDMxZmM3NjgtM2Q3M2E4MGMtZjVjYTM4OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [4:7494341382610788374:2361] 2025-04-17T17:52:10.160015Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [4:7494341395495690596:2674] 2025-04-17T17:52:10.365534Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:10.641582Z node 4 :KQP_PROXY DEBUG: TraceId: "01js2e2bdf07sgk78czm2q58q8", Forwarded response to sender actor, requestId: 22, sender: [4:7494341395495690595:2458], selfId: [4:7494341365430918394:2275], source: [4:7494341382610788374:2361] 2025-04-17T17:52:10.643198Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 956d116b-583b7684-453be340-b1f558b6, Bootstrap. Database: /dc-1 2025-04-17T17:52:10.643375Z node 4 :KQP_PROXY DEBUG: Request has 18444999161378.908257s seconds to be completed 2025-04-17T17:52:10.645331Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=MTc2NzNkMzgtZjJhYjY2ODEtYzY2MGZkNjktOTIxOGVhMWI=, workerId: [4:7494341395495690645:2470], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:52:10.645487Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:52:10.645726Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 956d116b-583b7684-453be340-b1f558b6, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-17T17:52:10.646030Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=MTc2NzNkMzgtZjJhYjY2ODEtYzY2MGZkNjktOTIxOGVhMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 24, targetId: [4:7494341395495690645:2470] 2025-04-17T17:52:10.646065Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 24 timeout: 300.000000s actor id: [4:7494341395495690647:2699] 2025-04-17T17:52:10.846656Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 24, sender: [4:7494341395495690646:2471], selfId: [4:7494341365430918394:2275], source: [4:7494341395495690645:2470] 2025-04-17T17:52:10.846983Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 956d116b-583b7684-453be340-b1f558b6, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=MTc2NzNkMzgtZjJhYjY2ODEtYzY2MGZkNjktOTIxOGVhMWI=, TxId: 01js2e2c2t672bnj24hzmd59rg 2025-04-17T17:52:10.847090Z node 4 :KQP_PROXY WARN: [TQueryBase] [TScriptLeaseUpdater] TraceId: 956d116b-583b7684-453be340-b1f558b6, State: Get lease info, Finish with BAD_REQUEST, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=MTc2NzNkMzgtZjJhYjY2ODEtYzY2MGZkNjktOTIxOGVhMWI=, TxId: 01js2e2c2t672bnj24hzmd59rg 2025-04-17T17:52:10.847123Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 956d116b-583b7684-453be340-b1f558b6, State: Get lease info, Rollback transaction: 01js2e2c2t672bnj24hzmd59rg 2025-04-17T17:52:10.847420Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=MTc2NzNkMzgtZjJhYjY2ODEtYzY2MGZkNjktOTIxOGVhMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 25, targetId: [4:7494341395495690645:2470] 2025-04-17T17:52:10.847443Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 25 timeout: 600.000000s actor id: [4:7494341395495690673:2711] 2025-04-17T17:52:10.848244Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 25, sender: [4:7494341395495690672:2478], selfId: [4:7494341365430918394:2275], source: [4:7494341395495690645:2470] 2025-04-17T17:52:10.848644Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 956d116b-583b7684-453be340-b1f558b6, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2025-04-17T17:52:10.849015Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=MTc2NzNkMzgtZjJhYjY2ODEtYzY2MGZkNjktOTIxOGVhMWI=, workerId: [4:7494341395495690645:2470], local sessions count: 1 2025-04-17T17:52:10.859734Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=Y2FkZmE4YzItZDMxZmM3NjgtM2Q3M2E4MGMtZjVjYTM4OTU=, workerId: [4:7494341382610788374:2361], local sessions count: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:51:14.737607Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:14.737712Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:51:14.761243Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:14.785669Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:51:14.786661Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:51:14.789969Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:51:14.791992Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-17T17:51:14.793697Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-17T17:51:14.805098Z node 1 :PERSQUEUE INFO: new Cookie default|14717a62-4f3a17f3-da0a9520-8307b1a_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:14.822628Z node 1 :PERSQUEUE INFO: new Cookie default|5fb264b7-3ffbe3b9-55c12086-df762847_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:14.872585Z node 1 :PERSQUEUE INFO: new Cookie default|3a888100-92f771f2-9ffe0a3a-41ee8a15_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:243:2057] recipient: [1:99:2134] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:246:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:247:2057] recipient: [1:245:2246] Leader for TabletID 72057594037927937 is [1:248:2247] sender: [1:249:2057] recipient: [1:245:2246] 2025-04-17T17:51:14.966208Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:14.966268Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:51:14.966832Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:297:2288] 2025-04-17T17:51:14.968909Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:298:2289] 2025-04-17T17:51:14.999933Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:51:15.000026Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:297:2288] 2025-04-17T17:51:15.022428Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:51:15.022520Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:298:2289] Leader for TabletID 72057594037927937 is [1:248:2247] sender: [1:330:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:51:15.702947Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:15.703022Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-04-17T17:51:15.719676Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:15.720415Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:51:15.720968Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-04-17T17:51:15.723515Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-04-17T17:51:15.725095Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:184:2197] 2025-04-17T17:51:15.726863Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:184:2197] 2025-04-17T17:51:15.734567Z node 2 :PERSQUEUE INFO: new Cookie default|b13ca772-96c0d141-ecfa4a2-13250543_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:15.741190Z node 2 :PERSQUEUE INFO: new Cookie default|111e726c-56a0ac3e-9d99cd22-14172207_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:15.763554Z node 2 :PERSQUEUE INFO: new Cookie default|b2eba444-e4827999-3f668a86-a75a1e41_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:242:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:244:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:246:2057] recipient: [2:245:2245] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:248:2057] recipient: [2:245:2245] 2025-04-17T17:51:15.817184Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:15.817275Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:51:15.818150Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:296:2287] 2025-04-17T17:51:15.820410Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:297:2288] 2025-04-17T17:51:15.843293Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:51:15.843384Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:296:2287] 2025-04-17T17:51:15.849328Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:51:15.849386Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:297:2288] !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:247:2246] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:349:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:352:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:355:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:356:2057] recipient: [2:354:2322] Leader for TabletID 72057594037927937 is [2:357:2323] sender: [2:358:2057] recipient: [2:354:2322] 2025-04-17T17:51:17.282622Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:17.282702Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:51:17.283734Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:408:2366] 2025-04-17T17:51:17.291895Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:409:2367] 2025-04-17T17:51:17.320669Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:51:17.320763Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 4 [2:408:2366] 2025-04-17T17:51:17.327035Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:51:17.327116Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 4 [2:409:2367] Leader for TabletID 72057594037927937 is [2:357:2323] sender: [2:439:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient ... on 3 [53:300:2291] Leader for TabletID 72057594037927937 is [53:250:2249] sender: [53:332:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:250:2249]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [53:250:2249] sender: [53:334:2057] recipient: [53:99:2134] Leader for TabletID 72057594037927937 is [53:250:2249] sender: [53:337:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:250:2249] sender: [53:338:2057] recipient: [53:336:2312] Leader for TabletID 72057594037927937 is [53:339:2313] sender: [53:340:2057] recipient: [53:336:2312] 2025-04-17T17:52:03.129900Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:03.129957Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:52:03.130492Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [53:390:2356] 2025-04-17T17:52:03.132267Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [53:391:2357] 2025-04-17T17:52:03.150374Z node 53 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:52:03.150467Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 4 [53:390:2356] 2025-04-17T17:52:03.155898Z node 53 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:52:03.155952Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 4 [53:391:2357] !Reboot 72057594037927937 (actor [53:250:2249]) rebooted! !Reboot 72057594037927937 (actor [53:250:2249]) tablet resolver refreshed! new actor is[53:339:2313] Leader for TabletID 72057594037927937 is [53:339:2313] sender: [53:440:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:108:2057] recipient: [54:101:2135] 2025-04-17T17:52:04.991672Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:04.991720Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927938 is [54:153:2174] sender: [54:154:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:179:2057] recipient: [54:14:2061] 2025-04-17T17:52:05.009508Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:05.010279Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 54 actor [54:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 54 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 54 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 54 Important: false } 2025-04-17T17:52:05.010819Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:185:2198] 2025-04-17T17:52:05.013385Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:185:2198] 2025-04-17T17:52:05.015042Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:186:2199] 2025-04-17T17:52:05.016882Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:186:2199] 2025-04-17T17:52:05.022344Z node 54 :PERSQUEUE INFO: new Cookie default|9c18815e-d430d0dc-c3fc9054-5a409d2d_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:05.029611Z node 54 :PERSQUEUE INFO: new Cookie default|48dffca3-5c3d79c3-925d6f4b-81dc0aee_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:05.056101Z node 54 :PERSQUEUE INFO: new Cookie default|63ec3454-72eb842b-7ef5aa5b-213045e1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:243:2057] recipient: [54:99:2134] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:245:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:247:2057] recipient: [54:246:2246] Leader for TabletID 72057594037927937 is [54:248:2247] sender: [54:249:2057] recipient: [54:246:2246] 2025-04-17T17:52:05.127353Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:05.127430Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:52:05.128504Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:297:2288] 2025-04-17T17:52:05.131756Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:298:2289] 2025-04-17T17:52:05.159884Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:52:05.159984Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:297:2288] 2025-04-17T17:52:05.166696Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:52:05.166789Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:298:2289] Leader for TabletID 72057594037927937 is [54:248:2247] sender: [54:330:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:103:2057] recipient: [55:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:103:2057] recipient: [55:101:2135] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:108:2057] recipient: [55:101:2135] 2025-04-17T17:52:05.729271Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:05.729352Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:149:2057] recipient: [55:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:149:2057] recipient: [55:147:2170] Leader for TabletID 72057594037927938 is [55:153:2174] sender: [55:154:2057] recipient: [55:147:2170] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:177:2057] recipient: [55:14:2061] 2025-04-17T17:52:05.758449Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:05.759412Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 55 actor [55:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2025-04-17T17:52:05.760127Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:183:2196] 2025-04-17T17:52:05.763642Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:183:2196] 2025-04-17T17:52:05.765665Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:184:2197] 2025-04-17T17:52:05.767934Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:184:2197] 2025-04-17T17:52:05.776661Z node 55 :PERSQUEUE INFO: new Cookie default|5bd007fd-3599a5aa-4aa1b39-1bacebe3_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:05.784277Z node 55 :PERSQUEUE INFO: new Cookie default|a28f8b83-e4d9f926-854f7f43-52768a48_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:05.818693Z node 55 :PERSQUEUE INFO: new Cookie default|2ce05c2-b3614dc3-5d58b75d-95077b6e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:243:2057] recipient: [55:99:2134] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:246:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:247:2057] recipient: [55:245:2246] Leader for TabletID 72057594037927937 is [55:248:2247] sender: [55:249:2057] recipient: [55:245:2246] 2025-04-17T17:52:05.903505Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:05.903566Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:52:05.906206Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:297:2288] 2025-04-17T17:52:05.909104Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:298:2289] 2025-04-17T17:52:05.934857Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:52:05.934929Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:297:2288] 2025-04-17T17:52:05.943433Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:52:05.943512Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:298:2289] Leader for TabletID 72057594037927937 is [55:248:2247] sender: [55:328:2057] recipient: [55:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::SimpleUpdateTable [GOOD] Test command err: 2025-04-17T17:51:59.303864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341346618522430:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:59.303909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001969/r3tmp/tmpmiED73/pdisk_1.dat 2025-04-17T17:51:59.656944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:59.673016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:59.673117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:59.675085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24763 TServer::EnableGrpc on GrpcPort 9434, node 1 2025-04-17T17:51:59.850813Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:59.850832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:59.850838Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:59.850942Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:52:00.019827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:02.151608Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:02.152982Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:52:02.160077Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:02.160124Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:02.161552Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:52:02.162321Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:52:02.162342Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:02.162431Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:02.163053Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-17T17:52:02.163059Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-17T17:52:02.163082Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-17T17:52:02.163143Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-17T17:52:02.163147Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-17T17:52:02.163172Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-17T17:52:02.164629Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-17T17:52:02.164642Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-17T17:52:02.164662Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-17T17:52:02.167698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-17T17:52:02.169319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:52:02.171170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:52:02.176741Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-17T17:52:02.176808Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-17T17:52:02.177025Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-17T17:52:02.177053Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-04-17T17:52:02.177565Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-17T17:52:02.177599Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-04-17T17:52:02.301087Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-17T17:52:02.339180Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-17T17:52:02.347527Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-17T17:52:02.355501Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-17T17:52:02.398513Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-17T17:52:02.437798Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-17T17:52:02.438254Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 8efd3a51-c8f34a8d-bde2d0df-f94e31cb, Bootstrap. Database: /dc-1 2025-04-17T17:52:02.457055Z node 1 :KQP_PROXY DEBUG: Request has 18444999161387.094588s seconds to be completed 2025-04-17T17:52:02.459490Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=OTE3ZDRjMDgtOTg3NGM2OWYtNGU3OGYzMWYtNTU1ODRkYWQ=, workerId: [1:7494341359503425185:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T17:52:02.459616Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:52:02.460809Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 8efd3a51-c8f34a8d-bde2d0df-f94e31cb, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-17T17:52:02.461295Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=OTE3ZDRjMDgtOTg3NGM2OWYtNGU3OGYzMWYtNTU1ODRkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7494341359503425185:2333] 2025-04-17T17:52:02.461351Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7494341359503425187:2467] 2025-04-17T17:52:02.465363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341359503425188:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:02.465446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:02.465864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341359503425200:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:02.469274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:52:02.481785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341359503425202:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:52:02.573143Z node 1 :TX_PROXY ERROR: Actor# [1:7494341359503425244:2499] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:02.966412Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7494341359503425186:2334], selfId: [1:7494341346618522650:2278], source: [1:7494341359503425185:2333] 2025-04-17T17:52:02.9666 ... :2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:07.237353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341382803217001:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:07.237635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:07.240137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-17T17:52:07.249116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341382803217016:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:52:07.323264Z node 2 :TX_PROXY ERROR: Actor# [2:7494341382803217059:2495] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:07.470057Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:7494341382803217000:2334], selfId: [2:7494341365623347136:2264], source: [2:7494341382803216999:2333] 2025-04-17T17:52:07.470379Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWYxNDZmNTEtZmI4NzFhZDEtODIyM2JlYTItNTkxNDU0OTU=, TxId: 2025-04-17T17:52:07.470404Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWYxNDZmNTEtZmI4NzFhZDEtODIyM2JlYTItNTkxNDU0OTU=, TxId: 2025-04-17T17:52:07.470416Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 3b2c7d31-6deba396-a89ef8e8-4e38d098. Result: SUCCESS. Issues: 2025-04-17T17:52:07.472776Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OGE3MzQ2YS03OTE5YmQxYi05MGE5ZjI0Ni1kYTljMTJiYg==, workerId: [2:7494341382803217116:2351], database: dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:52:07.472931Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:52:07.473388Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OWYxNDZmNTEtZmI4NzFhZDEtODIyM2JlYTItNTkxNDU0OTU=, workerId: [2:7494341382803216999:2333], local sessions count: 1 2025-04-17T17:52:07.474090Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OGE3MzQ2YS03OTE5YmQxYi05MGE5ZjI0Ni1kYTljMTJiYg==, CurrentExecutionId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7494341382803217116:2351] 2025-04-17T17:52:07.474122Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7494341382803217118:2526] 2025-04-17T17:52:07.511600Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2e28tqe2ammt9b72b21cmc", Request has 18444999161382.040057s seconds to be completed 2025-04-17T17:52:07.513555Z node 2 :KQP_PROXY DEBUG: TraceId: "01js2e28tqe2ammt9b72b21cmc", Created new session, sessionId: ydb://session/3?node_id=2&id=OTI1ZDcxMjAtYzlhZWVhOTYtZTZiZjY5NmEtMTFjNGI2NGQ=, workerId: [2:7494341382803217134:2361], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-17T17:52:07.513685Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01js2e28tqe2ammt9b72b21cmc 2025-04-17T17:52:07.517144Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-04-17T17:52:07.517161Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-04-17T17:52:07.517185Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-17T17:52:07.519761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:1, at schemeshard: 72057594046644480 2025-04-17T17:52:07.520892Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-04-17T17:52:07.520918Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976710664 2025-04-17T17:52:07.592055Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976710664. Doublechecking... 2025-04-17T17:52:07.605687Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, Bootstrap. Database: /dc-1 2025-04-17T17:52:07.606933Z node 2 :KQP_PROXY DEBUG: Request has 18444999161381.944699s seconds to be completed 2025-04-17T17:52:07.609114Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YjhlZjgxMDQtNzY5YzAyYjgtNzk2MDhkY2EtZjcxZWMxNzE=, workerId: [2:7494341382803217225:2366], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-17T17:52:07.609283Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:52:07.609378Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7494341382803216996:2460], selfId: [2:7494341365623347136:2264], source: [2:7494341382803217116:2351] 2025-04-17T17:52:07.610370Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-17T17:52:07.610853Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YjhlZjgxMDQtNzY5YzAyYjgtNzk2MDhkY2EtZjcxZWMxNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7494341382803217225:2366] 2025-04-17T17:52:07.610889Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7494341382803217228:2592] 2025-04-17T17:52:07.678412Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:52:07.678820Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2025-04-17T17:52:07.678856Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-17T17:52:07.680015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:07.680978Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710666 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2025-04-17T17:52:07.681000Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976710666 2025-04-17T17:52:07.690941Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976710666. Doublechecking... 2025-04-17T17:52:07.775926Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-17T17:52:07.797855Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7494341382803217227:2367], selfId: [2:7494341365623347136:2264], source: [2:7494341382803217225:2366] 2025-04-17T17:52:07.798026Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjhlZjgxMDQtNzY5YzAyYjgtNzk2MDhkY2EtZjcxZWMxNzE=, TxId: 2025-04-17T17:52:07.798046Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjhlZjgxMDQtNzY5YzAyYjgtNzk2MDhkY2EtZjcxZWMxNzE=, TxId: 2025-04-17T17:52:07.799646Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, start saving rows range [0; 1) 2025-04-17T17:52:07.799782Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, Bootstrap. Database: /dc-1 2025-04-17T17:52:07.800923Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YjhlZjgxMDQtNzY5YzAyYjgtNzk2MDhkY2EtZjcxZWMxNzE=, workerId: [2:7494341382803217225:2366], local sessions count: 2 2025-04-17T17:52:07.800994Z node 2 :KQP_PROXY DEBUG: Request has 18444999161381.750646s seconds to be completed 2025-04-17T17:52:07.803081Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YTE4NmIzNDUtMTI2ZWI4YTgtYzE5MjdhNjctNjA2OWFjMWE=, workerId: [2:7494341382803217292:2377], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-17T17:52:07.803213Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T17:52:07.803553Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 3b2c7d31-6deba396-a89ef8e8-4e38d098, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-17T17:52:07.803969Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YTE4NmIzNDUtMTI2ZWI4YTgtYzE5MjdhNjctNjA2OWFjMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7494341382803217292:2377] 2025-04-17T17:52:07.804035Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7494341382803217294:2636] 2025-04-17T17:52:07.808349Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OTI1ZDcxMjAtYzlhZWVhOTYtZTZiZjY5NmEtMTFjNGI2NGQ=, workerId: [2:7494341382803217134:2361], local sessions count: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::CollectScriptingQueries [GOOD] Test command err: 2025-04-17T17:50:50.417259Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341048963520616:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:50.418156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004a5/r3tmp/tmp3bwv0N/pdisk_1.dat 2025-04-17T17:50:50.853233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:50.853343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:50.856899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:50.880791Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30702, node 1 2025-04-17T17:50:50.893862Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:50.895133Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:50.942141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:50.942191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:50.942201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:50.942383Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:51.282857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:51.333774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:51.382589Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494341052540483142:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:51.382651Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:50:51.444024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:51.481261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:51.481351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:51.481932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:51.481993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:51.484701Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T17:50:51.484729Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T17:50:51.573695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:51.577722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:51.880177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:51.898575Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494341052688221656:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:51.898620Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:50:51.907515Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341055154480154:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:51.907570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:50:51.983057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:52.048129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:52.048629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:52.060477Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:50:52.073766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:52.134469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:52.134534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:52.149260Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:50:52.151361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:55.400727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:50:55.417669Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341048963520616:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:55.417728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:55.571680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341070438358465:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:55.571721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341070438358457:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:55.571796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:55.576291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-17T17:50:55.613179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341070438358471:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-17T17:50:55.682779Z node 1 :TX_PROXY ERROR: Actor# [1:7494341070438358552:3010] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:56.079616Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.203588s 2025-04-17T17:50:56.079660Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.203687s 2025-04-17T17:50:56.169332Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2e02jh5z559yq2ch47hrjq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzZkYjEwZmMtMjUyNjA3YmYtNjQyOTcxMzgtNWU5MzA5N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:56.203461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:50:56.386275Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494341052540483142:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:56.386332Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:56.441815Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2e03b8dw53jz0ek2ap5rre, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzZkYjEwZmMtMjUyNjA3YmYtNjQyOTcxMzgtNWU5MzA5N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:56.472453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:50:56.701088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01js2e03kw0m2xayqf6x4ghzb6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzZkYjEwZmMtMjUyNjA3YmYtNjQyOTcxMzgtNWU5MzA5N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:56.866611Z node 1 :KQP_E ... 37841566460:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:02.323130Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:02.333231Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [31:7494341359316403681:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:02.333334Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:02.333638Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [31:7494341359316403693:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:02.341837Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:52:02.359172Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [31:7494341359316403695:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:52:02.438017Z node 31 :TX_PROXY ERROR: Actor# [31:7494341359316403749:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:02.567918Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2e23rs7c2s7k2ccayag155, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=YzZlNzI2YS05ZGM1MGU3ZS01ZTVhYzA5ZS1mYWMzMDQyNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:52:02.713808Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2e2419dbgxp4vb70shxb4y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=M2Q2MGIxYmItZWJjZmRkZGMtYTdhZjI1MGEtODc4M2UzNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:52:02.775913Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912322755, txId: 281474976710662] shutting down 2025-04-17T17:52:03.040572Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2e247b0rxkdewb2yxzms4x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=MTRkMjI3MDAtN2MyZTNjNzItZTcwNmY0My1kYzA4MjYxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:52:03.044335Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7494341363611371163:2366], owner: [31:7494341363611371160:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:52:03.055381Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7494341363611371163:2366], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:52:03.056063Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7494341363611371163:2366], row count: 2, finished: 1 2025-04-17T17:52:03.056102Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7494341363611371163:2366], owner: [31:7494341363611371160:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:52:03.060261Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912323037, txId: 281474976710664] shutting down 2025-04-17T17:52:04.334621Z node 32 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[32:7494341367546685585:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:04.334715Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004a5/r3tmp/tmp073VJ2/pdisk_1.dat 2025-04-17T17:52:04.537392Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:04.537508Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:04.540980Z node 32 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:04.557509Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24784, node 32 2025-04-17T17:52:04.619286Z node 32 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:04.619307Z node 32 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:04.619318Z node 32 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:04.619488Z node 32 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:05.015948Z node 32 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:05.035825Z node 32 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:52:05.044851Z node 32 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:09.338355Z node 32 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[32:7494341367546685585:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:09.338510Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:10.230677Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [32:7494341393316490114:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:10.230768Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [32:7494341393316490125:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:10.230830Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:10.236106Z node 32 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:52:10.253271Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [32:7494341393316490128:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:52:10.309923Z node 32 :TX_PROXY ERROR: Actor# [32:7494341393316490179:2402] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:10.451481Z node 32 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e2bfm6cve4vvdqq0psga3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=32&id=NjNmYTY0YmUtNWE3ODE1ZWYtZTQ2MDc4ZjItNGQ2NWMyM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:52:10.675879Z node 32 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e2bsv0b2zm91t70tfjjyn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=32&id=NWQxNjEyZmQtODFiY2MxNWYtNTVkMDg0NzUtMmFhM2UxODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:52:10.712400Z node 32 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912330714, txId: 281474976715662] shutting down 2025-04-17T17:52:10.948193Z node 32 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2e2c055dgvx56h5y4drrqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=32&id=YzlkZmVlMmMtM2JmMTg5YjAtNGYwYTYxNzktMWRlOTk0NTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:52:10.950519Z node 32 :SYSTEM_VIEWS INFO: Scan started, actor: [32:7494341393316490305:2372], owner: [32:7494341393316490302:2370], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:52:10.953123Z node 32 :SYSTEM_VIEWS INFO: Scan prepared, actor: [32:7494341393316490305:2372], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:52:10.953808Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494341393316490305:2372], row count: 2, finished: 1 2025-04-17T17:52:10.953858Z node 32 :SYSTEM_VIEWS INFO: Scan finished, actor: [32:7494341393316490305:2372], owner: [32:7494341393316490302:2370], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:52:10.956749Z node 32 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912330946, txId: 281474976715664] shutting down >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> SystemView::AuthPermissions_Selects [GOOD] |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] >> KqpQueryPerf::KvRead-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] Test command err: 2025-04-17T17:51:51.354388Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341313337840241:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:51.354450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001974/r3tmp/tmp9c4hbs/pdisk_1.dat 2025-04-17T17:51:51.719399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:51.788000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:51.788100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:51.789800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9529 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:51:52.029077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:52.062440Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:51:54.162263Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:54.165305Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:54.173219Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-04-17T17:51:54.176008Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:51:54.176044Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:51:54.176061Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:54.176105Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:51:54.176169Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:54.176204Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:54.176352Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7494341317632808101:2281], selfId: [1:7494341313337840490:2278], source: [1:7494341313337840490:2278] 2025-04-17T17:51:54.177792Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:54.177848Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:51:54.178123Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-04-17T17:51:54.178263Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7494341317632808101:2281], selfId: [1:7494341313337840490:2278], source: [1:7494341313337840490:2278] 2025-04-17T17:51:54.178995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341326222742733:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:54.179250Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2025-04-17T17:51:54.179260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:54.179326Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 4, sender: [1:7494341317632808101:2281], selfId: [1:7494341313337840490:2278], source: [1:7494341313337840490:2278] 2025-04-17T17:51:54.180669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341326222742757:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:54.180788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:57.568397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:57.568734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:57.568994Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001974/r3tmp/tmpyoQE0e/pdisk_1.dat 2025-04-17T17:51:57.872387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:57.897211Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:51:57.897293Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:51:57.897635Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:57.922843Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:313:2357], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:51:57.924544Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:313:2357], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-17T17:51:57.924679Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:313:2357], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:610:2532] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:51:57.924803Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:313:2357], cacheItem# { Subscriber: { Subscriber: [2:610:2532] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:51:57.924916Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:313:2357], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-17T17:51:57.924987Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:313:2357], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:611:2533] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:51:57.925048Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:313:2357], cacheItem# { Subscriber: { Subscriber: [2:611:2533] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:51:57.925210Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:624:2534], recipient# [2:324:2367], result# { ErrorCount: 2 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:51:57.937211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:57.937359Z node 2 :HIVE ... p to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to BSC_STAT_PROCESSOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NIcNodeCache::TIcNodeCacheServiceActor Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NBsController::TBlobStorageController::TSelfHealActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR 2025-04-17T17:52:08.426722Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(20) 2025-04-17T17:52:08.426794Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 20 sessionId: ydb://session/3?node_id=2&id=N2U1YTlkODUtMTVjNzI5N2EtZDM4NDFkOTYtNzkyZTRjNDU= status: TIMEOUT round: 0 Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER 2025-04-17T17:52:08.427008Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2U1YTlkODUtMTVjNzI5N2EtZDM4NDFkOTYtNzkyZTRjNDU=, ActorId: [2:1131:2934], ActorState: ExecuteState, TraceId: 01js2e28tc2ntb4pv9jj856vyr, Create QueryResponse for error on request, msg: 2025-04-17T17:52:08.427301Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [2:593:2518], selfId: [2:57:2104], source: [2:1131:2934] Send scheduled evet back 2025-04-17T17:52:08.427460Z node 2 :KQP_COMPILE_ACTOR NOTICE: Compilation timeout, self: [2:1134:2937], cluster: db, database: , text: "SELECT * FROM `/Root/Table`;", startTime: 2025-04-17T17:52:07.501610Z 2025-04-17T17:52:08.427549Z node 2 :KQP_COMPILE_ACTOR DEBUG: Send response, self: [2:1134:2937], owner: [2:307:2351], status: TIMEOUT, issues:
: Error: Query compilation timed out. , uid: 2f72021-84e624f0-7776d04c-a4b80966 Send captured event back Send captured event back Send captured event back Send captured event back Send captured event back 2025-04-17T17:52:09.489292Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494341391263697174:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:09.489342Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001974/r3tmp/tmp2dn0xB/pdisk_1.dat 2025-04-17T17:52:09.681469Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:09.686704Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:09.686774Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:09.689206Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65214, node 3 2025-04-17T17:52:09.798577Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:09.798598Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:09.798605Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:09.798727Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:10.039558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:12.086817Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:12.087704Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-17T17:52:12.090710Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:12.090766Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:52:12.090778Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:12.090808Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:52:12.090824Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:12.097463Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::Replace+QueryService+UseSink >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> KqpYql::TableUseBeforeCreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25613, MsgBus: 10240 2025-04-17T17:52:02.736712Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341358952287765:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:02.738132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016d4/r3tmp/tmpImrujV/pdisk_1.dat 2025-04-17T17:52:03.146970Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:03.151598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:03.151705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:03.154085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25613, node 1 2025-04-17T17:52:03.288373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:03.288392Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:03.292988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:03.293132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10240 TClient is connected to server localhost:10240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:03.857757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:03.882038Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:03.894709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:04.108166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:04.304658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:04.379203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:06.110621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341376132158701:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:06.110715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:06.455333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:06.484542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:06.519160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:06.555409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:06.604528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:06.638978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:06.713478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:06.800759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341376132159374:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:06.800844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:06.800869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341376132159379:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:06.809366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:06.820516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341376132159381:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:06.885309Z node 1 :TX_PROXY ERROR: Actor# [1:7494341376132159437:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:07.737311Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341358952287765:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:07.737380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28718, MsgBus: 61364 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016d4/r3tmp/tmpSznLhC/pdisk_1.dat 2025-04-17T17:52:08.926379Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:08.941304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:08.941376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:08.942950Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:08.956130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28718, node 2 2025-04-17T17:52:09.022835Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:09.022867Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:09.022876Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:09.023004Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61364 TClient is connected to server localhost:61364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:09.535931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:09.550768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:52:09.635848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:52:09.774724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:09.859848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:11.764929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341396993919874:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:11.765017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:11.821775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:11.851987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:11.884137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:11.915087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:11.944295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:11.991095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:12.023465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:12.081511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341401288887833:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:12.081582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:12.081587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341401288887838:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:12.085696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:12.098042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341401288887840:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:52:12.176896Z node 2 :TX_PROXY ERROR: Actor# [2:7494341401288887894:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> SystemView::AuthGroupMembers [GOOD] >> SystemView::AuthGroupMembers_Access >> KqpYql::UuidPrimaryKeyDisabled >> KqpPragma::Auth |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |77.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |77.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> KqpYql::BinaryJsonOffsetBound >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] >> KqpYql::ColumnNameConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Selects [GOOD] Test command err: 2025-04-17T17:50:30.428250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340963770372130:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:30.428302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0008e8/r3tmp/tmpAXuG70/pdisk_1.dat 2025-04-17T17:50:30.794591Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22321, node 1 2025-04-17T17:50:30.831364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:30.831490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:30.833637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:30.843953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:30.843989Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:30.844000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:30.844138Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:31.112953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:31.136885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:33.221552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340976655274774:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:33.222253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340976655274763:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:33.222402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:33.225760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:50:33.240342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340976655274777:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:50:33.312530Z node 1 :TX_PROXY ERROR: Actor# [1:7494340976655274828:2394] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:33.654229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2dzcr21tny9a5ssxfg9cd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWIwNmEwNmYtMTg4MzcwYzktODQyZmFiZDYtZGE4NDIyMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:33.798878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2dzda57mxgn35ec3bjbngv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY3NDk1ZmItOTYwNDg0MGQtYWU1MDhjN2UtMjhhODdiNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:33.947316Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2dzdaj3e3nbmjsz51njfst, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdkZmE4ODEtODU3YjAzYTktYzZmM2Y0ZDctNmYwZDUwODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:33.948897Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494340976655274926:2364], owner: [1:7494340976655274922:2362], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:50:33.949791Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494340976655274926:2364], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:50:33.950371Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494340976655274926:2364], row count: 2, finished: 1 2025-04-17T17:50:33.950423Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494340976655274926:2364], owner: [1:7494340976655274922:2362], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:50:33.957133Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912233946, txId: 281474976710663] shutting down 2025-04-17T17:50:35.112062Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494340985564393345:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:35.112106Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0008e8/r3tmp/tmpRQLfFx/pdisk_1.dat 2025-04-17T17:50:35.254614Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:35.294697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:35.294798Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:35.300332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30028, node 2 2025-04-17T17:50:35.489707Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:35.489741Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:35.489752Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:35.489883Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:35.709100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:35.789223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:50:35.789423Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:50:35.789511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T17:50:35.789646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-17T17:50:35.789773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T17:50:35.789894Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:50:35.789920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:50:35.790017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:50:35.790060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T17:50:35.792988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-17T17:50:35.793124Z node 2 :FLAT_TX_SCHEMESH ... ount: 1 2025-04-17T17:52:10.986614Z node 32 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-17T17:52:10.986692Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:52:10.987027Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-04-17T17:52:10.987106Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494341393432047160:2430], row count: 0, finished: 0 2025-04-17T17:52:10.987299Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:52:10.987551Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-04-17T17:52:10.987625Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494341393432047160:2430], row count: 0, finished: 0 2025-04-17T17:52:10.987732Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:52:10.987942Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:52:10.988011Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494341393432047160:2430], row count: 2, finished: 0 2025-04-17T17:52:10.988113Z node 32 :SYSTEM_VIEWS INFO: Scan finished, actor: [32:7494341393432047160:2430], owner: [32:7494341393432047157:2428], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-17T17:52:10.990319Z node 32 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [32:7494341350482372130:2186], database# , query hash# 3187945588805523718, cpu time# 165848 2025-04-17T17:52:10.991103Z node 32 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912330981, txId: 281474976710687] shutting down 2025-04-17T17:52:11.084954Z node 36 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-04-17T17:52:11.084877Z 2025-04-17T17:52:11.160774Z node 32 :KQP_EXECUTER ERROR: TxId: 281474976710690. Ctx: { TraceId: 01js2e2c7qffa0sy4mcxd9kzcf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=32&id=ZWYxMmUzNWYtYzc1Zjc0NzUtNDk3NGNhYWQtZTQ5MjBhNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:52:11.162794Z node 32 :SYSTEM_VIEWS INFO: Scan started, actor: [32:7494341397727014494:2440], owner: [32:7494341397727014490:2438], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-17T17:52:11.163809Z node 32 :SYSTEM_VIEWS INFO: Scan prepared, actor: [32:7494341397727014494:2440], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:52:11.163880Z node 32 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-17T17:52:11.163987Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:52:11.164436Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-04-17T17:52:11.164510Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494341397727014494:2440], row count: 0, finished: 0 2025-04-17T17:52:11.164670Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:52:11.164985Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-04-17T17:52:11.165050Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494341397727014494:2440], row count: 0, finished: 0 2025-04-17T17:52:11.165277Z node 32 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:52:11.165533Z node 32 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:52:11.165647Z node 32 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [32:7494341397727014494:2440], row count: 1, finished: 0 2025-04-17T17:52:11.165790Z node 32 :SYSTEM_VIEWS INFO: Scan finished, actor: [32:7494341397727014494:2440], owner: [32:7494341397727014490:2438], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-17T17:52:11.168776Z node 32 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [32:7494341350482372130:2186], database# , query hash# 15123460272068726277, cpu time# 154464 2025-04-17T17:52:11.169460Z node 32 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912331157, txId: 281474976710689] shutting down 2025-04-17T17:52:11.176397Z node 36 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:11.179068Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 34 2025-04-17T17:52:11.179936Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:52:11.180079Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 33 2025-04-17T17:52:11.177366Z node 34 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:11.184130Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:52:11.185277Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 36 2025-04-17T17:52:11.186032Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:52:11.190489Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 35 2025-04-17T17:52:11.191338Z node 32 :HIVE WARN: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:52:11.193448Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7494341355921782537:2107], Type=268959746 2025-04-17T17:52:11.193507Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7494341355921782537:2107], Type=268959746 2025-04-17T17:52:11.193532Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7494341355921782537:2107], Type=268959746 2025-04-17T17:52:11.193556Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7494341355921782537:2107], Type=268959746 2025-04-17T17:52:11.193579Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7494341355921782537:2107], Type=268959746 2025-04-17T17:52:11.193605Z node 32 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7494341355921782537:2107], Type=268959746 >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61275, MsgBus: 18300 2025-04-17T17:52:09.354984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341390911058799:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:09.356392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016d2/r3tmp/tmpUcAKvD/pdisk_1.dat 2025-04-17T17:52:09.821391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:09.821485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:09.825185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61275, node 1 2025-04-17T17:52:09.874364Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:09.874385Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:09.911567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:09.919845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:09.919865Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:09.919874Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:09.919980Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18300 TClient is connected to server localhost:18300 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:10.431985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:10.460998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:10.576964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:10.704862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:10.778658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:12.282704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341403795962307:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:12.282804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:12.575865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:12.607856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:12.646786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:12.681815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:12.715283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:12.792480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:12.863370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:12.937936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341403795962975:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:12.938008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:12.938143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341403795962980:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:12.941974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:12.951925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341403795962982:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:13.012206Z node 1 :TX_PROXY ERROR: Actor# [1:7494341408090930333:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:14.400630Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341390911058799:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:14.400905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpPg::CopyTableSerialColumns-useSink [GOOD] >> KqpPg::CreateIndex >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TSchemeShardServerLess::StorageBillingLabels [GOOD] >> YdbSdkSessionsPool::Get1Session >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |77.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:50:56.040467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:50:56.040548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.040631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:50:56.040667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:50:56.040708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:50:56.040749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:50:56.040812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:50:56.040871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:50:56.041195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:50:56.113692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:50:56.113742Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:56.127712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:50:56.128148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:50:56.128321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:50:56.141722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:50:56.141914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:50:56.142590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:56.142820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:50:56.145814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.147081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:56.147145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.147350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:50:56.147406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:56.147441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:50:56.147587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.153890Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:50:56.257980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:50:56.258166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.258367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:50:56.258561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:50:56.258597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.260823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:56.260975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:50:56.261157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.261211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:50:56.261265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:50:56.261297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:50:56.262884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.262937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:50:56.262967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:50:56.264542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.264603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.264637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:56.264682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.267340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:56.269012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:50:56.269236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:50:56.270142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:50:56.270287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:50:56.270348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:56.270630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:50:56.270694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:50:56.271040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:50:56.271123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:50:56.275868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:56.275917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:50:56.276113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.276161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:50:56.276521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:50:56.276565Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:50:56.276654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:56.276681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.276733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:50:56.276773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.276809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:50:56.276860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:50:56.276888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:50:56.276913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:50:56.276974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:50:56.277010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:50:56.277040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:50:56.285751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:56.285893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:50:56.285952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-17T17:50:56.667515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-17T17:50:56.667551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-17T17:50:56.667589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2025-04-17T17:50:56.671316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409549, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "cloud_id" Value: "CLOUD_ID_VAL" } UserAttributes { Key: "database_id" Value: "DATABASE_ID_VAL" } UserAttributes { Key: "folder_id" Value: "FOLDER_ID_VAL" } UserAttributes { Key: "label_k" Value: "v" } UserAttributes { Key: "not_a_label_x" Value: "y" } UserAttributesVersion: 2 2025-04-17T17:50:56.671456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "cloud_id" Value: "CLOUD_ID_VAL" } UserAttributes { Key: "database_id" Value: "DATABASE_ID_VAL" } UserAttributes { Key: "folder_id" Value: "FOLDER_ID_VAL" } UserAttributes { Key: "label_k" Value: "v" } UserAttributes { Key: "not_a_label_x" Value: "y" } UserAttributesVersion: 2, at schemeshard: 72075186233409549 2025-04-17T17:50:56.671743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-17T17:50:56.671961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:50:56.672000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:50:56.672187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:50:56.672228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 FAKE_COORDINATOR: Erasing txId 105 2025-04-17T17:50:56.673103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T17:50:56.673223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T17:50:56.673263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T17:50:56.673299Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-04-17T17:50:56.673339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T17:50:56.673453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-17T17:50:56.676045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 3 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 2 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-17T17:50:56.676123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:50:56.676204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:563:2502], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:50:56.678681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2025-04-17T17:50:56.678722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 0, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2025-04-17T17:50:56.678890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2025-04-17T17:50:56.678936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:660:2572], at schemeshard: 72075186233409549, txId: 0, path id: 1 2025-04-17T17:50:56.679736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409549, cookie: 0 2025-04-17T17:50:56.679973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T17:50:56.680051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T17:50:56.680289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T17:50:56.680327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T17:50:56.680727Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T17:50:56.680830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T17:50:56.680867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:739:2631] TestWaitNotification: OK eventTxId 105 ... waiting for metering 2025-04-17T17:51:02.890296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:51:02.890377Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:02.943943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:51:02.944021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:02.988916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:51:02.988977Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:23.479570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:51:23.479889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: initiate at first time, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:01:00.000000Z, set LastBillTime: 1970-01-01T00:01:00.000000Z, next retry at: 1970-01-01T00:02:00.000000Z 2025-04-17T17:51:23.482911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:51:23.602669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-17T17:51:23.602877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:51:23.603050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:51:23.702581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-17T17:51:23.702712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-17T17:51:23.702801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-17T17:51:23.757457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-17T17:51:23.757575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-17T17:51:23.757640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-17T17:51:50.821371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:51:50.821515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: too soon call, wait until current period ends, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:02:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, next retry at: 1970-01-01T00:03:00.000000Z 2025-04-17T17:51:50.821594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:51:50.918772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-17T17:51:50.918892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T17:51:50.918969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T17:51:51.002916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-17T17:51:51.003036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-17T17:51:51.003108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-17T17:51:51.068110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-17T17:51:51.068234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-17T17:51:51.068303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-17T17:52:16.935070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:16.935452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":120,"quantity":59,"finish":179,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-120-179-0","cloud_id":"CLOUD_ID_VAL","source_wt":180,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","labels":{"k":"v"},"folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:03:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:02:00.000000Z--1970-01-01T00:02:59.000000Z, next retry at: 1970-01-01T00:04:00.000000Z 2025-04-17T17:52:16.944895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering ... waiting for metering (done) |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |77.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> YdbSdkSessionsPool::RunSmallPlan >> YdbSdkSessionsPool::PeriodicTask10 >> YdbSdkSessionsPool::StressTestAsync10 |77.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] >> YdbSdkSessionsPool::WaitQueue1 >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |77.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |77.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> KqpYql::TableUseBeforeCreate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 18413, MsgBus: 5770 2025-04-17T17:52:15.948645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341413540523300:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:15.948705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00146c/r3tmp/tmpMCCliK/pdisk_1.dat 2025-04-17T17:52:16.477429Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:16.502352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.502435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.506075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18413, node 1 2025-04-17T17:52:16.690157Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:16.690199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:16.690206Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:16.690317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5770 TClient is connected to server localhost:5770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:17.424949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.446591Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:19.808914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341430720393151:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:19.809047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.161302Z node 1 :TX_PROXY ERROR: Actor# [1:7494341435015360471:2311] txid# 281474976710658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-04-17T17:52:20.189202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341435015360479:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.189286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.207199Z node 1 :TX_PROXY ERROR: Actor# [1:7494341435015360486:2319] txid# 281474976710659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-04-17T17:52:20.218614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341435015360494:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.218703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.238994Z node 1 :TX_PROXY ERROR: Actor# [1:7494341435015360501:2327] txid# 281474976710660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2025-04-17T17:52:20.276290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341435015360509:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.276356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.290515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.502614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341435015360597:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.502688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink >> SystemView::ShowCreateTableColumn [GOOD] >> SystemView::ShowCreateTableKeyBloomFilter >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn |77.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] >> LabeledDbCounters::OneTablet [GOOD] >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 26813, MsgBus: 3731 2025-04-17T17:52:15.755622Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341413356274262:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:15.755731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001466/r3tmp/tmp6rv1Yp/pdisk_1.dat 2025-04-17T17:52:16.240187Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:16.269322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.269427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.270965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26813, node 1 2025-04-17T17:52:16.449886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:16.449910Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:16.449917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:16.450053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3731 TClient is connected to server localhost:3731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:17.193388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.214695Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:17.232953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.425524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.621079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.709608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:19.204366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341430536145211:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:19.204484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:19.629858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.767832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.821087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.875565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.954547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.069015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.112484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.202504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341434831113185:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.202662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.202899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341434831113190:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.206595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:20.221928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341434831113192:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:20.297150Z node 1 :TX_PROXY ERROR: Actor# [1:7494341434831113250:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:20.805923Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341413356274262:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:20.806391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch |77.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |77.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |77.9%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |77.9%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> GroupWriteTest::WriteHardRateDispatcher [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29907, MsgBus: 21427 2025-04-17T17:52:15.486813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341416235893592:2085];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016c6/r3tmp/tmp1TvZlH/pdisk_1.dat 2025-04-17T17:52:15.819410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:52:16.063571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.063655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.067971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:16.093005Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29907, node 1 2025-04-17T17:52:16.228989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:16.229021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:16.229033Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:16.229142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21427 TClient is connected to server localhost:21427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:16.879822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:52:16.925083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:52:17.262172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.670867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.837801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:20.117904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341437710731793:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.118072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.489847Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341416235893592:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:20.489954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:20.506164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.605939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.691707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.736089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.792084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.848980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.928454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.031249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341442005699762:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:21.031346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:21.031768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341442005699767:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:21.035632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:21.049772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341442005699769:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:21.119025Z node 1 :TX_PROXY ERROR: Actor# [1:7494341442005699829:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpProxy::DatabasesCacheForServerless [GOOD] >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14603, MsgBus: 28952 2025-04-17T17:52:15.606913Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341415585606685:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:15.607601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016bf/r3tmp/tmppli5fI/pdisk_1.dat 2025-04-17T17:52:16.057010Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:16.069745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.069857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.072107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14603, node 1 2025-04-17T17:52:16.155514Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:16.155537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:16.155545Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:16.155623Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28952 TClient is connected to server localhost:28952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:16.885220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:16.914229Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:16.924713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:52:17.162830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:52:17.416108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.546445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:19.944614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341432765477627:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:19.944725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.257486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.295060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.335118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.376937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.452700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.550094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.606812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341415585606685:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:20.606869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:20.672267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.959048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341437060445606:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.959148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.959571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341437060445612:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.964189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:20.981995Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:52:20.982336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341437060445614:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:21.043189Z node 1 :TX_PROXY ERROR: Actor# [1:7494341441355412965:3568] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> YdbSdkSessionsPool::StressTestAsync1 |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |77.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> YdbSdkSessionsPool::RunSmallPlan [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> YdbSdkSessionsPool::StressTestSync10 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::RunSmallPlan [GOOD] Test command err: 2025-04-17T17:52:19.341717Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341432450791060:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:19.342000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0008cb/r3tmp/tmpu01QDK/pdisk_1.dat 2025-04-17T17:52:20.230204Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:20.275823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:20.275919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:20.284590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32685, node 1 2025-04-17T17:52:20.515061Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637704 Duration# 0.100636s 2025-04-17T17:52:20.515106Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.100710s 2025-04-17T17:52:20.674846Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:20.674869Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:20.674875Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:20.675010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:21.205980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:24.334309Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341432450791060:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:24.334431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:51:45.981106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:51:45.981258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:51:45.981299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:51:45.981356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:51:45.981405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:51:45.981436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:51:45.981503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:51:45.981586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:51:45.981951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:46.067262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:51:46.067324Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:46.085773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:46.086479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:51:46.086680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:51:46.096161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:51:46.096391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:51:46.097055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:51:46.097289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:51:46.100323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:51:46.101426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:51:46.101486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:51:46.101618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:51:46.101666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:51:46.101702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:51:46.101786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.106971Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:51:46.268812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:51:46.269118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.270165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:51:46.270492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:51:46.270565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.273300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:51:46.273434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:51:46.273619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.273682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:51:46.273709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:51:46.273757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:51:46.289442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.289543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:51:46.289588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:51:46.295257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.295339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.295401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:51:46.295472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:51:46.311484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:51:46.315341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:51:46.315579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:51:46.316691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:51:46.316849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:51:46.316928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:51:46.317286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:51:46.317363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:51:46.317599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:51:46.317714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:51:46.320876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:51:46.320932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:51:46.321176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:51:46.321225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:51:46.321631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.321682Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:51:46.321786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:51:46.321834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:51:46.321881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:51:46.321922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:51:46.321970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:51:46.322012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:51:46.322047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:51:46.322083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:51:46.322161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:51:46.322215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:51:46.322251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:51:46.324173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:51:46.324287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:51:46.324332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3329Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-04-17T17:52:19.373347Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-04-17T17:52:19.373367Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-04-17T17:52:19.373383Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-04-17T17:52:19.373407Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2025-04-17T17:52:19.373890Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-17T17:52:19.374034Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-17T17:52:19.374092Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-04-17T17:52:19.374149Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2025-04-17T17:52:19.374234Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2025-04-17T17:52:19.374765Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-17T17:52:19.374829Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-17T17:52:19.374855Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-04-17T17:52:19.374890Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2025-04-17T17:52:19.374921Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2025-04-17T17:52:19.374991Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-04-17T17:52:19.382429Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-04-17T17:52:19.382631Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-04-17T17:52:19.402753Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1791 } } 2025-04-17T17:52:19.402819Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-04-17T17:52:19.402967Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1791 } } 2025-04-17T17:52:19.403149Z node 19 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1791 } } 2025-04-17T17:52:19.404337Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 748 RawX2: 81604381261 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-17T17:52:19.404439Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-04-17T17:52:19.404676Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 748 RawX2: 81604381261 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-17T17:52:19.404794Z node 19 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2025-04-17T17:52:19.404947Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 748 RawX2: 81604381261 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-17T17:52:19.405050Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2025-04-17T17:52:19.405108Z node 19 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-17T17:52:19.405173Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2025-04-17T17:52:19.405242Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2025-04-17T17:52:19.415062Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-17T17:52:19.415714Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-17T17:52:19.416184Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-17T17:52:19.416259Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2025-04-17T17:52:19.416499Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-04-17T17:52:19.416574Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-17T17:52:19.416651Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-04-17T17:52:19.416718Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-17T17:52:19.416803Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-04-17T17:52:19.416873Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-17T17:52:19.416946Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-17T17:52:19.417003Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-04-17T17:52:19.417112Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2025-04-17T17:52:19.417160Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2025-04-17T17:52:19.417181Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2025-04-17T17:52:19.417277Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2025-04-17T17:52:19.417312Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2025-04-17T17:52:19.417333Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2025-04-17T17:52:19.417361Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-17T17:52:22.766867Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-17T17:52:22.767341Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 496us result status StatusNameConflict 2025-04-17T17:52:22.767619Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-17T17:52:25.858929Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-17T17:52:25.859385Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 477us result status StatusNameConflict 2025-04-17T17:52:25.859610Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> YdbSdkSessionsPool::WaitQueue10 |78.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |78.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |78.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2025-04-17T17:52:01.305914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341355618531083:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:01.306605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:52:01.340863Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341353169738694:2225];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:01.341125Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:52:01.367894Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494341353578928043:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:01.368193Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:52:01.388023Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341354142124916:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:01.388079Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:52:01.441700Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494341357284485706:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:01.450991Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00194c/r3tmp/tmpSFv9MY/pdisk_1.dat 2025-04-17T17:52:02.366726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:02.409359Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:02.413416Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:02.411616Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:02.499629Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:02.660473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:02.755910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:02.756046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:02.756997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:02.757041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:02.760738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:02.760813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:02.765823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:02.765905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:02.766012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:02.766065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:02.775254Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:52:02.775459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:02.786632Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T17:52:02.787319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:02.788588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:02.789967Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:52:02.790001Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T17:52:02.790745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:02.795535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24255 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:52:03.476789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:06.337095Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341355618531083:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:06.339253Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341353169738694:2225];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:06.352580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:06.353059Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:06.370235Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494341353578928043:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:06.391253Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494341354142124916:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:06.401099Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:06.370315Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:06.421549Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494341357284485706:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:06.421603Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:07.529005Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:07.538165Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:52:07.561021Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:52:07.561058Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:52:07.561080Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:52:07.561131Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T17:52:07.561196Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:07.561223Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:07.566701Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:07.566748Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:52:07.593676Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-17T17:52:07.593687Z node 3 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-17T17:52:07.593731Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-17T17:52:07.594010Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-17T17:52:07.594015Z node 3 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-17T17:52:07.594035Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-17T17:52:07.606556Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-17T17:52:07.606568Z node 3 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-17T17:52:07.606600Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-17T17:52:07.617457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730657:1, at schemeshard: 72057594046644480 2025-04-17T17:52:07.619773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730658:0, at schemeshard: 72057594046644480 2025-04-17T ... essor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:52:15.919395Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:15.919499Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:15.924410Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-04-17T17:52:15.926170Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:15.986966Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:15.987116Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:15.987230Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:15.987294Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:15.987346Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:15.987429Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:15.987481Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:15.987533Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:15.987583Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.032497Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.032618Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.043819Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:16.330690Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:16.365807Z node 8 :STATISTICS WARN: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2025-04-17T17:52:16.627865Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:52:16.655663Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341417858062313:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:16.655733Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:52:16.699845Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.699959Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.726400Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-04-17T17:52:16.729055Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:16.874703Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.874817Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.908521Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.908711Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.908808Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.908897Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.908980Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.909073Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.909157Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.909233Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.909302Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:52:16.920546Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:17.038316Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494341400477877633:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:17.038411Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:17.171930Z node 7 :STATISTICS WARN: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2025-04-17T17:52:17.185207Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:17.440648Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:17.658863Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:17.801437Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7494341422153030567:2532], Database: /Root/test-serverless, Start database fetching 2025-04-17T17:52:17.801636Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7494341422153030567:2532], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2025-04-17T17:52:20.846466Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7494341413548758355:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:20.846551Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:20.903820Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T17:52:20.904413Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7494341435023595611:2347], Start check tables existence, number paths: 2 2025-04-17T17:52:20.904613Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T17:52:20.904628Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T17:52:20.922437Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-17T17:52:20.922561Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7494341435023595611:2347], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T17:52:20.922630Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7494341435023595611:2347], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T17:52:20.922666Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7494341435023595611:2347], Successfully finished 2025-04-17T17:52:20.922743Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T17:52:21.656493Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494341417858062313:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:21.656577Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:22.707726Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T17:52:22.707849Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7494341443627867191:2375], Start check tables existence, number paths: 2 2025-04-17T17:52:22.708797Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T17:52:22.708815Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T17:52:22.710899Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-17T17:52:22.714526Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7494341443627867191:2375], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T17:52:22.714634Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7494341443627867191:2375], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T17:52:22.714682Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7494341443627867191:2375], Successfully finished 2025-04-17T17:52:22.714736Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T17:52:22.821324Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-04-17T17:52:22.821846Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:52:22.823298Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2025-04-17T17:52:22.823582Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:52:22.825170Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTRmOTYzY2ItZmJkMjJiOGMtOTJhMjI0OWUtZDFjODJmOTM=, ActorId: [6:7494341413362780367:2334], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T17:52:22.825222Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTRmOTYzY2ItZmJkMjJiOGMtOTJhMjI0OWUtZDFjODJmOTM=, ActorId: [6:7494341413362780367:2334], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T17:52:22.825266Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTRmOTYzY2ItZmJkMjJiOGMtOTJhMjI0OWUtZDFjODJmOTM=, ActorId: [6:7494341413362780367:2334], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T17:52:22.825300Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTRmOTYzY2ItZmJkMjJiOGMtOTJhMjI0OWUtZDFjODJmOTM=, ActorId: [6:7494341413362780367:2334], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T17:52:22.825381Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTRmOTYzY2ItZmJkMjJiOGMtOTJhMjI0OWUtZDFjODJmOTM=, ActorId: [6:7494341413362780367:2334], ActorState: unknown state, Session actor destroyed >> YdbSdkSessionsPool::WaitQueue1 [GOOD] |78.0%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 15822234513273438229 2025-04-17T17:45:35.108596Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-17T17:45:35.125446Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-17T17:45:35.125504Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-17T17:45:35.127698Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-17T17:45:35.148573Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:45:35.150681Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-17T17:47:43.238138Z 5 00h01m03.203651s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 3628 2025-04-17T17:49:31.440636Z 1 00h01m05.918701s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 6023 2025-04-17T17:49:39.395415Z 1 00h01m06.142471s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 13376 2025-04-17T17:50:37.681040Z 6 00h01m07.894718s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 24083 2025-04-17T17:52:22.094087Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:52:22.106066Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:52:22.106208Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-17T17:52:22.106259Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-17T17:52:22.433989Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-04-17T17:52:22.434110Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream >> YdbSdkSessionsPool::StressTestSync1 >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence >> YdbSdkSessionsPool::CustomPlan >> TContinuousBackupTests::TakeIncrementalBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue1 [GOOD] Test command err: 2025-04-17T17:52:20.784763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341435210798961:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:20.784915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005f1/r3tmp/tmpcGLDkC/pdisk_1.dat 2025-04-17T17:52:21.369242Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:21.393569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:21.393661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:21.411874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6857, node 1 2025-04-17T17:52:21.678083Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:21.678113Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:21.678122Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:21.678288Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:22.187086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:25.772591Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341435210798961:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:25.772691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TContinuousBackupTests::Basic >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] |78.0%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> KqpYql::ColumnTypeMismatch [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] >> KqpTx::CommitRoTx >> TSchemeShardUserAttrsTest::MkDir >> KqpYql::AnsiIn [GOOD] >> SystemView::AuthOwners_TableRange [GOOD] >> SystemView::AuthPermissions >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::EquiJoin-useSink >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] |78.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |78.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |78.0%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.0%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> TContinuousBackupTests::Basic [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 15142, MsgBus: 5248 2025-04-17T17:52:16.602631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341417535034464:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:16.603050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001455/r3tmp/tmpGHhiZB/pdisk_1.dat 2025-04-17T17:52:17.214949Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:17.219524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:17.219602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:17.223519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15142, node 1 2025-04-17T17:52:17.531668Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:17.531696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:17.531703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:17.531845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5248 TClient is connected to server localhost:5248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:18.385319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:18.419445Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:18.433656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:18.680587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:52:18.891682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:52:18.994315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:21.165917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341439009872598:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:21.166052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:21.578352Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341417535034464:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:21.598089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:21.724938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.773011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.818213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.861317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.906112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.968512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:22.024407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:22.133021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341443304840565:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:22.133119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:22.133485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341443304840570:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:22.137450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:22.153180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341443304840572:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:22.228401Z node 1 :TX_PROXY ERROR: Actor# [1:7494341443304840627:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, gRPC: 9282, MsgBus: 2344 2025-04-17T17:52:24.863159Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341454421593891:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:24.863216Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001455/r3tmp/tmpQ3fOZB/pdisk_1.dat 2025-04-17T17:52:25.212506Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:25.248028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:25.248114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:25.251421Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9282, node 2 2025-04-17T17:52:25.339642Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:25.339668Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:25.339676Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:25.339789Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2344 TClient is connected to server localhost:2344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:25.778198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:25.785230Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:25.795664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:25.881101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:52:26.082542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:52:26.185323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:28.776808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341471601464830:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.776906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.844903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.907593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.959229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.009903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.058385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.136920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.190141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.286655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341475896432795:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:29.286857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:29.287250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341475896432801:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:29.291473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:29.303437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341475896432803:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:29.362021Z node 2 :TX_PROXY ERROR: Actor# [2:7494341475896432859:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:29.870243Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341454421593891:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:29.870425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:30.933339Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494341480191400508:2521], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T17:52:30.933593Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWY1ZTFjYTctZDYwNTMwOS1iNWFhMWMyYi03ZTFkMDdkNA==, ActorId: [2:7494341480191400500:2516], ActorState: ExecuteState, TraceId: 01js2e2zny2hva9aexagh1fqw8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32406, MsgBus: 62887 2025-04-17T17:52:15.758204Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341414664958288:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:15.758253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016c0/r3tmp/tmpYiVrOm/pdisk_1.dat 2025-04-17T17:52:16.254171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:16.263320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.263395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.267481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32406, node 1 2025-04-17T17:52:16.370725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:16.370746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:16.370753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:16.370875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62887 TClient is connected to server localhost:62887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:16.972364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.013463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.203117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.406056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.498410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:19.093394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341431844829245:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:19.093546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:19.567094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.624262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.720278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.767758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.827039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.893852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:19.963106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.078672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341436139797215:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.078786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.079186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341436139797221:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.083220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:20.097747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341436139797223:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:20.165650Z node 1 :TX_PROXY ERROR: Actor# [1:7494341436139797279:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:20.762317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341414664958288:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:20.762383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17986, MsgBus: 61244 2025-04-17T17:52:23.187186Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341449467780207:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:23.187244Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016c0/r3tmp/tmpADFTgd/pdisk_1.dat 2025-04-17T17:52:23.358516Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:23.362531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:23.362591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:23.365730Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17986, node 2 2025-04-17T17:52:23.438727Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:23.438745Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:23.438750Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:23.438838Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61244 TClient is connected to server localhost:61244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:23.869521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:23.879351Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:52:23.896760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:23.992928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:24.296146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:24.480001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:27.828268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341466647651136:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:27.828400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:27.909745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.001847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.119753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.168840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.224277Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341449467780207:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:28.224357Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:28.256105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.355672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.448645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.635390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341470942619116:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.635510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.636014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341470942619121:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.640657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:28.663300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341470942619123:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:52:28.737921Z node 2 :TX_PROXY ERROR: Actor# [2:7494341470942619178:3561] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:52:31.521720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:31.521822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:31.521859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:31.521892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:31.521937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:31.521986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:31.522050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:31.522115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:31.522835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:31.618765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:52:31.618823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:31.643922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:31.644511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:31.644701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:31.656020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:31.656205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:31.656951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:31.657188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:31.660787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:31.662269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:31.662335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:31.662525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:31.662593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:31.662640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:31.662774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.670494Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:52:31.827385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:31.827657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.827878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:31.828105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:31.828163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.839112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:31.839272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:31.839464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.839523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:31.839557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:31.839590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:31.846064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.846159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:31.846213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:31.855398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.855547Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.855589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:31.855636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:31.861337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:31.869311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:31.869561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:31.870686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:31.870873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:31.870932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:31.871235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:31.871292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:31.871477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:31.871589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:31.874302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:31.874376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:31.874585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:31.874641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:31.875038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.875093Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:31.875193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:31.875237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:31.875284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:31.875317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:31.875379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:31.875436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:31.875491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:31.875526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:31.875598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:31.875639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:31.875675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:31.877694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:31.877842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:31.877883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 04, ready parts: 2/3, is published: true 2025-04-17T17:52:32.689292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:52:32.689379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:52:32.689413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:52:32.689443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-17T17:52:32.708420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:52:32.708537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:52:32.711620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:52:32.711781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:52:32.711847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:52:32.711896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:52:32.711941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:52:32.729038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1036 } } 2025-04-17T17:52:32.729125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-17T17:52:32.729300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1036 } } 2025-04-17T17:52:32.729426Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1036 } } FAKE_COORDINATOR: Erasing txId 104 2025-04-17T17:52:32.730912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-17T17:52:32.730985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-17T17:52:32.731148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-17T17:52:32.731203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:52:32.731304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-17T17:52:32.731403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:32.731453Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:52:32.731494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:52:32.731535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-17T17:52:32.738731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:52:32.739453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:52:32.739872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:52:32.739928Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T17:52:32.740048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-17T17:52:32.740091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T17:52:32.740137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-17T17:52:32.740178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T17:52:32.740226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-17T17:52:32.740315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 104 2025-04-17T17:52:32.740391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T17:52:32.740445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:52:32.740483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:52:32.740627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:52:32.740694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-17T17:52:32.740723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-17T17:52:32.740768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:52:32.740797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-17T17:52:32.740819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-17T17:52:32.740891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:52:32.741394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:52:32.741451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T17:52:32.741559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:52:32.741611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:52:32.741646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:52:32.759373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:52:32.759462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:727:2642] 2025-04-17T17:52:32.759603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-17T17:52:32.760316Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:52:32.760602Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 294us result status StatusPathDoesNotExist 2025-04-17T17:52:32.760815Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:52:32.761401Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:52:32.761655Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 215us result status StatusPathDoesNotExist 2025-04-17T17:52:32.761842Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 3451, MsgBus: 11989 2025-04-17T17:52:16.289665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341420688563372:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:16.291054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00145b/r3tmp/tmpAL9vv3/pdisk_1.dat 2025-04-17T17:52:16.942676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:16.955072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.955181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.959982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3451, node 1 2025-04-17T17:52:17.158689Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:17.158710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:17.158720Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:17.158817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11989 TClient is connected to server localhost:11989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:18.158021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:18.188331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:18.491073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:18.741898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:18.829725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:20.807064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341437868434189:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.807183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:21.102678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.190962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.249277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.291557Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341420688563372:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:21.291614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:21.345036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.404279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.459487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.512898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:21.584776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341442163402151:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:21.584864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:21.585198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341442163402156:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:21.593880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:21.625210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341442163402158:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:21.722027Z node 1 :TX_PROXY ERROR: Actor# [1:7494341442163402216:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:23.231042Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=NzMyN2M2ODAtYTU0ZjE0ODItMTY1ZjY5NDYtMzJhOTYxNjI=, ActorId: [1:7494341450753337147:2516], ActorState: ExecuteState, TraceId: 01js2e2r38c1femeztvjtsrxaz, Internal error, message: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer 2025-04-17T17:52:23.231094Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzMyN2M2ODAtYTU0ZjE0ODItMTY1ZjY5NDYtMzJhOTYxNjI=, ActorId: [1:7494341450753337147:2516], ActorState: ExecuteState, TraceId: 01js2e2r38c1femeztvjtsrxaz, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer Trying to start YDB, gRPC: 18365, MsgBus: 16410 2025-04-17T17:52:24.449052Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341454258419530:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:24.449195Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00145b/r3tmp/tmp3Gd36p/pdisk_1.dat 2025-04-17T17:52:24.737532Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:24.753495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:24.753575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:24.763516Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18365, node 2 2025-04-17T17:52:24.960100Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:24.960122Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:24.960129Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:24.960226Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16410 TClient is connected to server localhost:16410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:25.600652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:25.611887Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:25.637267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:25.733380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:26.066164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:26.159293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:28.890414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341471438290351:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.890516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.944571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.987969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.071698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.160412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.206965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.291243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.380166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:29.449999Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341454258419530:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:29.450145Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:29.520052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341475733258330:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:29.520147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:29.520397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341475733258335:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:29.525263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:29.552385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341475733258337:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:29.613719Z node 2 :TX_PROXY ERROR: Actor# [2:7494341475733258393:3563] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardUserAttrsTest::MkDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:52:31.154473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:31.154588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:31.154632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:31.154669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:31.154717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:31.154777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:31.154854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:31.154919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:31.155304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:31.360857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:52:31.360918Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:31.409767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:31.410392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:31.410557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:31.462832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:31.463045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:31.463777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:31.464052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:31.468053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:31.469719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:31.469799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:31.470010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:31.470076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:31.470127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:31.470324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.478627Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:52:31.631254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:31.631586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.631843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:31.632102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:31.632167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.635594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:31.635780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:31.636003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.636067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:31.636105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:31.636138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:31.639257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.639345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:31.639382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:31.641985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.642047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.642101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:31.642166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:31.651590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:31.655818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:31.656027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:31.657097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:31.657249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:31.657306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:31.657593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:31.657659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:31.657839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:31.657912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:31.661208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:31.661266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:31.661457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:31.661501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:31.661871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:31.661922Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:31.662028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:31.662061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:31.662115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:31.662147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:31.662236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:31.662328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:31.662371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:31.662405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:31.662482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:31.662520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:31.662550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:31.664523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:31.664677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:31.664725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 33409546, at schemeshard: 72057594046678944 2025-04-17T17:52:32.746246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2025-04-17T17:52:32.749615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-17T17:52:32.750126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-17T17:52:32.754919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-17T17:52:32.754990Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2025-04-17T17:52:32.755122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-04-17T17:52:32.755160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-17T17:52:32.755200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-04-17T17:52:32.755232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-17T17:52:32.755266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2025-04-17T17:52:32.755371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 103 2025-04-17T17:52:32.755434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-17T17:52:32.755486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:52:32.755525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:52:32.755629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:52:32.755684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-17T17:52:32.755708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-17T17:52:32.755868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:52:32.755902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-17T17:52:32.755944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-17T17:52:32.756000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T17:52:32.756031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-04-17T17:52:32.756052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-04-17T17:52:32.756130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:52:32.763583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:52:32.763658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:719:2623] TestWaitNotification: OK eventTxId 103 2025-04-17T17:52:32.764410Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:52:32.764788Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 314us result status StatusSuccess 2025-04-17T17:52:32.765441Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:32.766191Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:52:32.766448Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 265us result status StatusSuccess 2025-04-17T17:52:32.767055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:32.768330Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:52:32.768609Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 255us result status StatusSuccess 2025-04-17T17:52:32.769142Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 3090, MsgBus: 6908 2025-04-17T17:52:16.043625Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341421713697733:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:16.043680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001469/r3tmp/tmphBAyOb/pdisk_1.dat 2025-04-17T17:52:16.531226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:16.531321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:16.539543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:16.558423Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3090, node 1 2025-04-17T17:52:16.798822Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:16.798847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:16.798858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:16.798959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6908 TClient is connected to server localhost:6908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:17.481434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.494431Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:17.504071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.728534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:17.896490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:18.027137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:19.863305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341434598601379:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:19.863419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.258472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.288195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.368258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.397728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.442134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.481588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.524874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:20.597876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341438893569336:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.598022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.598925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341438893569341:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:20.603682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:20.616461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341438893569343:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:20.683096Z node 1 :TX_PROXY ERROR: Actor# [1:7494341438893569398:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:21.043907Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341421713697733:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:21.043978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:21.914765Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341443188537044:2519], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2025-04-17T17:52:21.916219Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGIyZWVmOTUtZTVjOGRjOWEtM2I4N2VjNWMtNjI1MjY3N2U=, ActorId: [1:7494341443188537036:2514], ActorState: ExecuteState, TraceId: 01js2e2pt47cdvhv0bej6sj04p, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 63648, MsgBus: 1555 2025-04-17T17:52:23.014406Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341448273348063:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:23.014811Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001469/r3tmp/tmpmSC0OL/pdisk_1.dat 2025-04-17T17:52:23.234368Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:23.263851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:23.263932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:23.271379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63648, node 2 2025-04-17T17:52:23.438974Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:23.439006Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:23.439014Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:23.439149Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1555 TClient is connected to server localhost:1555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:24.297017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:24.311073Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:52:24.323444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:24.455553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:24.722610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:24.814824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:27.716233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341465453218882:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:27.716324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:27.790254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:27.854618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:27.927369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.009529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.021291Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341448273348063:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:28.021388Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:28.131597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.241685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.324742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:28.443916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341469748186856:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.444009Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.444263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341469748186861:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:28.448389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:28.464527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341469748186863:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:52:28.529613Z node 2 :TX_PROXY ERROR: Actor# [2:7494341469748186918:3557] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:30.736397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T17:52:31.598985Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912351630, txId: 281474976715674] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 32582, MsgBus: 26531 2025-04-17T17:51:28.904409Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341214499114317:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:28.909062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014df/r3tmp/tmpCcapQb/pdisk_1.dat 2025-04-17T17:51:29.435387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:29.439305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:29.439404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:29.441729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32582, node 1 2025-04-17T17:51:29.571964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:29.571991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:29.572002Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:29.572129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26531 TClient is connected to server localhost:26531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:30.333552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:33.058484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341235973951459:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.058607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.125690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:51:33.356602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341235973951566:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.356701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.367745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:51:33.456115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341235973951645:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.456177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.458371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341235973951650:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.474793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T17:51:33.505832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341235973951652:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T17:51:33.594785Z node 1 :TX_PROXY ERROR: Actor# [1:7494341235973951704:2450] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:33.910722Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341214499114317:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:33.910998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32546, MsgBus: 8543 2025-04-17T17:51:35.033830Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341245023962001:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:35.033904Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014df/r3tmp/tmpwvtCsv/pdisk_1.dat 2025-04-17T17:51:35.154552Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32546, node 2 2025-04-17T17:51:35.183701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:35.183776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:35.185542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:35.262536Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:35.262555Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:35.262560Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:35.262643Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8543 TClient is connected to server localhost:8543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:35.647191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:35.653592Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:51:37.995800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341253613897213:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:37.995880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341253613897227:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:37.995996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:38.008740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:38.044761Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T17:51:38.045173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341253613897251:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:51:38.105224Z node 2 :TX_PROXY ERROR: Actor# [2:7494341257908864600:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:38.143610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRP ... 1Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:17.349043Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:17.352957Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4286, node 10 2025-04-17T17:52:17.467812Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:17.467837Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:17.467848Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:17.467998Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28915 TClient is connected to server localhost:28915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:18.247929Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:18.257087Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:22.090501Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494341422357499308:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:22.090594Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:22.522294Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341443832336375:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:22.522425Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:22.522487Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341443832336391:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:22.528699Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:52:22.548315Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494341443832336404:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:52:22.648693Z node 10 :TX_PROXY ERROR: Actor# [10:7494341443832336455:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:22.698842Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7494341443832336464:2340], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-04-17T17:52:22.701545Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=OTU4MDU4NC1mMzI0MTg5LTUwYzhjMjhjLWQ1N2JhODdh, ActorId: [10:7494341443832336373:2331], ActorState: ExecuteState, TraceId: 01js2e2kb0f7atwhv6r4627gjc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" Trying to start YDB, gRPC: 8359, MsgBus: 4416 2025-04-17T17:52:23.844652Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7494341448608620158:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:23.844860Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014df/r3tmp/tmpbdFQzs/pdisk_1.dat 2025-04-17T17:52:24.041123Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:24.066083Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:24.066325Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:24.080023Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8359, node 11 2025-04-17T17:52:24.174944Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:24.174977Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:24.174990Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:24.175206Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4416 TClient is connected to server localhost:4416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:25.984346Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:28.845908Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494341448608620158:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:28.846008Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:31.506697Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494341482968359189:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:31.506697Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494341482968359178:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:31.506813Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:31.513711Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:52:31.546498Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494341482968359192:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:52:31.616765Z node 11 :TX_PROXY ERROR: Actor# [11:7494341482968359243:2349] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:31.711265Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7494341482968359252:2344], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-04-17T17:52:31.714207Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=NWQxYWFlMWQtMmVkNzRiZWQtMTc2M2ZjOTUtYjA1MTNjYg==, ActorId: [11:7494341482968359176:2335], ActorState: ExecuteState, TraceId: 01js2e2txvfvgs8mv2zh1n515a, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" |78.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:52:33.296958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:33.297114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:33.297174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:33.297227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:33.297288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:33.297339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:33.297427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:33.297554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:33.298033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:33.575442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:52:33.575512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:33.639615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:33.639783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:33.639935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:33.674828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:33.675326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:33.681012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:33.682067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:33.693907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:33.695494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:33.695580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:33.695833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:33.701388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:33.701533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:33.701860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:33.727789Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:52:33.947216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:33.947565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:33.947811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:33.948090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:33.948154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:33.955331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:33.955547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:33.955791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:33.955848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:33.955897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:33.955945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:33.963077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:33.963160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:33.963210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:33.965900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:33.966000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:33.966071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:33.966131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:33.971127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:33.975769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:33.976076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:33.977182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:33.977367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:33.977426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:33.977756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:33.977820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:33.978022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:33.978133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:33.984021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:33.984093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:33.984353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:33.984422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:33.984793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:33.984848Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:33.984965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:33.985011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:33.985054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:33.985111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:33.985156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:33.985207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:33.985266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:33.985302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:33.985381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:33.985426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:33.985462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:33.987869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:33.988026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:33.988072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... .161351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:52:34.162001Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T17:52:34.162149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T17:52:34.162212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:375:2366] 2025-04-17T17:52:34.162493Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:52:34.162600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:52:34.162635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:52:34.162662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:375:2366] 2025-04-17T17:52:34.162832Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:52:34.162906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:52:34.162968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:375:2366] 2025-04-17T17:52:34.163138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:52:34.163161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:375:2366] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-17T17:52:34.163833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:34.164062Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 214us result status StatusSuccess 2025-04-17T17:52:34.164602Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:34.165253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:34.165421Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 174us result status StatusSuccess 2025-04-17T17:52:34.165782Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:34.168670Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:34.168909Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 317us result status StatusSuccess 2025-04-17T17:52:34.169287Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:34.169968Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:34.170155Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 183us result status StatusSuccess 2025-04-17T17:52:34.170567Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:34.171378Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:34.171633Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 205us result status StatusSuccess 2025-04-17T17:52:34.171947Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SystemView::ShowCreateTableKeyBloomFilter [FAIL] >> SystemView::ShowCreateTable >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> YdbSdkSessionsPool::WaitQueue10 [GOOD] |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |78.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |78.1%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> DataStreams::TestUpdateStorage >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly >> TSyncBrokerTests::ShouldProcessAfterRelease >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink >> KqpPg::CreateSequence [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> KqpPg::AlterSequence >> TSyncBrokerTests::ShouldReleaseInQueue >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] >> DataStreams::TestNonChargeableUser >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |78.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2025-04-17T17:52:36.963564Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-04-17T17:52:36.963741Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-04-17T17:52:36.963793Z node 1 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token released, active: 1, waiting: 1 2025-04-17T17:52:36.963840Z node 1 :BS_SYNCER DEBUG: ProcessQueue(), VDisk actor id: [0:1:2], actor id: [1:6:2053], token sent, active: 0, waiting: 1 2025-04-17T17:52:37.056864Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-04-17T17:52:37.057026Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-04-17T17:52:37.057087Z node 2 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], removed from queue, active: 1, waiting: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2025-04-17T17:52:28.951193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341471162633630:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:28.951250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004ff/r3tmp/tmpL1WNrm/pdisk_1.dat 2025-04-17T17:52:29.864176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:29.864274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:29.879617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:29.886959Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17275, node 1 2025-04-17T17:52:30.344946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:30.344971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:30.344978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:30.345119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:31.021946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:33.951968Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341471162633630:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:33.952069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> SystemView::AuthGroupMembers_Access [GOOD] >> SystemView::AuthGroupMembers_ResultOrder >> TSyncNeighborsTests::SerDes3 [GOOD] >> TTxLocatorTest::Boot |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink >> TTxLocatorTest::Boot [GOOD] >> TTxLocatorTest::TestZeroRange >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2025-04-17T17:52:40.359362Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T17:52:40.359890Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T17:52:40.360670Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T17:52:40.362535Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.363118Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T17:52:40.375102Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.375295Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.375430Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T17:52:40.375611Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.375704Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.375806Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T17:52:40.375971Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> TTxLocatorTest::TestZeroRange [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitRoTx_TLI >> YdbSdkSessionsPool::StressTestSync10 [GOOD] >> SlowTopicAutopartitioning::CDC_Write [GOOD] |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |78.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2025-04-17T17:52:40.969720Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T17:52:40.970251Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T17:52:40.970960Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T17:52:40.972675Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.973232Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T17:52:40.984701Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.984820Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.984923Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T17:52:40.985079Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.985160Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.985252Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T17:52:40.985406Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T17:52:40.986069Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#0 2025-04-17T17:52:40.986590Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.986687Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:40.986767Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2025-04-17T17:52:40.986802Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 0 expected SUCCESS |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |78.2%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> TTxLocatorTest::TestImposibleSize |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |78.2%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] Test command err: 2025-04-17T17:52:27.662706Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341465009945750:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:27.662779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00055f/r3tmp/tmpR2Fjs6/pdisk_1.dat 2025-04-17T17:52:28.801745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:28.804751Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:28.808621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:28.808749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:28.819172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20136, node 1 2025-04-17T17:52:29.058796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:29.058825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:29.058832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:29.058944Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:29.845606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:32.694870Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341465009945750:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:32.694968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod >> YdbSdkSessionsPool::CustomPlan [GOOD] >> YdbSdkSessionsPool::FailTest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> TTxLocatorTest::TestImposibleSize [GOOD] |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [GOOD] >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2025-04-17T17:52:42.610168Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T17:52:42.610954Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T17:52:42.611784Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T17:52:42.613602Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:42.614115Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T17:52:42.624912Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:42.625038Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:42.625150Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T17:52:42.625295Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:42.625394Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:42.625492Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T17:52:42.625629Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T17:52:42.626355Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#281474976710656 2025-04-17T17:52:42.626527Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2025-04-17T17:52:42.629982Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-04-17T17:52:42.630736Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2108] requested range size#123456 2025-04-17T17:52:42.631272Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:42.631339Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:42.631482Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2025-04-17T17:52:42.631531Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2108] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2025-04-17T17:52:42.631906Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2112] requested range size#281474976587200 2025-04-17T17:52:42.632032Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2025-04-17T17:52:42.632065Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:78:2112] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-04-17T17:52:42.632470Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#246912 2025-04-17T17:52:42.632892Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:42.632944Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:42.633055Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2025-04-17T17:52:42.633088Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2025-04-17T17:52:42.633479Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#281474976340288 2025-04-17T17:52:42.633567Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2025-04-17T17:52:42.633598Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |78.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:51:14.424760Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:14.424870Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:51:14.447090Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:14.468258Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:51:14.469817Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:51:14.472518Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:51:14.474666Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-17T17:51:14.476412Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-17T17:51:14.483635Z node 1 :PERSQUEUE INFO: new Cookie default|ae4442b0-45c89dc4-89133689-b2a1689_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:14.489345Z node 1 :PERSQUEUE INFO: new Cookie default|c14dbd03-1f7cf160-f0d0b9bc-d02ab894_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:14.514344Z node 1 :PERSQUEUE INFO: new Cookie default|20514907-76f794ae-dc012dbe-29fb1e00_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:14.522687Z node 1 :PERSQUEUE INFO: new Cookie default|b3bd0b18-128ad02d-431648e6-8d6c12b5_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:14.539272Z node 1 :PERSQUEUE INFO: new Cookie default|68fcf5d5-9dd642e2-ddac98b-3431dfe3_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:14.554327Z node 1 :PERSQUEUE INFO: new Cookie default|b835684a-3f56e980-df0f9570-b0c3bb7f_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:51:15.108088Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:15.108209Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:182:2057] recipient: [2:181:2193] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] 2025-04-17T17:51:15.156818Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:15.156900Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:184:2194] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:258:2057] recipient: [2:14:2061] 2025-04-17T17:51:16.989879Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:16.990946Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:51:16.991923Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:264:2256] 2025-04-17T17:51:17.002691Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:264:2256] 2025-04-17T17:51:17.004953Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:265:2257] 2025-04-17T17:51:17.006984Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:265:2257] 2025-04-17T17:51:17.024441Z node 2 :PERSQUEUE INFO: new Cookie default|9aa88ce-5a0ec986-b33e6439-e2b5172b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:17.035090Z node 2 :PERSQUEUE INFO: new Cookie default|aa05d140-8157eba7-2d587c61-d5504d6a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:17.091530Z node 2 :PERSQUEUE INFO: new Cookie default|63c55d6a-28a4d29e-1bff9d89-6a7c7252_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:17.106526Z node 2 :PERSQUEUE INFO: new Cookie default|9f9a0278-a245f35c-399d647c-b4d5a7e0_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:17.136075Z node 2 :PERSQUEUE INFO: new Cookie default|de2b909d-a447f33c-342b6f50-552624e6_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:51:17.171227Z node 2 :PERSQUEUE INFO: new Cookie default|de6fc9d-317b711a-36a4dc4d-5c17d626_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-17T17:51:17.843626Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:17.843711Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:181:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:184:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:185:2057] recipient: [3:183:2195] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:187:2057] recipient: [3:183:2195] 2025-04-17T17:51:17.899779Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:17.899846Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:186:2196] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:260:2057] recipient: [3:14:2061] 2025-04-17T17:51:19.719773Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:19.720574Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-17T17:51:19.721487Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2258] 2025-04-17T17:51:19.723817Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:266:2258] 2025-04-17T17:51:19.730150Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:267:2259] 2025-04-17T17:51:19.732220Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:267:2259] 2025-04-17T17:51:19.749081Z node 3 :PERSQUEUE INFO: new Cookie default|acf24344-1b807fce-e22cda85-8 ... 9.509283Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 47 actor [47:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 47 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 47 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 47 Important: false } 2025-04-17T17:52:39.510538Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:185:2198] 2025-04-17T17:52:39.513652Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [47:185:2198] 2025-04-17T17:52:39.515909Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:186:2199] 2025-04-17T17:52:39.518705Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [47:186:2199] 2025-04-17T17:52:39.531368Z node 47 :PERSQUEUE INFO: new Cookie default|30b1c1dc-7ee9bb7e-a892a9ff-f271e60_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:39.538855Z node 47 :PERSQUEUE INFO: new Cookie default|7675debf-db8fe4da-19476e71-1363add4_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:39.585348Z node 47 :PERSQUEUE INFO: new Cookie default|fe156702-5143fa32-8423bbd3-aa8fbbd_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:39.598730Z node 47 :PERSQUEUE INFO: new Cookie default|b291b2e7-8625438-ae7cb315-448729cb_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:39.612284Z node 47 :PERSQUEUE INFO: new Cookie default|cd8be0a9-2e768a70-5430e0f7-d8675a2a_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:39.624864Z node 47 :PERSQUEUE INFO: new Cookie default|f3c1c7a-d2fd363e-8f206928-6f379047_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [47:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:284:2057] recipient: [47:99:2134] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:286:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:288:2057] recipient: [47:287:2281] Leader for TabletID 72057594037927937 is [47:289:2282] sender: [47:290:2057] recipient: [47:287:2281] 2025-04-17T17:52:39.700155Z node 47 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:39.700238Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:52:39.701592Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:338:2323] 2025-04-17T17:52:39.705089Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:339:2324] 2025-04-17T17:52:39.720428Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:52:39.720527Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [47:339:2324] 2025-04-17T17:52:39.724135Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:52:39.724227Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [47:338:2323] !Reboot 72057594037927937 (actor [47:107:2139]) rebooted! !Reboot 72057594037927937 (actor [47:107:2139]) tablet resolver refreshed! new actor is[47:289:2282] Leader for TabletID 72057594037927937 is [47:289:2282] sender: [47:386:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:108:2057] recipient: [48:101:2135] 2025-04-17T17:52:41.652790Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:41.652877Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927938 is [48:153:2174] sender: [48:154:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:179:2057] recipient: [48:14:2061] 2025-04-17T17:52:41.679383Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:41.683329Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2025-04-17T17:52:41.684476Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:185:2198] 2025-04-17T17:52:41.687806Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:185:2198] 2025-04-17T17:52:41.689943Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:186:2199] 2025-04-17T17:52:41.692307Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:186:2199] 2025-04-17T17:52:41.705081Z node 48 :PERSQUEUE INFO: new Cookie default|ede6dbb3-8a4c9acb-4da449ae-396b7334_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:41.714336Z node 48 :PERSQUEUE INFO: new Cookie default|3a9a3bf-246fa6e3-a4229449-358649fd_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:41.751365Z node 48 :PERSQUEUE INFO: new Cookie default|fba9ccaf-d22c8b5d-3cf046c8-f48aac41_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:41.765121Z node 48 :PERSQUEUE INFO: new Cookie default|8555aadf-138e33bf-f22eb4cb-cde7304a_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:41.780493Z node 48 :PERSQUEUE INFO: new Cookie default|3c33c62b-d67504c3-73805020-204e28c0_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:41.799554Z node 48 :PERSQUEUE INFO: new Cookie default|c9467c37-158197bc-71df0b32-80d4e3bd_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:108:2057] recipient: [49:101:2135] 2025-04-17T17:52:42.394137Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:42.394243Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927938 is [49:153:2174] sender: [49:154:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:177:2057] recipient: [49:14:2061] 2025-04-17T17:52:42.415915Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:52:42.416804Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2025-04-17T17:52:42.417734Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:183:2196] 2025-04-17T17:52:42.420864Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:183:2196] 2025-04-17T17:52:42.423050Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:184:2197] 2025-04-17T17:52:42.425365Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:184:2197] 2025-04-17T17:52:42.435858Z node 49 :PERSQUEUE INFO: new Cookie default|bdb8c6dd-1fd8c3c9-b097f2cf-e207f181_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:42.442440Z node 49 :PERSQUEUE INFO: new Cookie default|da75ce32-1c6463f2-18cea866-f8ea15dd_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:42.484998Z node 49 :PERSQUEUE INFO: new Cookie default|1d77a0d-88f378d3-c3a37b52-373e1570_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:42.500054Z node 49 :PERSQUEUE INFO: new Cookie default|4379cfd9-6fe7682-4b4bea7b-52c36b08_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:42.511552Z node 49 :PERSQUEUE INFO: new Cookie default|9c4d79a3-f3fb54f5-93c24492-9e7f7f7e_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:52:42.537197Z node 49 :PERSQUEUE INFO: new Cookie default|b4289167-5daf4e22-986c2efe-e3e376bc_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutEmptyMessage >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder >> TTxLocatorTest::TestAllocateAllByPieces ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2025-04-17T17:52:44.029276Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T17:52:44.029770Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T17:52:44.030546Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T17:52:44.032291Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.032873Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T17:52:44.044470Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.044614Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.044773Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T17:52:44.044938Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.045022Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.045140Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T17:52:44.045304Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T17:52:44.046995Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-04-17T17:52:44.047502Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-04-17T17:52:44.047859Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-04-17T17:52:44.048264Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-04-17T17:52:44.048909Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-04-17T17:52:44.049230Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.049377Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-04-17T17:52:44.049560Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.049887Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.049975Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.050079Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-04-17T17:52:44.050327Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.050385Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-04-17T17:52:44.050598Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.050863Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.050941Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.051025Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-04-17T17:52:44.051216Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-04-17T17:52:44.051422Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.051555Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-04-17T17:52:44.051601Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-04-17T17:52:44.051769Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.051863Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.051995Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-04-17T17:52:44.052026Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 100000 to# 200000 2025-04-17T17:52:44.052148Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-04-17T17:52:44.052175Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 200000 to# 300000 2025-04-17T17:52:44.052273Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.052335Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.052436Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.052522Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-04-17T17:52:44.052579Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 300000 to# 400000 2025-04-17T17:52:44.052713Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.052781Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-04-17T17:52:44.052805Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 400000 to# 500000 2025-04-17T17:52:44.052909Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.052962Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-04-17T17:52:44.052984Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 500000 to# 600000 2025-04-17T17:52:44.053115Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.053151Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-04-17T17:52:44.053174Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 600000 to# 700000 2025-04-17T17:52:44.053301Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.053367Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-04-17T17:52:44.053388Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 700000 to# 800000 2025-04-17T17:52:44.053444Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-04-17T17:52:44.053465Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 800000 to# 900000 2025-04-17T17:52:44.053581Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.053688Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.053790Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-04-17T17:52:44.053825Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-17T17:52:44.061886Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:111:2145] requested range size#100000 2025-04-17T17:52:44.062449Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:113:2147] requested range size#100000 2025-04-17T17:52:44.063029Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:117:2151] requested range size#100000 2025-04-17T17:52:44.063217Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.063384Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.063563Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:115:2149] requested range size#100000 2025-04-17T17:52:44.063864Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:119:2153] requested range size#100000 2025-04-17T17:52:44.064077Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.064306Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:121:2155] requested range size#100000 2025-04-17T17:52:44.064560Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.064827Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [720575940464476 ... 0} 2025-04-17T17:52:44.133973Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.134133Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2025-04-17T17:52:44.134162Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:397:2431] TEvAllocateResult from# 8300000 to# 8400000 2025-04-17T17:52:44.134327Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2025-04-17T17:52:44.134368Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:399:2433] TEvAllocateResult from# 8400000 to# 8500000 2025-04-17T17:52:44.134458Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.134604Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.134661Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2025-04-17T17:52:44.134683Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:401:2435] TEvAllocateResult from# 8500000 to# 8600000 2025-04-17T17:52:44.134826Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.134915Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2025-04-17T17:52:44.134943Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:403:2437] TEvAllocateResult from# 8600000 to# 8700000 2025-04-17T17:52:44.135031Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.135088Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2025-04-17T17:52:44.135117Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8700000 to# 8800000 2025-04-17T17:52:44.135192Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.135246Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2025-04-17T17:52:44.135278Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8800000 to# 8900000 2025-04-17T17:52:44.135408Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2025-04-17T17:52:44.135437Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:409:2443] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-17T17:52:44.139769Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2025-04-17T17:52:44.140264Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2025-04-17T17:52:44.140750Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:431:2465] requested range size#100000 2025-04-17T17:52:44.141112Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.141292Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2025-04-17T17:52:44.141426Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.141779Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.141896Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2025-04-17T17:52:44.142040Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.142231Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2025-04-17T17:52:44.142374Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.142663Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.142773Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2025-04-17T17:52:44.142990Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.143242Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2025-04-17T17:52:44.143409Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.143595Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.143737Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2025-04-17T17:52:44.143860Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.144092Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.144241Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:449:2483] requested range size#100000 2025-04-17T17:52:44.144391Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.144573Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-04-17T17:52:44.144608Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9000000 to# 9100000 2025-04-17T17:52:44.144708Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.144893Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-04-17T17:52:44.144938Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9100000 to# 9200000 2025-04-17T17:52:44.144997Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.145135Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-04-17T17:52:44.145163Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:431:2465] TEvAllocateResult from# 9200000 to# 9300000 2025-04-17T17:52:44.145237Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.145377Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-04-17T17:52:44.145405Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9300000 to# 9400000 2025-04-17T17:52:44.145472Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.145595Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-04-17T17:52:44.145619Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9400000 to# 9500000 2025-04-17T17:52:44.145764Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-04-17T17:52:44.145805Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9500000 to# 9600000 2025-04-17T17:52:44.145860Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.145974Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-04-17T17:52:44.146000Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9600000 to# 9700000 2025-04-17T17:52:44.146048Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.146086Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.146215Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-04-17T17:52:44.146256Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9700000 to# 9800000 2025-04-17T17:52:44.146315Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:44.146396Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-04-17T17:52:44.146435Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9800000 to# 9900000 2025-04-17T17:52:44.146543Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-04-17T17:52:44.146570Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:449:2483] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [GOOD] Test command err: 2025-04-17T17:52:30.487092Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341480448579291:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:30.491186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004d0/r3tmp/tmpffx7nZ/pdisk_1.dat 2025-04-17T17:52:31.356352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:31.356459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:31.372336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:31.406577Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10821, node 1 2025-04-17T17:52:31.642547Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:31.647640Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:31.650465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:31.738457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:31.738485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:31.738492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:31.738613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:32.147070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:35.458607Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341480448579291:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:35.458671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2025-04-17T17:52:45.134118Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T17:52:45.134630Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T17:52:45.135417Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T17:52:45.137263Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.137817Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T17:52:45.165367Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.165493Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.165617Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T17:52:45.165786Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.165884Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.166001Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T17:52:45.166150Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T17:52:45.166876Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#8796093022207 2025-04-17T17:52:45.167494Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.167593Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.167703Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2025-04-17T17:52:45.167751Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2025-04-17T17:52:45.171775Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2109] requested range size#8796093022207 2025-04-17T17:52:45.172313Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.172411Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.172509Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2025-04-17T17:52:45.172571Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:75:2109] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2025-04-17T17:52:45.173023Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2113] requested range size#8796093022207 2025-04-17T17:52:45.173449Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.173525Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.173625Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2025-04-17T17:52:45.173666Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2113] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2025-04-17T17:52:45.174100Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#8796093022207 2025-04-17T17:52:45.174487Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.174557Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.174669Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2025-04-17T17:52:45.174712Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2025-04-17T17:52:45.175126Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2121] requested range size#8796093022207 2025-04-17T17:52:45.175485Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.175556Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.175638Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2025-04-17T17:52:45.175675Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:87:2121] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2025-04-17T17:52:45.176080Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:91:2125] requested range size#8796093022207 2025-04-17T17:52:45.176478Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.176575Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.176656Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2025-04-17T17:52:45.176699Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:91:2125] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2025-04-17T17:52:45.177139Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:95:2129] requested range size#8796093022207 2025-04-17T17:52:45.177495Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.177549Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.177653Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2025-04-17T17:52:45.177704Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:95:2129] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2025-04-17T17:52:45.178231Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:99:2133] requested range size#8796093022207 2025-04-17T17:52:45.178570Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.178639Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.178723Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2025-04-17T17:52:45.178769Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:99:2133] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2025-04-17T17:52:45.179252Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:103:2137] requested range size#8796093022207 2025-04-17T17:52:45.179602Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.179652Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.179722Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2025-04-17T17:52:45.179761Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:103:2137] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2025-04-17T17:52:45.180262Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:107:2141] requested range size#8796093022207 2025-04-17T17:52:45.180614Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.180714Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.180807Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 79164837199863 Reserved to# 87960930222070 2025-04-17T17:52:45.180845Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:107:2141] TEvAllocateResult from# 79164837199863 to# 87960930222070 expected SUCCESS 2025-04-17T17:52:45.181391Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:111:2145] requested range size#8796093022207 2025-04-17T17:52:45.181699Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.181775Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.181881Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 87960930222070 Reserved to# 96757023244277 2025-04-17T17:52:45.181926Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:111:2145] TEvAllocateResult from# 87960930222070 to# 96757023244277 expected SUCCESS 2025-04-17T17:52:45.182495Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:115:2149] requested range size#8796093022207 2025-04-17T17:52:45.182812Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.182889Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:69:0] Status# OK StatusFla ... e 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:151:2185] requested range size#8796093022207 2025-04-17T17:52:45.193183Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.193232Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.193335Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 175921860444140 Reserved to# 184717953466347 2025-04-17T17:52:45.193373Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:151:2185] TEvAllocateResult from# 175921860444140 to# 184717953466347 expected SUCCESS 2025-04-17T17:52:45.194002Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:155:2189] requested range size#8796093022207 2025-04-17T17:52:45.194327Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.194392Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.194469Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 184717953466347 Reserved to# 193514046488554 2025-04-17T17:52:45.194507Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:155:2189] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2025-04-17T17:52:45.195188Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:159:2193] requested range size#8796093022207 2025-04-17T17:52:45.195553Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.195629Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.195698Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2025-04-17T17:52:45.195749Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:159:2193] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2025-04-17T17:52:45.196409Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:163:2197] requested range size#8796093022207 2025-04-17T17:52:45.196703Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.196792Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.196860Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2025-04-17T17:52:45.196907Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:163:2197] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2025-04-17T17:52:45.197583Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:167:2201] requested range size#8796093022207 2025-04-17T17:52:45.197872Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.197938Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.198015Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2025-04-17T17:52:45.198048Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:167:2201] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2025-04-17T17:52:45.207082Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:171:2205] requested range size#8796093022207 2025-04-17T17:52:45.207624Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.207690Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.207808Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2025-04-17T17:52:45.207854Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:171:2205] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2025-04-17T17:52:45.208670Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:175:2209] requested range size#8796093022207 2025-04-17T17:52:45.209053Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.209117Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.209201Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2025-04-17T17:52:45.209252Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:175:2209] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2025-04-17T17:52:45.210054Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:179:2213] requested range size#8796093022207 2025-04-17T17:52:45.214629Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.214726Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.214856Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2025-04-17T17:52:45.214898Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:179:2213] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2025-04-17T17:52:45.215814Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:183:2217] requested range size#8796093022207 2025-04-17T17:52:45.216236Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.216314Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.216432Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2025-04-17T17:52:45.216471Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:183:2217] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2025-04-17T17:52:45.217287Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:187:2221] requested range size#8796093022207 2025-04-17T17:52:45.217707Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.217773Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.217860Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2025-04-17T17:52:45.217904Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:187:2221] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2025-04-17T17:52:45.220626Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:191:2225] requested range size#8796093022207 2025-04-17T17:52:45.221115Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.221173Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.221270Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2025-04-17T17:52:45.221327Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:191:2225] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2025-04-17T17:52:45.222200Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:195:2229] requested range size#8796093022207 2025-04-17T17:52:45.222593Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.222675Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.222762Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2025-04-17T17:52:45.222802Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:195:2229] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2025-04-17T17:52:45.223676Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:199:2233] requested range size#31 2025-04-17T17:52:45.224033Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.224134Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:52:45.224225Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2025-04-17T17:52:45.224268Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:199:2233] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2025-04-17T17:52:45.225162Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:203:2237] requested range size#1 2025-04-17T17:52:45.225288Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-04-17T17:52:45.225329Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:203:2237] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 3772, MsgBus: 10408 2025-04-17T17:51:29.802886Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341215892144728:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:29.809571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ca/r3tmp/tmp0D9X2W/pdisk_1.dat 2025-04-17T17:51:30.445385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:30.445541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:30.450913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:30.521359Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3772, node 1 2025-04-17T17:51:30.621946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:30.621972Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:30.621980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:30.622133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10408 TClient is connected to server localhost:10408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:31.395580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:31.410714Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:51:33.567746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341233072014571:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.567862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.567996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341233072014583:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:33.573317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:33.608545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341233072014585:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:51:33.711431Z node 1 :TX_PROXY ERROR: Actor# [1:7494341233072014636:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:33.768814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29720, MsgBus: 19488 2025-04-17T17:51:35.219054Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341243396973739:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:35.219102Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ca/r3tmp/tmpw7y4Eg/pdisk_1.dat 2025-04-17T17:51:35.395233Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:35.395998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:35.396093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:35.405963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29720, node 2 2025-04-17T17:51:35.466632Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:35.466654Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:35.466660Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:35.466759Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19488 TClient is connected to server localhost:19488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:35.872134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:35.883206Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:51:38.453486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341256281876276:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:38.453756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:38.454048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341256281876296:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:38.456980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:38.466993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341256281876298:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:51:38.561376Z node 2 :TX_PROXY ERROR: Actor# [2:7494341256281876349:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:38.593282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12151, MsgBus: 21444 2025-04-17T17:51:40.058102Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494341266664560363:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:40.058204Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ca/r3tmp/tmpF90jj5/pdisk_1.dat 2025-04-17T17:51:40.223515Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:40.275557Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:40.275657Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:40.277018Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12151, node 3 2025-04-17T17:51:40.351751Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:40.351782Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:40.351792Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:40.351948Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21444 TClient is connected to server localhost:21444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "R ... 68897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:22.386511Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:22.395947Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15169, node 10 2025-04-17T17:52:22.590978Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:22.591009Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:22.591021Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:22.591191Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26422 TClient is connected to server localhost:26422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:23.933008Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:23.958098Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:52:26.906760Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494341442086981856:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:26.906877Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:30.814537Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341480741688204:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:30.814744Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:30.815382Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341480741688216:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:30.824195Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:52:30.856638Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494341480741688218:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:52:30.913124Z node 10 :TX_PROXY ERROR: Actor# [10:7494341480741688270:2352] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:31.004142Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:52:31.183614Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T17:52:31.399065Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7494341485036655809:2370], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-04-17T17:52:31.402415Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=YzhjZDQ2ZDMtZDc2OTEwN2UtYTk5MDM4Y2UtNmFiOWY0NjE=, ActorId: [10:7494341485036655807:2369], ActorState: ExecuteState, TraceId: 01js2e3026ck9hv2989pnrnxf8, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 2429, MsgBus: 14505 2025-04-17T17:52:33.601433Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7494341494023444454:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:33.601519Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ca/r3tmp/tmpKJ2Caz/pdisk_1.dat 2025-04-17T17:52:33.956532Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:34.002898Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:34.003050Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:34.011648Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2429, node 11 2025-04-17T17:52:34.153137Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:34.153162Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:34.153171Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:34.153356Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14505 TClient is connected to server localhost:14505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:35.559987Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:38.606347Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494341494023444454:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:38.606468Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:41.436328Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494341528383183433:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:41.436546Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:41.442679Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494341528383183445:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:41.449835Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:52:41.499993Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494341528383183447:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:52:41.606998Z node 11 :TX_PROXY ERROR: Actor# [11:7494341528383183498:2349] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:41.651585Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.839013Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.984221Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7494341528383183743:2368], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-04-17T17:52:41.987115Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=OTQ0OWM5NTYtOTgzNDlkZDEtNmU0Mzk0MmMtM2RjMDg0OTc=, ActorId: [11:7494341528383183741:2367], ActorState: ExecuteState, TraceId: 01js2e3af4129zpr3gf85d6vbg, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2025-04-17T17:52:19.747435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341432232254693:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:19.747484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00085a/r3tmp/tmp0JNJJW/pdisk_1.dat 2025-04-17T17:52:20.351399Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25673, node 1 2025-04-17T17:52:20.544633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:20.544708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:20.553679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:20.562954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:20.562980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:20.562988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:20.563125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:20.932935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:24.750547Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341432232254693:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:24.750732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:35.323198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:52:35.323234Z node 1 :IMPORT WARN: Table profiles were not loaded >> TExternalDataSourceTest::ReadOnlyMode >> TExternalDataSourceTest::DropTableTwice |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |78.3%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-04-17T17:51:07.909201Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-17T17:51:07.909285Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-17T17:51:07.909506Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-17T17:51:07.909565Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-04-17T17:51:07.909643Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-04-17T17:51:07.909698Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-17T17:51:07.909983Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-04-17T17:51:07.910020Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-04-17T17:51:07.910127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:07.910602Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:41:2068] 2025-04-17T17:51:07.910639Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant 2025-04-17T17:51:07.910744Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:51:07.910891Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:42:2068] 2025-04-17T17:51:07.910914Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/tenant 2025-04-17T17:51:07.910984Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:51:07.911086Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2068] 2025-04-17T17:51:07.911106Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/tenant 2025-04-17T17:51:07.911141Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:51:07.911199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2025-04-17T17:51:07.911269Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-04-17T17:51:07.911318Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2025-04-17T17:51:07.911350Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-04-17T17:51:07.911380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2025-04-17T17:51:07.911415Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-04-17T17:51:07.911486Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:38:2068] 2025-04-17T17:51:07.911582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:39:2068] 2025-04-17T17:51:07.911630Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:07.911682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2068] 2025-04-17T17:51:07.911731Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/tenant] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-04-17T17:51:07.911944Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 103 2025-04-17T17:51:07.911983Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-17T17:51:07.912046Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-17T17:51:07.912226Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2025-04-17T17:51:07.912285Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-04-17T17:51:07.912357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:38:2068] 2025-04-17T17:51:07.912421Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-04-17T17:51:08.405319Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-04-17T17:51:08.405370Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-17T17:51:08.405495Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-04-17T17:51:08.405522Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Commit generation: owner# 800, generation# 1 2025-04-17T17:51:08.405562Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-04-17T17:51:08.405792Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-17T17:51:08.405967Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-04-17T17:51:08.405995Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-04-17T17:51:08.406081Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:51:08.406473Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:41:2068] 2025-04-17T17:51:08.406501Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Upsert description: path# /root/tenant 2025-04-17T17:51:08.406562Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Subscribe: subscriber# [3:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:51:08.406667Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:42:2068] 2025-04-17T17:51:08.406686Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/tenant 2025-04-17T17:51:08.406741Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:51:08.406836Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2068] 2025-04-17T17:51:08.406857Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Upsert description: path# /root/tenant 2025-04-17T17:51:08.406885Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Subscribe: subscriber# [3:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:51:08.406949Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2025-04-17T17:51:08.406993Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:41:2068] 2025-04-17T17:51:08.407042Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2025-04-17T17:51:08.407074Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:42:2068] 2025-04-17T17:51:08.407101Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:9:2056] 2025-04-17T17:51:08.407150Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2068] 2025-04-17T17:51:08.407217Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:38:2068] 2025-04-17T17:51:08.407264Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:39:2068] 2025-04-17T17:51:08.407309Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:37:2068][/root/tenant] Set up state: owner# [3:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:51:08.407367Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/ ... omainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-04-17T17:52:45.876222Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-04-17T17:52:45.876275Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Successful handshake: owner# 910, generation# 1 2025-04-17T17:52:45.876393Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-04-17T17:52:45.876420Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Commit generation: owner# 910, generation# 1 2025-04-17T17:52:45.876461Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-04-17T17:52:45.876492Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Successful handshake: owner# 910, generation# 1 2025-04-17T17:52:45.876675Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-04-17T17:52:45.876704Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Commit generation: owner# 910, generation# 1 2025-04-17T17:52:45.876775Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:52:45.877159Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:41:2068] 2025-04-17T17:52:45.877190Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-04-17T17:52:45.877257Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Subscribe: subscriber# [397:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:52:45.877385Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:42:2068] 2025-04-17T17:52:45.877410Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-04-17T17:52:45.877449Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Subscribe: subscriber# [397:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:52:45.877575Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:43:2068] 2025-04-17T17:52:45.877598Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-04-17T17:52:45.877636Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Subscribe: subscriber# [397:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:52:45.877701Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:3:2050] 2025-04-17T17:52:45.877745Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:41:2068] 2025-04-17T17:52:45.877787Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:6:2053] 2025-04-17T17:52:45.877821Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:42:2068] 2025-04-17T17:52:45.877855Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:9:2056] 2025-04-17T17:52:45.877888Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:43:2068] 2025-04-17T17:52:45.877960Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:38:2068] 2025-04-17T17:52:45.878020Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:39:2068] 2025-04-17T17:52:45.878058Z node 397 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][397:37:2068][/Root/Tenant/table_inside] Set up state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:52:45.878108Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:40:2068] 2025-04-17T17:52:45.878145Z node 397 :SCHEME_BOARD_SUBSCRIBER INFO: [main][397:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-04-17T17:52:46.368396Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-04-17T17:52:46.368452Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Successful handshake: owner# 910, generation# 1 2025-04-17T17:52:46.368582Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-04-17T17:52:46.368609Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Commit generation: owner# 910, generation# 1 2025-04-17T17:52:46.368650Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-04-17T17:52:46.368680Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Successful handshake: owner# 910, generation# 1 2025-04-17T17:52:46.368845Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-04-17T17:52:46.368871Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Commit generation: owner# 910, generation# 1 2025-04-17T17:52:46.368939Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:52:46.369374Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:41:2068] 2025-04-17T17:52:46.369431Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-04-17T17:52:46.369513Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Subscribe: subscriber# [399:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:52:46.369681Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:42:2068] 2025-04-17T17:52:46.369706Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-04-17T17:52:46.369745Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Subscribe: subscriber# [399:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:52:46.369870Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2068] 2025-04-17T17:52:46.369894Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-04-17T17:52:46.369932Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Subscribe: subscriber# [399:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-17T17:52:46.370001Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2025-04-17T17:52:46.370051Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:41:2068] 2025-04-17T17:52:46.370096Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2025-04-17T17:52:46.370136Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:42:2068] 2025-04-17T17:52:46.370192Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2025-04-17T17:52:46.370238Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2068] 2025-04-17T17:52:46.370319Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:38:2068] 2025-04-17T17:52:46.370381Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:39:2068] 2025-04-17T17:52:46.370425Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][399:37:2068][/Root/Tenant/table_inside] Set up state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:52:46.370481Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2068] 2025-04-17T17:52:46.370520Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: [main][399:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink >> KqpErrors::ProposeResultLost_RwTx+UseSink >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DescribeStream >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> KqpTx::CommitRoTx_TLI [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] |78.3%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T17:52:47.311176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:47.311274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:47.311341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:47.311388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:47.311433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:47.311463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:47.311535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:47.311603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:47.311913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:47.430936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:52:47.431012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:47.476737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:47.476847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:47.477007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:47.557749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:47.558144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:47.558828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.559233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:47.561600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.562914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:47.562982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.563254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:47.563297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:47.563342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:47.563527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.570586Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:52:47.708847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:47.709072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.709321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:47.709540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:47.709601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.711781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.711928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:47.712102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.712159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:47.712184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:47.712208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:47.714691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.714750Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:47.714789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:47.716328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.716376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.716418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:47.716495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.719150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:47.720568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:47.720832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:47.721510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.721602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:47.721645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:47.721822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:47.721852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:47.721964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:47.722029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:47.723437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:47.723466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:47.723558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.723608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:47.723832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.723863Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:47.723936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:47.723964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.724013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:47.724032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.724057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:47.724082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.724107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:47.724126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:47.724168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:47.724192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:47.724232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:47.725418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... thId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:52:47.835251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:52:47.835290Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T17:52:47.835330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:47.836331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:52:47.836420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:52:47.836460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:52:47.836491Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:52:47.836533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:52:47.836593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T17:52:47.838705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-17T17:52:47.838808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-17T17:52:47.839249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.839349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:47.839415Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-04-17T17:52:47.839512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:52:47.839595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-17T17:52:47.839770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:47.839820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:52:47.840700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:52:47.842037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-04-17T17:52:47.843356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:47.843391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:47.843517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:52:47.843663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.843712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-17T17:52:47.843755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-17T17:52:47.843982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.844022Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T17:52:47.844122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:52:47.844153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:52:47.844187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:52:47.844223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:52:47.844249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T17:52:47.844282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:52:47.844318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:52:47.844345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:52:47.844412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:52:47.844450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-17T17:52:47.844475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T17:52:47.844499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T17:52:47.844867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:52:47.844915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:52:47.844935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:52:47.844960Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:52:47.844985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:52:47.845280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:52:47.845349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:52:47.845395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:47.845592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:52:47.845644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:52:47.845671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:52:47.845694Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:52:47.845709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:47.845763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-17T17:52:47.849601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:52:47.849753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:52:47.849824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T17:52:47.850152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T17:52:47.850230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T17:52:47.850721Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T17:52:47.850816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:52:47.850851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:390:2381] TestWaitNotification: OK eventTxId 104 2025-04-17T17:52:47.851432Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:47.851741Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 211us result status StatusPathDoesNotExist 2025-04-17T17:52:47.851894Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TNebiusAccessServiceTest::Authenticate >> TNebiusAccessServiceTest::Authenticate [GOOD] >> TNebiusAccessServiceTest::PassRequestId [GOOD] >> YdbSdkSessionsPool::FailTest [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T17:52:47.598362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:47.598471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:47.598522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:47.598564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:47.598611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:47.598639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:47.598698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:47.598772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:47.599114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:47.684025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:52:47.684101Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:47.703011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:47.703623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:47.703808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:47.717668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:47.717859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:47.718616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.719275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:47.722170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.723500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:47.723562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.723743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:47.723804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:47.723867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:47.724021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.731057Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:52:47.879452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:47.879720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.879972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:47.880278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:47.880340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.887103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.887307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:47.887551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.887638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:47.887696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:47.887733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:47.890476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.890552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:47.890593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:47.896050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.896111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.896167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:47.896239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.900177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:47.905879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:47.906131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:47.907333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.907494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:47.907560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:47.907932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:47.908015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:47.908203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:47.908314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:47.915737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:47.915788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:47.915962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.915999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:47.916283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.916333Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:47.916415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:47.916441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.916475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:47.916519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.916568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:47.916605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.916634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:47.916667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:47.916736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:47.916771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:47.916796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:47.918334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... T_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:48.337705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:52:48.337758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:52:48.347282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAccepted TxId: 128 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2025-04-17T17:52:48.347504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-04-17T17:52:48.347802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:48.347856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:48.348069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:52:48.348266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:48.348310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:452:2410], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-04-17T17:52:48.348359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:452:2410], at schemeshard: 72057594046678944, txId: 128, path id: 4 2025-04-17T17:52:48.348798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.348850Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:48.348916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 128 ready parts: 1/1 2025-04-17T17:52:48.349047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 128 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:48.349804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-04-17T17:52:48.349909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-04-17T17:52:48.349999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-04-17T17:52:48.350041Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-17T17:52:48.350093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:52:48.351858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-04-17T17:52:48.351954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-04-17T17:52:48.351986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-04-17T17:52:48.352032Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-04-17T17:52:48.352075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:52:48.352166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 0/1, is published: true 2025-04-17T17:52:48.365942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 128:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:128 msg type: 269090816 2025-04-17T17:52:48.366190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 128, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:52:48.367706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 FAKE_COORDINATOR: Add transaction: 128 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 128 at step: 5000004 2025-04-17T17:52:48.368464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-04-17T17:52:48.368797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:48.369045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 128 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:48.369104Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-04-17T17:52:48.369273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 128:0 128 -> 240 2025-04-17T17:52:48.369464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:52:48.369530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:52:48.382000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:48.382068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:48.382282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:52:48.382403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:48.382440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:452:2410], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-04-17T17:52:48.382476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:452:2410], at schemeshard: 72057594046678944, txId: 128, path id: 4 FAKE_COORDINATOR: Erasing txId 128 2025-04-17T17:52:48.382897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.382959Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 128:0 ProgressState 2025-04-17T17:52:48.383086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-04-17T17:52:48.383122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-17T17:52:48.383169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-04-17T17:52:48.383201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-17T17:52:48.383242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 1/1, is published: false 2025-04-17T17:52:48.383286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-17T17:52:48.383324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 128:0 2025-04-17T17:52:48.383356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 128:0 2025-04-17T17:52:48.383437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:52:48.383481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 128, publications: 2, subscribers: 0 2025-04-17T17:52:48.383523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-17T17:52:48.383554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-04-17T17:52:48.384191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-04-17T17:52:48.384287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-04-17T17:52:48.384339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 128 2025-04-17T17:52:48.384396Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T17:52:48.384440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:52:48.386842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-04-17T17:52:48.386935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-04-17T17:52:48.386967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 128 2025-04-17T17:52:48.386997Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-17T17:52:48.387026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T17:52:48.387102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 128, subscribers: 0 2025-04-17T17:52:48.391359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-04-17T17:52:48.391668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 TestModificationResult got TxId: 128, wait until txId: 128 >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T17:52:47.802739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:47.802840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:47.802891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:47.802927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:47.802976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:47.803003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:47.803058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:47.803164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:47.803493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:47.895275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:52:47.895353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:47.908602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:47.909014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:47.909145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:47.918420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:47.918631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:47.919272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.919930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:47.922864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.924376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:47.924557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.924769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:47.924843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:47.924893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:47.925058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.933037Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:52:48.141189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:48.141395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.141621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:48.141855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:48.141911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.148930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:48.149078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:48.149306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.149373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:48.149407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:48.149438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:48.151734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.151794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:48.151836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:48.154166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.154237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.154303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:48.154368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:48.162791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:48.167073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:48.167280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:48.168265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:48.168411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:48.168469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:48.168850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:48.168904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:48.169075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:48.169151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:48.172248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:48.172316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:48.172567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:48.172607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:48.172864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.172910Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:48.172987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:48.173010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:48.173036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:48.173076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:48.173121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:48.173167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:48.173196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:48.173223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:48.173278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:48.173304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:48.173325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:48.174819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... SHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:52:48.253204Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-17T17:52:48.253244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:48.255047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:48.255154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:48.255185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:52:48.255216Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:52:48.255246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:52:48.255325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:52:48.257531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:52:48.257696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T17:52:48.258289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:48.258446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:48.258507Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2025-04-17T17:52:48.258626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-17T17:52:48.258809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:48.258917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:52:48.260178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:52:48.266209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:52:48.267629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:48.267670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:48.267828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:52:48.267926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:52:48.268025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:48.268055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T17:52:48.268103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:52:48.268136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:52:48.268424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:52:48.268473Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:52:48.268596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:52:48.268629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:52:48.268685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:52:48.268725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:52:48.268761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T17:52:48.268867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:52:48.268904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:52:48.268934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:52:48.269008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:52:48.269052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T17:52:48.269094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T17:52:48.269123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T17:52:48.269954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:48.270065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:48.270140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:52:48.270215Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:52:48.270268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:48.272103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:48.272206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:52:48.272234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:52:48.272260Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:52:48.272303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:52:48.272374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T17:52:48.279554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:52:48.280159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:52:48.280396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:52:48.280444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:52:48.280954Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:52:48.281060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:52:48.281114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:334:2325] TestWaitNotification: OK eventTxId 102 2025-04-17T17:52:48.281648Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:48.281846Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 227us result status StatusSuccess 2025-04-17T17:52:48.282135Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2025-04-17T17:52:48.971904Z node 1 :GRPC_CLIENT DEBUG: [517000002b08] Connect to grpc://localhost:4321 2025-04-17T17:52:48.975830Z node 1 :GRPC_CLIENT DEBUG: [517000002b08] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2025-04-17T17:52:49.038916Z node 1 :GRPC_CLIENT DEBUG: [517000002b08] Status 7 Permission Denied 2025-04-17T17:52:49.046622Z node 1 :GRPC_CLIENT DEBUG: [517000002b08] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2025-04-17T17:52:49.055377Z node 1 :GRPC_CLIENT DEBUG: [517000002b08] Response AuthenticateResponse { account { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx_TLI [GOOD] Test command err: Trying to start YDB, gRPC: 5453, MsgBus: 26298 2025-04-17T17:52:32.124485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341487564134927:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:32.124597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001835/r3tmp/tmpwgC8FE/pdisk_1.dat 2025-04-17T17:52:32.627471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:32.627565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:32.628685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:32.655587Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5453, node 1 2025-04-17T17:52:32.962072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:32.962093Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:32.962100Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:32.962239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26298 TClient is connected to server localhost:26298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:34.251317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:34.298258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:34.478929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:34.698485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:34.816824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:37.127049Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341487564134927:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:37.127355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:37.284260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341509038973185:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:37.284366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:37.956657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:38.036220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:38.104538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:38.176940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:38.268796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:38.398994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:38.486582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:38.586342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341513333941153:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:38.586421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:38.586674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341513333941158:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:38.595176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:38.621212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341513333941160:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:38.710927Z node 1 :TX_PROXY ERROR: Actor# [1:7494341513333941216:3564] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 62953, MsgBus: 64115 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001835/r3tmp/tmp8i75kG/pdisk_1.dat 2025-04-17T17:52:41.687404Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:41.688755Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:41.708001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:41.708094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:41.710371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62953, node 2 2025-04-17T17:52:41.879746Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:41.879774Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:41.879781Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:41.879899Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64115 TClient is connected to server localhost:64115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:42.423067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:42.436348Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:52:42.450975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:42.617003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:42.804191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:42.886151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:45.283485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341544924619777:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:45.283575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:45.349689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:45.397172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:45.450936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:45.506936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:45.568689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:45.656201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:45.727387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:45.825185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341544924620454:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:45.825307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:45.825612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341544924620459:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:45.830067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:45.845400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341544924620461:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:52:45.904759Z node 2 :TX_PROXY ERROR: Actor# [2:7494341544924620517:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T17:52:47.619175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:47.619289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:47.619332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:47.619362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:47.619399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:47.619455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:47.619511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:47.619577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:47.619920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:47.703131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:52:47.703198Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:47.722663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:47.723185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:47.723361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:47.740433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:47.740648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:47.741419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.742112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:47.745269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.746504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:47.746566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.746765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:47.746808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:47.746853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:47.747013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.753702Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:52:47.904615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:47.904816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.905010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:47.905233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:47.905283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.908571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.908739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:47.908955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.909021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:47.909052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:47.909082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:47.911167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.911234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:47.911274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:47.913136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.913183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.913216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:47.913268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.922865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:47.925578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:47.925821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:47.926840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:47.927003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:47.927060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:47.927292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:47.927339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:47.927492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:47.927581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:47.929929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:47.929973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:47.930151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:47.930222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:47.930565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:47.930614Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:47.930702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:47.930734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.930768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:47.930813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.930857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:47.930898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:47.930951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:47.930983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:47.931051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:47.931090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:47.931119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:47.933369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... end EvNotifyTxCompletion 2025-04-17T17:52:48.791165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 2025-04-17T17:52:48.791632Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-04-17T17:52:48.791756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-04-17T17:52:48.791797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:340:2331] 2025-04-17T17:52:48.791939Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-04-17T17:52:48.791997Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-17T17:52:48.792045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-17T17:52:48.792066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:340:2331] 2025-04-17T17:52:48.792148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-17T17:52:48.792183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:340:2331] TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 2025-04-17T17:52:48.792631Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:48.792818Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 227us result status StatusSuccess 2025-04-17T17:52:48.793143Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:48.793801Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:48.793944Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 160us result status StatusSuccess 2025-04-17T17:52:48.794157Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:48.796876Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:48.797028Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 185us result status StatusSuccess 2025-04-17T17:52:48.797446Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:48.797947Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:48.798093Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 161us result status StatusSuccess 2025-04-17T17:52:48.798345Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:48.798839Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:52:48.798970Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 152us result status StatusSuccess 2025-04-17T17:52:48.799185Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-04-17T17:52:49.059376Z node 2 :GRPC_CLIENT DEBUG: [517000003908]{reqId} Connect to grpc://localhost:12943 2025-04-17T17:52:49.063035Z node 2 :GRPC_CLIENT DEBUG: [517000003908]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-04-17T17:52:49.080727Z node 2 :GRPC_CLIENT DEBUG: [517000003908]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } >> DataStreams::TestPutEmptyMessage [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> DataStreams::TestListStreamConsumers >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace |78.3%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |78.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::FailTest [GOOD] Test command err: 2025-04-17T17:52:30.384638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341478914395673:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:30.384819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000455/r3tmp/tmpQIvm4x/pdisk_1.dat 2025-04-17T17:52:31.068206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:31.068288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:31.076359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:31.095448Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9226, node 1 2025-04-17T17:52:31.144626Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:31.144691Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:31.243298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:31.243322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:31.243329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:31.243467Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:31.572523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:35.386438Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341478914395673:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:35.386536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:43.206824Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341534238560848:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:43.206881Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000455/r3tmp/tmp5RpMIO/pdisk_1.dat 2025-04-17T17:52:43.567312Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:43.631211Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:43.631319Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:43.644284Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29854, node 4 2025-04-17T17:52:43.932805Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:43.932831Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:43.932837Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:43.932977Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:44.495548Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> DataShardReadIteratorSysTables::ShouldRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2025-04-17T17:51:14.095394Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341154134224444:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:14.095449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b79/r3tmp/tmpqJtL7V/pdisk_1.dat 2025-04-17T17:51:14.386903Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:51:14.640805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:14.644750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:14.644847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:14.648574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4635, node 1 2025-04-17T17:51:14.782859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001b79/r3tmp/yandexpZr59S.tmp 2025-04-17T17:51:14.782889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001b79/r3tmp/yandexpZr59S.tmp 2025-04-17T17:51:14.783079Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001b79/r3tmp/yandexpZr59S.tmp 2025-04-17T17:51:14.783234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:51:14.881649Z INFO: TTestServer started on Port 8101 GrpcPort 4635 TClient is connected to server localhost:8101 PQClient connected to localhost:4635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:15.518024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:15.542758Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:15.591794Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:51:15.602052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:51:15.886659Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:15.906771Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:51:19.010970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341171314094425:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.011195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.014690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341175609061750:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:19.019397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:51:19.033916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341175609061752:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:51:19.102689Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341154134224444:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:19.115874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:51:19.464513Z node 1 :TX_PROXY ERROR: Actor# [1:7494341175609061817:2455] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:19.509298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:51:19.572962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:51:19.761941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:51:19.772298Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341175609061827:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:51:19.777616Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzgyNDBlMDMtZWVlYTEyYTItYWRmNmU3MTYtYTMzOTU3M2E=, ActorId: [1:7494341171314094423:2338], ActorState: ExecuteState, TraceId: 01js2e0scz6nabg0zgr37a4vkk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:51:19.780108Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494341179904029418:2642] === CheckClustersList. Ok 2025-04-17T17:51:25.543862Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:51:25.675943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7494341154134224843:2185]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:51:25.675982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:51:25.676026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7494341154134224843:2185], Recipient [1:7494341154134224843:2185]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:51:25.676042Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:51:25.692049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494341201378866165:2785], Recipient [1:7494341154134224843:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:51:25.692089Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:51:25.692106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:51:25.692149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494341201378866160:2781], Recipient [1:7494341154134224843:2185]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-17T17:51:25.692169Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:51:25.749788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "origin" Columns { Name: "id" Type: "Uint64" NotNull: false } Columns { Name: "order" Type: "Uint64" NotNull: false } Columns { Name: "value" Type: "Utf8" NotNull: false } KeyColumnNames: "id" KeyColumnNames: "order" UniformPartitionsCount: 64 PartitionConfig { PartitioningPolicy { MinPartitionsCount: 64 MaxPartitionsCount: 64 } } Temporary: false } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:51:25.750149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/origin, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:51:25.750307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/origin, opId: 281474976710672:0, schema: Name: "origin" Columns { Name: "id" Type: "Uint64" NotNull: false } Columns { Name: "order" Type: "Uint64" NotNull: false } Columns { Name: "value" Type: "Utf8" NotNull: false } KeyColumnNames: "id" KeyColumnNames: "order" UniformPartitionsCount: 64 PartitionConfig { PartitioningPolicy { MinPartitionsCount: 64 MaxPartitionsCount: 64 } } Temporary: false, at schemeshard: 72057594046644480 2025-04-17T17:51:25.750767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: orig ... 1 SourceId: '\00072075186224037913' SeqNo: 2083 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353684Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2084 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353694Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2085 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353706Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2086 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353715Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2087 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353725Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2088 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353735Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2089 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353814Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2090 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353825Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2091 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353832Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2093 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353843Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2094 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353856Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2095 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353868Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2096 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353917Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2097 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353930Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2098 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353939Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2103 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353949Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2104 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353958Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2105 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353966Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2108 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353975Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2109 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353983Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2111 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.353992Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2113 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354001Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2116 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354013Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2121 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354025Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2123 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354036Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2125 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354046Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2126 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354058Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2128 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354067Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2131 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354076Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2132 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354088Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2133 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354101Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2134 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354116Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2135 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354131Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2137 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354141Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2138 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354151Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2140 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354160Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2141 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354169Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2142 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354198Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2144 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354216Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2146 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354227Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2147 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354237Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2148 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354247Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2149 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354256Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2155 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354266Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2156 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354275Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2158 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354287Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2161 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354298Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2163 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354311Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2164 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354320Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2165 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354329Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2168 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354339Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2169 partNo : 0 messageNo: 3 size 192 offset: -1 2025-04-17T17:52:05.354349Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956] got client message topic: origin/feed/streamImpl partition: 1 SourceId: '\00072075186224037913' SeqNo: 2170 partNo : 0 messageNo: 3 size 192 offset: -1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2025-04-17T17:52:18.172463Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341429986070929:2226];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:18.176313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00096b/r3tmp/tmpulqnNP/pdisk_1.dat 2025-04-17T17:52:18.611795Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6993, node 1 2025-04-17T17:52:18.643214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:18.643531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:18.667608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:18.681152Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:18.681467Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:18.706713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:18.706737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:18.706746Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:18.706844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:19.049487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:23.398492Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341450397926039:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:23.398542Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00096b/r3tmp/tmpaYw2yg/pdisk_1.dat 2025-04-17T17:52:23.675043Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:23.729527Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:23.729687Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:23.735449Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64500, node 4 2025-04-17T17:52:23.922851Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:23.922878Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:23.922886Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:23.923022Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:24.279316Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:28.402517Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494341450397926039:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:28.402608Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:38.643034Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:52:38.643072Z node 4 :IMPORT WARN: Table profiles were not loaded >> DataShardReadIterator::ShouldReceiveErrorAfterSplit >> DataShardReadIterator::ShouldHandleReadAck >> DataShardReadIteratorBatchMode::SelectingColumns |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> DataShardReadIterator::ShouldReadRangeCellVec ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: 2025-04-17T17:52:40.906165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:40.906779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:40.906841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e5/r3tmp/tmpG5yM4r/pdisk_1.dat 2025-04-17T17:52:41.298246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.341628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:41.398076Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:52:41.399113Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:52:41.399304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:41.399418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:41.411507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:41.496800Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:52:41.496880Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:52:41.497101Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:52:41.637990Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:52:41.638108Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:52:41.638719Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:52:41.638842Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:52:41.639206Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:52:41.639420Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:52:41.639516Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:52:41.641460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.641946Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:52:41.642705Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:52:41.642778Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:52:41.678084Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:52:41.679330Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:52:41.679860Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:52:41.680160Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:41.739916Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:52:41.740867Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:41.740997Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:52:41.742828Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:52:41.742907Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:52:41.742968Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:52:41.743389Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:52:41.743554Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:52:41.743655Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:52:41.758716Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:52:41.817371Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:52:41.817592Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:52:41.817752Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:52:41.817788Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:52:41.817821Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:52:41.817859Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:41.818046Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:41.818092Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:41.818575Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:52:41.818658Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:52:41.818783Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:41.818830Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:41.818867Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:52:41.818921Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:52:41.818948Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:52:41.818981Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:52:41.819019Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:52:41.819134Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:41.819192Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:41.819233Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:52:41.819616Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:52:41.819674Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:52:41.819783Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:52:41.819951Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:52:41.819999Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:52:41.820075Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:52:41.820114Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:52:41.820146Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:52:41.820171Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:52:41.820210Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:41.820494Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:52:41.820522Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:52:41.820547Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:52:41.820595Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:41.820649Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:52:41.820669Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:52:41.820693Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:52:41.820738Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:41.820816Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:52:41.822477Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:52:41.822524Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:52:41.833629Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... planned 0 immediate 1 planned 0 2025-04-17T17:52:49.822556Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for WaitForStreamClearance 2025-04-17T17:52:49.822592Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit WaitForStreamClearance 2025-04-17T17:52:49.822638Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715663] at 72075186224037890 2025-04-17T17:52:49.822676Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:52:49.822700Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-17T17:52:49.822723Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit ReadTableScan 2025-04-17T17:52:49.822746Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-17T17:52:49.822942Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Continue 2025-04-17T17:52:49.822965Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:52:49.823001Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-17T17:52:49.823043Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T17:52:49.823076Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:52:49.823133Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:52:49.823833Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1004:2803], Recipient [2:888:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:52:49.823895Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T17:52:49.823987Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-17T17:52:49.824017Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T17:52:49.824076Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1004:2803] 2025-04-17T17:52:49.824214Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-17T17:52:49.824604Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:52:49.824782Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-17T17:52:49.824816Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-17T17:52:49.824844Z node 2 :TX_PROXY TRACE: [ReadTable [2:974:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1004:2803] ShardId# 72075186224037890 2025-04-17T17:52:49.824899Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-17T17:52:49.825046Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-17T17:52:49.825079Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T17:52:49.825378Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:973:2775], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T17:52:49.825406Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T17:52:49.825428Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1004:2803] 2025-04-17T17:52:49.825474Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-17T17:52:49.825548Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:52:49.825683Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2025-04-17T17:52:49.825723Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-17T17:52:49.825746Z node 2 :TX_PROXY TRACE: [ReadTable [2:974:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1004:2803] ShardId# 72075186224037890 2025-04-17T17:52:49.825816Z node 2 :TX_PROXY INFO: [ReadTable [2:974:2775] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.022805s execute time: 0.203274s total time: 0.226079s 2025-04-17T17:52:49.826140Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-17T17:52:49.826213Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2025-04-17T17:52:49.826448Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:974:2775], Recipient [2:886:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-04-17T17:52:49.826674Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-17T17:52:49.826704Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037890 2025-04-17T17:52:49.826889Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:888:2711], Recipient [2:888:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:49.826917Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:49.826961Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T17:52:49.826988Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:52:49.827019Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2025-04-17T17:52:49.827041Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-17T17:52:49.827071Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2025-04-17T17:52:49.827104Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:52:49.827129Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2025-04-17T17:52:49.827173Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2025-04-17T17:52:49.827202Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-17T17:52:49.827234Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2025-04-17T17:52:49.827255Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2025-04-17T17:52:49.827285Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:52:49.827308Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:52:49.827359Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:52:49.827382Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:52:49.827415Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2025-04-17T17:52:49.827440Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:49.827464Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-17T17:52:49.827490Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T17:52:49.827512Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:52:49.827573Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:52:49.827605Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-17T17:52:49.827638Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:52:49.827701Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:52:49.829791Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [2:974:2775], Recipient [2:888:2711]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2025-04-17T17:52:49.829846Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-04-17T17:52:49.829888Z node 2 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2025-04-17T17:52:49.829950Z node 2 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2025-04-17T17:52:49.830136Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [2:974:2775], Recipient [2:888:2711]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2025-04-17T17:52:49.830171Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-04-17T17:52:49.830296Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:974:2775], Recipient [2:888:2711]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> KqpPg::ExplainColumnsReorder [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 22612, MsgBus: 25519 2025-04-17T17:51:28.989047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341215123655828:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:28.989121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014cb/r3tmp/tmpf4fddt/pdisk_1.dat 2025-04-17T17:51:29.832533Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:29.845390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:29.845544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:29.851117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22612, node 1 2025-04-17T17:51:30.083886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:30.083917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:30.083926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:30.084091Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25519 TClient is connected to server localhost:25519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:31.181335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:33.994514Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341215123655828:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:33.994594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:51:34.100238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341240893460074:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:34.105275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341240893460065:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:34.105389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:34.107079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:34.122559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341240893460079:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:51:34.206386Z node 1 :TX_PROXY ERROR: Actor# [1:7494341240893460130:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31285, MsgBus: 15125 2025-04-17T17:51:35.308211Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341243159336047:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014cb/r3tmp/tmpgWmULO/pdisk_1.dat 2025-04-17T17:51:35.406441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:51:35.586911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:35.586988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:35.598023Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:35.604001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31285, node 2 2025-04-17T17:51:35.794724Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:35.794747Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:35.794754Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:35.794862Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15125 TClient is connected to server localhost:15125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:36.623454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:36.631098Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:51:39.323830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341260339205726:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:39.323919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:39.324294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341260339205738:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:39.328613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:51:39.343945Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:51:39.344202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341260339205740:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:51:39.402133Z node 2 :TX_PROXY ERROR: Actor# [2:7494341260339205791:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19676, MsgBus: 25436 2025-04-17T17:51:40.295092Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494341264357362369:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:40.296267Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014cb/r3tmp/tmppDzPSg/pdisk_1.dat 2025-04-17T17:51:40.441037Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:40.452895Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:40.452985Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:40.457696Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19676, node 3 2025-04-17T17:51:40.609425Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:40.609456Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:40.609464Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:40.609592Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25436 TClient is connected to server localhost:25436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 7205759404664 ... u don't have access permissions } 2025-04-17T17:52:28.969902Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T17:52:28.987256Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494341470436151983:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T17:52:29.049490Z node 10 :TX_PROXY ERROR: Actor# [10:7494341474731119331:2451] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27245, MsgBus: 7591 2025-04-17T17:52:32.727156Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7494341490513138933:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:32.728445Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014cb/r3tmp/tmp6G3AEG/pdisk_1.dat 2025-04-17T17:52:33.247439Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:33.323607Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:33.323775Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:33.328151Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27245, node 11 2025-04-17T17:52:33.687059Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:33.687091Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:33.687103Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:33.687360Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7591 TClient is connected to server localhost:7591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:35.109872Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:35.126749Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:37.730751Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494341490513138933:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:37.730867Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:40.755212Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494341524872877979:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:40.755355Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:40.778911Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:52:40.863755Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:52:40.968524Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494341524872878161:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:40.968705Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:40.968933Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494341524872878167:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:40.973904Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T17:52:41.000642Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494341524872878169:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T17:52:41.085903Z node 11 :TX_PROXY ERROR: Actor# [11:7494341529167845517:2458] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:50.523045Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:50.523472Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:50.523629Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014cb/r3tmp/tmpj8Kb03/pdisk_1.dat 2025-04-17T17:52:51.037981Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:51.092892Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:51.137018Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:51.137238Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:51.149935Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:51.276747Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:645:2553], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:51.276914Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:655:2558], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:51.277627Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:51.288416Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:52:51.485656Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:659:2561], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:52:51.548567Z node 12 :TX_PROXY ERROR: Actor# [12:730:2601] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "3f8a6a2a-809a1afe-e954bf9-755fd42e" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"4c8aa676-414f418f-8e86e9cc-d22f7e6f\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" >> TableWriter::Backup [GOOD] >> TableWriter::Restore [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] >> SystemView::AuthPermissions [GOOD] >> SystemView::AuthPermissions_Access >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 61267, MsgBus: 25028 2025-04-17T17:52:36.930448Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341506270850823:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:36.930511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cec/r3tmp/tmps39x7i/pdisk_1.dat 2025-04-17T17:52:37.449212Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:37.460461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:37.460567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:37.466132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61267, node 1 2025-04-17T17:52:37.641886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:37.641918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:37.641928Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:37.642093Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25028 TClient is connected to server localhost:25028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:38.479350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:38.506641Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:38.517049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:38.720837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:39.017181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:39.104270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:40.704223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341523450721592:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:40.704313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:40.970165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.006245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.038719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.069078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.123701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.194819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.274071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.376889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341527745689565:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:41.377003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:41.377355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341527745689570:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:41.382638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:41.398603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341527745689572:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:41.458068Z node 1 :TX_PROXY ERROR: Actor# [1:7494341527745689626:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:41.910544Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341506270850823:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:41.910590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:42.784828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:43.595444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 7083, MsgBus: 9078 2025-04-17T17:52:45.316431Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341544587509045:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:45.316489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cec/r3tmp/tmpBXRXQf/pdisk_1.dat 2025-04-17T17:52:45.498611Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:45.519792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:45.519867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:45.522020Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7083, node 2 2025-04-17T17:52:45.634724Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:45.634750Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:45.634756Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:45.634864Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9078 TClient is connected to server localhost:9078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:46.088159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:46.103310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:46.185534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:46.414416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:46.491043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:49.148970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341561767379982:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:49.149085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:49.212528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:49.300735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:49.392529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:49.444184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:49.497729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:49.589006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:49.680604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:49.813821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341561767380654:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:49.813945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:49.814299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341561767380659:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:49.819953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:49.841412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341561767380661:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:52:49.897799Z node 2 :TX_PROXY ERROR: Actor# [2:7494341561767380716:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:50.316555Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341544587509045:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:50.324171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:50.926367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestListShards1Shard |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T17:52:37.363054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341509231097950:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:37.370402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:52:37.519477Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341509325126221:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:37.983285Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:52:37.985574Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019a2/r3tmp/tmpjpPEb8/pdisk_1.dat 2025-04-17T17:52:38.102842Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:52:38.498888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:38.641098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:38.689153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:38.689277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:38.695085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:38.695209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:38.705083Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:52:38.705239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:38.711581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17349, node 1 2025-04-17T17:52:39.132208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0019a2/r3tmp/yandexos1yz0.tmp 2025-04-17T17:52:39.132230Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0019a2/r3tmp/yandexos1yz0.tmp 2025-04-17T17:52:39.132452Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0019a2/r3tmp/yandexos1yz0.tmp 2025-04-17T17:52:39.132595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:52:39.249948Z INFO: TTestServer started on Port 9580 GrpcPort 17349 TClient is connected to server localhost:9580 PQClient connected to localhost:17349 === TenantModeEnabled() = 1 === Init PQ - start server on port 17349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:40.064096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:52:40.064355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:40.064533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:52:40.064904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:52:40.064980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:40.071624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:52:40.071840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:52:40.072076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:40.072137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:52:40.072162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T17:52:40.072179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-17T17:52:40.079531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:40.079604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:52:40.079650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T17:52:40.082885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:40.082929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:40.082953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:52:40.083250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:52:40.092472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:40.093022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:40.093046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T17:52:40.093077Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:40.100804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T17:52:40.101047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:52:40.104695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912360149, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:52:40.104857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912360149 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:52:40.104886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:52:40.105192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T17:52:40.105250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:52:40.105439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:52:40.105533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T17:52:40.108725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:52:40.108760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:52:40.108986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:52:40.109002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494341513526065892:2399], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T17:52:40.109057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:40.109082Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T17:52:40.109177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:52:40.109190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:52:40.109223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:52:40.109237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:52:40.109253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T17:52:40.109275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:52:40.109291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T17:52:40.109300Z n ... eId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T17:52:53.946861Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494341576476978124:2440] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-17T17:52:53.946876Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T17:52:53.957141Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 3, Generation: 1 2025-04-17T17:52:53.957188Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [3:7494341576476978127:2440], now have 1 active actors on pipe 2025-04-17T17:52:53.957322Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:52:53.957344Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-17T17:52:53.957418Z node 3 :PERSQUEUE INFO: new Cookie 123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0 generated for partition 0 topic 'PQ/account/topic' owner 123 2025-04-17T17:52:53.957510Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T17:52:53.957553Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:52:53.957788Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:52:53.957813Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-17T17:52:53.957924Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:52:53.958020Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: 123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0 2025-04-17T17:52:53.958843Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744912373958 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:52:53.958970Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0" topic: "PQ/account/topic" 2025-04-17T17:52:53.959300Z :DEBUG: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Write 1 messages with Id from 1 to 1 2025-04-17T17:52:53.959417Z :DEBUG: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Write session: try to update token 2025-04-17T17:52:53.959463Z :DEBUG: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Send 1 message(s) (0 left), first sequence number is 3 2025-04-17T17:52:53.959704Z :INFO: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Write session: close. Timeout = 10000 ms 2025-04-17T17:52:53.960652Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T17:52:53.960886Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-17T17:52:53.961300Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:52:53.961330Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-17T17:52:53.961410Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-17T17:52:53.961456Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T17:52:53.966302Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:52:53.966335Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-17T17:52:53.966377Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2025-04-17T17:52:53.966471Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2025-04-17T17:52:53.966526Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2025-04-17T17:52:53.966654Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2025-04-17T17:52:53.967531Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2025-04-17T17:52:53.968049Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 431 WTime 1744912373967 2025-04-17T17:52:53.968155Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:52:53.968170Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:52:53.968185Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:52:53.968201Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:52:53.968218Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] m0000000000p123 2025-04-17T17:52:53.968228Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-04-17T17:52:53.968236Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:52:53.968251Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:52:53.968265Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:52:53.968301Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:52:53.968359Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 431 2025-04-17T17:52:53.982542Z node 3 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 431 actorID [3:7494341576476977803:2404] 2025-04-17T17:52:53.982642Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:52:53.982684Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:52:53.982727Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-17T17:52:53.982910Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-17T17:52:53.982976Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T17:52:53.983605Z node 3 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 0 offset 2 partno 0 count 1 parts 0 size 431 2025-04-17T17:52:53.983709Z :DEBUG: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 15 } 2025-04-17T17:52:53.983764Z :DEBUG: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Write session: acknoledged message 1 2025-04-17T17:52:54.060997Z :INFO: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Write session will now close 2025-04-17T17:52:54.061079Z :DEBUG: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Write session: aborting 2025-04-17T17:52:54.061610Z :INFO: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Write session: gracefully shut down, all writes complete 2025-04-17T17:52:54.061657Z :DEBUG: [] MessageGroupId [123] SessionId [123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0] Write session: destroy 2025-04-17T17:52:54.070340Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0 grpc read done: success: 0 data: 2025-04-17T17:52:54.070381Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0 grpc read failed 2025-04-17T17:52:54.070411Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0 grpc closed 2025-04-17T17:52:54.070437Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|c5052073-2b1ef0cb-cc96738c-a2ebb256_0 is DEAD 2025-04-17T17:52:54.071037Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T17:52:54.071426Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7494341576476978127:2440] destroyed 2025-04-17T17:52:54.071463Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:52:54.590679Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494341580771945443:2446], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:52:54.592978Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWYxMDJiNy1iOWQ3Njg1NC02NjRjNDk4NC01MDdiMjg0YQ==, ActorId: [3:7494341580771945441:2445], ActorState: ExecuteState, TraceId: 01js2e3prdct333yz7ms2da5ck, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:52:54.593415Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid >> TSchemeShardAuditSettings::CreateSubdomain >> SystemView::AuthGroupMembers_ResultOrder [GOOD] >> SystemView::AuthGroupMembers_TableRange |78.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |78.4%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TTxAllocatorClientTest::ZeroRange [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck >> TSchemeShardAuditSettings::AlterSubdomain >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected |78.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |78.4%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:52:57.641284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:57.641371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:57.641403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:57.641433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:57.641471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:57.641527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:57.641610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:57.641671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:57.641959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:57.753998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:52:57.754045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:57.784631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:57.785059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:57.785191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:57.797003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:57.797218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:57.797885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:57.798110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:57.800996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:57.802556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:57.802617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:57.802813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:57.802862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:57.802899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:57.803003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:57.809998Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:52:57.995469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:57.995725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:57.995900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:57.996130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:57.996223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.004276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:58.004441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:58.004687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.004742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:58.004793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:58.004914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:58.011382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.011494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:58.011550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:58.014921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.014986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.015033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:58.015078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:58.018839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:58.023883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:58.024199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:58.025855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:58.026152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:58.026293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:58.026749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:58.026819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:58.027119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:58.027242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:58.030454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:58.030501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:58.030719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:58.030766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:58.031109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.031151Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:58.031243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:58.031278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:58.031309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:58.031364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:58.031402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:58.031437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:58.031469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:58.031495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:58.031557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:58.031590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:58.031634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:58.033539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:58.033657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:58.033693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.408265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-17T17:52:58.408293Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-04-17T17:52:58.408320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:58.408725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.408808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.408833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-17T17:52:58.408857Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-04-17T17:52:58.408883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-17T17:52:58.408937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-17T17:52:58.411137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-04-17T17:52:58.411249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-04-17T17:52:58.411721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:58.411820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:58.411871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-04-17T17:52:58.411942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:58.411967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-17T17:52:58.411996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 134 2025-04-17T17:52:58.412803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T17:52:58.414106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T17:52:58.417494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.417542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:58.417669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 134 -> 135 2025-04-17T17:52:58.417830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:58.417891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 FAKE_COORDINATOR: Erasing txId 112 2025-04-17T17:52:58.420057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:58.420107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:58.420220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-17T17:52:58.420342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:58.420384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-17T17:52:58.420432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-04-17T17:52:58.420694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.420739Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-04-17T17:52:58.420764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 135 -> 240 2025-04-17T17:52:58.421421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.421496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.421520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-17T17:52:58.421546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-04-17T17:52:58.421588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:58.422453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.422531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.422552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-17T17:52:58.422576Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-17T17:52:58.422604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-17T17:52:58.422664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-17T17:52:58.427110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.427172Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 112:0 ProgressState 2025-04-17T17:52:58.427269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-17T17:52:58.427297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-17T17:52:58.427324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-17T17:52:58.427346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-17T17:52:58.427372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2025-04-17T17:52:58.427400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-17T17:52:58.427424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-17T17:52:58.427443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-17T17:52:58.427488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-17T17:52:58.428238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:52:58.428294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-17T17:52:58.428363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-17T17:52:58.428668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:52:58.428697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-17T17:52:58.428745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:58.429338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T17:52:58.429419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T17:52:58.432355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:52:58.432449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-17T17:52:58.432750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-17T17:52:58.432789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-17T17:52:58.433205Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-17T17:52:58.433343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-17T17:52:58.433371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:649:2640] TestWaitNotification: OK eventTxId 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2025-04-17T17:51:17.337483Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T17:51:17.338075Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T17:51:17.343441Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T17:51:17.345755Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.350725Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T17:51:17.371271Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.371404Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.371518Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T17:51:17.371686Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.371766Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.371865Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T17:51:17.372011Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T17:51:17.372717Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-17T17:51:17.373295Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.373393Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T17:51:17.373486Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-17T17:51:17.373525Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> DataStreams::TestUnsupported [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2 ... 2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [106:54:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:57:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:74:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:127:2057] recipient: [106:36:2083] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:130:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:131:2057] recipient: [106:129:2148] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:133:2057] recipient: [106:129:2148] !Reboot 72057594037927937 (actor [106:56:2097]) rebooted! !Reboot 72057594037927937 (actor [106:56:2097]) tablet resolver refreshed! new actor is[106:132:2149] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:152:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:50:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:57:2057] recipient: [107:50:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:74:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:130:2057] recipient: [107:36:2083] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:133:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:134:2057] recipient: [107:132:2151] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:136:2057] recipient: [107:132:2151] !Reboot 72057594037927937 (actor [107:56:2097]) rebooted! !Reboot 72057594037927937 (actor [107:56:2097]) tablet resolver refreshed! new actor is[107:135:2152] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:189:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:51:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:57:2057] recipient: [108:51:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:74:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:130:2057] recipient: [108:36:2083] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:133:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:134:2057] recipient: [108:132:2151] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:136:2057] recipient: [108:132:2151] !Reboot 72057594037927937 (actor [108:56:2097]) rebooted! !Reboot 72057594037927937 (actor [108:56:2097]) tablet resolver refreshed! new actor is[108:135:2152] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:189:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:50:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:57:2057] recipient: [109:50:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:74:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:131:2057] recipient: [109:36:2083] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:134:2057] recipient: [109:133:2151] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:135:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:137:2057] recipient: [109:133:2151] !Reboot 72057594037927937 (actor [109:56:2097]) rebooted! !Reboot 72057594037927937 (actor [109:56:2097]) tablet resolver refreshed! new actor is[109:136:2152] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:154:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:52:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:57:2057] recipient: [110:52:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:74:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:133:2057] recipient: [110:36:2083] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:135:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:137:2057] recipient: [110:136:2153] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:139:2057] recipient: [110:136:2153] !Reboot 72057594037927937 (actor [110:56:2097]) rebooted! !Reboot 72057594037927937 (actor [110:56:2097]) tablet resolver refreshed! new actor is[110:138:2154] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:192:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:57:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:74:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:133:2057] recipient: [111:36:2083] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:136:2057] recipient: [111:135:2153] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:137:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:139:2057] recipient: [111:135:2153] !Reboot 72057594037927937 (actor [111:56:2097]) rebooted! !Reboot 72057594037927937 (actor [111:56:2097]) tablet resolver refreshed! new actor is[111:138:2154] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:192:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:52:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:57:2057] recipient: [112:52:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:74:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:134:2057] recipient: [112:36:2083] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:136:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:138:2057] recipient: [112:137:2153] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:140:2057] recipient: [112:137:2153] !Reboot 72057594037927937 (actor [112:56:2097]) rebooted! !Reboot 72057594037927937 (actor [112:56:2097]) tablet resolver refreshed! new actor is[112:139:2154] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:193:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:57:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:74:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:137:2057] recipient: [113:36:2083] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:140:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:141:2057] recipient: [113:139:2156] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:143:2057] recipient: [113:139:2156] !Reboot 72057594037927937 (actor [113:56:2097]) rebooted! !Reboot 72057594037927937 (actor [113:56:2097]) tablet resolver refreshed! new actor is[113:142:2157] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:196:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:50:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:57:2057] recipient: [114:50:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:74:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:137:2057] recipient: [114:36:2083] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:140:2057] recipient: [114:139:2156] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:141:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:143:2057] recipient: [114:139:2156] !Reboot 72057594037927937 (actor [114:56:2097]) rebooted! !Reboot 72057594037927937 (actor [114:56:2097]) tablet resolver refreshed! new actor is[114:142:2157] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:196:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:57:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:74:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:138:2057] recipient: [115:36:2083] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:141:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:142:2057] recipient: [115:140:2156] Leader for TabletID 72057594037927937 is [115:143:2157] sender: [115:144:2057] recipient: [115:140:2156] !Reboot 72057594037927937 (actor [115:56:2097]) rebooted! !Reboot 72057594037927937 (actor [115:56:2097]) tablet resolver refreshed! new actor is[115:143:2157] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:57:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:74:2057] recipient: [116:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:52:58.081924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:58.082023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:58.082056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:58.082091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:58.082129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:58.082156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:58.082277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:58.082378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:58.082675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:58.179611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:52:58.179666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:58.194929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:58.195028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:58.195165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:58.201124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:58.201396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:58.201984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:58.202324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:58.206349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:58.207620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:58.207677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:58.207809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:58.207852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:58.207890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:58.208056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.216614Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:52:58.348710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:58.348854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.348968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:58.349140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:58.349177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.350938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:58.351042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:58.351189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.351253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:58.351299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:58.351340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:58.357776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.357841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:58.357895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:58.365870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.365924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.365973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:58.366018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:58.379530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:58.384956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:58.385221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:58.386570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:58.386724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:58.386802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:58.387076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:58.387137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:58.387296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:58.387390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:58.390107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:58.390155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:58.390372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:58.390418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:58.390749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.390799Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:58.390903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:58.390954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:58.391017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:58.391066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:58.391106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:58.391149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:58.391186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:58.391215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:58.391274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:58.391309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:58.391342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:58.393251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:58.393366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:58.393410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.768509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.768541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-17T17:52:58.768578Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-04-17T17:52:58.768616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:58.769091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.769190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.769252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-17T17:52:58.769281Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-04-17T17:52:58.769309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-17T17:52:58.769364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-17T17:52:58.772196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-04-17T17:52:58.772300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-04-17T17:52:58.773232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:58.773330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:58.773370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-04-17T17:52:58.773406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:58.773430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-17T17:52:58.773555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 130 2025-04-17T17:52:58.773667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:58.773704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-17T17:52:58.773990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T17:52:58.774215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T17:52:58.775407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:58.775444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:58.775551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-17T17:52:58.775682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:58.775718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-17T17:52:58.775780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 112, path id: 7 FAKE_COORDINATOR: Erasing txId 112 2025-04-17T17:52:58.776109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.776151Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-04-17T17:52:58.776213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-17T17:52:58.776237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-17T17:52:58.776264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-17T17:52:58.776285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-17T17:52:58.776315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-04-17T17:52:58.776341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-17T17:52:58.776364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-17T17:52:58.776420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-17T17:52:58.776476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-17T17:52:58.776502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2025-04-17T17:52:58.776545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 27 2025-04-17T17:52:58.776574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2025-04-17T17:52:58.777169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.777218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.777235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-04-17T17:52:58.777263Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-04-17T17:52:58.777293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:58.777779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.777851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T17:52:58.777876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-04-17T17:52:58.777901Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-17T17:52:58.777930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-17T17:52:58.777996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-04-17T17:52:58.778362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:52:58.778399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-17T17:52:58.778469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-17T17:52:58.779028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:52:58.779064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-17T17:52:58.779106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:58.780670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T17:52:58.783146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T17:52:58.783269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:52:58.783347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-17T17:52:58.783707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-17T17:52:58.783740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-17T17:52:58.784278Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-17T17:52:58.784363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-17T17:52:58.784448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:664:2655] TestWaitNotification: OK eventTxId 112 >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService |78.5%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2025-04-17T17:52:37.287839Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341509579989828:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:37.287898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001991/r3tmp/tmpzw6bL4/pdisk_1.dat 2025-04-17T17:52:38.259742Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:38.311024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:38.311126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:38.318739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:38.323669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12032, node 1 2025-04-17T17:52:38.531040Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:38.531070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:38.531077Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:38.531204Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:39.080920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:39.319542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10388 2025-04-17T17:52:39.510969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:39.679045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:52:43.205580Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341537647082911:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:43.205633Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001991/r3tmp/tmpim4XXo/pdisk_1.dat 2025-04-17T17:52:43.589955Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:43.669573Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:43.669664Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:43.675809Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32342, node 4 2025-04-17T17:52:43.846977Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:43.847004Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:43.847012Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:43.847169Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:44.292282Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:44.468748Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:6117 2025-04-17T17:52:44.699212Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:45.083610Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:52:45.198535Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "30" shard_id: "shard-000000" } records { sequence_number: "31" shard_id: "shard-000000" } records { sequence_number: "32" shard_id: "shard-000000" } records { sequence_number: "33" shard_id: "shard-000000" } records { sequence_number: "34" shard_id: "shard-000000" } records { sequence_number: "35" shard_id: "shard-000000" } records { sequence_number: "36" shard_id: "shard-000000" } records { sequence_number: "37" shard_id: "shard-000000" } records { sequence_number: "38" shard_id: "shard-000000" } records { sequence_number: "39" shard_id: "shard-000000" } records { sequence_number: "40" shard_id: "shard-000000" } records { sequence_number: "41" shard_id: "shard-000000" } records { sequence_number: "42" shard_id: "shard-000000" } records { sequence_number: "43" shard_id: "shard-000000" } records { sequence_number: "44" shard_id: "shard-000000" } records { sequence_number: "45" shard_id: "shard-000000" } records { sequence_number: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: "65" shard_id: "shard-000000" } records { sequence_number: "66" shard_id: "shard-000000" } re ... ard_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } 2025-04-17T17:52:48.205850Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494341537647082911:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:48.205991Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744912365012-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744912365,"finish":1744912365},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744912365}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744912365142-3","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744912365,"finish":1744912365},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744912365}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744912365246-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744912365,"finish":1744912366},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744912366}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744912366303-5","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744912366,"finish":1744912367},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744912367}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744912367333-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744912367,"finish":1744912368},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744912368}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744912368358-7","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744912368,"finish":1744912369},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744912369}' 2025-04-17T17:52:53.507530Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341580110277639:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:53.507642Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001991/r3tmp/tmpGfi8Nk/pdisk_1.dat 2025-04-17T17:52:53.776326Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:53.822237Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:53.822346Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:53.828203Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63028, node 7 2025-04-17T17:52:54.131749Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:54.131775Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:54.131785Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:54.131954Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:54.490282Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:54.593008Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2785 2025-04-17T17:52:54.939188Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive |78.5%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink >> DataShardReadIterator::ShouldReadKeyCellVec |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> Cdc::DecimalKey [GOOD] >> Cdc::DropColumn >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight >> TNebiusAccessServiceTest::Authorize [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:52:58.913793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:58.913900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:58.913942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:58.913976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:58.914038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:58.914073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:58.914131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:58.916691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:58.917076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:59.019125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:52:59.019179Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:59.035427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:59.035569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:59.035723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:59.049723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:59.050142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:59.050887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:59.051260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:59.055479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:59.056846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:59.056914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:59.057042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:59.057084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:59.057112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:59.057274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.064151Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:52:59.209692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:59.209898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.210081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:59.212780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:59.212876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.218973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:59.219098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:59.219292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.219354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:59.219390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:59.219434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:59.221471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.221535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:59.221570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:59.227195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.227246Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.227301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:59.227358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:59.231344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:59.233527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:59.233724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:59.234669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:59.234805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:59.234855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:59.235090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:59.235145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:59.235291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:59.235419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:59.237495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:59.237544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:59.237735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:59.237782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:59.238071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.238118Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:59.238276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:59.238337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:59.238378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:59.238422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:59.238459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:59.238512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:59.238550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:59.238580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:59.238644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:59.238694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:59.238741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:59.240675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:59.240794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:59.240834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.410806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.410830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:01.410856Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-04-17T17:53:01.410883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:53:01.411635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.411705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.411729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:01.411762Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-04-17T17:53:01.411792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-17T17:53:01.411843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-17T17:53:01.415666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-04-17T17:53:01.415746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-04-17T17:53:01.416585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:53:01.416677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:53:01.416713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-04-17T17:53:01.416751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:53:01.416775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-17T17:53:01.416862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 130 2025-04-17T17:53:01.416993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:53:01.417031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-17T17:53:01.417900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-17T17:53:01.418027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2025-04-17T17:53:01.425086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:53:01.425130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:53:01.425247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-17T17:53:01.425357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:53:01.425400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-17T17:53:01.425440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-17T17:53:01.425747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-17T17:53:01.425792Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-17T17:53:01.425853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-17T17:53:01.425879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-17T17:53:01.425913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-17T17:53:01.425936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-17T17:53:01.425962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2025-04-17T17:53:01.425988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-17T17:53:01.426013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-17T17:53:01.426036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-17T17:53:01.426118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-17T17:53:01.426149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2025-04-17T17:53:01.426193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 103 2025-04-17T17:53:01.426224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 26], 18446744073709551615 2025-04-17T17:53:01.426964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.427132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.427165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:01.427206Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-17T17:53:01.427235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:53:01.427965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.428029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.428050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:01.428074Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-17T17:53:01.428100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-17T17:53:01.428188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2025-04-17T17:53:01.428344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:53:01.428372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-17T17:53:01.428480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-17T17:53:01.429037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:53:01.429067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-17T17:53:01.429137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:53:01.433826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-17T17:53:01.437881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-17T17:53:01.438019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:53:01.438101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-17T17:53:01.439264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-17T17:53:01.439307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-17T17:53:01.440604Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-17T17:53:01.440688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-17T17:53:01.440715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2475:4466] TestWaitNotification: OK eventTxId 175 |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:52:58.831333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:58.831420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:58.831457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:58.831488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:58.831526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:58.831552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:58.831600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:58.831696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:58.832019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:58.920881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:52:58.920944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:58.948479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:58.948598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:58.948738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:58.971419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:58.971774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:58.972453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:58.972785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:58.979399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:58.980873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:58.980943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:58.981072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:58.981118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:58.981156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:58.981377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:58.988719Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:52:59.139025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:59.139257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.139442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:59.139740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:59.139819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.142969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:59.143106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:59.143318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.143382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:59.143417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:59.143460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:59.146048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.146114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:59.146163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:59.148133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.148226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.148274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:59.148321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:59.152340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:59.154514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:59.154722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:59.155827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:59.155979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:59.156038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:59.156301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:59.156368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:59.156544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:59.156633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:59.158986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:59.159038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:59.159250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:59.159296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:59.159641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:59.159692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:59.159812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:59.159849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:59.159890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:59.159939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:59.159980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:59.160023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:59.160056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:59.160086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:59.160149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:59.160188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:59.160233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:59.162136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:59.162291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:59.162326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.518681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:01.518714Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-04-17T17:53:01.518756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:53:01.519544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.519628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.519659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:01.519687Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-04-17T17:53:01.519717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-17T17:53:01.519806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-17T17:53:01.521741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-04-17T17:53:01.521866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-04-17T17:53:01.522933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:53:01.523031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:53:01.523077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-04-17T17:53:01.523165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:53:01.523195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-17T17:53:01.523237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 134 2025-04-17T17:53:01.524624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-17T17:53:01.524911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-17T17:53:01.525971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-17T17:53:01.526021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:53:01.526123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-04-17T17:53:01.526305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:53:01.526389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 FAKE_COORDINATOR: Erasing txId 175 2025-04-17T17:53:01.528245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:53:01.528288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:53:01.528441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-17T17:53:01.528547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:53:01.528575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-17T17:53:01.528601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-17T17:53:01.528903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-17T17:53:01.528936Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-17T17:53:01.528959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-04-17T17:53:01.529657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.529763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.529794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:01.529826Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-17T17:53:01.529857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:53:01.530567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.530639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:01.530665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:01.530693Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-17T17:53:01.530722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-17T17:53:01.530798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-17T17:53:01.533173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-17T17:53:01.533210Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-04-17T17:53:01.533271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-17T17:53:01.533293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-17T17:53:01.533316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-17T17:53:01.533334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-17T17:53:01.533359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-04-17T17:53:01.533390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-17T17:53:01.533412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-17T17:53:01.533434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-17T17:53:01.533491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-17T17:53:01.533887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:53:01.533920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-17T17:53:01.533972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-17T17:53:01.534571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:53:01.534602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-17T17:53:01.534644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:53:01.535182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-17T17:53:01.535334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-17T17:53:01.537279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:53:01.537355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-17T17:53:01.538724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-17T17:53:01.538765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-17T17:53:01.540203Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-17T17:53:01.540310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-17T17:53:01.540347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2613:4604] TestWaitNotification: OK eventTxId 175 >> Yq_1::DeleteConnections >> Cdc::SupportedTypes [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2025-04-17T17:53:02.382828Z node 3 :GRPC_CLIENT DEBUG: [517000004388] Connect to grpc://localhost:16647 2025-04-17T17:53:02.391729Z node 3 :GRPC_CLIENT DEBUG: [517000004388] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-04-17T17:53:02.399163Z node 3 :GRPC_CLIENT DEBUG: [517000004388] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2025-04-17T17:53:02.399979Z node 3 :GRPC_CLIENT DEBUG: [517000004388] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2025-04-17T17:53:02.401890Z node 3 :GRPC_CLIENT DEBUG: [517000004388] Status 7 Permission Denied 2025-04-17T17:53:02.402540Z node 3 :GRPC_CLIENT DEBUG: [517000004388] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-04-17T17:53:02.404076Z node 3 :GRPC_CLIENT DEBUG: [517000004388] Status 7 Permission Denied 2025-04-17T17:53:02.404412Z node 3 :GRPC_CLIENT DEBUG: [517000004388] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2025-04-17T17:53:02.405634Z node 3 :GRPC_CLIENT DEBUG: [517000004388] Status 7 Permission Denied >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [GOOD] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow >> DataStreams::TestListShards1Shard [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |78.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.6%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> Yq_1::DescribeConnection >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite |78.6%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> YdbYqlClient::ConnectDbAclIsStrictlyChecked >> YdbYqlClient::TestColumnOrder >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2025-04-17T17:52:38.291696Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341512728232703:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:38.291757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001988/r3tmp/tmpJZQbYg/pdisk_1.dat 2025-04-17T17:52:39.272625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:39.272723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:39.276550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:39.310063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:39.402322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 9392, node 1 2025-04-17T17:52:39.432101Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:39.494891Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:52:39.765625Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:39.765652Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:39.765686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:39.765844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:40.374069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:40.579476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20586 2025-04-17T17:52:40.762042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:41.263375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2025-04-17T17:52:41.407484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.539806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.572537Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T17:52:41.572835Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-17T17:52:41.572988Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1744912361090-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744912361,"finish":1744912361},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744912361}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1744912361090-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744912361,"finish":1744912361},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744912361}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1744912361478-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744912361,"finish":1744912361},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037890","source_wt":1744912361}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037890-1744912361478-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744912361,"finish":1744912361},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037890","source_wt":1744912361}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1744912361478-5","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744912361,"finish":1744912361},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744912361}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1744912361478-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744912361,"finish":1744912361},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744912361}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1744912361090-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744912361,"finish":1744912361},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744912361}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1744912361090-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744912361,"finish":1744912361},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744912361}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1744912361478-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744912361,"finish":1744912361},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037890","source_wt":1744912361}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037890-1744912361478-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"us ... 4480 waiting... 2025-04-17T17:52:52.074162Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15889 2025-04-17T17:52:52.292036Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:52.308710Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1744912372.639470 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912372.639629 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912372.656942 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912372.657050 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912372.669468 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912372.669606 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912372.679231 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912372.679329 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-17T17:52:52.703105Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:52:52.917594Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 E0000 00:00:1744912373.060090 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912373.060213 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-17T17:52:53.083419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 E0000 00:00:1744912373.271675 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912373.271802 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-17T17:52:53.290132Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 E0000 00:00:1744912373.448325 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912373.448603 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912373.494603 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912373.494713 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-17T17:52:53.561470Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:53.611364Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-04-17T17:52:53.611396Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-04-17T17:52:53.611408Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-04-17T17:52:53.611419Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2025-04-17T17:52:53.611429Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-04-17T17:52:53.611459Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2025-04-17T17:52:53.626858Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-04-17T17:52:53.626934Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-17T17:52:53.626965Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-04-17T17:52:53.626989Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-04-17T17:52:53.627013Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-04-17T17:52:53.627041Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found E0000 00:00:1744912373.658111 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912373.658240 96635 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-17T17:52:57.425812Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494341594721643532:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:57.426052Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001988/r3tmp/tmpx3sksC/pdisk_1.dat 2025-04-17T17:52:57.773808Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:57.824478Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:57.824629Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:57.833470Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14935, node 10 2025-04-17T17:52:57.993079Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:57.993097Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:57.993105Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:57.993242Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:58.374340Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:58.488967Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:31761 2025-04-17T17:52:58.855304Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:58.880601Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 E0000 00:00:1744912379.106807 97819 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912379.119437 97819 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912379.129151 97819 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912379.140310 97819 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744912379.150275 97819 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead >> GrpcConnectionStringParserTest::NoDatabaseFlag >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |78.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> TGRpcYdbTest::ExecuteQueryBadRequest >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> YdbYqlClient::DiscoveryLocationOverride >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:52:56.200080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:52:56.200180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:56.200223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:52:56.200261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:52:56.200305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:52:56.200336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:52:56.200396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:52:56.200499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:52:56.200850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:56.300906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:52:56.300971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:56.318035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:56.318193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:52:56.318357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:52:56.330456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:52:56.330883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:52:56.331618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:56.331995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:52:56.337359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:56.339089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:56.339160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:56.339304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:52:56.339354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:56.339400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:52:56.339690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:52:56.347236Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:52:56.628081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:52:56.628345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:56.628594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:52:56.628927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:52:56.628996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:56.631822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:56.631947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:52:56.632179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:56.632298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:52:56.632338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:52:56.632383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:52:56.634681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:56.634759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:52:56.634816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:52:56.637115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:56.637177Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:56.637241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:56.637319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:52:56.641303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:52:56.647527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:52:56.647769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:52:56.648916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:52:56.649062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:52:56.649127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:56.649534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:52:56.649602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:52:56.649787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:52:56.649883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:52:56.652978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:52:56.653031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:52:56.653265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:52:56.653323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:52:56.653671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:52:56.653733Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:52:56.653845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:56.653887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:56.653930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:52:56.653974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:56.654019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:52:56.654059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:52:56.654092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:52:56.654123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:52:56.654264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:52:56.654309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:52:56.654344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:52:56.656403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:56.656551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:52:56.656589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 233409618 TxId: 175 } 2025-04-17T17:53:04.296443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free owner tablets reply, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897, at schemeshard: 72057594046678944 2025-04-17T17:53:04.296544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 175:0, at schemeshard: 72057594046678944, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897 2025-04-17T17:53:04.296586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 HandleReply TDeleteExternalShards, Status: ALREADY, from Hive: 72057594037968897, Owner: 72075186233409618, at schemeshard: 72057594046678944 2025-04-17T17:53:04.296703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-04-17T17:53:04.296865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:53:04.296930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-04-17T17:53:04.298815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-17T17:53:04.299025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:53:04.299062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:53:04.299184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-17T17:53:04.299320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:53:04.299352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-17T17:53:04.299389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-17T17:53:04.299826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-17T17:53:04.299872Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-17T17:53:04.299905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-04-17T17:53:04.300785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:04.300871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:04.300899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:04.300931Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-17T17:53:04.300961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:53:04.301768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:04.301845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-17T17:53:04.301876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-17T17:53:04.301907Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-17T17:53:04.301936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 6 2025-04-17T17:53:04.302005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-17T17:53:04.304156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:53:04.304202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:53:04.304228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:53:04.304573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-17T17:53:04.304619Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-04-17T17:53:04.304691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-17T17:53:04.304722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-17T17:53:04.304755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-17T17:53:04.304780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-17T17:53:04.304811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-04-17T17:53:04.304844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-17T17:53:04.304874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-17T17:53:04.304898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-17T17:53:04.305057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-04-17T17:53:04.306745Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 74 TabletID: 72075186233409619 2025-04-17T17:53:04.307033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 74 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2025-04-17T17:53:04.307254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 4 2025-04-17T17:53:04.307802Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 73 TabletID: 72075186233409618 Forgetting tablet 72075186233409619 2025-04-17T17:53:04.309144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:53:04.311615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 73 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2025-04-17T17:53:04.311856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 Forgetting tablet 72075186233409618 2025-04-17T17:53:04.313614Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 75 TabletID: 72075186233409620 Forgetting tablet 72075186233409620 2025-04-17T17:53:04.314007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-17T17:53:04.314877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-17T17:53:04.315279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 75 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2025-04-17T17:53:04.315535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-17T17:53:04.315964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:53:04.316002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-17T17:53:04.316101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-17T17:53:04.316491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:53:04.316532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-17T17:53:04.316586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:53:04.323131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:74 2025-04-17T17:53:04.323192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2025-04-17T17:53:04.323597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:73 2025-04-17T17:53:04.323631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2025-04-17T17:53:04.325337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:75 2025-04-17T17:53:04.325386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2025-04-17T17:53:04.325649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:53:04.325720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-17T17:53:04.327096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-17T17:53:04.327134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-17T17:53:04.328647Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-17T17:53:04.328771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-17T17:53:04.328805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:6767:7749] TestWaitNotification: OK eventTxId 175 >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel >> TGRpcClientLowTest::GrpcRequestProxy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15830, MsgBus: 29354 2025-04-17T17:52:37.146569Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341510119138139:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:37.157424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ce8/r3tmp/tmpOSa4qR/pdisk_1.dat 2025-04-17T17:52:37.691494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:37.698431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:37.698756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:37.703815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15830, node 1 2025-04-17T17:52:37.830813Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:37.830836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:37.830843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:37.830960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29354 TClient is connected to server localhost:29354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:38.557252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:38.574277Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:52:38.588901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:38.767537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:39.000007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:39.088807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:41.484968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341527299008985:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:41.485130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:41.778461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.831411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.880244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.934871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:52:41.997546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:52:42.049074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:52:42.093126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:52:42.127264Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341510119138139:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:42.127372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:42.154699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341531593976935:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:42.154837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:42.155197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341531593976940:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:42.159333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:52:42.168274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341531593976942:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:52:42.260361Z node 1 :TX_PROXY ERROR: Actor# [1:7494341531593976999:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:44.079225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 query_phases { duration_us: 726 cpu_time_us: 726 } query_phases { duration_us: 1955 cpu_time_us: 1955 } query_phases { duration_us: 14622 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 13248 affected_shards: 1 } query_phases { duration_us: 3963 cpu_time_us: 3963 } query_phases { duration_us: 10105 cpu_time_us: 10105 } query_phases { duration_us: 5776 table_access { name: "/Root/TestTable/Index/indexImplTable" } cpu_time_us: 4600 } query_phases { duration_us: 1679 cpu_time_us: 1679 } query_phases { duration_us: 5214 cpu_time_us: 5214 } query_phases { duration_us: 21876 cpu_time_us: 5532 } query_phases { duration_us: 17112 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 4175 affected_shards: 2 } compilation { duration_us: 1495608 cpu_time_us: 1469571 } process_cpu_time_us: 28599 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":46,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":45,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_7_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744912365872,\"TaskId\":1,\"Host\":\"ghrun-gkdwnlcfnq\",\"ComputeTimeUs\":96}],\"CpuTimeUs\":570}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1744912365871,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":570,\"Max\":570,\"Min\":570}},\"CTE Name\":\"precompute_7_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":44,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":43,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1744912365871,\"FinishedTasks\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_8_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":42,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":41,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744912365871,\"TaskId\":2,\"Host\":\"ghrun-gkdwnlcfnq\",\"ComputeTimeUs\":120}],\"CpuTimeUs\":724}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1744912365871,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":7 ... alue\")) \'(\'\"fk1\" (Member $147 \'\"fk1\")) \'(\'\"fk3\" (Member $147 \'\"fk3\"))))) \'0 $138))\n (let $140 (OrderedFilter $137 (lambda \'($148) (And (Exists (Member $148 \'\"fk1\")) (Exists (Member $148 \'\"fk2\")) (Exists (Member $148 \'\"fk3\"))))))\n (let $141 (lambda \'($150) (Void)))\n (let $142 (ToDict $140 (lambda \'($149) (AsStruct \'(\'\"fk1\" (Member $149 \'\"fk1\")) \'(\'\"fk2\" (Member $149 \'\"fk2\")) \'(\'\"fk3\" (Member $149 \'\"fk3\")))) $141 $35))\n (let $143 (Variant (DictKeys $142) \'1 $138))\n (let $144 (Variant (== (Length $140) (Length $142)) \'2 $138))\n (let $145 (ToDict $137 (lambda \'($151) (AsStruct \'(\'\"Key\" (Member $151 \'\"Key\")))) $141 $35))\n (let $146 (Variant $145 \'\"3\" $138))\n (return (Iterator (AsList $139 $143 $144 $146)))\n))) \'(\'(\'\"_logical_id\" \'4306) \'(\'\"_id\" \'\"1476fe8b-799f16eb-46112b73-4a401690\"))))\n(let $59 (DqCnValue (TDqOutput $58 \'\"3\")))\n(let $60 (DqCnValue (TDqOutput $58 \'0)))\n(let $61 (DqCnValue (TDqOutput $58 \'2)))\n(let $62 (DqCnValue (TDqOutput $58 \'1)))\n(let $63 \'($59 $60 $61 $62))\n(let $64 (KqpTxResultBinding $54 \'\"3\" \'0))\n(let $65 (KqpPhysicalTx \'($58) $63 \'(\'($53 $64)) $3))\n(let $66 \'\"%kqp%tx_result_binding_4_3\")\n(let $67 (DqPhyStage \'() (lambda \'() (Iterator %kqp%tx_result_binding_4_3)) \'(\'(\'\"_logical_id\" \'4701) \'(\'\"_id\" \'\"7d540939-3f97da89-b7706a6e-a0836b61\"))))\n(let $68 (KqpTable \'\"/Root/TestTable/Index/indexImplTable\" \'\"72057594046644480:19\" \'\"\" \'1))\n(let $69 (KqpCnStreamLookup (TDqOutput $67 \'0) $68 \'(\'\"Key\") $55 $24))\n(let $70 \'\"%kqp%tx_result_binding_4_0\")\n(let $71 (Bool \'false))\n(let $72 (DqPhyStage \'($69) (lambda \'($152) (Map (Filter (Take $152 (Uint64 \'1)) (lambda \'($153) (Not (Contains %kqp%tx_result_binding_4_0 $153)))) (lambda \'($154) $71))) \'(\'(\'\"_logical_id\" \'4689) \'(\'\"_id\" \'\"f14681ba-19b071de-ed6243cc-9e43fa9b\"))))\n(let $73 (DqCnUnionAll (TDqOutput $72 \'0)))\n(let $74 (Bool \'true))\n(let $75 (DqPhyStage \'($73) (lambda \'($155) (block \'(\n (let $156 (lambda \'($157 $158) $71))\n (return (FromFlow (Condense (ToFlow $155) $74 $156 $156)))\n))) \'(\'(\'\"_logical_id\" \'4669) \'(\'\"_id\" \'\"df1819e1-c703a541-ef8443e8-5a89eb99\"))))\n(let $76 \'($67 $72 $75))\n(let $77 (DqCnValue (TDqOutput $75 \'0)))\n(let $78 (KqpTxResultBinding $57 \'\"4\" \'0))\n(let $79 (KqpTxResultBinding $55 \'\"4\" \'\"3\"))\n(let $80 (KqpPhysicalTx $76 \'($77) \'(\'($70 $78) \'($66 $79)) $41))\n(let $81 \'\"%kqp%tx_result_binding_4_2\")\n(let $82 \'\"%kqp%tx_result_binding_5_0\")\n(let $83 \'\"%kqp%tx_result_binding_4_1\")\n(let $84 (DqPhyStage \'() (lambda \'() (block \'(\n (let $159 (KqpEnsure $74 %kqp%tx_result_binding_4_2 \'\"2012\" (Utf8 \'\"Duplicated keys found.\")))\n (let $160 (KqpEnsure $74 %kqp%tx_result_binding_5_0 \'\"2012\" (Utf8 \'\"Conflict with existing key.\")))\n (let $161 (If (And $159 $160) %kqp%tx_result_binding_4_1 (List $11)))\n (return (ToStream (Just (PartitionByKey $161 (lambda \'($162) (Member $162 \'\"Key\")) (Void) (Void) (lambda \'($163) (FlatMap $163 (lambda \'($164) (Last (ForwardList (Nth $164 \'1))))))))))\n))) \'(\'(\'\"_logical_id\" \'5031) \'(\'\"_id\" \'\"30009b69-1159a69c-11ec1d26-2283295e\"))))\n(let $85 (DqCnValue (TDqOutput $84 \'0)))\n(let $86 (KqpTxResultBinding $11 \'\"4\" \'1))\n(let $87 (KqpTxResultBinding $56 \'\"4\" \'2))\n(let $88 (KqpTxResultBinding $56 \'\"5\" \'0))\n(let $89 \'(\'($83 $86) \'($81 $87) \'($82 $88)))\n(let $90 (KqpPhysicalTx \'($84) \'($85) $89 $3))\n(let $91 \'\"%kqp%tx_result_binding_6_0\")\n(let $92 %kqp%tx_result_binding_6_0)\n(let $93 (DqPhyStage \'() (lambda \'() (Iterator (AsList (ToDict (FlatMap (Map $92 (lambda \'($165) (AsStruct \'(\'\"Key\" (Member $165 \'\"Key\")) \'(\'\"fk1\" (Member $165 \'\"fk1\")) \'(\'\"fk3\" (Member $165 \'\"fk3\"))))) (lambda \'($166) (block \'(\n (let $167 (AsStruct \'(\'\"Key\" (Member $166 \'\"Key\"))))\n (return (IfPresent (Lookup $46 $167) (lambda \'($168) (Just \'($167 $168 (Or (AggrNotEquals (Member $166 \'\"fk1\") (Member $168 \'\"fk1\")) (AggrNotEquals (Member $166 \'\"fk3\") (Member $168 \'\"fk3\")))))) (Nothing (OptionalType (TupleType $19 $44 $56)))))\n)))) (lambda \'($169) (Nth $169 \'0)) (lambda \'($170) \'((Nth $170 \'1) (Nth $170 \'2))) $35)))) \'(\'(\'\"_logical_id\" \'5182) \'(\'\"_id\" \'\"5f227f21-7b6f1373-83df44bf-567272da\"))))\n(let $94 (DqCnValue (TDqOutput $93 \'0)))\n(let $95 (KqpTxResultBinding $11 \'\"6\" \'0))\n(let $96 \'($91 $95))\n(let $97 (KqpPhysicalTx \'($93) \'($94) \'($51 $96) $3))\n(let $98 (DataSink \'\"KqpTableSink\" \'\"db\"))\n(let $99 (KqpTableSinkSettings $22 \'false \'\"upsert\" \'0 \'false \'false \'()))\n(let $100 (DqPhyStage \'() (lambda \'() (Iterator $92)) \'(\'(\'\"_logical_id\" \'5730) \'(\'\"_id\" \'\"6d6b051f-7ed947c8-8bd497a5-686c7f12\")) \'((DqSink \'0 $98 $99))))\n(let $101 \'\"%kqp%tx_result_binding_7_0\")\n(let $102 (DictType $19 (TupleType $44 $56)))\n(let $103 %kqp%tx_result_binding_7_0)\n(let $104 \'(\'(\'\"_logical_id\" \'5758) \'(\'\"_id\" \'\"3d68dac-c071d3f1-c9b6f9e0-5cc5d333\") $30))\n(let $105 (DqPhyStage \'() (lambda \'() (block \'(\n (let $171 (lambda \'($173) (block \'(\n (let $174 (Nth $173 \'1))\n (let $175 (Nth $174 \'0))\n (return (Member (Nth $173 \'0) \'\"Key\") (Member $175 \'\"fk1\") (Member $175 \'\"fk2\") (Member $175 \'\"fk3\") (Nth $174 \'1))\n ))))\n (let $172 (lambda \'($181 $182 $183 $184 $185) $181 $182 $183 $184))\n (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow (DictItems $103)) $171) (lambda \'($176 $177 $178 $179 $180) $180)) $172)))\n))) $104))\n(let $106 (DqCnUnionAll (TDqOutput $105 \'0)))\n(let $107 (lambda \'($186) (FromFlow (NarrowMap (ToFlow $186) $34))))\n(let $108 (KqpTableSinkSettings $68 \'false \'\"delete\" \'1 \'false \'false \'()))\n(let $109 (DqPhyStage \'($106) $107 \'(\'(\'\"_logical_id\" \'5744) \'(\'\"_id\" \'\"6411798d-a2ceca62-dbe02366-bc21f9df\")) \'((DqSink \'0 $98 $108))))\n(let $110 \'(\'(\'\"_logical_id\" \'5810) \'(\'\"_id\" \'\"49e1dc2a-c498cd00-a037c07b-e1558208\") $30))\n(let $111 (DqPhyStage \'() (lambda \'() (FromFlow (ExpandMap (FlatMap (Map (ToFlow $92) (lambda \'($187) (AsStruct \'(\'\"Key\" (Member $187 \'\"Key\")) \'(\'\"fk1\" (Member $187 \'\"fk1\")) \'(\'\"fk3\" (Member $187 \'\"fk3\"))))) (lambda \'($188) (block \'(\n (let $189 \'(\'\"Key\" (Member $188 \'\"Key\")))\n (let $190 \'(\'\"fk1\" (Member $188 \'\"fk1\")))\n (let $191 \'(\'\"fk3\" (Member $188 \'\"fk3\")))\n (return (IfPresent (Lookup $103 (AsStruct $189)) (lambda \'($192) (If (Nth $192 \'1) (Just (AsStruct $189 $190 \'(\'\"fk2\" (Member (Nth $192 \'0) \'\"fk2\")) $191)) (Nothing (OptionalType $29)))) (Just (AsStruct $189 $190 $47 $191))))\n)))) $26))) $110))\n(let $112 (DqCnUnionAll (TDqOutput $111 \'0)))\n(let $113 (KqpTableSinkSettings $68 \'false \'\"\" \'2 \'false \'false \'()))\n(let $114 (DqPhyStage \'($112) $107 \'(\'(\'\"_logical_id\" \'5772) \'(\'\"_id\" \'\"f81717d5-def18ead-e80fd69e-58abb07a\")) \'((DqSink \'0 $98 $113))))\n(let $115 \'($100 $105 $109 $111 $114))\n(let $116 (KqpTxResultBinding $102 \'\"7\" \'0))\n(let $117 (KqpPhysicalTx $115 \'() \'($96 \'($101 $116)) \'($40 \'(\'\"with_effects\"))))\n(let $118 \'($4 $17 $42 $52 $65 $80 $90 $97 $117))\n(return (KqpPhysicalQuery $118 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 1148857 total_cpu_time_us: 1122845 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":17},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":5,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"Indexes\\\":[{\\\"Name\\\":\\\"Index\\\",\\\"Type\\\":2,\\\"State\\\":1,\\\"SchemaVersion\\\":1,\\\"LocalPathId\\\":18,\\\"PathOwnerId\\\":8716544,\\\"KeyColumns\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\"]}],\\\"SecondaryGlobalIndexMetadata\\\":[{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable/Index/indexImplTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":19},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\",\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1744912383\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"93a02f8d-1e1cd125-d6d35b09-45ee6326\",\"version\":\"1.0\"}" 2025-04-17T17:53:03.233140Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:53:04.448102Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpPg::TableArrayInsert-useSink [GOOD] >> KqpPg::Returning+useSink >> YdbOlapStore::LogNonExistingRequest >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> GrpcConnectionStringParserTest::NoDatabaseFlag [GOOD] >> GrpcConnectionStringParserTest::IncorrectConnectionString >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::CopyTables >> SystemView::AuthPermissions_Access [GOOD] >> TGRpcYdbTest::ExecuteQueryBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryImplicitSession >> GrpcConnectionStringParserTest::IncorrectConnectionString [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestDecimal >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> YdbYqlClient::DeleteTableWithDeletedIndex >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow |78.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams+useSink >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture >> Cdc::Alter [GOOD] >> Cdc::AddColumn >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: 2025-04-17T17:52:55.577782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:55.578708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:55.578870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:52:55.579916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:55.580578Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:55.580634Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00171c/r3tmp/tmpKGzayx/pdisk_1.dat 2025-04-17T17:52:56.058860Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:56.335306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.479600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:56.479739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:56.501218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:56.501317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:56.522216Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:52:56.523031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:56.523464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:56.879720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:58.174397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1598:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:58.174531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1607:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:58.174613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:52:58.181051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:52:58.876120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1612:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:52:59.129526Z node 1 :TX_PROXY ERROR: Actor# [1:1764:3058] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:52:59.522614Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-17T17:52:59.522877Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-17T17:52:59.523005Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-17T17:52:59.523184Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-17T17:52:59.523448Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-04-17T17:52:59.523608Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-17T17:52:59.523734Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2025-04-17T17:52:59.523859Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] create compute task: 1 2025-04-17T17:52:59.524033Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:52:59.524089Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T17:52:59.524438Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:1793:2959] 2025-04-17T17:52:59.524502Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:1793:2959], channels: 0 2025-04-17T17:52:59.524558Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T17:52:59.524595Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-17T17:52:59.524642Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:1793:2959] 2025-04-17T17:52:59.524682Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:1793:2959], channels: 0 2025-04-17T17:52:59.524740Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:1793:2959], 2025-04-17T17:52:59.524812Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1793:2959], 2025-04-17T17:52:59.524872Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlNjhiMjQtYzk0YTMxOTQtN2M0YzcyYzctYjYyNjA1YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-17T17:52:59.535267Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2e3t9sfc9b31s7b1nbbcng, Database: , DatabaseId: /Root, SessionId: ydb:// ... abase: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1846:2476], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-17T17:53:11.392171Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1846:2476], CA [3:1844:3102], 2025-04-17T17:53:11.392240Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1846:2476], CA [3:1844:3102], 2025-04-17T17:53:11.393162Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1846:2476], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 686 Tasks { TaskId: 1 CpuTimeUs: 442 ComputeCpuTimeUs: 9 BuildCpuTimeUs: 433 HostName: "ghrun-gkdwnlcfnq" NodeId: 4 CreateTimeMs: 1744912391380 UpdateTimeMs: 1744912391381 } MaxMemoryUsage: 1048576 } 2025-04-17T17:53:11.393300Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1846:2476], CA [3:1844:3102], 2025-04-17T17:53:11.393346Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1846:2476], CA [3:1844:3102], 2025-04-17T17:53:11.410545Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1846:2476], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 865 Tasks { TaskId: 1 CpuTimeUs: 564 OutputRows: 3 OutputBytes: 12 IngressRows: 3 ComputeCpuTimeUs: 131 BuildCpuTimeUs: 433 HostName: "ghrun-gkdwnlcfnq" NodeId: 4 StartTimeMs: 1744912391408 CreateTimeMs: 1744912391380 UpdateTimeMs: 1744912391408 } MaxMemoryUsage: 1048576 } 2025-04-17T17:53:11.410659Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1846:2476], CA [3:1844:3102], 2025-04-17T17:53:11.410750Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1846:2476], CA [3:1844:3102], 2025-04-17T17:53:11.411249Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1845:3102], finished: 0 2025-04-17T17:53:11.411371Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 1, to: [3:1845:3102] 2025-04-17T17:53:11.431101Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1846:2476], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 2991 DurationUs: 22000 Tasks { TaskId: 1 CpuTimeUs: 637 FinishTimeMs: 1744912391430 OutputRows: 3 OutputBytes: 12 IngressRows: 3 ComputeCpuTimeUs: 204 BuildCpuTimeUs: 433 WaitInputTimeUs: 21033 HostName: "ghrun-gkdwnlcfnq" NodeId: 4 StartTimeMs: 1744912391408 CreateTimeMs: 1744912391380 CurrentWaitOutputTimeUs: 32 UpdateTimeMs: 1744912391430 } MaxMemoryUsage: 1048576 } 2025-04-17T17:53:11.431231Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1846:2476], CA [3:1844:3102], 2025-04-17T17:53:11.431291Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1846:2476], CA [3:1844:3102], 2025-04-17T17:53:11.431575Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1845:3102], finished: 1 2025-04-17T17:53:11.431618Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 2, to: [3:1845:3102] 2025-04-17T17:53:11.432558Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1844:3102], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1525 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 846 FinishTimeMs: 1744912391432 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 237 BuildCpuTimeUs: 609 HostName: "ghrun-gkdwnlcfnq" NodeId: 3 CreateTimeMs: 1744912391379 UpdateTimeMs: 1744912391432 } MaxMemoryUsage: 1048576 } 2025-04-17T17:53:11.432668Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1844:3102] 2025-04-17T17:53:11.432761Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1846:2476], 2025-04-17T17:53:11.432805Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:1846:2476], 2025-04-17T17:53:11.433203Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1846:2476], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 7375 DurationUs: 23000 Tasks { TaskId: 1 CpuTimeUs: 639 FinishTimeMs: 1744912391431 OutputRows: 3 OutputBytes: 12 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ComputeCpuTimeUs: 206 BuildCpuTimeUs: 433 WaitInputTimeUs: 21033 HostName: "ghrun-gkdwnlcfnq" NodeId: 4 StartTimeMs: 1744912391408 CreateTimeMs: 1744912391380 UpdateTimeMs: 1744912391431 } MaxMemoryUsage: 1048576 } 2025-04-17T17:53:11.433270Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [4:1846:2476] 2025-04-17T17:53:11.433447Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T17:53:11.433525Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-17T17:53:11.433587Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1836:3102] TxId: 281474976715663. Ctx: { TraceId: 01js2e471h4x7e75c077pr2bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTlmYWVlZWEtZTVhMTUzOGYtM2RhMGZkNzktNjg3NDY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.008900s ReadRows: 3 ReadBytes: 24 ru: 5 rate limiter was not found force flag: 1 { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString [GOOD] >> LocalityOperation::LocksFromAnotherTenants+UseSink >> SystemView::AuthGroupMembers_TableRange [GOOD] >> SystemView::AuthEffectivePermissions >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash [GOOD] >> TDatabaseQuotas::DisableWritesToDatabase >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestBusySession >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] >> SystemView::PartitionStatsTtlFields [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> YdbYqlClient::CreateTableWithUniformPartitions >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> KqpPg::Returning+useSink [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> YdbYqlClient::CopyTables [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken [GOOD] >> SystemView::PartitionStatsLocksFields >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> KqpPg::Returning-useSink >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> Yq_1::DescribeConnection [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> Cdc::DropIndex [GOOD] >> Yq_1::DeleteConnections [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> Cdc::AddColumn [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> Yq_1::DeleteQuery >> Cdc::AddColumn_TopicAutoPartitioning >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink >> Cdc::DisableStream >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> Yq_1::Create_And_Modify_The_Same_Connection >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Access [GOOD] Test command err: 2025-04-17T17:50:47.931241Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341035286118862:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:47.931854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00052b/r3tmp/tmpZYhmHR/pdisk_1.dat 2025-04-17T17:50:48.360691Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:48.378905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:48.378994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:48.398021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16538, node 1 2025-04-17T17:50:48.445808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:48.445837Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:48.445848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:48.445974Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:48.726949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:48.832517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:50:48.832777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:50:48.832835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T17:50:48.832980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-17T17:50:48.833099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T17:50:48.833183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:50:48.833199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:50:48.833254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:50:48.833312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T17:50:48.836704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-17T17:50:48.836886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-04-17T17:50:48.837075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:50:48.837122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] waiting... 2025-04-17T17:50:48.837269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:50:48.837351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:50:48.837366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494341039581086748:2387], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-17T17:50:48.837390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494341039581086748:2387], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-17T17:50:48.837451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:50:48.837492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:50:48.837517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-17T17:50:48.837539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2025-04-17T17:50:48.845772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:50:48.847929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T17:50:48.848283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T17:50:48.848295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-17T17:50:48.848315Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-17T17:50:48.848339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-17T17:50:48.848917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T17:50:48.848964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T17:50:48.848970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-17T17:50:48.848980Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-17T17:50:48.848990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T17:50:48.849029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-17T17:50:48.849169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:50:48.849183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-17T17:50:48.849216Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:50:48.849275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-04-17T17:50:48.849343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:50:48.856409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-17T17:50:48.856491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-17T17:50:48.856779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912248898, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:50:48.856942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912248898 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:50:48.856971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-17T17:50:48.857153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-17T17:50:48.857202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-17T17:50:48.857360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:50:48.857441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-0 ... 5-04-17T17:53:07.045719Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341639140966501:2435], row count: 4, finished: 0 2025-04-17T17:53:07.047568Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:07.051764Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata TableId: [72057594046644480:5:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [workload_manager] }] } 2025-04-17T17:53:07.051842Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341639140966501:2435], row count: 0, finished: 0 2025-04-17T17:53:07.052122Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:07.054616Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager TableId: [72057594046644480:6:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [pools] }] } 2025-04-17T17:53:07.054691Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341639140966501:2435], row count: 0, finished: 0 2025-04-17T17:53:07.054780Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:07.055025Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools TableId: [72057594046644480:7:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [default] }] } 2025-04-17T17:53:07.055065Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341639140966501:2435], row count: 0, finished: 0 2025-04-17T17:53:07.055222Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:07.055496Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [72057594046644480:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:53:07.055603Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341639140966501:2435], row count: 6, finished: 0 2025-04-17T17:53:07.055953Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:07.057691Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:53:07.057748Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341639140966501:2435], row count: 1, finished: 0 2025-04-17T17:53:07.058034Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:07.059129Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:53:07.059203Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341639140966501:2435], row count: 0, finished: 0 2025-04-17T17:53:07.059633Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:07.060384Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:53:07.060426Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341639140966501:2435], row count: 0, finished: 0 2025-04-17T17:53:07.060649Z node 41 :SYSTEM_VIEWS INFO: Scan finished, actor: [41:7494341639140966501:2435], owner: [41:7494341639140966498:2433], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-17T17:53:07.061706Z node 41 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [41:7494341591896324121:2208], database# , query hash# 12107705915200741666, cpu time# 138921 2025-04-17T17:53:07.062651Z node 41 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912387034, txId: 281474976710692] shutting down 2025-04-17T17:53:07.077345Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 43 2025-04-17T17:53:07.078335Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:07.081207Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 44 2025-04-17T17:53:07.081736Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:07.084029Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 45 2025-04-17T17:53:07.084817Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(45, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:07.084150Z node 45 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:53:07.085294Z node 43 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:53:07.093425Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 42 2025-04-17T17:53:07.094429Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:07.095581Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341600636730562:2099], Type=268959746 2025-04-17T17:53:07.095635Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341600636730562:2099], Type=268959746 2025-04-17T17:53:07.095675Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341600636730562:2099], Type=268959746 2025-04-17T17:53:07.095701Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341600636730562:2099], Type=268959746 2025-04-17T17:53:07.095730Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341600636730562:2099], Type=268959746 2025-04-17T17:53:07.095763Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341600636730562:2099], Type=268959746 2025-04-17T17:53:07.095809Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341594101234534:2105], Type=268959746 2025-04-17T17:53:07.095842Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341594101234534:2105], Type=268959746 2025-04-17T17:53:07.095867Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341594101234534:2105], Type=268959746 2025-04-17T17:53:07.095897Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341594101234534:2105], Type=268959746 |78.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |78.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-17T17:49:40.617438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.617902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.617942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001714/r3tmp/tmpd5bcSD/pdisk_1.dat 2025-04-17T17:49:40.890125Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:40.890201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:40.890229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:40.890293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-17T17:49:40.890315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:49:40.979046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-17T17:49:40.979225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:40.979353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:49:40.979546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:49:40.979590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:40.979655Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:40.980137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:40.980229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:49:40.980257Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:40.980278Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:40.980390Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:40.980415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:40.980467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:40.980509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:49:40.980534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:49:40.980563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:49:40.980626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:40.980879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:40.980902Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:40.980998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:40.981022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:40.981047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:40.981075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:49:40.981098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:49:40.981142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:40.981346Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:40.981362Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T17:49:40.981422Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:49:40.981445Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:49:40.981474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:40.981492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:49:40.981519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:49:40.981537Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:49:40.981573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:49:40.983795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:49:40.984100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:49:40.984129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:49:40.984258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:49:40.985227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:49:40.985271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:49:40.985303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-17T17:49:40.985393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-17T17:49:40.985671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:40.985700Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:40.985737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:40.985836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-17T17:49:40.985855Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:49:40.985895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:40.985925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-17T17:49:40.985948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.019367Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-17T17:49:41.019444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-17T17:49:41.019469Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:41.019967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T17:49:41.020018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-17T17:49:41.056711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:49:41.056807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:49:41.067930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:49:41.141245Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-17T17:49:41.141927Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.141968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:49:41.142008Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:49:41.142141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-17T17:49:41.142174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:49:41.142268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:49:41.142376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... 4046644480 2025-04-17T17:52:15.143728Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:3 ProgressState 2025-04-17T17:52:15.143794Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:52:15.143821Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-04-17T17:52:15.143844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-04-17T17:52:15.143871Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-04-17T17:52:15.143894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-04-17T17:52:15.143919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 5/7, is published: true 2025-04-17T17:52:15.144187Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:52:15.144219Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:52:15.144271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:6, at schemeshard: 72057594046644480 2025-04-17T17:52:15.144308Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:6 ProgressState 2025-04-17T17:52:15.144365Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:52:15.144389Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-04-17T17:52:15.144412Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-04-17T17:52:15.144438Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-04-17T17:52:15.144462Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-04-17T17:52:15.144489Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 6/7, is published: true 2025-04-17T17:52:15.144681Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:52:15.144708Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:52:15.144767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:4, at schemeshard: 72057594046644480 2025-04-17T17:52:15.144812Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:4 ProgressState 2025-04-17T17:52:15.144873Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:52:15.144895Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-04-17T17:52:15.144917Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-04-17T17:52:15.144955Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-04-17T17:52:15.144975Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-04-17T17:52:15.144999Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 7/7, is published: true 2025-04-17T17:52:15.145072Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1253:2962] message: TxId: 281474976715668 2025-04-17T17:52:15.145133Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-04-17T17:52:15.145191Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-04-17T17:52:15.145233Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-04-17T17:52:15.145322Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 17] was 2 2025-04-17T17:52:15.145360Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-04-17T17:52:15.145382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-04-17T17:52:15.145415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 18] was 2 2025-04-17T17:52:15.145439Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:2 2025-04-17T17:52:15.145458Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:2 2025-04-17T17:52:15.145484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 19] was 3 2025-04-17T17:52:15.145504Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:3 2025-04-17T17:52:15.145529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:3 2025-04-17T17:52:15.145608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 20] was 3 2025-04-17T17:52:15.145641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-04-17T17:52:15.145693Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:4 2025-04-17T17:52:15.145714Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:4 2025-04-17T17:52:15.145767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 21] was 3 2025-04-17T17:52:15.145796Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-04-17T17:52:15.145822Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:5 2025-04-17T17:52:15.145845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:5 2025-04-17T17:52:15.145893Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 22] was 3 2025-04-17T17:52:15.145918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-17T17:52:15.145942Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:6 2025-04-17T17:52:15.145962Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:6 2025-04-17T17:52:15.146014Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 23] was 3 2025-04-17T17:52:15.146039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-04-17T17:52:15.146616Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:52:15.146746Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:52:15.146886Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:52:15.146978Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:52:15.147070Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1253:2962] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-04-17T17:52:15.147464Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1260:2968], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:52:15.147500Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:52:15.147527Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T17:52:15.299754Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [3:1555:3203], serverId# [3:1556:3204], sessionId# [0:0:0] 2025-04-17T17:52:15.299901Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2e2g9j8p4wv07t5yvwdekx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTJlYjkxMTYtYjQwODc5NWQtODc0Mjc5MC04ZDkxN2NhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } 2025-04-17T17:52:15.479658Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [3:1584:3220], serverId# [3:1585:3221], sessionId# [0:0:0] 2025-04-17T17:52:15.479871Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2e2gekfnvcskcft7ydkx2w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MThlZTcyOTgtNTdiNWM5Y2YtZGI1MDc5OGUtZTg2ZTk3ZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 11 } items { uint32_value: 101 } }, { items { uint32_value: 21 } items { uint32_value: 201 } }, { items { uint32_value: 31 } items { uint32_value: 301 } }, { items { uint32_value: 41 } items { uint32_value: 401 } }, { items { uint32_value: 51 } items { uint32_value: 501 } } 2025-04-17T17:52:15.638909Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1613:3237], serverId# [3:1614:3238], sessionId# [0:0:0] 2025-04-17T17:52:15.639073Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2e2gm3969zrmy5hqg2nf52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjhiMTQzYmItYjU4MDJiYTItOThmNTUxZTktNjkzNDZjMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 12 } items { uint32_value: 102 } }, { items { uint32_value: 22 } items { uint32_value: 202 } }, { items { uint32_value: 32 } items { uint32_value: 302 } }, { items { uint32_value: 42 } items { uint32_value: 402 } }, { items { uint32_value: 52 } items { uint32_value: 502 } } 2025-04-17T17:52:15.841856Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [3:1642:3254], serverId# [3:1643:3255], sessionId# [0:0:0] 2025-04-17T17:52:15.842067Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2e2gs307d2xsqadnyjdbma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGM3ODU3YjktZTI3M2ZjNjgtNTRlMGQyNDEtMjBkNjQxNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 13 } items { uint32_value: 103 } }, { items { uint32_value: 23 } items { uint32_value: 203 } }, { items { uint32_value: 33 } items { uint32_value: 303 } }, { items { uint32_value: 43 } items { uint32_value: 403 } }, { items { uint32_value: 53 } items { uint32_value: 503 } } >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ResolvedTimestamps >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery >> SystemView::ShowCreateTable [GOOD] >> SystemView::QueryStatsFields >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> YdbYqlClient::BuildInfo >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> LocalityOperation::LocksFromAnotherTenants+UseSink [GOOD] >> LocalityOperation::LocksFromAnotherTenants-UseSink >> YdbQueryService::TestCreateAndAttachSession >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> DataShardReadIterator::ShouldForbidDuplicatedReadId [GOOD] >> DataShardReadIterator::ShouldFailUknownColumns >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2025-04-17T17:47:34.053547Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:34.420962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:34.455083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:34.456446Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:34.497136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:34.497859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:34.501622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:34.502032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:34.502325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:34.502556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:34.502733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:34.503059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:34.503320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:34.503576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.504221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:34.504400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:34.532254Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:34.532366Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:34.532423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:34.532564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.532679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:34.532734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:34.532767Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:34.532838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:34.532897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:34.532928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:34.532948Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:34.533085Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.533136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:34.533162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:34.533184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:34.533246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:34.533288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:34.533316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:34.533365Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:34.533436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:34.533468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:34.533489Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:34.533519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:34.533547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:34.533566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:34.533932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=67; 2025-04-17T17:47:34.534033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-17T17:47:34.534150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-04-17T17:47:34.534285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-04-17T17:47:34.534493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:34.534572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:34.534623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:34.534843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:34.534902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.534937Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.535071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:34.535114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:34.535143Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:34.535344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:34.535400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:34.535438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:34.535589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:34.535631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:34.535677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744912055385;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744912055385;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-17T17:53:13.464023Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11069:12696];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-17T17:53:13.466216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11069:12696];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2025-04-17T17:48:34.372492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:34.372556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:34.442575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:45.982885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:45.982943Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:46.035827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:47.321964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:47.322035Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:47.378047Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:50.254191Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:50.254249Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:50.294132Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:53.891436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:53.891500Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:53.969346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:55.167448Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:55.167508Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:55.218299Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:55.802103Z node 6 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 7 2025-04-17T17:48:55.802353Z node 6 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 7 2025-04-17T17:48:55.802661Z node 6 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 7 2025-04-17T17:48:55.802904Z node 7 :TX_PROXY WARN: actor# [7:345:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-17T17:48:56.823083Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:56.823150Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:56.873275Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:57.476290Z node 8 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:114} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] 2025-04-17T17:48:58.464583Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:58.464649Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:58.570164Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:59.881560Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-17T17:49:00.050623Z node 11 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:49:00.051185Z node 11 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016fc/r3tmp/tmprJdcfo/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:49:00.056126Z node 11 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016fc/r3tmp/tmprJdcfo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016fc/r3tmp/tmprJdcfo/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4230437761185287579 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:49:00.060377Z node 11 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016fc/r3tmp/tmprJdcfo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:49:00.128107Z node 10 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:49:00.128568Z node 10 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016fc/r3tmp/tmprJdcfo/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:49:00.128769Z node 10 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016fc/r3tmp/tmprJdcfo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016fc/r3tmp/tmprJdcfo/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14474627567621486201 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:49:00.131461Z node 10 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 ... 00-s[16/16]o)]} 2025-04-17T17:52:07.690285Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:07.690666Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:12.623344Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:12.623704Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:18.049896Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:18.050047Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:23.716209Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:23.716561Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:23.880489Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:23.881007Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:29.009899Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:29.010245Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:34.680097Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:34.680451Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:40.702107Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:40.702501Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:46.221538Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:46.221806Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:52.115261Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:52.115591Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:57.790802Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:52:57.791144Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:53:03.102009Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:53:03.102352Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:53:08.506651Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:53:08.507017Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:53:13.767075Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-17T17:53:13.767306Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink >> YdbYqlClient::TestConstraintViolation [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery [GOOD] >> TGRpcYdbTest::ExecuteQueryCache >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TTableProfileTests::DescribeTableWithPartitioningPolicy >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV2PDiskIdFilter >> YdbTableBulkUpsertOlap::UpsertCsvBug ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestConstraintViolation [GOOD] Test command err: 2025-04-17T17:53:05.018743Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341629232993292:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:05.021654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a57/r3tmp/tmp5Lufof/pdisk_1.dat 2025-04-17T17:53:05.552999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:05.553088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:05.557922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:05.585753Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17706, node 1 2025-04-17T17:53:05.846689Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:05.846713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:05.846725Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:05.846859Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:06.197346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:06.228579Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:53:08.314569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341642117896244:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:08.314671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:08.653082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:08.857923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341642117896422:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:08.858027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:08.859681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341642117896427:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:08.863745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:53:08.891318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341642117896429:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:53:08.971237Z node 1 :TX_PROXY ERROR: Actor# [1:7494341642117896510:2809] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:09.098688Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2e44qk1gk4pybfndp5y1ph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q0ZmEzNDktN2ZiYjUxNTEtODVjZTk0OWUtZDE0ODQ2MmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:09.320785Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2e45064wz5w5wv0cst6nav, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q0ZmEzNDktN2ZiYjUxNTEtODVjZTk0OWUtZDE0ODQ2MmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:10.778802Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341649729946667:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:10.778888Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a57/r3tmp/tmpFf90mx/pdisk_1.dat 2025-04-17T17:53:11.000230Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4272, node 4 2025-04-17T17:53:11.114350Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:11.114476Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:11.145023Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:11.209545Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:11.209572Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:11.209582Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:11.209717Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:11.460826Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:14.124402Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341666909816885:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:14.124479Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341666909816879:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:14.124750Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:14.127871Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:53:14.174125Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494341666909816893:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:53:14.274277Z node 4 :TX_PROXY ERROR: Actor# [4:7494341666909816977:2675] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:15.921983Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341675074739783:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:15.922057Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a57/r3tmp/tmpfHqfdG/pdisk_1.dat 2025-04-17T17:53:16.057640Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:16.091535Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:16.091611Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:16.093827Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16083, node 7 2025-04-17T17:53:16.187589Z node 7 :NET_CLASSIFIER WARN: distributable config is ... 6Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZWFkYmRmZTQtZTk1ZWNkYWMtN2U3OTM0MzAtODY4N2UxNWM=, ActorId: [7:7494341687959642682:2334], ActorState: ExecuteState, TraceId: 01js2e4efe27r3yzg2rsx5ghtt, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-04-17T17:53:18.834376Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZWFkYmRmZTQtZTk1ZWNkYWMtN2U3OTM0MzAtODY4N2UxNWM=, ActorId: [7:7494341687959642682:2334], ActorState: ExecuteState, TraceId: 01js2e4efe27r3yzg2rsx5ghtt, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-04-17T17:53:18.834424Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZWFkYmRmZTQtZTk1ZWNkYWMtN2U3OTM0MzAtODY4N2UxNWM=, ActorId: [7:7494341687959642682:2334], ActorState: ExecuteState, TraceId: 01js2e4efe27r3yzg2rsx5ghtt, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-04-17T17:53:18.835375Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494341687959642728:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:18.835440Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:18.836948Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZWFkYmRmZTQtZTk1ZWNkYWMtN2U3OTM0MzAtODY4N2UxNWM=, ActorId: [7:7494341687959642682:2334], ActorState: ExecuteState, TraceId: 01js2e4efe27r3yzg2rsx5ghtt, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-04-17T17:53:18.837592Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494341687959642742:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:18.837684Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:18.840426Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:53:18.879288Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494341687959642705:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:53:18.947738Z node 7 :TX_PROXY ERROR: Actor# [7:7494341687959642803:2691] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:20.516740Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494341695884776209:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:20.516812Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a57/r3tmp/tmpAxsuyf/pdisk_1.dat 2025-04-17T17:53:20.745712Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:20.774515Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:20.774601Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:20.779705Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13553, node 10 2025-04-17T17:53:20.864539Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:20.864565Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:20.864574Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:20.864766Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:21.124854Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:23.719057Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341708769679112:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:23.719121Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:23.744334Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:23.858661Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341708769679287:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:23.858803Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:23.859256Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341708769679292:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:23.863430Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:53:23.883820Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494341708769679294:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:53:23.985546Z node 10 :TX_PROXY ERROR: Actor# [10:7494341708769679370:2788] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:24.048646Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2e4kch3hny1e6drwscne24, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDgxYjQ3NDQtZjllYWVmYTEtMTM5ZWZhZTEtODZjYTNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:24.133617Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2e4kkber4p753kxpg1vnv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDgxYjQ3NDQtZjllYWVmYTEtMTM5ZWZhZTEtODZjYTNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:24.136895Z node 10 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-17T17:53:24.147737Z node 10 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T17:53:24.147968Z node 10 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T17:53:24.148195Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7494341713064646726:2335], Table: `Root/Test` ([72057594046644480:2:1]), SessionActorId: [10:7494341708769679108:2335]Got CONSTRAINT VIOLATION for table `Root/Test`. ShardID=72075186224037888, Sink=[10:7494341713064646726:2335].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T17:53:24.148752Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7494341713064646719:2335], SessionActorId: [10:7494341708769679108:2335], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[10:7494341708769679108:2335]. isRollback=0 2025-04-17T17:53:24.149036Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NDgxYjQ3NDQtZjllYWVmYTEtMTM5ZWZhZTEtODZjYTNjOA==, ActorId: [10:7494341708769679108:2335], ActorState: ExecuteState, TraceId: 01js2e4kkber4p753kxpg1vnv7, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [10:7494341713064646720:2335] from: [10:7494341713064646719:2335] 2025-04-17T17:53:24.149159Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7494341713064646720:2335] TxId: 281474976710662. Ctx: { TraceId: 01js2e4kkber4p753kxpg1vnv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDgxYjQ3NDQtZjllYWVmYTEtMTM5ZWZhZTEtODZjYTNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T17:53:24.149403Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NDgxYjQ3NDQtZjllYWVmYTEtMTM5ZWZhZTEtODZjYTNjOA==, ActorId: [10:7494341708769679108:2335], ActorState: ExecuteState, TraceId: 01js2e4kkber4p753kxpg1vnv7, Create QueryResponse for error on request, msg: >> YdbTableBulkUpsertOlap::UpsertArrowBatch >> SystemView::AuthEffectivePermissions [GOOD] >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] Test command err: 2025-04-17T17:53:05.918512Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341628549308064:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:05.918556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a46/r3tmp/tmpAKVfEm/pdisk_1.dat 2025-04-17T17:53:06.669760Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:06.677083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:06.677197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:06.687765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3341, node 1 2025-04-17T17:53:06.774370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:06.774400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:06.774407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:06.774521Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:07.125581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:11.168415Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341656686961116:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:11.168694Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a46/r3tmp/tmpZoyG65/pdisk_1.dat 2025-04-17T17:53:11.454507Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:11.477276Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:11.477338Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:11.482580Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8189, node 4 2025-04-17T17:53:11.623067Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:11.623088Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:11.623102Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:11.623243Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:11.843807Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:11.875359Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:53:14.166401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:14.444190Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-04-17T17:53:14.500253Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-04-17T17:53:16.224951Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341678129617354:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:16.225195Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a46/r3tmp/tmpSvKXCV/pdisk_1.dat 2025-04-17T17:53:16.357212Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:16.383811Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:16.383883Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:16.388258Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26321, node 7 2025-04-17T17:53:16.450897Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:16.450917Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:16.450924Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:16.451034Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:16.652374Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:18.898655Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:20.682117Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494341693825688871:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:20.682211Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a46/r3tmp/tmpvvhIFO/pdisk_1.dat 2025-04-17T17:53:20.901627Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:20.940965Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:20.941066Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:20.943719Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20684, node 10 2025-04-17T17:53:21.063677Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:21.063725Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:21.063736Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:21.063933Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:21.395559Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:24.328862Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> YdbQueryService::TestCreateAndAttachSession [GOOD] >> YdbQueryService::TestForbidExecuteWithoutAttach >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests >> YdbOlapStore::LogNonExistingRequest [GOOD] >> YdbOlapStore::LogNonExistingUserId >> KqpPg::CheckPgAutoParams+useSink [GOOD] >> KqpPg::CheckPgAutoParams-useSink >> Cdc::DisableStream [GOOD] >> Cdc::InitialScan >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey >> DataShardReadIterator::ShouldFailUknownColumns [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema >> KqpRm::Reduce >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthEffectivePermissions [GOOD] Test command err: 2025-04-17T17:50:49.513404Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341043951125803:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:49.513465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004d1/r3tmp/tmpZOb18L/pdisk_1.dat 2025-04-17T17:50:49.886003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:49.925384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:49.925500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:49.929862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31160, node 1 2025-04-17T17:50:49.997486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:49.997515Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:49.997523Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:49.997658Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32380 TClient is connected to server localhost:32380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:50.570044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:52.819677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341056836028812:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:52.819799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:52.822947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341056836028820:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:52.826955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:52.851574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341056836028826:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:52.915590Z node 1 :TX_PROXY ERROR: Actor# [1:7494341056836028901:2743] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:53.294053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-17T17:50:54.094652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:50:54.513428Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341043951125803:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:54.513493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:54.566469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:50:55.215837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:50:55.747524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:50:56.297727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T17:50:56.722126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:50:56.790610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:50:59.785727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-17T17:51:02.492288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710750:0, at schemeshard: 72057594046644480 2025-04-17T17:51:02.557051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710751:3, at schemeshard: 72057594046644480 2025-04-17T17:51:02.816549Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494341099785705708:3142], owner: [1:7494341099785705705:3140], scan id: 0, table id: [1:0:0:show_create] 2025-04-17T17:51:02.817746Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494341099785705708:3142] 2025-04-17T17:51:02.836283Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494341099785705708:3142], row count: 1, finished: 1 2025-04-17T17:51:02.836325Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494341099785705708:3142], owner: [1:7494341099785705705:3140], scan id: 0, table id: [1:0:0:show_create] 2025-04-17T17:51:05.905790Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494341114402640167:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:05.905858Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004d1/r3tmp/tmpO99mv5/pdisk_1.dat 2025-04-17T17:51:06.143746Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28271, node 6 2025-04-17T17:51:06.279688Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:06.279800Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:06.285169Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:06.326834Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:06.326870Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:06.326878Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:06.327018Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15089 TClient is connected to server localhost:15089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:06.880249Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:09.954128Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494341131582510508:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:09.954229Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:09.954497Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494341131582510522:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or ... esolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:53:23.676729Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341708555949565:2387], row count: 5, finished: 0 2025-04-17T17:53:23.679444Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:23.683875Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:53:23.683954Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341708555949565:2387], row count: 1, finished: 0 2025-04-17T17:53:23.684170Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:23.684885Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:53:23.684945Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341708555949565:2387], row count: 1, finished: 0 2025-04-17T17:53:23.685243Z node 41 :SYSTEM_VIEWS INFO: Scan finished, actor: [41:7494341708555949565:2387], owner: [41:7494341708555949561:2385], scan id: 0, table id: [72057594046644480:1:0:auth_effective_permissions] 2025-04-17T17:53:23.687240Z node 41 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [41:7494341674196209434:2209], database# , query hash# 11342553055430868283, cpu time# 184570 2025-04-17T17:53:23.687919Z node 41 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912403652, txId: 281474976715676] shutting down 2025-04-17T17:53:23.887383Z node 41 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01js2e4k94cpswxdd06sckdgqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=41&id=ZDkyNjhlOTgtYTgyZTEzYTktNmJlOGUwMDYtMmNiODcwZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:23.889644Z node 41 :SYSTEM_VIEWS INFO: Scan started, actor: [41:7494341708555949620:2399], owner: [41:7494341708555949616:2397], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-04-17T17:53:23.893759Z node 41 :SYSTEM_VIEWS INFO: Scan prepared, actor: [41:7494341708555949620:2399], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-17T17:53:23.893807Z node 41 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root/Tenant1 tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-17T17:53:23.893903Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:23.894293Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [72075186224037888:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 }] Groups: [] } Children [Dir2,Table1] }] } 2025-04-17T17:53:23.894397Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341708555949620:2399], row count: 1, finished: 0 2025-04-17T17:53:23.894517Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:23.894900Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [72075186224037888:3:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-17T17:53:23.894999Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341708555949620:2399], row count: 2, finished: 0 2025-04-17T17:53:23.895683Z node 41 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:53:23.897360Z node 41 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [72075186224037888:2:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:53:23.897421Z node 41 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [41:7494341708555949620:2399], row count: 1, finished: 0 2025-04-17T17:53:23.897483Z node 41 :SYSTEM_VIEWS INFO: Scan finished, actor: [41:7494341708555949620:2399], owner: [41:7494341708555949616:2397], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-04-17T17:53:23.899590Z node 41 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [41:7494341674196209434:2209], database# , query hash# 17325808444334437222, cpu time# 130751 2025-04-17T17:53:23.900300Z node 41 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912403885, txId: 281474976715678] shutting down 2025-04-17T17:53:23.912682Z node 43 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:53:23.912829Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 43 2025-04-17T17:53:23.913695Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:23.913857Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 44 2025-04-17T17:53:23.914685Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:23.915171Z node 44 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:53:23.916657Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 42 2025-04-17T17:53:23.919382Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:23.919649Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 45 2025-04-17T17:53:23.920465Z node 41 :HIVE WARN: HIVE#72057594037968897 Node(45, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:23.921039Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341675481787265:2101], Type=268959746 2025-04-17T17:53:23.921070Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341675481787265:2101], Type=268959746 2025-04-17T17:53:23.921096Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341675481787265:2101], Type=268959746 2025-04-17T17:53:23.921118Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341675481787265:2101], Type=268959746 2025-04-17T17:53:23.921142Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341675481787265:2101], Type=268959746 2025-04-17T17:53:23.921165Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341680537120308:2107], Type=268959746 2025-04-17T17:53:23.921187Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341680537120308:2107], Type=268959746 2025-04-17T17:53:23.921211Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341680537120308:2107], Type=268959746 2025-04-17T17:53:23.921237Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341680537120308:2107], Type=268959746 2025-04-17T17:53:23.921263Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341680537120308:2107], Type=268959746 2025-04-17T17:53:23.924323Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[45:7494341675481787265:2101], Type=268959746 2025-04-17T17:53:23.924378Z node 41 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[42:7494341680537120308:2107], Type=268959746 ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] Test command err: rows by tier: {'__DEFAULT': 90000}, portions: 7 rows by tier: {'__DEFAULT': 90000}, portions: 7 rows by tier: {'__DEFAULT': 90000}, portions: 7 rows by tier: {'__DEFAULT': 90000}, portions: 7 rows by tier: {'__DEFAULT': 90000}, portions: 7 rows by tier: {'__DEFAULT': 90000}, portions: 7 rows by tier: {'__DEFAULT': 100000}, portions: 7 >> SystemView::QueryStatsFields [GOOD] >> SystemView::QueryStatsAllTables >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> KqpRm::Reduce [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-04-17T17:53:29.293829Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T17:53:29.294388Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/001735/r3tmp/tmpLlPnmx/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T17:53:29.295005Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/001735/r3tmp/tmpLlPnmx/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001735/r3tmp/tmpLlPnmx/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3265920627732790505 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T17:53:29.343987Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T17:53:29.344306Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T17:53:29.360079Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T17:53:29.360241Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T17:53:29.360454Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T17:53:29.360525Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T17:53:29.360622Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T17:53:29.360657Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T17:53:29.360692Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T17:53:29.360712Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T17:53:29.360914Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T17:53:29.376178Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744912409 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T17:53:29.376528Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T17:53:29.376621Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744912409 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T17:53:29.376864Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T17:53:29.377059Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T17:53:29.377092Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T17:53:29.377215Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744912409 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T17:53:29.377392Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T17:53:29.377425Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T17:53:29.377506Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744912409 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T17:53:29.377616Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T17:53:29.378164Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T17:53:29.378495Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T17:53:29.378753Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T17:53:29.379160Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T17:53:29.379335Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T17:53:29.379430Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T17:53:29.379643Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T17:53:29.379763Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T17:53:29.379834Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T17:53:29.379911Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T17:53:29.382816Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T17:53:29.382885Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T17:53:29.382941Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T17:53:29.382997Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T17:53:29.383040Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:463:2341])) 2025-04-17T17:53:29.383243Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T17:53:29.383469Z node 1 :RESOURCE_BROKER DEBUG: Update task kqp-1-1-1 (1 by [1:463:2341]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2025-04-17T17:53:29.383520Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T17:53:29.383561Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:463:2341])) 2025-04-17T17:53:29.383598Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable >> SystemView::PartitionStatsLocksFields [GOOD] >> SystemView::PartitionStatsFields >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg >> Secret::SimpleQueryService [GOOD] >> ResultFormatter::Pg [GOOD] >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks >> TGRpcYdbTest::ExecuteQueryCache [GOOD] >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] >> TBlobStorageProxyTest::TestSingleFailureMirror >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] >> YdbQueryService::TestForbidExecuteWithoutAttach [GOOD] >> YdbQueryService::TestCreateDropAttachSession >> YdbTableBulkUpsertOlap::UpsertCsvBug [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard >> TDatabaseQuotas::DisableWritesToDatabase [GOOD] >> TGRpcAuthentication::InvalidPassword >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 >> YdbTableBulkUpsertOlap::UpsertArrowBatch [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] Test command err: 2025-04-17T17:53:05.358741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341628167504705:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:05.358790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a47/r3tmp/tmp5cmWmp/pdisk_1.dat 2025-04-17T17:53:06.034080Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:06.037555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:06.037688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:06.052996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64226, node 1 2025-04-17T17:53:06.407866Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:06.407888Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:06.407894Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:06.408030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:06.749133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:10.489038Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341650633246665:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:10.489811Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a47/r3tmp/tmpxlg9Ya/pdisk_1.dat 2025-04-17T17:53:10.699302Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20764, node 4 2025-04-17T17:53:10.831091Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:10.831631Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:10.841057Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:10.860643Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:10.860667Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:10.860674Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:10.860794Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:11.181280Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:15.458550Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341672437030628:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a47/r3tmp/tmpequwcA/pdisk_1.dat 2025-04-17T17:53:15.559987Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:15.664530Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:15.739417Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:15.739508Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:15.754018Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25896, node 7 2025-04-17T17:53:15.834686Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:15.834731Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:15.834745Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:15.834903Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:16.105328Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:18.689689Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494341685321933406:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:18.689835Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:18.690436Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494341685321933418:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:18.697980Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:53:18.734401Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494341685321933420:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:53:18.824762Z node 7 :TX_PROXY ERROR: Actor# [7:7494341685321933495:2673] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:20.817240Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494341695266968471:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:20.817993Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a47/r3tmp/tmpetTuMn/pdisk_1.dat 2025-04-17T17:53:20.994339Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:21.022983Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:21.023073Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:21.025892Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22972, node 10 2025-04-17T17:53:21.111486Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:21.112511Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:21.112523Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:21.112676Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:21.370711Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:24.387642Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341712446838731:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:24.387715Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:24.388159Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341712446838743:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:24.392415Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:53:24.415886Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494341712446838745:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:53:24.513914Z node 10 :TX_PROXY ERROR: Actor# [10:7494341712446838816:2684] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:26.257329Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494341719765457209:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:26.257413Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a47/r3tmp/tmp3L2td3/pdisk_1.dat 2025-04-17T17:53:26.494778Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10097, node 13 2025-04-17T17:53:26.640163Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:26.640277Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:26.663589Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:26.684745Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:26.684768Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:26.684777Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:26.684935Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:26.977364Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:30.060064Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494341736945327423:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:30.060161Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:30.061367Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494341736945327435:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:30.065821Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:53:30.088594Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494341736945327437:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:53:30.150337Z node 13 :TX_PROXY ERROR: Actor# [13:7494341736945327510:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] Test command err: 2025-04-17T17:53:05.054733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341629269233726:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:05.054785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a51/r3tmp/tmp7rUAQC/pdisk_1.dat 2025-04-17T17:53:05.714833Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65491, node 1 2025-04-17T17:53:05.993300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:05.993397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:06.112531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:06.272019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:06.272038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:06.272046Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:06.272188Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:06.609543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:10.434073Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341651599778650:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:10.434224Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a51/r3tmp/tmpOORkMN/pdisk_1.dat 2025-04-17T17:53:10.596926Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:10.623359Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:10.623443Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:10.629721Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9905, node 4 2025-04-17T17:53:10.827947Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:10.827972Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:10.827978Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:10.828095Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:11.075655Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:11.090768Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:53:15.203365Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341675078934141:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:15.203427Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a51/r3tmp/tmpApEqbS/pdisk_1.dat 2025-04-17T17:53:15.373882Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:15.400170Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:15.400241Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:15.403332Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29663, node 7 2025-04-17T17:53:15.515742Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:15.515765Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:15.515774Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:15.515907Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:15.773398Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:9203 2025-04-17T17:53:16.071393Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:16.102472Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:16.610593Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494341678314676353:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:16.610689Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:16.615194Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:16.615281Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:16.618900Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-17T17:53:16.619837Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:16.732354Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:16.753841Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:17.270690Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494341679542090275:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:17.270753Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:17.361747Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:17.361829Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:17.368301Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-04-17T17:53:17.377895Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:18.877900Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:53:19.051054Z node 7 :FLAT_TX_S ... Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:23.386742Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8990 2025-04-17T17:53:23.846567Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:23.876164Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:24.384011Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494341713009119753:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:24.384074Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:24.418301Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:24.418385Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:24.423983Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-04-17T17:53:24.425468Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:24.500464Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:24.537398Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:25.060370Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7494341715049887212:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:25.061274Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:25.150472Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:25.150565Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:25.154159Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2025-04-17T17:53:25.159464Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:26.801464Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:53:26.916230Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:53:26.988396Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341719432655581:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:26.988494Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:26.988557Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341719432655592:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:26.994226Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-17T17:53:27.019143Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494341719432655595:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T17:53:27.104154Z node 10 :TX_PROXY ERROR: Actor# [10:7494341723727622968:3405] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:27.212772Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2e4pea4k4zfy7h78sn1jk5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGVmMTBlMmItZmE4MWJhZmItYjQ2NDc2NDctNzU0MTcxZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:27.344532Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2e4pp5f9zw5y488afy2m2c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGVmMTBlMmItZmE4MWJhZmItYjQ2NDc2NDctNzU0MTcxZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:27.568167Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2e4pt8fdtztnc6tt4k8a7j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGVmMTBlMmItZmE4MWJhZmItYjQ2NDc2NDctNzU0MTcxZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:27.642374Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494341702252784490:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:27.642462Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:28.881950Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2e4pt8fdtztnc6tt4k8a7j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGVmMTBlMmItZmE4MWJhZmItYjQ2NDc2NDctNzU0MTcxZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:28.899206Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7494341728022590448:2360] TxId: 281474976715669. Ctx: { TraceId: 01js2e4pt8fdtztnc6tt4k8a7j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGVmMTBlMmItZmE4MWJhZmItYjQ2NDc2NDctNzU0MTcxZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Handle TEvProposeTransactionResult: unable to select coordinator. Tx canceled, actorId: [10:7494341728022590448:2360], previously selected coordinator: 72075186224037888, coordinator selected at propose result: 72075186224037890 2025-04-17T17:53:28.899477Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NGVmMTBlMmItZmE4MWJhZmItYjQ2NDc2NDctNzU0MTcxZTE=, ActorId: [10:7494341719432655397:2360], ActorState: ExecuteState, TraceId: 01js2e4pt8fdtztnc6tt4k8a7j, Create QueryResponse for error on request, msg: 2025-04-17T17:53:28.900546Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2e4pt8fdtztnc6tt4k8a7j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGVmMTBlMmItZmE4MWJhZmItYjQ2NDc2NDctNzU0MTcxZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:28.916675Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-17T17:53:28.917183Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:28.919368Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-17T17:53:28.919840Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:29.384580Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7494341713009119753:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:29.384677Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:29.748857Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:53:29.750908Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7494341732229756798:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:29.807562Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7494341732229756798:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:29.927031Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7494341732229756798:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:30.060937Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494341715049887212:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:30.061017Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:30.196286Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7494341732229756798:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |78.7%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink >> TBlobStorageProxyTest::TestPartialGetBlock >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2025-04-17T17:53:03.438674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341623264315150:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:03.449379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 17:53:03.818068021 99793 dns_resolver.cc:162] no server name supplied in dns URI E0417 17:53:03.818224956 99793 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T17:53:04.452406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:04.815756Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23714: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23714 } ] 2025-04-17T17:53:04.822680Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23714: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23714 2025-04-17T17:53:05.458566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:06.463943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:06.712713Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23714: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23714 } ] 2025-04-17T17:53:07.374798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:53:07.387007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494341640444184635:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:07.463355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:07.474173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494341640444184635:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:07.654565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494341640444184635:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ade/r3tmp/tmpLCSQhq/pdisk_1.dat 2025-04-17T17:53:07.958349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494341640444184635:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 23714, node 1 TClient is connected to server localhost:28428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:08.422219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:53:08.435986Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341623264315150:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:08.436050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-04-17T17:53:08.777122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:08.777150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:08.777160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:08.777323Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:53:08.782551Z node 1 :IMPORT WARN: Table profiles were not loaded E0417 17:53:08.810885870 99878 dns_resolver.cc:162] no server name supplied in dns URI E0417 17:53:08.811052568 99878 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T17:53:09.253521Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:53:09.258537Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T17:53:09.258598Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T17:53:09.258617Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T17:53:09.258617Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T17:53:09.258635Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T17:53:09.258641Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T17:53:09.263406Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T17:53:09.263441Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T17:53:09.263453Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T17:53:09.264234Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T17:53:09.264255Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T17:53:09.264261Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T17:53:09.264444Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T17:53:09.264454Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T17:53:09.264459Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T17:53:09.265054Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T17:53:09.265055Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T17:53:09.265067Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T17:53:09.265067Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T17:53:09.265071Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T17:53:09.265072Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T17:53:09.267078Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T17:53:09.267092Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T17:53:09.267097Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T17:53:09.268064Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T17:53:09.268086Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T17:53:09.268092Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T17:53:09.268793Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T17:53:09.268804Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T17:53:09.268809Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T17:53:09.269446Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T17:53:09.269461Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T17:53:09.269470Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T17:53:09.270235Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T17:53:09.270252Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T17:53:09.270257Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T17:53:09.276490Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T17:53:09.276515Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T17:53:09.276546Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T17:53:09.276559Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T17:53:09.276565Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T17:53:09.287875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:09.291424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:53:09.293770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... : default. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2025-04-17T17:53:29.514021Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980793:2627], TxId: 281474976715706, task: 2. Add data: 234 / 234 2025-04-17T17:53:29.514066Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980793:2627], TxId: 281474976715706, task: 2. Send data=234, closed=1, bufferActorId=[4:7494341734586980784:2509] 2025-04-17T17:53:29.514086Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980790:2627], TxId: 281474976715706, task: 2. Ctx: { TraceId : 01js2e4rp2armwhkvekrn4wz7c. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjE2MzQzMDktNjQwNWY5YjYtNWYzYTBiYTMtYjNiYjY2ZWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 234 2025-04-17T17:53:29.514111Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-17T17:53:29.514121Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 2. Tasks execution finished 2025-04-17T17:53:29.514130Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980790:2627], TxId: 281474976715706, task: 2. Ctx: { TraceId : 01js2e4rp2armwhkvekrn4wz7c. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjE2MzQzMDktNjQwNWY5YjYtNWYzYTBiYTMtYjNiYjY2ZWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Waiting finish of sink[0] 2025-04-17T17:53:29.514145Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980789:2626], TxId: 281474976715706, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjE2MzQzMDktNjQwNWY5YjYtNWYzYTBiYTMtYjNiYjY2ZWU=. TraceId : 01js2e4rp2armwhkvekrn4wz7c. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-17T17:53:29.514172Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980789:2626], TxId: 281474976715706, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjE2MzQzMDktNjQwNWY5YjYtNWYzYTBiYTMtYjNiYjY2ZWU=. TraceId : 01js2e4rp2armwhkvekrn4wz7c. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T17:53:29.514290Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Create new TableWriteActor for table `Root/yq/connections` ([72057594046644480:10:1]). lockId=281474976715700 [4:7494341734586980795:2509] 2025-04-17T17:53:29.514322Z node 4 :KQP_COMPUTE DEBUG: Table: `Root/yq/connections` ([72057594046644480:10:1]), SessionActorId: [4:7494341717407110745:2509]Open: token=0 2025-04-17T17:53:29.514345Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], ProcessRequestQueue [72057594046644480:10:1] NOT READY queue=1 2025-04-17T17:53:29.514393Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980795:2509], Table: `Root/yq/connections` ([72057594046644480:10:1]), SessionActorId: [4:7494341717407110745:2509]Write: token=0 2025-04-17T17:53:29.514495Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980795:2509], Table: `Root/yq/connections` ([72057594046644480:10:1]), SessionActorId: [4:7494341717407110745:2509]Close: token=0 2025-04-17T17:53:29.514573Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980793:2627], TxId: 281474976715706, task: 2. TKqpForwardWriteActor recieve EvBufferWriteResult from [4:7494341734586980784:2509] 2025-04-17T17:53:29.514585Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980793:2627], TxId: 281474976715706, task: 2. Finished 2025-04-17T17:53:29.514601Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980790:2627], TxId: 281474976715706, task: 2. Ctx: { TraceId : 01js2e4rp2armwhkvekrn4wz7c. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjE2MzQzMDktNjQwNWY5YjYtNWYzYTBiYTMtYjNiYjY2ZWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T17:53:29.514626Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-17T17:53:29.514633Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 2. Tasks execution finished 2025-04-17T17:53:29.514646Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980790:2627], TxId: 281474976715706, task: 2. Ctx: { TraceId : 01js2e4rp2armwhkvekrn4wz7c. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjE2MzQzMDktNjQwNWY5YjYtNWYzYTBiYTMtYjNiYjY2ZWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T17:53:29.514714Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 2. pass away 2025-04-17T17:53:29.514785Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715706;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T17:53:29.515083Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1. Tasks execution finished 2025-04-17T17:53:29.515147Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980789:2626], TxId: 281474976715706, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjE2MzQzMDktNjQwNWY5YjYtNWYzYTBiYTMtYjNiYjY2ZWU=. TraceId : 01js2e4rp2armwhkvekrn4wz7c. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T17:53:29.515251Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1. pass away 2025-04-17T17:53:29.515320Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715706;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T17:53:29.515580Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Start prepare for distributed commit 2025-04-17T17:53:29.515593Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980795:2509], Table: `Root/yq/connections` ([72057594046644480:10:1]), SessionActorId: [4:7494341717407110745:2509]SetPrepare; txId=281474976715706 2025-04-17T17:53:29.515608Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Flush data 2025-04-17T17:53:29.515730Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980795:2509], Table: `Root/yq/connections` ([72057594046644480:10:1]), SessionActorId: [4:7494341717407110745:2509]Send EvWrite to ShardID=72075186224037895, isPrepare=1, isImmediateCommit=0, TxId=281474976715706, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715700 DataShard: 72075186224037895 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 10, Size=324, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=1 2025-04-17T17:53:29.515813Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Send EvWrite (external) to ShardID=72075186224037892, isPrepare=1, isImmediateCommit=0, TxId=281474976715706, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715700 DataShard: 72075186224037892 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 7, Size=0, Cookie=0, OperationsCount=0, IsFinal=1, Attempts=0 2025-04-17T17:53:29.516687Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980795:2509], Table: `Root/yq/connections` ([72057594046644480:10:1]), SessionActorId: [4:7494341717407110745:2509]Recv EvWriteResult from ShardID=72075186224037895, Status=STATUS_PREPARED, TxId=281474976715706, Locks= , Cookie=1 2025-04-17T17:53:29.516738Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Flush data 2025-04-17T17:53:29.516766Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Recv EvWriteResult (external) from ShardID=72075186224037892, Status=STATUS_PREPARED, TxId=281474976715706, Locks= , Cookie=0 2025-04-17T17:53:29.516780Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Got prepared result TxId=281474976715706, TabletId=72075186224037892, Cookie=0 2025-04-17T17:53:29.516800Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Start distributed commit with TxId=281474976715706 2025-04-17T17:53:29.516813Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980795:2509], Table: `Root/yq/connections` ([72057594046644480:10:1]), SessionActorId: [4:7494341717407110745:2509]SetDistributedCommit; txId=281474976715706 2025-04-17T17:53:29.516837Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 2 2025-04-17T17:53:29.517306Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Got transaction status, status: 16 2025-04-17T17:53:29.523816Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Got transaction status, status: 17 2025-04-17T17:53:29.527544Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Recv EvWriteResult (external) from ShardID=72075186224037892, Status=STATUS_COMPLETED, TxId=281474976715706, Locks= , Cookie=0 2025-04-17T17:53:29.527579Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Got completed result TxId=281474976715706, TabletId=72075186224037892, Cookie=0, Locks= 2025-04-17T17:53:29.527623Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980795:2509], Table: `Root/yq/connections` ([72057594046644480:10:1]), SessionActorId: [4:7494341717407110745:2509]Recv EvWriteResult from ShardID=72075186224037895, Status=STATUS_COMPLETED, TxId=281474976715706, Locks= , Cookie=0 2025-04-17T17:53:29.527641Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980795:2509], Table: `Root/yq/connections` ([72057594046644480:10:1]), SessionActorId: [4:7494341717407110745:2509]Got completed result TxId=281474976715706, TabletId=72075186224037895, Cookie=0, Mode=2, Locks= 2025-04-17T17:53:29.527654Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341734586980784:2509], SessionActorId: [4:7494341717407110745:2509], Committed TxId=281474976715706 2025-04-17T17:53:29.579093Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:28462: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:28462 E0417 17:53:30.258131563 105107 dns_resolver.cc:162] no server name supplied in dns URI E0417 17:53:30.258290243 105107 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T17:53:30.274576Z node 4 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:28462: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:28462 2025-04-17T17:53:30.578453Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:28462: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:28462 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: 2025-04-17T17:51:03.338364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:03.338904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:03.338966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b20/r3tmp/tmpLKEbh4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8489, node 1 TClient is connected to server localhost:62238 2025-04-17T17:51:03.960691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:04.015640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:04.019101Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:04.019164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:04.019195Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:04.019738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:51:04.057548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:04.057677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:04.069610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-17T17:51:15.972177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:808:2676], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:15.972333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:818:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:15.972427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:15.978068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:51:15.999632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:822:2684], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:51:16.047254Z node 1 :TX_PROXY ERROR: Actor# [1:873:2716] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:16.283798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-17T17:51:17.248864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:51:18.027498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-17T17:51:19.456883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-17T17:51:20.405948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T17:51:21.230075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T17:51:22.887618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-17T17:51:23.470308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T17:51:27.008698Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2e0pfw87mqdkyx0etkqkpk", SessionId: ydb://session/3?node_id=1&id=MmNiNmZjOS05MjQ3ZmNmNC1kMGI2ZWUzNC0yYjYyYTU0, Slow query, duration: 11.039325s, status: STATUS_CODE_UNSPECIFIED, user: root@builtin, results: 0b, text: "CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`", parameters: 0b REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-04-17T17:51:41.851778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:51:41.851827Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-04-17T17:52:05.306467Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715719. Ctx: { TraceId: 01js2e264p4340mq1dypp3xcqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ3NmEyMjgtMjg4OWJkMC01M2VhYjQ0OS1mYTMwMjlkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-17T17:52:29.590778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715737:0, at schemeshard: 72057594046644480 2025-04-17T17:52:31.527181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715744:0, at schemeshard: 72057594046644480 2025-04-17T17:52:33.629637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715755:0, at schemeshard: 72057594046644480 2025-04-17T17:52:34.395612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715758:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-17T17:52:48.314210Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715770. Ctx: { TraceId: 01js2e3g90d5sm4f4qt5a294jb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI0MWM0Zi01YzJkYzdlMS1hMTUwYTZmOS05NDIzMWFkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-04-17T17:53:27.591868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715810. Ctx: { TraceId: 01js2e4px39z1rj9z9h3ps4g0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU5NmYzMTEtZDFlOGFiMDAtMjYzM2ZkYmItZDc0M2YwNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> DataShardReadIterator::ShouldFailWrongSchema [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] Test command err: 2025-04-17T17:53:04.630760Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341623696311599:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:04.630819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a5e/r3tmp/tmp8aMsix/pdisk_1.dat 2025-04-17T17:53:05.242042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:05.242150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:05.245932Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:05.254545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61573, node 1 2025-04-17T17:53:05.702894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:05.702922Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:05.702929Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:05.703049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:06.253176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:06.358867Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:40734 Call 2025-04-17T17:53:06.376504Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:40744 2025-04-17T17:53:08.494472Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:40758 Call Call 2025-04-17T17:53:08.596925Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:40774 2025-04-17T17:53:08.619529Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:40780 2025-04-17T17:53:08.623297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:10.358653Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341650272035423:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:10.358726Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a5e/r3tmp/tmpRJ057C/pdisk_1.dat 2025-04-17T17:53:10.536534Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:10.569830Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:10.569909Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:10.573890Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9533, node 4 2025-04-17T17:53:10.678907Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:10.678930Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:10.678941Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:10.679057Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:10.936813Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:13.513167Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table-1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:13.514263Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:13.514295Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:13.516344Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table-1 2025-04-17T17:53:13.590376Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912393637, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:53:13.657999Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-17T17:53:13.671976Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-2, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:53:13.672706Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:13.676843Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /Root/Table-2 2025-04-17T17:53:13.734025Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912393777, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:53:13.756327Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715659, done: 0, blocked: 1 2025-04-17T17:53:13.758700Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-17T17:53:13.789220Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-3, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:13.789702Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:13.789720Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-4, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-17T17:53:13.789984Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:13.792647Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, dst path: /Root/Table-3, dst path: /Root/Table-4 2025-04-17T17:53:13.835416Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912393882, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:53:13.861888Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715660, done: 0, blocked: 2 2025-04-17T17:53:13.864498Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-17T17:53:13.864643Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-04-17T17:53:13.876997Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-5, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T17:53:13.877501Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:13.877520Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-6, opId: 281474976715661:1, at schemeshard: 72057594046644480 2025-04-17T17:53:13.877696Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:13.877713Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-7, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-04-17T17:53:13.877926Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:13.877944Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-8, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-17T17:53:13.878130Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , ... 3:17.156222Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:17.196107Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:17.196188Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:17.198989Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20015, node 7 2025-04-17T17:53:17.419073Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:17.419095Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:17.419102Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:17.419232Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:17.659100Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:14438 2025-04-17T17:53:20.223454Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:14438 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744912400343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-04-17T17:53:20.566770Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:14438 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744912400343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-04-17T17:53:22.322302Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494341703705078777:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:22.322401Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a5e/r3tmp/tmphyv2aC/pdisk_1.dat 2025-04-17T17:53:22.582421Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23372, node 10 2025-04-17T17:53:22.670834Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:22.670979Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:22.727464Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:22.805189Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:22.805212Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:22.805224Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:22.805395Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:23.107512Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:26.097966Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:26.220372Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:53:26.277289Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:27.960997Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494341724785511112:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:27.961070Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a5e/r3tmp/tmpLQbGoV/pdisk_1.dat 2025-04-17T17:53:28.253640Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:28.313459Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:28.313568Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:28.318839Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15970, node 13 2025-04-17T17:53:28.531054Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:28.531084Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:28.531097Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:28.531289Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:28.876926Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:31.894936Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:32.037368Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys >> SystemView::ShowCreateTableTtlSettings [GOOD] >> SystemView::ShowCreateTableTemporary >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne >> KqpPg::SelectIndex+useSink [GOOD] >> KqpPg::SelectIndex-useSink >> TBlobStorageProxyTest::TestDoubleFailure >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> TBlobStorageProxyTest::TestNormal >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> Cdc::InitialScan [GOOD] >> Cdc::InitialScanDebezium >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] Test command err: 2025-04-17T17:53:05.489808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341630219416890:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:05.490409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4c/r3tmp/tmp9HWJGy/pdisk_1.dat 2025-04-17T17:53:06.014841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:06.040707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:06.040794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:06.048824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20167, node 1 2025-04-17T17:53:06.351158Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:06.351184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:06.351195Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:06.351330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:06.675741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:10.685866Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341653575766858:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:10.685921Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4c/r3tmp/tmpHVbWxI/pdisk_1.dat 2025-04-17T17:53:10.912251Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12918, node 4 2025-04-17T17:53:11.095098Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:11.095180Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:11.113259Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:11.163314Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:11.163340Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:11.163349Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:11.163507Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:11.512021Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:15.751631Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341671718441710:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:15.751699Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4c/r3tmp/tmpzFlYFc/pdisk_1.dat 2025-04-17T17:53:15.926711Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:15.954916Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:15.954992Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:15.957984Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30903, node 7 2025-04-17T17:53:16.022864Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:16.022889Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:16.022898Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:16.023041Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:16.333576Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:7286 2025-04-17T17:53:16.663564Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:16.664066Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:16.664091Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:16.668876Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-17T17:53:16.676982Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912396724, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:53:16.679019Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-17T17:53:16.679069Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-04-17T17:53:16.679685Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-17T17:53:16.683576Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:53:16.684179Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:16.684205Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:16.685479Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2025-04-17T17:53:17.191915Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494341680647952885:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:17.191989Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:17.263024Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:17.263150Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:17.272415Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-17T17:53:17.274015Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:17.586724Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912397634, ... 494341705788410489:2274] 1744912403519254 != 1744912403519257 2025-04-17T17:53:23.909569Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912403955, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:53:23.918434Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-17T17:53:23.918618Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-17T17:53:23.921131Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-17T17:53:25.815304Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:25.816567Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:25.819022Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:25.819189Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976710660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2025-04-17T17:53:25.819428Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:25.819483Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-17T17:53:25.820506Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:25.829291Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2025-04-17T17:53:25.835550Z node 10 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923082178336}: tablet 72075186224037891 could not find a group for channel 2 pool name_ydb_ut_tenant_kind_hdd 2025-04-17T17:53:25.835600Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923082178336}: tablet 72075186224037891 wasn't changed 2025-04-17T17:53:25.973630Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912406010, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:53:25.994387Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-04-17T17:53:25.994521Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 2025-04-17T17:53:25.994537Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:2 2025-04-17T17:53:26.015615Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-17T17:53:26.016471Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:28.799803Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494341730639683011:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:28.802153Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4c/r3tmp/tmpZXVYbM/pdisk_1.dat 2025-04-17T17:53:29.051778Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2879, node 13 2025-04-17T17:53:29.140381Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:29.140504Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:29.148594Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:29.153577Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:29.153607Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:29.153618Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:29.153777Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:29.495794Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:20449 2025-04-17T17:53:29.976429Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:29.976811Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:29.976843Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:29.980721Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2025-04-17T17:53:29.990171Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912410038, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:53:29.992103Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-17T17:53:29.992168Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-04-17T17:53:29.993337Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-17T17:53:29.997886Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:53:29.998461Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:29.998490Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T17:53:30.000037Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-17T17:53:30.511564Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7494341735977215294:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:30.511627Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:30.527371Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:30.527475Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:30.543077Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-17T17:53:30.556046Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:30.866420Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912410913, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:53:30.869790Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-17T17:53:30.869954Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-04-17T17:53:30.872526Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-17T17:53:33.331557Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:33.332945Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:33.332972Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:33.341273Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/Table-1 2025-04-17T17:53:33.433535Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912413470, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:53:33.451606Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-17T17:53:33.484666Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-17T17:53:33.485202Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2025-04-17T17:53:32.504239Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001c20/r3tmp/tmp3Dsi4N//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-17T17:53:32.528265Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] Test command err: 2025-04-17T17:53:07.518584Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341637210208429:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:07.538583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a44/r3tmp/tmpRI5arA/pdisk_1.dat 2025-04-17T17:53:07.938400Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:07.952482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:07.952610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:07.966080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27850, node 1 2025-04-17T17:53:08.110080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:08.110104Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:08.110112Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:08.110439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:08.486806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-17T17:53:10.831218Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:10.852968Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:12.688653Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341662032560204:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:12.689581Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a44/r3tmp/tmpTUSKBJ/pdisk_1.dat 2025-04-17T17:53:12.887309Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:12.933045Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:12.933145Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:12.948115Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31663, node 4 2025-04-17T17:53:13.230627Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:13.230650Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:13.230659Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:13.230794Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:13.461020Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:13.543248Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:13.551416Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:17.451003Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341681514906628:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:17.451045Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a44/r3tmp/tmphy5kBc/pdisk_1.dat 2025-04-17T17:53:17.625964Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:17.666795Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:17.666883Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:17.671932Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23760, node 7 2025-04-17T17:53:17.782670Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:17.782691Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:17.782703Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:17.782841Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:18.062568Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-17T17:53:20.918045Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:20.927593Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:20.937151Z node 7 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {UNAUTHORIZED, 0} 2025-04-17T17:53:20.949201Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:20.959760Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:22.523117Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494341704019959905:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:22.523166Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a44/r3tmp/tmpl9H3y5/pdisk_1.dat 2025-04-17T17:53:22.853973Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:22.887796Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:22.887878Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:22.893495Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9232, node 10 2025-04-17T17:53:23.091868Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:23.091896Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:23.091906Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:23.092068Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:23.370576Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-17T17:53:26.667345Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:26.680483Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:26.688221Z node 10 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:26.704701Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:26.717015Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-17T17:53:28.427388Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494341730070806082:2105];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:28.427483Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a44/r3tmp/tmpmezp4M/pdisk_1.dat 2025-04-17T17:53:28.852449Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:28.891476Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:28.891584Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:28.899533Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30425, node 13 2025-04-17T17:53:29.007959Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:29.007988Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:29.007997Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:29.008200Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:29.293861Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:33.415102Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7494341730070806082:2105];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:33.415665Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> YdbQueryService::TestCreateDropAttachSession [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> TGRpcAuthentication::InvalidPassword [GOOD] >> TGRpcAuthentication::DisableLoginAuthentication >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard [GOOD] >> YdbTableBulkUpsertOlap::UpsertMixed >> YdbTableBulkUpsertOlap::UpsertArrowDupField [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> TBlobStorageProxyTest::TestDoubleGroups >> Cdc::AddIndex [GOOD] >> Cdc::AddStream >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> TBlobStorageProxyTest::TestVPutVGetPersistence >> TBlobStorageProxyTest::TestVPutVGet ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] Test command err: 2025-04-17T17:52:26.749536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341463011955674:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:26.758125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005ab/r3tmp/tmpVEgbZd/pdisk_1.dat 2025-04-17T17:52:27.755788Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:27.790628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:27.812819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:27.812918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:27.836995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5370, node 1 2025-04-17T17:52:28.294205Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:28.294231Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:28.294239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:28.294399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:28.878976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:31.726974Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341463011955674:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:31.727108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:42.734576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:52:42.734608Z node 1 :IMPORT WARN: Table profiles were not loaded >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> TTableProfileTests::DescribeTableOptions >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> SystemView::PartitionStatsFields [FAIL] >> SystemView::PDisksFields >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> Yq_1::DeleteQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2025-04-17T17:53:36.700120Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001c0d/r3tmp/tmpG9nT2d//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-17T17:53:36.700733Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001c0d/r3tmp/tmpG9nT2d//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-17T17:53:36.762944Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:36.771248Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:39.525953Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001c0d/r3tmp/tmp8PLQc1//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-17T17:53:39.526732Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001c0d/r3tmp/tmp8PLQc1//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-17T17:53:39.530946Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:39.531122Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> KqpPg::SelectIndex-useSink [GOOD] >> KqpPg::TableDeleteAllData+useSink >> TBlobStorageProxyTest::TestInFlightPuts >> TGRpcAuthentication::DisableLoginAuthentication [GOOD] >> TGRpcAuthentication::NoConnectRights >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] >> YdbS3Internal::BadRequests >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> YdbTableBulkUpsertOlap::UpsertMixed [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 >> YdbYqlClient::AlterTableAddIndex >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite >> DataShardReadIterator::ShouldReadKeyPrefix3 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> DataShardReadIterator::TryWriteManyRows+Commit >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> TBlobStorageProxyTest::TestHugeCollectGarbage |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |78.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |78.8%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2025-04-17T17:53:04.577296Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341625435035220:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:04.577348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 17:53:05.168824515 100100 dns_resolver.cc:162] no server name supplied in dns URI E0417 17:53:05.168947946 100100 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T17:53:05.246910Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32373: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32373 } ] 2025-04-17T17:53:05.578896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:06.246783Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32373: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32373 } ] 2025-04-17T17:53:06.276814Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32373: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:32373 2025-04-17T17:53:06.578534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:07.586258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:08.050204Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32373: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32373 } ] 2025-04-17T17:53:08.590700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:08.962034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:53:08.963873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494341642614904786:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:09.058948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494341642614904786:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ad4/r3tmp/tmp4Uy1OX/pdisk_1.dat 2025-04-17T17:53:09.201539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494341642614904786:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:09.321818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:09.321910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:09.333236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32373, node 1 2025-04-17T17:53:09.577609Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341625435035220:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:09.577696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:09.660319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:09.774338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:09.777594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:09.777613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:09.777621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:09.777745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration E0417 17:53:10.164082401 100259 dns_resolver.cc:162] no server name supplied in dns URI E0417 17:53:10.164257365 100259 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T17:53:10.847636Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:53:10.847789Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T17:53:10.847810Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T17:53:10.847817Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T17:53:10.850954Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T17:53:10.850987Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T17:53:10.850993Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T17:53:10.852143Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T17:53:10.852156Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T17:53:10.852162Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T17:53:10.858909Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T17:53:10.858951Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T17:53:10.858960Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T17:53:10.872414Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T17:53:10.872452Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T17:53:10.872459Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T17:53:10.876225Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T17:53:10.876248Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T17:53:10.876252Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T17:53:10.888988Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T17:53:10.889023Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T17:53:10.889029Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T17:53:10.891746Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T17:53:10.891773Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T17:53:10.891778Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T17:53:10.892631Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T17:53:10.892648Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T17:53:10.892675Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T17:53:10.892684Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T17:53:10.892699Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T17:53:10.893412Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T17:53:10.893425Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T17:53:10.893429Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T17:53:10.894565Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T17:53:10.894576Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T17:53:10.894581Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T17:53:10.898509Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T17:53:10.898524Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T17:53:10.898529Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T17:53:10.911181Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T17:53:10.911208Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T17:53:10.911215Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T17:53:10.921371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, s ... rrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-17T17:53:39.687965Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7494341775307683336 RawX2: 4503616807242792 } } DstEndpoint { ActorId { RawX1: 7494341775307683337 RawX2: 4503616807242793 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7494341775307683337 RawX2: 4503616807242793 } } DstEndpoint { ActorId { RawX1: 7494341775307683332 RawX2: 4503616807242283 } } InMemory: true } 2025-04-17T17:53:39.687987Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7494341775307683336:3112] 2025-04-17T17:53:39.688058Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-17T17:53:39.688167Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7494341775307683336 RawX2: 4503616807242792 } } DstEndpoint { ActorId { RawX1: 7494341775307683337 RawX2: 4503616807242793 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7494341775307683337 RawX2: 4503616807242793 } } DstEndpoint { ActorId { RawX1: 7494341775307683332 RawX2: 4503616807242283 } } InMemory: true } 2025-04-17T17:53:39.688200Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T17:53:39.688842Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. Recv TEvReadResult from ShardID=72075186224037891, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-04-17T17:53:39.688870Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. Taken 0 locks 2025-04-17T17:53:39.688887Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. new data for read #0 seqno = 1 finished = 1 2025-04-17T17:53:39.688908Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683336:3112], TxId: 281474976715803, task: 1. Ctx: { TraceId : 01js2e52fz6rx20q131dnmntd2. SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-17T17:53:39.688932Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683336:3112], TxId: 281474976715803, task: 1. Ctx: { TraceId : 01js2e52fz6rx20q131dnmntd2. SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T17:53:39.688949Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-17T17:53:39.688974Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. enter pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-17T17:53:39.688993Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. exit pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-17T17:53:39.689010Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. returned 0 rows; processed 0 rows 2025-04-17T17:53:39.689046Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. dropping batch for read #0 2025-04-17T17:53:39.689061Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. effective maxinflight 1024 sorted 0 2025-04-17T17:53:39.689072Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-17T17:53:39.689089Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1, CA Id [4:7494341775307683336:3112]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-17T17:53:39.689166Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683336:3112], TxId: 281474976715803, task: 1. Ctx: { TraceId : 01js2e52fz6rx20q131dnmntd2. SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T17:53:39.689192Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-17T17:53:39.689214Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 2. Finish input channelId: 1, from: [4:7494341775307683336:3112] 2025-04-17T17:53:39.689241Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T17:53:39.689276Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T17:53:39.689288Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683336:3112], TxId: 281474976715803, task: 1. Ctx: { TraceId : 01js2e52fz6rx20q131dnmntd2. SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-17T17:53:39.689305Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683336:3112], TxId: 281474976715803, task: 1. Ctx: { TraceId : 01js2e52fz6rx20q131dnmntd2. SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T17:53:39.689325Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1. Tasks execution finished 2025-04-17T17:53:39.689339Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683336:3112], TxId: 281474976715803, task: 1. Ctx: { TraceId : 01js2e52fz6rx20q131dnmntd2. SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T17:53:39.689445Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 1. pass away 2025-04-17T17:53:39.689552Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715803;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T17:53:39.689885Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T17:53:39.689926Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-17T17:53:39.689935Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 2. Tasks execution finished 2025-04-17T17:53:39.689945Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494341775307683337:3113], TxId: 281474976715803, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGUwZDdjMmItYTYxYjA5MWMtMzNlOGQ2ZjItZDYzMjk4NjA=. CustomerSuppliedId : . TraceId : 01js2e52fz6rx20q131dnmntd2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T17:53:39.689992Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715803, task: 2. pass away 2025-04-17T17:53:39.690037Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715803;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T17:53:39.728644Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqued0blqheeqdbaa63" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:664: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } 2025-04-17T17:53:39.808382Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:18402: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:18402 2025-04-17T17:53:40.784497Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:18402: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:18402 >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> Cdc::RenameTable [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |78.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock >> KqpPg::CheckPgAutoParams-useSink [GOOD] |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |78.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> TBlobStorageProxyTest::TestProxySimpleDiscover |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |78.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |78.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |78.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2025-04-17T17:53:45.254448Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001c05/r3tmp/tmpHWDq9Z//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-17T17:53:45.257997Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001c05/r3tmp/tmpHWDq9Z//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-17T17:53:45.289654Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:45.289891Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> Cdc::AddStream [GOOD] >> Cdc::AwsRegion >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |78.9%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 13768, MsgBus: 25848 2025-04-17T17:51:36.831841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341246778422885:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:36.831926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014b7/r3tmp/tmpqQPPic/pdisk_1.dat 2025-04-17T17:51:37.273057Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13768, node 1 2025-04-17T17:51:37.326993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:37.327098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:37.328239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:37.389980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:37.390001Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:37.390007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:37.390099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25848 TClient is connected to server localhost:25848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:38.083003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:38.114502Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:51:40.553092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:51:40.735510Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-04-17T17:51:40.832091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:51:40.904085Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-04-17T17:51:41.036920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:51:41.118391Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-04-17T17:51:41.209692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T17:51:41.310851Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-04-17T17:51:41.367240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 f f t t 2025-04-17T17:51:41.513631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 f f t t 2025-04-17T17:51:41.662725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:51:41.720252Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-04-17T17:51:41.776001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T17:51:41.833160Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341246778422885:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:41.833218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:51:41.850301Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-04-17T17:51:41.906598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T17:51:41.972506Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-17T17:51:42.041555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.092746Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-17T17:51:42.158460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-17T17:51:42.274253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.317737Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-17T17:51:42.365519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.421441Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-17T17:51:42.458296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.508143Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-17T17:51:42.547907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.638932Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-17T17:51:42.678354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.757220Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-17T17:51:42.806700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.853805Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-17T17:51:42.901710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710723:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.945035Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-17T17:51:42.997317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710727:0, at schemeshard: 72057594046644480 2025-04-17T17:51:43.049143Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-17T17:51:43.094379Z node 1 :FLAT_ ... ndelivered;self_id=[14:7494341757581830301:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:35.917498Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014b7/r3tmp/tmpdwTr4V/pdisk_1.dat 2025-04-17T17:53:36.377578Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:36.404861Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:36.405011Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:36.411805Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5603, node 14 2025-04-17T17:53:36.483341Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:36.483378Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:36.483395Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:36.483597Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20001 TClient is connected to server localhost:20001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:37.687407Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:37.700686Z node 14 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:53:40.917388Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7494341757581830301:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:40.917491Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:41.865274Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7494341783351634737:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:41.865411Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:41.865999Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7494341783351634757:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:41.875110Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:53:41.898163Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7494341783351634759:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:53:41.996378Z node 14 :TX_PROXY ERROR: Actor# [14:7494341783351634812:2345] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:42.036495Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:42.429849Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:53:43.260992Z node 14 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [14:7494341791941569755:2400], owner: [14:7494341783351634709:2322], statement id: 0 2025-04-17T17:53:43.261518Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZDI5YzBjNDAtNWVkNzk5YTUtNDc4MTkwMGMtZGFkOWZjOGE=, ActorId: [14:7494341791941569753:2399], ActorState: ExecuteState, TraceId: 01js2e56apbqaf2m29s5ekfk3w, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:53:43.573418Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7494341791941569784:2410], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:55: Error: At function: PgOp
:2:55: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-04-17T17:53:43.573682Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NmViYjg1MDItMTRjZDEzZjUtMjllMWQwYWEtMTdhNTc3MzE=, ActorId: [14:7494341791941569781:2408], ActorState: ExecuteState, TraceId: 01js2e56kne2s62ygy1t8mfdx3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:53:43.620012Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7494341791941569797:2416], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:57: Error: At function: PgAnd
:2:67: Error: At function: PgOp
:2:67: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-04-17T17:53:43.620350Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NTIwMTEwYzItNWE1NTc0ZjItOWUxMTMxYmEtNTM3NDg5Nzk=, ActorId: [14:7494341791941569794:2414], ActorState: ExecuteState, TraceId: 01js2e56n52kfev58pmzkqkpk3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:53:43.721334Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2e56pg1bjq3jqe3k9r5j94, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZjAwN2Q5MmQtYTMzY2UyMjctNzMwY2RlZGUtYzQ3NTllYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-04-17T17:53:43.721676Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZjAwN2Q5MmQtYTMzY2UyMjctNzMwY2RlZGUtYzQ3NTllYTQ=, ActorId: [14:7494341791941569807:2420], ActorState: ExecuteState, TraceId: 01js2e56pg1bjq3jqe3k9r5j94, Create QueryResponse for error on request, msg: 2025-04-17T17:53:43.770916Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:53:43.972662Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T17:53:44.105053Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7494341796236537282:2448], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2025-04-17T17:53:44.106421Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZTVjMGRlMjAtNjYzZjRmNWItMTNhMDYzNzctMzEyYWY3NDI=, ActorId: [14:7494341796236537279:2446], ActorState: ExecuteState, TraceId: 01js2e57448thxjsg9tpr32ms6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:53:44.152026Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7494341796236537294:2454], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2025-04-17T17:53:44.155200Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZTM2MGMzNzctZDkxNTNmNzEtNjE1MTA3ZGEtYzlkYmQ2OWM=, ActorId: [14:7494341796236537291:2452], ActorState: ExecuteState, TraceId: 01js2e575r5h4mdtb15kzfyzse, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:53:44.755424Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2e57794n184rx3avgp0pb0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZmNlMjE4ODItMjMxZDZkZjUtNzU3MWI5NS02ODdkN2U4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-04-17T17:53:44.756261Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZmNlMjE4ODItMjMxZDZkZjUtNzU3MWI5NS02ODdkN2U4Yg==, ActorId: [14:7494341796236537303:2458], ActorState: ExecuteState, TraceId: 01js2e57794n184rx3avgp0pb0, Create QueryResponse for error on request, msg: 2025-04-17T17:53:44.906524Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:53:45.798735Z node 14 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037892 not found 2025-04-17T17:53:45.834964Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 >> TTableProfileTests::DescribeTableOptions [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> TGRpcAuthentication::NoConnectRights [GOOD] |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> SystemView::PDisksFields [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> TBlobStorageProxyTest::TestBlock |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |78.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink >> YdbYqlClient::AlterTableAddIndex [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::DescribeTableOptions [GOOD] Test command err: 2025-04-17T17:53:16.768486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341677285683469:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:16.769735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3e/r3tmp/tmpiUrpUK/pdisk_1.dat 2025-04-17T17:53:17.264414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:17.264555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:17.269776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:17.329410Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18277, node 1 2025-04-17T17:53:17.397791Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:53:17.401929Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:53:17.481642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:17.481664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:17.481678Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:17.481788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:17.788844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744919597187814 Nodes { NodeId: 1024 Host: "localhost" Port: 23045 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744919597187814 } Nodes { NodeId: 1 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 2 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 3 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-17T17:53:21.441181Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341696850094611:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:21.441703Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3e/r3tmp/tmp3xmcKJ/pdisk_1.dat 2025-04-17T17:53:21.616350Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13090, node 4 2025-04-17T17:53:21.723417Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:21.723444Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:21.723463Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:21.723583Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:53:21.757494Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:21.757585Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:21.797678Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:22.044612Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744919601609464 Nodes { NodeId: 1024 Host: "localhost" Port: 14237 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744919601609464 } Nodes { NodeId: 4 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 5 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 6 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-17T17:53:26.641152Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341720555631030:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:26.641241Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3e/r3tmp/tmpIuebUx/pdisk_1.dat 2025-04-17T17:53:26.900365Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:26.951231Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:26.951342Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:26.957627Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19990, node 7 2025-04-17T17:53:27.071675Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:27.071701Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:27.071711Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:27.071868Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:27.371076Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8240 2025-04-17T17:53:27.787024Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:27.813858Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:28.338999Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494341727973962117:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:28.361261Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:28.378558Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:28.378649Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:28.391053Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-17T17:53:28.410809Z node 7 :HIVE WARN: HIVE#720575940 ... quests } 2025-04-17T17:53:32.774856Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:7494341745153832380:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:33.886928Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494341748540132173:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:33.903062Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3e/r3tmp/tmpOiDhIn/pdisk_1.dat 2025-04-17T17:53:34.193726Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:34.257273Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:34.257370Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:34.261264Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27694, node 10 2025-04-17T17:53:34.380430Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:34.380456Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:34.380466Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:34.380629Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:34.653915Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:12613 2025-04-17T17:53:35.059307Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:35.107628Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:35.618353Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494341759868469871:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:35.618428Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:35.705893Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:35.706008Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:35.710664Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-04-17T17:53:35.711597Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12613 2025-04-17T17:53:36.110722Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:12613 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1744912416270 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ke... (TRUNCATED) 2025-04-17T17:53:36.675219Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-17T17:53:36.676041Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:41.189869Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494341783832426038:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:41.189936Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3e/r3tmp/tmpmJjgoy/pdisk_1.dat 2025-04-17T17:53:41.403705Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:41.445999Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:41.446104Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:41.448895Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21241, node 13 2025-04-17T17:53:41.528281Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:41.528306Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:41.528318Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:41.528517Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:41.878438Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:28154 2025-04-17T17:53:42.486046Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:42.544497Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:43.055666Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7494341792931520185:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:43.055828Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:53:43.069379Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:43.069510Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:43.098018Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-17T17:53:43.124948Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28154 2025-04-17T17:53:43.642572Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-17T17:53:43.643111Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:53:44.056662Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:45.057471Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:46.058918Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:47.062843Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> YdbS3Internal::BadRequests [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcAuthentication::NoConnectRights [GOOD] Test command err: 2025-04-17T17:53:09.571867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341646760804203:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:09.571912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a40/r3tmp/tmp6IeKYE/pdisk_1.dat 2025-04-17T17:53:09.988103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:10.012571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:10.012644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:10.017646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8356, node 1 2025-04-17T17:53:10.111288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:10.111309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:10.111321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:10.111452Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:10.389726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:15990 TClient is connected to server localhost:15990 2025-04-17T17:53:10.990653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:53:12.972335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341659645707131:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:12.972478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:12.973115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341659645707158:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:12.976590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T17:53:13.043576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341659645707160:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T17:53:13.105402Z node 1 :TX_PROXY ERROR: Actor# [1:7494341663940674542:2711] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:15990 2025-04-17T17:53:13.643710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:53:18.221096Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:446:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:18.221433Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:53:18.221493Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a40/r3tmp/tmpejjK9r/pdisk_1.dat 2025-04-17T17:53:18.777448Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:53:18.845289Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:18.845418Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:18.863743Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:19.146846Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [4:1004:2808], Recipient [4:552:2466]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:19.146927Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:19.146962Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:53:19.147047Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [4:1001:2806], Recipient [4:552:2466]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:53:19.147073Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:53:19.213953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "tenant" } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:53:19.214214Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/tenant, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:53:19.214305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: tenant, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T17:53:19.214474Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-17T17:53:19.214694Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T17:53:19.214808Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:53:19.214847Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:53:19.214910Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T17:53:19.214958Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:53:19.215022Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T17:53:19.218643Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-17T17:53:19.218849Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/tenant 2025-04-17T17:53:19.218919Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:53:19.218973Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2025-04-17T17:53:19.219394Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [4:552:2466], Recipient [4:552:2466]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:53:19.219446Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:53:19.219644Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:53:19.219694Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:53:19.219914Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:53:19.220032Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:53:19.220086Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:776:2622], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-17T17:53:19.220140Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:776:2622], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2025-04-17T17:53:19.220668Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:53:19.220781Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-04-17T17:53:19.220838Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-17T17:53:19.220894Z node 4 ... TxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T17:53:31.087642Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:1639:2457], Recipient [4:552:2466]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037890 TableLocalId: 3 Generation: 1 Round: 2 TableStats { DataSize: 75 RowCount: 1 IndexSize: 0 InMemSize: 0 LastAccessTime: 2050 LastUpdateTime: 2050 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T17:53:31.087707Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T17:53:31.087787Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 75 rowCount 1 cpuUsage 0 2025-04-17T17:53:31.087946Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 75 RowCount: 1 IndexSize: 0 InMemSize: 0 LastAccessTime: 2050 LastUpdateTime: 2050 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T17:53:31.088009Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T17:53:31.175257Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2e4teyd0v252scfsfw8azy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Njk1YTFiNzctYTg0YzcwY2ItMjdiYTZjZjEtMWM4YTI2NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:32.908767Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494341747213732857:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:32.908827Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a40/r3tmp/tmpNzUDJu/pdisk_1.dat 2025-04-17T17:53:33.175002Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:33.195443Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:33.195526Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:33.203671Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64215, node 6 2025-04-17T17:53:33.330857Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:33.330887Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:33.330894Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:33.331026Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:33.652007Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:11739 2025-04-17T17:53:38.146599Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494341772668921650:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:38.146664Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a40/r3tmp/tmph9fq1o/pdisk_1.dat 2025-04-17T17:53:38.293462Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:38.330099Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:38.330205Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:38.332731Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26306, node 9 2025-04-17T17:53:38.507035Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:38.507073Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:38.507084Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:38.507246Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:38.831032Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:43.484148Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494341795192247184:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:43.484216Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a40/r3tmp/tmpFNmo2y/pdisk_1.dat 2025-04-17T17:53:43.752628Z node 12 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27793, node 12 2025-04-17T17:53:43.904274Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:43.904378Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:43.932879Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:44.035855Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:44.035883Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:44.035893Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:44.036074Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:44.549681Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:16806 >> TBlobStorageProxyTest::TestQuadrupleGroups >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3a/r3tmp/tmphSCqlI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6970, node 1 TClient is connected to server localhost:18085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:53:27.452159Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341723288562925:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:27.452244Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3a/r3tmp/tmpZTUplt/pdisk_1.dat 2025-04-17T17:53:27.668012Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29611, node 4 2025-04-17T17:53:27.779639Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:27.779756Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:27.782926Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:27.782950Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:27.782967Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:27.783121Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:53:27.784647Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:28.077653Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:30.872011Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341736173465848:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:30.872137Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:31.172541Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:31.385445Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341740468433311:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:31.385548Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:31.386081Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494341740468433316:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:31.390505Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:53:31.430389Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494341740468433318:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:53:31.535476Z node 4 :TX_PROXY ERROR: Actor# [4:7494341740468433396:2802] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:31.672041Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e4tqr2msbmj1ph5bk083y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NmYwMzc0MzAtZWZjNGNiNDYtNDg2NDRmMjQtOWFmNmI1YjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:31.730050Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T17:53:31.825122Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T17:53:32.054326Z node 4 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:53:33.826584Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341750470900737:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:33.829395Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3a/r3tmp/tmpV5RClT/pdisk_1.dat 2025-04-17T17:53:34.050451Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:34.063265Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:34.063326Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:34.068015Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32410, node 7 2025-04-17T17:53:34.207028Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:34.207054Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:34.207061Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:34.207183Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:34.470608Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:37.168014Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:39.075975Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494341778244051134:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:39.076050Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3a/r3tmp/tmp1X4siE/pdisk_1.dat 2025-04-17T17:53:39.204690Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:39.236756Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:39.236823Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:39.241159Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16384, node 10 2025-04-17T17:53:39.359172Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:39.359198Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:39.359206Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:39.359327Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:39.638022Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:42.390336Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:44.441546Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494341796154955934:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:44.441596Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3a/r3tmp/tmpZBl2fw/pdisk_1.dat 2025-04-17T17:53:44.856304Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:44.882085Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:44.882161Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:44.892290Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13499, node 13 2025-04-17T17:53:45.014951Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:45.014979Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:45.014988Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:45.015121Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:45.316794Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:48.601191Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbS3Internal::BadRequests [GOOD] Test command err: 2025-04-17T17:53:22.644630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341704709575713:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:22.645280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3b/r3tmp/tmp1Xs1BO/pdisk_1.dat 2025-04-17T17:53:23.146239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:23.146337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:23.153156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29272, node 1 2025-04-17T17:53:23.192369Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:23.193230Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:53:23.208718Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:53:23.261632Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:23.261680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:23.261694Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:23.261805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:23.627075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:25.948449Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknownSesson 2025-04-17T17:53:27.562996Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341724337610194:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:27.563951Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3b/r3tmp/tmppBFpV7/pdisk_1.dat 2025-04-17T17:53:27.771895Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:27.793465Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:27.793584Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:27.797758Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21773, node 4 2025-04-17T17:53:27.879420Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:27.879448Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:27.879456Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:27.879591Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:28.115651Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:32.821578Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494341744228199681:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:32.822655Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3b/r3tmp/tmpP1dOsD/pdisk_1.dat 2025-04-17T17:53:33.011796Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:33.049940Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:33.050022Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:33.052127Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22054, node 7 2025-04-17T17:53:33.170440Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:33.170465Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:33.170474Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:33.170651Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:33.480129Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:38.064344Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494341769864717359:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:38.064403Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3b/r3tmp/tmpHs0qkS/pdisk_1.dat 2025-04-17T17:53:38.270450Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:38.310447Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:38.310558Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:38.316596Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2867, node 10 2025-04-17T17:53:38.451122Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:38.451146Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:38.451156Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:38.451360Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:38.763542Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:41.781767Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:53:41.786732Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-17T17:53:41.792625Z node 10 :KQP_PROXY DEBUG: TraceId: "01js2e521y3gdgc2asgp0yscx1", Request has 18444999161287.759051s seconds to be completed 2025-04-17T17:53:41.797060Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM= 2025-04-17T17:53:41.797154Z node 10 :KQP_PROXY DEBUG: TraceId: "01js2e521y3gdgc2asgp0yscx1", Created new session, sessionId: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, workerId: [10:7494341782749620279:2332], database: , longSession: 1, local sessions count: 1 2025-04-17T17:53:41.797193Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-17T17:53:41.797406Z node 10 :KQP_PROXY DEBUG: Received create session request, trace_id: 01js2e521y3gdgc2asgp0yscx1 2025-04-17T17:53:41.797464Z node 10 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T17:53:41.797497Z node 10 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T17:53:41.797529Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T17:53:41.797575Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-17T17:53:41.797638Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:53:41.797692Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:53:41.798376Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, ActorId: [10:7494341782749620279:2332], ActorState: unknown state, session actor bootstrapped 2025-04-17T17:53:41.798536Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:53:41.798583Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:53:41.798608Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T17:53:41.823102Z node 10 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, rpc ctrl: [10:7494341782749620280:2333], sameNode: 1, trace_id: 2025-04-17T17:53:41.823148Z node 10 :KQP_PROXY TRACE: Attach local session: [10:7494341782749620279:2332] to rpc: [10:7494341782749620280:2333] on same node 2025-04-17T17:53:41.835875Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, ActorId: [10:7494341782749620279:2332], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T17:53:41.835934Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, ActorId: [10:7494341782749620279:2332], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T17:53:41.835978Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, ActorId: [10:7494341782749620279:2332], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T17:53:41.836015Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, ActorId: [10:7494341782749620279:2332], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T17:53:41.836078Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, ActorId: [10:7494341782749620279:2332], ActorState: unknown state, Session actor destroyed 2025-04-17T17:53:41.836406Z node 10 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM=, workerId: [10:7494341782749620279:2332], local sessions count: 0 2025-04-17T17:53:41.848344Z node 10 :KQP_PROXY DEBUG: Received ping session request, request_id: 3, sender: [10:7494341782749620283:2335], trace_id: 2025-04-17T17:53:41.848510Z node 10 :KQP_PROXY NOTICE: Session not found: ydb://session/3?node_id=10&id=MzhkOTZhMjgtMzUwYTZmMjUtMjFjNGZlMTItZDgyNmY4MGM= 2025-04-17T17:53:41.848627Z node 10 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [10:7494341782749620283:2335], selfId: [10:7494341769864717581:2280], source: [10:7494341769864717581:2280] 2025-04-17T17:53:43.561068Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494341794789093844:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:43.561159Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3b/r3tmp/tmpJXQYO1/pdisk_1.dat 2025-04-17T17:53:43.757830Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:43.812588Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:43.812694Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:43.817125Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15510, node 13 2025-04-17T17:53:43.965831Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:43.965853Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:43.965868Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:43.966005Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:44.292748Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:44.310607Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:53:47.837385Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:48.567517Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7494341794789093844:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:48.567610Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:48.671368Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494341816263933350:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.675582Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494341816263933326:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.686437Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.697167Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:53:48.737860Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494341816263933353:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:53:48.807530Z node 13 :TX_PROXY ERROR: Actor# [13:7494341816263933467:4215] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:49.373800Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e5bjsd1na9apxkx20cth5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2Y4ZWZiODctN2YxMTNkMjItYjIwYTE3OGUtMjg2ZTg1MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |78.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> YdbOlapStore::LogNonExistingUserId [GOOD] >> YdbOlapStore::LogPagingBefore >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanAndLimits >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> TGRpcCmsTest::AlterRemoveTest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TGRpcCmsTest::SimpleTenantsTest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> DataShardReadIterator::ShouldReadRangeChunk7 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 >> DataShardReadIterator::NoErrorOnFinalACK [GOOD] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2025-04-17T17:53:48.456569Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bd7/r3tmp/tmpEtWdfc//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-17T17:53:48.470738Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:50.970867Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bd7/r3tmp/tmpEtWdfc//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-17T17:53:50.978470Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:52.342501Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bd7/r3tmp/tmpEtWdfc//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-17T17:53:52.381968Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:53.647256Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bd7/r3tmp/tmpEtWdfc//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-17T17:53:53.650061Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:55.013794Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bd7/r3tmp/tmpEtWdfc//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-17T17:53:55.026358Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> KqpBatchUpdate::ManyPartitions_1 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] >> KqpBatchUpdate::MultiStatement |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] |79.0%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::AwsRegion [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> KqpBatchUpdate::Returning >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] Test command err: 2025-04-17T17:53:26.665201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341721555633723:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:26.665316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a39/r3tmp/tmpslxCP5/pdisk_1.dat 2025-04-17T17:53:27.076564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:27.076659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:27.080445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:27.081302Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28621, node 1 2025-04-17T17:53:27.214545Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:27.214572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:27.214582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:27.214714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:27.535851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:29.934958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 SUCCESS 2025-04-17T17:53:30.108656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341738735503894:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:30.108740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341738735503903:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:30.108818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:30.112788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:53:30.131338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341738735503908:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:53:30.216207Z node 1 :TX_PROXY ERROR: Actor# [1:7494341738735503998:2818] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:30.634940Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2e4sfqc9ebx5ey3sht53z5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGU1M2ZhMi1iYjlmZTdhMS0zYzBkOTc1Yi03YjNiMzJlMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:30.677115Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912410675, txId: 281474976710661] shutting down 2025-04-17T17:53:30.739580Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T17:53:30.743908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 BAD_REQUEST 2025-04-17T17:53:30.918761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:53:30.920495Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found SUCCESS 2025-04-17T17:53:31.105848Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-17T17:53:32.781086Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494341746140887225:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:32.781144Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a39/r3tmp/tmpOulHZA/pdisk_1.dat 2025-04-17T17:53:33.030092Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4022, node 4 2025-04-17T17:53:33.106858Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:33.106952Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:33.126619Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:33.142601Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:33.142711Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:33.142718Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:33.142849Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:33.373369Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:35.776568Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:35.797291Z node 4 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [4:7494341759025790218:2339] 2025-04-17T17:53:35.797544Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:53:35.816520Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:53:35.816616Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:53:35.818199Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:53:35.818240Z node 4 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:53:35.818281Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:53:35.818616Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:53:35.818669Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:53:35.818694Z node 4 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [4:7494341759025790242:2339] in generation 1 2025-04-17T17:53:35.821444Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:53:35.821485Z node 4 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:53:35.821542Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:53:35.821581Z node 4 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [4:7494341759025790245:2340] 2025-04-17T17:53:35.821595Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:53:35.821601Z node 4 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:53:35.821614Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:53:35.821710Z node 4 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:53:35.821767Z node 4 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:53:35.821783Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:53:35.821796Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:53:35.821813Z node 4 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:53:35.821828Z no ... 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:45.279470Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:48.603436Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341813976763092:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.603535Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.673152Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:48.795893Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341813976763262:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.795988Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.796092Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494341813976763267:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.799607Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:53:48.821685Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494341813976763269:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:53:48.892633Z node 10 :TX_PROXY ERROR: Actor# [10:7494341813976763340:2808] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:48.990863Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e5bqv8raf34qgsb3nh6m5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzFjZWY3YmQtZjRmYzM2OTAtNWRmNjZlMTEtYWMzMjNlMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:49.113587Z node 10 :TX_PROXY WARN: [AlterTableAddIndex [10:7494341818271730706:2370] TxId# 281474976715663] Access check failed 2025-04-17T17:53:49.158171Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T17:53:49.300068Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T17:53:49.452438Z node 10 :TX_PROXY ERROR: [AlterTableAddIndex [10:7494341818271731079:2384] TxId# 281474976715665] Unable to navigate: Root/WrongPath status: PathErrorUnknown 2025-04-17T17:53:49.490287Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494341796796892867:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:49.490362Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:49.659576Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-04-17T17:53:51.582582Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494341825746949753:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:51.582651Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a39/r3tmp/tmpiT5r3X/pdisk_1.dat 2025-04-17T17:53:51.834629Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15733, node 13 2025-04-17T17:53:51.926983Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:51.927098Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:51.957933Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:52.018859Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:52.018890Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:52.018899Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:52.019052Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:52.336245Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:56.203563Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494341847221787339:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:56.203692Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:56.217334Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:56.371179Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494341847221787503:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:56.371304Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:56.371722Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494341847221787508:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:56.376546Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:53:56.410976Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494341847221787510:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:53:56.484245Z node 13 :TX_PROXY ERROR: Actor# [13:7494341847221787583:2808] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:56.575061Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2e5k4j9ccej6vqzgy97gta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzFjMDU1OGQtNWI2MjcwZDQtZGJjZTk5OTctOWI3NzgzNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:56.583463Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7494341825746949753:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:56.583534Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:56.654339Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:53:56.781050Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:53:56.937114Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2025-04-17T17:53:55.116805Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341845749162452:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:55.117368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001789/r3tmp/tmplsYXxd/pdisk_1.dat 2025-04-17T17:53:55.859290Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:55.881341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:55.881440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:55.885950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10900, node 1 2025-04-17T17:53:56.162328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:56.162349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:56.162378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:56.162487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:56.494796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:56.593728Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7494341850044130518:2314], Recipient [1:7494341845749162887:2210]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-17T17:53:56.593771Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-17T17:53:56.593799Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.593814Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.594305Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-17T17:53:56.594449Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744912436590969) 2025-04-17T17:53:56.594862Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744912436590969 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-17T17:53:56.595045Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-17T17:53:56.597896Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-17T17:53:56.598869Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436590969&action=1" } } } 2025-04-17T17:53:56.599000Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.599057Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-17T17:53:56.599204Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-17T17:53:56.599665Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-17T17:53:56.599797Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T17:53:56.604721Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-17T17:53:56.604775Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:56.604844Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7494341850044130523:2210], Recipient [1:7494341845749162887:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:56.604883Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:56.604908Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.604917Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.604953Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-17T17:53:56.604968Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-17T17:53:56.605042Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-17T17:53:56.608630Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T17:53:56.608662Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.608670Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.608675Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.608892Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341850044130532:2315], Recipient [1:7494341845749162887:2210]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436590969&action=1" } UserToken: "" } 2025-04-17T17:53:56.608911Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:56.609166Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436590969&action=1" } } 2025-04-17T17:53:56.609222Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-17T17:53:56.609247Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744912436590969 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T17:53:56.612147Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-17T17:53:56.612357Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.612393Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-17T17:53:56.612400Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-17T17:53:56.623916Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-17T17:53:56.625230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-17T17:53:56.628329Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-17T17:53:56.628391Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-17T17:53:56.635983Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-17T17:53:56.647598Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-17T17:53:56.648024Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912436680 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T17:53:56.648041Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-17T17:53:56.648067Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainCreated 2025-04-17T17:53:56.648143Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435074, Sender [1:7494341850044130542:2210], Recipient [1:7494341845749162887:221 ... 9, Sender [1:7494341850044130685:2210], Recipient [1:7494341845749162887:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:56.676712Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:56.676734Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.676743Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.676764Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-17T17:53:56.676785Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=2 2025-04-17T17:53:56.677411Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912436680 ParentPathId: 2 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 0 TimeCastBucketsPerMediator: 0 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T17:53:56.677429Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-04-17T17:53:56.677533Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-17T17:53:56.680143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-17T17:53:56.680462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:56.682828Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T17:53:56.682848Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.684047Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-04-17T17:53:56.684485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-17T17:53:56.685265Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-17T17:53:56.685308Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-17T17:53:56.685431Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-17T17:53:56.685463Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2025-04-17T17:53:56.685535Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7494341850044130623:2210], Recipient [1:7494341845749162887:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-17T17:53:56.685568Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-04-17T17:53:56.685597Z node 1 :CMS_TENANTS DEBUG: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2025-04-17T17:53:56.687034Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341850044130715:2323], Recipient [1:7494341845749162887:2210]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436670727&action=2" } UserToken: "" } 2025-04-17T17:53:56.687047Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:56.687234Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436670727&action=2" } } 2025-04-17T17:53:56.695188Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-04-17T17:53:56.707102Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-04-17T17:53:56.707122Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-17T17:53:56.707158Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-17T17:53:56.707228Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7494341850044130695:2210], Recipient [1:7494341845749162887:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-17T17:53:56.707252Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-17T17:53:56.707266Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.707274Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.707313Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-17T17:53:56.707345Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1744912436670727 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T17:53:56.707441Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744912436670727 issue= 2025-04-17T17:53:56.709315Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-17T17:53:56.709387Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-17T17:53:56.709409Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.709556Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494341845749162801:2209], Recipient [1:7494341845749162887:2210]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T17:53:56.709571Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T17:53:56.709585Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.709592Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.709612Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-17T17:53:56.709636Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1744912436670727 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T17:53:56.715840Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-17T17:53:56.715898Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.715931Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-17T17:53:56.716080Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-17T17:53:56.716620Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-17T17:53:56.716689Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-17T17:53:56.720391Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-17T17:53:56.720486Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7494341850044130795:2210], Recipient [1:7494341845749162887:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-17T17:53:56.720512Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-17T17:53:56.720527Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.720540Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.720585Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-17T17:53:56.720606Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-17T17:53:56.729624Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T17:53:56.729656Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.729663Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.729673Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.729751Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744912436670727 2025-04-17T17:53:56.729762Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744912436670727 issue= 2025-04-17T17:53:56.729777Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744912436670727 issue= 2025-04-17T17:53:56.729791Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-17T17:53:56.729873Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744912436670727 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T17:53:56.736150Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-17T17:53:56.736245Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.747264Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341850044130817:2325], Recipient [1:7494341845749162887:2210]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436670727&action=2" } UserToken: "" } 2025-04-17T17:53:56.747294Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:56.747435Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436670727&action=2" ready: true status: SUCCESS } } >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> KqpBatchUpdate::SimplePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: 2025-04-17T17:51:23.147531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:23.147973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:23.148017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001456/r3tmp/tmpLzfTaP/pdisk_1.dat 2025-04-17T17:51:23.649689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:23.700371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:23.740950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:23.741143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:23.757824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:23.855453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:23.910171Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:51:23.910706Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:23.970800Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:23.970949Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:23.972659Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:51:23.972741Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:51:23.972825Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:51:23.973242Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:23.973384Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:51:23.973482Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:51:23.986816Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:51:24.042363Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:51:24.042661Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:51:24.042844Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:51:24.042902Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:24.042946Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:51:24.043029Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:24.043601Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:51:24.043712Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:51:24.043818Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:24.043856Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:24.043905Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:51:24.043954Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:24.044081Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:51:24.044603Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:24.044933Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:51:24.045028Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:51:24.047512Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:24.058511Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:51:24.058645Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:51:24.248633Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:51:24.253818Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:51:24.253922Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:24.259998Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:24.260083Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:51:24.260153Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T17:51:24.260479Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T17:51:24.260657Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:51:24.261634Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:24.261734Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:51:24.266016Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:51:24.266520Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:24.268160Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T17:51:24.268228Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:24.268853Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T17:51:24.268929Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:24.282370Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:24.282452Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:24.282533Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:51:24.282653Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:51:24.282727Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T17:51:24.282849Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:24.284325Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:684:2580][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-17T17:51:24.288563Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:24.299343Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T17:51:24.300104Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T17:51:24.300176Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:51:29.357616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:29.357963Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:29.362428Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001456/r3tmp/tmpqr28Nm/pdisk_1.dat 2025-04-17T17:51:29.885670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:29.935098Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:29.977403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:29.977550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:29.991096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:51:30.088111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:30.123909Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:677:2578] 2025-04-17T17:51:30.124155Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:30.177535Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:30.177685Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:30.179646Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:51:30.179741Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:51:30.179807Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:51:30.180213Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:30.180429Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-0 ... :53:58.058238Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message topic: Table/Stream2/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 2 partNo : 0 messageNo: 1 size 323 offset: -1 2025-04-17T17:53:58.058425Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 2 partNo 0 2025-04-17T17:53:58.059255Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 438 count 1 nextOffset 1 batches 1 2025-04-17T17:53:58.059752Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream2/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 426 WTime 2505 2025-04-17T17:53:58.059890Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:53:58.059924Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:53:58.059960Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:53:58.059993Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:53:58.060024Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] m0000000000p72075186224037888 2025-04-17T17:53:58.060053Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000001_00000| 2025-04-17T17:53:58.060082Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:53:58.060114Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:53:58.060144Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:53:58.060276Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:53:58.060506Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 427 2025-04-17T17:53:58.060617Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:53:58.060683Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 426 2025-04-17T17:53:58.062428Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 427 actorID [21:793:2658] 2025-04-17T17:53:58.062738Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 size 427 2025-04-17T17:53:58.063058Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 426 actorID [21:977:2770] 2025-04-17T17:53:58.063172Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 size 426 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-04-17T17:53:58.064804Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:53:58.064921Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-17T17:53:58.065910Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 0 max time lag 0ms effective offset 0 2025-04-17T17:53:58.066014Z node 21 :PERSQUEUE DEBUG: waiting read cookie 0 partition 0 user $without_consumer offset 0 count 10000 size 26214400 timeout 0 2025-04-17T17:53:58.066166Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T17:53:58.066310Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] waiting read cookie 0 partition 0 read timeout for $without_consumer offset 0 2025-04-17T17:53:58.066459Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:53:58.078758Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 341 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:53:58.078975Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:53:58.079165Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream2/streamImpl', Partition: 0, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2025-04-17T17:53:58.079627Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 342 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:53:58.079690Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:53:58.079748Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream1/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-17T17:53:58.079939Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-04-17T17:53:58.080067Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-17T17:53:58.080291Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-17T17:53:58.080480Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-17T17:53:58.080603Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-17T17:53:58.080740Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-17T17:53:58.080890Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-17T17:53:58.080982Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T17:53:58.081443Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037891][21:1173:2812] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 2 Offset: 0 WriteTimestampMS: 2505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-17T17:53:58.081584Z node 21 :PERSQUEUE DEBUG: Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp done, result 2505 queuesize 0 startOffset 0 2025-04-17T17:53:58.081780Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][21:1172:2713] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 1 Offset: 0 WriteTimestampMS: 2505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-17T17:53:58.081919Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:1030:2812] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-17T17:53:58.082107Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:875:2713] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-17T17:53:58.084278Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-17T17:53:58.084399Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-04-17T17:53:58.101611Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 1, at tablet# 72075186224037888 2025-04-17T17:53:58.101837Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-17T17:53:58.101922Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037888 2025-04-17T17:53:58.115132Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-04-17T17:53:58.410406Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:53:58.410481Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-17T17:53:58.410641Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-04-17T17:53:58.410750Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-17T17:53:58.410912Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-17T17:53:58.411006Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T17:53:58.411779Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2025-04-17T17:53:58.415645Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:53:58.415783Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-04-17T17:53:58.416776Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-04-17T17:53:58.416907Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-17T17:53:58.417061Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-17T17:53:58.417151Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T17:53:58.419695Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> DataShardReadIterator::TryWriteManyRows+Commit [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> TTopicApiDescribes::DescribeConsumer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> TGRpcCmsTest::SimpleTenantsTest [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> TTopicApiDescribes::GetPartitionDescribe |79.1%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |79.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |79.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2025-04-17T17:54:01.192678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:01.193205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:01.193263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001929/r3tmp/tmp18KnD0/pdisk_1.dat 2025-04-17T17:54:01.909735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:01.973330Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:02.019800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:02.019961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:02.035239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:02.138390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:02.588618Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-04-17T17:54:02.588755Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-17T17:54:02.697288Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.108145s, errors=0 2025-04-17T17:54:02.697388Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 14686, MsgBus: 6722 2025-04-17T17:51:29.358852Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341218586376833:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:29.359129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014dd/r3tmp/tmpzJTYek/pdisk_1.dat 2025-04-17T17:51:29.871079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:29.876920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:29.877064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:29.879451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14686, node 1 2025-04-17T17:51:30.057518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:30.057545Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:30.057563Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:30.057667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6722 TClient is connected to server localhost:6722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:31.076608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:51:31.092247Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 16 2025-04-17T17:51:33.457463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:51:33.876492Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:51:33.892178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:51:33.969612Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:51:34.009888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341240061213992:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:34.010035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:34.010711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341240061214004:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:34.019415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:51:34.034675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341240061214006:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:51:34.101407Z node 1 :TX_PROXY ERROR: Actor# [1:7494341240061214057:2458] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:51:34.350489Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341218586376833:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:34.350559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; f f t t 18 2025-04-17T17:51:34.724476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:51:34.845789Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:51:34.861679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2025-04-17T17:51:35.250135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T17:51:35.309251Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:51:35.317171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:51:35.395350Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2025-04-17T17:51:35.981269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T17:51:36.101384Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:51:36.109427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T17:51:36.173149Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2025-04-17T17:51:36.587360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T17:51:36.657920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T17:51:36.708479Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2025-04-17T17:51:37.129352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T17:51:37.200364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T17:51:37.265635Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2025-04-17T17:51:37.654643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-17T17:51:37.711153Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:51:37.717036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-17T17:51:37.790730Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 25 2025-04-17T17:51:38.268546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-17T17:51:38.350072Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:51:38.356521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-17T17:51:38.445310Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill text 0 text 0 text 1 text 1 text 2 text 2 text 3 text 3 text 4 text 4 text 5 text 5 text 6 text 6 text 7 text 7 text 8 text 8 text 9 text 9 1042 2025-04-17T17:51:38.854532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-17T17:51:38.914712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 bpchar 0 bpchar 0 bpchar 1 bpchar 1 bpchar 2 bpchar 2 bpchar 3 bpchar 3 bpchar 4 bpchar 4 bpchar 5 bpchar 5 bpchar 6 bpchar 6 bpchar 7 bpchar 7 bpchar 8 bpchar 8 bpchar 9 bpchar 9 1043 2025-04-17T17:51:39.293054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-17T17:51:39.387732Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T17:51:39.391715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-17T17:51:39.490047Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill varchar 0 varchar 0 varchar 1 ... age: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:39.455939Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:39.853461Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4592, MsgBus: 20109 2025-04-17T17:53:41.812683Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7494341786569784743:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:41.812732Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014dd/r3tmp/tmpWD3RmK/pdisk_1.dat 2025-04-17T17:53:42.008592Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:42.052164Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:42.052298Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:42.054502Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4592, node 11 2025-04-17T17:53:42.126763Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:42.126798Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:42.126814Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:42.127014Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20109 TClient is connected to server localhost:20109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:42.964956Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:42.981139Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:53:46.818373Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494341786569784743:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:46.818489Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:47.690385Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494341812339589027:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:47.690564Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:47.691126Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494341812339589059:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:47.697130Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:53:47.713559Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494341812339589061:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:53:47.808418Z node 11 :TX_PROXY ERROR: Actor# [11:7494341812339589116:2343] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:47.863309Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:53:48.695524Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11745, MsgBus: 17660 2025-04-17T17:53:51.030750Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494341829638749434:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:51.031455Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014dd/r3tmp/tmpNe8UeD/pdisk_1.dat 2025-04-17T17:53:51.303560Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:51.350971Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:51.351097Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:51.353207Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11745, node 12 2025-04-17T17:53:51.422892Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:51.422921Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:51.422935Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:51.423113Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17660 TClient is connected to server localhost:17660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:52.465472Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:52.477919Z node 12 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:53:56.030981Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7494341829638749434:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:56.031098Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:57.865322Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494341855408553873:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:57.865449Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:57.865785Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494341855408553885:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:57.873166Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:53:57.900760Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494341855408553887:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:53:57.995596Z node 12 :TX_PROXY ERROR: Actor# [12:7494341855408553938:2346] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:58.057596Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 >> YdbTableBulkUpsertOlap::UpsertCSV [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2025-04-17T17:53:52.748799Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bdc/r3tmp/tmpwpZ0ch//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-17T17:53:52.758216Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:54.397208Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bdc/r3tmp/tmpwpZ0ch//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-17T17:53:54.439898Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:56.468513Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bdc/r3tmp/tmpwpZ0ch//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-17T17:53:56.492024Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:53:58.817092Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bdc/r3tmp/tmpwpZ0ch//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 4 2025-04-17T17:53:58.817276Z :BS_LOCALRECOVERY CRIT: PDiskId# 4 VDISK[0:_:0:3:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:54:01.032401Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bdc/r3tmp/tmpwpZ0ch//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 5 2025-04-17T17:54:01.032775Z :BS_LOCALRECOVERY CRIT: PDiskId# 5 VDISK[0:_:0:4:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T17:54:02.946020Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bdc/r3tmp/tmpwpZ0ch//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 6 2025-04-17T17:54:03.002310Z :BS_LOCALRECOVERY CRIT: PDiskId# 6 VDISK[0:_:0:5:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2025-04-17T17:53:55.180467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341846870138228:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:55.180593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001766/r3tmp/tmpkRbdnf/pdisk_1.dat 2025-04-17T17:53:55.902224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:55.902319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:55.907124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:55.923241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13904, node 1 2025-04-17T17:53:56.001414Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:53:56.001444Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:53:56.299411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:56.299438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:56.299450Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:56.299584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:56.734223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:56.842163Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7494341851165106187:2314], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-17T17:53:56.843752Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-17T17:53:56.843784Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.843798Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.843948Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-17T17:53:56.844084Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744912436842202) 2025-04-17T17:53:56.844652Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744912436842202 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-17T17:53:56.845422Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-17T17:53:56.850861Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-17T17:53:56.851837Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436842202&action=1" } } } 2025-04-17T17:53:56.851989Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.852052Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-17T17:53:56.852191Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-17T17:53:56.852716Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-17T17:53:56.852865Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T17:53:56.863301Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-17T17:53:56.863360Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:56.863447Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7494341851165106192:2198], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:56.863467Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:56.863508Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.863518Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.863568Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-17T17:53:56.865712Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-17T17:53:56.865785Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-17T17:53:56.890871Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341851165106199:2315], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436842202&action=1" } UserToken: "" } 2025-04-17T17:53:56.890923Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:56.891138Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912436842202&action=1" } } 2025-04-17T17:53:56.891572Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T17:53:56.891611Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:56.891629Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.891637Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:56.891714Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-17T17:53:56.891735Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744912436842202 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T17:53:56.902851Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-17T17:53:56.903081Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:56.903119Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-17T17:53:56.903133Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-17T17:53:56.907730Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-17T17:53:56.909332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-17T17:53:56.926614Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-17T17:53:56.926679Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-17T17:53:56.929848Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-17T17:53:56.945877Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-17T17:53:56.946360Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912436974 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T17:53:56.946370Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-17T17:53:56.946405Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainCr ... nse { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912438363902&action=2" } } 2025-04-17T17:53:58.491414Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-04-17T17:53:58.491436Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-17T17:53:58.491482Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-17T17:53:58.491543Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7494341859755041445:2198], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-17T17:53:58.491558Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-17T17:53:58.491572Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:58.491579Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:58.491615Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-17T17:53:58.491635Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1744912438363902 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T17:53:58.491693Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744912438363902 issue= 2025-04-17T17:53:58.492911Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-17T17:53:58.499714Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-17T17:53:58.499803Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-17T17:53:58.499819Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:58.500019Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494341846870138459:2194], Recipient [1:7494341846870138570:2198]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T17:53:58.500032Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T17:53:58.500047Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:58.500059Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:58.500079Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-17T17:53:58.500112Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1744912438363902 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T17:53:58.504556Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-17T17:53:58.504583Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-17T17:53:58.504594Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-17T17:53:58.504604Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-17T17:53:58.504614Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-17T17:53:58.504628Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-17T17:53:58.504648Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-17T17:53:58.504665Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-17T17:53:58.504678Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-17T17:53:58.511605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-17T17:53:58.552311Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341859755041516:2412], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912438363902&action=2" } UserToken: "" } 2025-04-17T17:53:58.552340Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:58.552509Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912438363902&action=2" } } 2025-04-17T17:53:58.572299Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-17T17:53:58.583499Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-17T17:53:58.583562Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:58.583587Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-17T17:53:58.583703Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-17T17:53:58.584584Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-17T17:53:58.584683Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-17T17:53:58.592791Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-17T17:53:58.592895Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7494341859755041552:2198], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-17T17:53:58.592921Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-17T17:53:58.592933Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:58.592942Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:58.592972Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-17T17:53:58.592990Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-17T17:53:58.625628Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341859755041568:2415], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912438363902&action=2" } UserToken: "" } 2025-04-17T17:53:58.625678Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:58.625832Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912438363902&action=2" } } 2025-04-17T17:53:58.634854Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T17:53:58.634894Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:58.634903Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:58.634909Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:58.634963Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744912438363902 2025-04-17T17:53:58.634974Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744912438363902 issue= 2025-04-17T17:53:58.634986Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744912438363902 issue= 2025-04-17T17:53:58.635004Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-17T17:53:58.635090Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744912438363902 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T17:53:58.666893Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-17T17:53:58.666979Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:58.683259Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341859755041575:2417], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912438363902&action=2" } UserToken: "" } 2025-04-17T17:53:58.683288Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:58.683441Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912438363902&action=2" ready: true status: SUCCESS } } 2025-04-17T17:53:58.685747Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7494341859755041578:2419], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-04-17T17:53:58.685773Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-17T17:53:58.685901Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-04-17T17:53:58.734765Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7494341859755041581:2420], Recipient [1:7494341846870138570:2198]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-04-17T17:53:58.734802Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-04-17T17:53:58.735016Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-04-17T17:53:58.740543Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-17T17:53:58.740735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:54:02.037753Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494341852055247704:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:02.037840Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2025-04-17T17:53:55.670360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341846812840911:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:55.670434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001775/r3tmp/tmpjvqqxC/pdisk_1.dat 2025-04-17T17:53:56.356551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:56.356642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:56.357030Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:56.369031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2102, node 1 2025-04-17T17:53:56.592508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:56.592525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:56.592531Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:56.592647Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:56.917174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:11414 2025-04-17T17:53:57.249362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:57.362354Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7494341855402776325:2314], Recipient [1:7494341851107808734:2199]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-04-17T17:53:57.362400Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-17T17:53:57.362456Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:57.362472Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:57.362607Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" 2025-04-17T17:53:57.362792Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744912437360748) 2025-04-17T17:53:57.363338Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744912437360748 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-17T17:53:57.363528Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-17T17:53:57.373508Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-17T17:53:57.374400Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912437360748&action=1" } } } 2025-04-17T17:53:57.374524Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:57.374585Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-17T17:53:57.374706Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-17T17:53:57.378946Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-17T17:53:57.379073Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T17:53:57.384460Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341855402776336:2315], Recipient [1:7494341851107808734:2199]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912437360748&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-04-17T17:53:57.384489Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:57.384724Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912437360748&action=1" } } 2025-04-17T17:53:57.388395Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-17T17:53:57.388449Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:57.388519Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7494341855402776330:2199], Recipient [1:7494341851107808734:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:57.388534Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-17T17:53:57.388547Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:57.388555Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:57.388589Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-17T17:53:57.388621Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-17T17:53:57.388671Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-17T17:53:57.392487Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T17:53:57.392520Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:57.392528Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:57.392534Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:57.392591Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-17T17:53:57.392618Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744912437360748 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T17:53:57.397159Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-17T17:53:57.397557Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:57.397590Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-17T17:53:57.397598Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-17T17:53:57.402249Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" DatabaseName: "Root" 2025-04-17T17:53:57.403508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-17T17:53:57.407177Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-17T17:53:57.407242Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715659 2025-04-17T17:53:57.417367Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715659 2025-04-17T17:53:57.448813Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2025-04-17T17:53:57.449361Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744912437464 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user-1@builtin" ACL: "" EffectiveACL: "\n\032\010\001\020\377\377\003\032\016user-1@builtin \003(\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } Pa ... processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:59.456070Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912439432391&action=2" } } 2025-04-17T17:53:59.463450Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715663 2025-04-17T17:53:59.513420Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-17T17:53:59.514084Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341863992711864:2452], Recipient [1:7494341851107808734:2199]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912439432391&action=2" } UserToken: "" } 2025-04-17T17:53:59.514111Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:59.514264Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912439432391&action=2" } } 2025-04-17T17:53:59.535516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-17T17:53:59.560519Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715663 2025-04-17T17:53:59.560542Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-17T17:53:59.560580Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-17T17:53:59.560644Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7494341863992711810:2199], Recipient [1:7494341851107808734:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-17T17:53:59.560671Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-17T17:53:59.560688Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:59.560696Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:59.560736Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-17T17:53:59.560756Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1744912439432391 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-17T17:53:59.560809Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744912439432391 issue=AccessDenied: Access denied for request 2025-04-17T17:53:59.561823Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-17T17:53:59.561876Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-17T17:53:59.561887Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-17T17:53:59.561897Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-17T17:53:59.561909Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-17T17:53:59.561919Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-17T17:53:59.561928Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-17T17:53:59.561938Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-17T17:53:59.561947Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-17T17:53:59.568474Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-17T17:53:59.596965Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341863992711901:2454], Recipient [1:7494341851107808734:2199]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912439432391&action=2" } UserToken: "" } 2025-04-17T17:53:59.596991Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:59.597121Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912439432391&action=2" } } 2025-04-17T17:53:59.612060Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-17T17:53:59.612161Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-17T17:53:59.612176Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:59.612359Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494341851107808584:2195], Recipient [1:7494341851107808734:2199]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T17:53:59.612375Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T17:53:59.612393Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:59.612401Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:59.612428Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-17T17:53:59.612448Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1744912439432391 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-17T17:53:59.618667Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-17T17:53:59.618726Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:59.622337Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-17T17:53:59.622447Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-17T17:53:59.622899Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2025-04-17T17:53:59.622964Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2025-04-17T17:53:59.637474Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-04-17T17:53:59.637599Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7494341863992711921:2199], Recipient [1:7494341851107808734:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-17T17:53:59.637641Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-17T17:53:59.637656Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:59.637665Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:59.637704Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-17T17:53:59.637721Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-17T17:53:59.650963Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T17:53:59.650992Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T17:53:59.650999Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:59.651006Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T17:53:59.651056Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744912439432391 2025-04-17T17:53:59.651069Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744912439432391 issue=AccessDenied: Access denied for request 2025-04-17T17:53:59.651085Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744912439432391 issue=AccessDenied: Access denied for request 2025-04-17T17:53:59.651095Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-17T17:53:59.651145Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744912439432391 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-17T17:53:59.659906Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341863992711933:2457], Recipient [1:7494341851107808734:2199]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912439432391&action=2" } UserToken: "" } 2025-04-17T17:53:59.659936Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:59.660088Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912439432391&action=2" } } 2025-04-17T17:53:59.687599Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-17T17:53:59.687641Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T17:53:59.734392Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494341863992711942:2460], Recipient [1:7494341851107808734:2199]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912439432391&action=2" } UserToken: "" } 2025-04-17T17:53:59.734421Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T17:53:59.734573Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744912439432391&action=2" ready: true status: SUCCESS } } 2025-04-17T17:53:59.746562Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-17T17:53:59.746729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T17:54:02.518292Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494341854725280246:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:02.518363Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: 2025-04-17T17:54:01.206010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:01.211195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:01.211272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00193c/r3tmp/tmpAr89nL/pdisk_1.dat 2025-04-17T17:54:02.001952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:02.064611Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:02.111908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:02.112089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:02.126012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:02.230816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:02.669045Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-04-17T17:54:02.669181Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-04-17T17:54:02.673173Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-04-17T17:54:02.673256Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T17:54:02.673314Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T17:54:02.673371Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T17:54:02.673403Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T17:54:02.673430Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T17:54:02.676560Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=ODg0ODI3ZTItZjdiZDhhZjEtZjQzZDk5ZTItMjlkODBiY2I= 2025-04-17T17:54:02.678512Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=NGVhMWJjNjUtMWYwMzY0NDctMzY5MmU2MGEtZjFkYWUxOTc= 2025-04-17T17:54:02.680045Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=ZmJmYjgyMGEtZWIyYWY5YTEtODYyMjZhYjktZTdhODc1MTE= 2025-04-17T17:54:02.681439Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=MmNhZDU5YmUtMWFlOTRiZGUtY2RlZGU3NmUtNjZjZDM3NzM= 2025-04-17T17:54:02.683139Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=ZDg2MzBiYTAtODUyNjUyNjMtNzM1ZmNjODItZmEzNTI0Yzk= 2025-04-17T17:54:02.687790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:02.687931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:02.688003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:02.688065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:02.688123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:02.693451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2659], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:02.693562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:02.701282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:54:02.832623Z node 1 :TX_PROXY ERROR: Actor# [1:792:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:54:02.833379Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:54:02.834101Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:54:02.834488Z node 1 :TX_PROXY ERROR: Actor# [1:802:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:54:03.051545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:54:03.051652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:54:03.051728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:54:03.051819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2670], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:54:03.051905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2671], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:54:03.089748Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:03.899905Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1744912443.899853s, errors=0 2025-04-17T17:54:03.900345Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744912443899 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:54:03.914573Z node 1 :TX_PROXY ERROR: Actor# [1:973:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:03.989263Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1744912443.989209s, errors=0 2025-04-17T17:54:03.989590Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744912443989 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:54:04.005199Z node 1 :TX_PROXY ERROR: Actor# [1:1024:2801] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:04.074668Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1744912444.074622s, errors=0 2025-04-17T17:54:04.074997Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744912444074 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:54:04.088925Z node 1 :TX_PROXY ERROR: Actor# [1:1075:2823] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:04.163780Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1744912444.163736s, errors=0 2025-04-17T17:54:04.164137Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744912444163 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:54:04.181860Z node 1 :TX_PROXY ERROR: Actor# [1:1126:2845] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:04.248473Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1744912444.248392s, errors=0 2025-04-17T17:54:04.248805Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744912444248 OperationsOK: 4 OperationsError: 0 } 2025-04-17T17:54:04.248876Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.575966s, oks# 20, errors# 0 2025-04-17T17:54:04.248999Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |79.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> KqpBatchUpdate::MultiStatement [GOOD] >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx >> TIcNodeCache::GetNodesInfoTest >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanUpdatedRows |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows >> KqpBatchUpdate::Returning [GOOD] >> TTopicApiDescribes::GetLocalDescribe >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 12884, MsgBus: 27873 2025-04-17T17:53:59.101219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341861033456156:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:59.101266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ca9/r3tmp/tmpXZZLSG/pdisk_1.dat 2025-04-17T17:53:59.610544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:59.610654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:59.613813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:59.657490Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12884, node 1 2025-04-17T17:53:59.901730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:59.901764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:59.901775Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:59.901906Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27873 TClient is connected to server localhost:27873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:01.064653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:01.129187Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:54:01.149293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:01.364690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:01.588094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:01.660275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:03.457316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341878213326902:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:03.457431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:03.793986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:03.831563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:03.912799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:03.962863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:03.992969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.046797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.101903Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341861033456156:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:04.101968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:04.106398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.220814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341882508294870:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:04.220900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:04.221272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341882508294875:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:04.225137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:04.241908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341882508294877:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:04.307110Z node 1 :TX_PROXY ERROR: Actor# [1:7494341882508294932:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:06.587204Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341891098229883:2525], status: GENERIC_ERROR, issues:
:5:32: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:54:06.587855Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmVlZDNkNDgtMjQwZjk2ZjctZjFlODFjNy0yY2RiOWU5NA==, ActorId: [1:7494341891098229874:2519], ActorState: ExecuteState, TraceId: 01js2e5x1wcekqgg8g2fy3h1yz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:54:06.602280Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341891098229895:2527], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:54:06.603399Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmVlZDNkNDgtMjQwZjk2ZjctZjFlODFjNy0yY2RiOWU5NA==, ActorId: [1:7494341891098229874:2519], ActorState: ExecuteState, TraceId: 01js2e5x414q0qrs54px2r6epw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:54:06.626216Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341891098229899:2529], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:54:06.627409Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmVlZDNkNDgtMjQwZjk2ZjctZjFlODFjNy0yY2RiOWU5NA==, ActorId: [1:7494341891098229874:2519], ActorState: ExecuteState, TraceId: 01js2e5x4j84cxks0w8n13bwyr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:54:06.652089Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341891098229928:2531], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:54:06.652203Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmVlZDNkNDgtMjQwZjk2ZjctZjFlODFjNy0yY2RiOWU5NA==, ActorId: [1:7494341891098229874:2519], ActorState: ExecuteState, TraceId: 01js2e5x5j5mtrsh3xddp4ft9a, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:54:06.667884Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341891098229932:2533], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T17:54:06.668102Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmVlZDNkNDgtMjQwZjk2ZjctZjFlODFjNy0yY2RiOWU5NA==, ActorId: [1:7494341891098229874:2519], ActorState: ExecuteState, TraceId: 01js2e5x625rpj4fg08bq03ajh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TTxDataShardUploadRows::TestUploadRows >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadFromHead |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |79.2%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 24901, MsgBus: 11329 2025-04-17T17:54:00.198672Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341864770113918:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:00.198713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d2b/r3tmp/tmpVtZEOB/pdisk_1.dat 2025-04-17T17:54:00.903301Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:00.907852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:00.907949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:00.910659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24901, node 1 2025-04-17T17:54:01.034677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:01.034699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:01.034709Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:01.034845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11329 TClient is connected to server localhost:11329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:01.670894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:01.698632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:01.910645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:02.097918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:02.164844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:04.427914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341881949984822:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:04.428036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:05.020976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:05.186785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:05.198392Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341864770113918:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:05.198455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:05.269176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:05.373193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:05.430042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:05.521973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:05.659032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:05.772078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341886244952799:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:05.772195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:05.772668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341886244952804:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:05.776789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:05.791102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341886244952806:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:05.887361Z node 1 :TX_PROXY ERROR: Actor# [1:7494341886244952863:3568] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:07.203913Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341894834887810:2523], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with RETURNING 2025-04-17T17:54:07.205136Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA5YmY4MjItN2M5MzRlMjktZWNlODQ3MC1iOTY4MDgxNg==, ActorId: [1:7494341894834887799:2516], ActorState: ExecuteState, TraceId: 01js2e5xppadxedzew2pad5jrc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |79.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction >> TBlobStorageProxyTest::TestSingleFailure [GOOD] |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> TColumnShardTestSchema::HotTiersAfterTtl >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |79.2%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2025-04-17T17:54:08.268322Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001bbe/r3tmp/tmpCeOOXT//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-17T17:54:08.321730Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> TColumnShardTestSchema::ForgetAfterFail >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> TColumnShardTestSchema::ColdTiers >> TWebLoginService::AuditLogLoginSuccess >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |79.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::TestExternalLogin >> SystemView::ShowCreateTableTemporary [GOOD] >> SystemView::StoragePoolsFields >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] Test command err: 2025-04-17T17:53:27.154427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341724938380944:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:27.154483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a38/r3tmp/tmpgxY8UH/pdisk_1.dat 2025-04-17T17:53:27.588024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:27.599475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:27.599598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:27.606533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14032, node 1 2025-04-17T17:53:27.767509Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:27.767534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:27.767543Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:27.767661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:28.082096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8086 2025-04-17T17:53:28.463099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:28.617527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:53:28.655855Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:53:28.656022Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2025-04-17T17:53:28.665930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:53:28.666163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:53:28.666449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:53:28.666535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:53:28.666623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:53:28.666702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:53:28.666759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:53:28.666815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:53:28.666924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:53:28.667002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:53:28.667142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:53:28.667303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494341729233349307:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:53:28.669909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:53:28.707463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:53:28.707700Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-17T17:53:28.716448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:53:28.716541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:53:28.716993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:53:28.717161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:53:28.717340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:53:28.717523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:53:28.717691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:53:28.717847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:53:28.718019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:53:28.718159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:53:28.718520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:53:28.718692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341729233349290:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:53:28.723405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7494341729233349288:2320];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:53:28.760191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7494341729233349288:2320];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:53:28.760356Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-17T17:53:28.771456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341729233349288:2320];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:53:28.771526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341729233349288:2320];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:53:28.771791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341729233349288:2320];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:53:28.771992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[ ... 37888 loaded tx from db 1744912451968:281474976715658 keys extracted: 0 2025-04-17T17:54:11.935348Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:54:11.935494Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:11.935637Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:11.935693Z node 13 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:54:11.936211Z node 13 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:54:11.936618Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:11.941767Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744912451967 2025-04-17T17:54:11.941808Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:11.946978Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744912451968} 2025-04-17T17:54:11.947039Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:11.947079Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:11.947114Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:11.947144Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:54:11.947197Z node 13 :TX_DATASHARD DEBUG: Complete [1744912451968 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7494341895164268010:2200], exec latency: 1 ms, propose latency: 11 ms 2025-04-17T17:54:11.947226Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-17T17:54:11.947274Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:11.947368Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744912451975 2025-04-17T17:54:11.953111Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-17T17:54:11.953170Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-04-17T17:54:12.001263Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:7494341912344138076:2758], serverId# [13:7494341912344138077:2759], sessionId# [0:0:0] 2025-04-17T17:54:12.001415Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-17T17:54:12.022307Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-17T17:54:12.022354Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 SUCCESS Upsert done: 0.062547s 2025-04-17T17:54:12.041392Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494341916639105381:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.041499Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494341916639105386:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.041566Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.046889Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:54:12.059643Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:12.086273Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:12.089719Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494341916639105395:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:54:12.174300Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7494341895164267652:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:12.174386Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:12.190828Z node 13 :TX_PROXY ERROR: Actor# [13:7494341916639105466:2819] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:12.339229Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:12.339350Z node 13 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715661 at tablet 72075186224037888 2025-04-17T17:54:12.356626Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:54:12.364715Z node 13 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715661 at step 1744912452402 at tablet 72075186224037888 { Transactions { TxId: 281474976715661 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912452402 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:54:12.364747Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:12.364886Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:12.364903Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:12.364927Z node 13 :TX_DATASHARD DEBUG: Found ready operation [1744912452402:281474976715661] in PlanQueue unit at 72075186224037888 2025-04-17T17:54:12.365090Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744912452402:281474976715661 keys extracted: 0 2025-04-17T17:54:12.365415Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:12.372905Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744912452402} 2025-04-17T17:54:12.372958Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:12.373005Z node 13 :TX_DATASHARD DEBUG: Complete [1744912452402 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7494341916639105500:2836], exec latency: 0 ms, propose latency: 7 ms 2025-04-17T17:54:12.373032Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:12.375406Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e62e2cc9sr1jx3r70bcpq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZmFmNTE2ZWMtMjMxZjA0Mi1lODhjMTE3LTU1OTljNTRk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:12.381655Z node 13 :TX_DATASHARD INFO: Start scan, at: [13:7494341916639105532:2149], tablet: [13:7494341912344137974:2340], scanId: 4, table: /Root/LogsX, gen: 1, deadline: 2025-04-17T18:04:12.379212Z 2025-04-17T17:54:12.381769Z node 13 :TX_DATASHARD DEBUG: Got ScanDataAck, at: [13:7494341916639105532:2149], scanId: 4, table: /Root/LogsX, gen: 1, tablet: [13:7494341912344137974:2340], freeSpace: 8388608;limits:(bytes=0;chunks=0); 2025-04-17T17:54:12.381790Z node 13 :TX_DATASHARD DEBUG: Wakeup driver at: [13:7494341916639105532:2149] 2025-04-17T17:54:12.383265Z node 13 :TX_DATASHARD DEBUG: Range 0 of 1 exhausted: try next one. table: /Root/LogsX range: [(Utf8 : NULL, Timestamp : NULL) ; ()) next range: 2025-04-17T17:54:12.383301Z node 13 :TX_DATASHARD DEBUG: TableRanges is over, at: [13:7494341916639105532:2149], scanId: 4, table: /Root/LogsX 2025-04-17T17:54:12.383344Z node 13 :TX_DATASHARD DEBUG: Finish scan, at: [13:7494341916639105532:2149], scanId: 4, table: /Root/LogsX, reason: 0, abortEvent: 2025-04-17T17:54:12.383379Z node 13 :TX_DATASHARD DEBUG: Send ScanData, from: [13:7494341916639105532:2149], to: [13:7494341916639105525:2360], scanId: 4, table: /Root/LogsX, bytes: 11000, rows: 100, page faults: 0, finished: 1, pageFault: 0 2025-04-17T17:54:12.383647Z node 13 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T17:54:12.383746Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:12.383770Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:12.383787Z node 13 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:54:12.383836Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:12.396658Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912452402, txId: 281474976715661] shutting down 2025-04-17T17:54:12.958421Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e62t711x3wrnh294vmfxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2JjODFlNjktNzljYWU3M2QtMmZjMTI5YzctYzA1NDU5ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 100 rows Negative (wrong format): BAD_REQUEST Negative (wrong data): SCHEME_ERROR FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8016;columns=9; 2025-04-17T17:54:13.010142Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Invalid: Ran out of field metadata, likely malformed;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (less columns): BAD_REQUEST FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-04-17T17:54:13.025481Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Serialization error: batch is not valid: Invalid: Offsets buffer size (bytes): 400 isn't large enough for length: 100;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (reordered columns): BAD_REQUEST >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLogout >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2025-04-17T17:54:07.712453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:07.712891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:07.712940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d6/r3tmp/tmptu9k8g/pdisk_1.dat 2025-04-17T17:54:08.104629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:08.172455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:08.223090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:08.223210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:08.239369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:08.336787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:08.379700Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:54:08.380774Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:54:08.381273Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:54:08.381517Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:08.430626Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:54:08.431404Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:08.431496Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:08.433146Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:54:08.433229Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:54:08.433294Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:54:08.433669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:08.433803Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:08.433881Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:54:08.444718Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:08.478398Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:54:08.478573Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:08.478695Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:54:08.478730Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:08.478764Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:54:08.478800Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:08.479042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:08.479093Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:08.479376Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:54:08.479472Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:54:08.479574Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:08.479625Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:08.479681Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:54:08.479731Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:08.479780Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:08.479809Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:54:08.479855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:08.479995Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:08.480032Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:08.480076Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:54:08.480474Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:54:08.480540Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:54:08.480651Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:08.480869Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:54:08.480945Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:54:08.481024Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:54:08.481067Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:54:08.481105Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:54:08.481139Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:54:08.481177Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:54:08.481434Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:54:08.481469Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:54:08.481500Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:54:08.481540Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:54:08.481589Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:54:08.481618Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:54:08.481645Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:54:08.481686Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:54:08.481712Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:54:08.484715Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:54:08.484776Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:08.498751Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:54:08.498837Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:54:08.498871Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:54:08.498912Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:54:08.498984Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:54:08.666268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:08.666335Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:08.666388Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:54:08.666822Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:54:08.666861Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:54:08.666978Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:54:08.667024Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:54:08.667065Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:54:08.667111Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:54:08.681681Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:54:08.681788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:08.682846Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:08.682897Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:08.682967Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:0 ... t 72075186224037890 to execution unit CompleteOperation 2025-04-17T17:54:10.606455Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-04-17T17:54:10.606609Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is DelayComplete 2025-04-17T17:54:10.606634Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompleteOperation 2025-04-17T17:54:10.606657Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:54:10.606680Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:54:10.606712Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is Executed 2025-04-17T17:54:10.606732Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:54:10.606751Z node 1 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715667] at 72075186224037890 has finished 2025-04-17T17:54:10.606776Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:10.606797Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-17T17:54:10.606821Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T17:54:10.606843Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:54:10.618704Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:54:10.618757Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:54:10.618793Z node 1 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-04-17T17:54:10.618844Z node 1 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715667] from 72075186224037890 at tablet 72075186224037890 send result to client [1:1129:2907], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:54:10.618885Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:54:14.308761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:14.309074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:14.309232Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d6/r3tmp/tmp0bJq3Z/pdisk_1.dat 2025-04-17T17:54:14.648487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:14.688429Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:14.729284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:14.729431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:14.740967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:14.825827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:14.852017Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-04-17T17:54:14.852203Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:14.905776Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:14.905932Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:14.907817Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:54:14.907934Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:54:14.908010Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:54:14.908423Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:14.908623Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:14.908751Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-04-17T17:54:14.921083Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:14.921180Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:54:14.921322Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:14.921420Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-04-17T17:54:14.921468Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:14.921512Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:54:14.921553Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:14.921953Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:54:14.922089Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:54:14.922260Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:14.922309Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:14.922358Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:54:14.922406Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:14.922517Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:671:2572], sessionId# [0:0:0] 2025-04-17T17:54:14.923008Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:14.923260Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:54:14.923352Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:54:14.925176Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:14.936509Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:54:14.936641Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:54:15.109090Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-17T17:54:15.109545Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:54:15.109597Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:15.109978Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:15.110033Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:15.110084Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T17:54:15.110399Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T17:54:15.110562Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:54:15.110738Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:15.110808Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:54:15.111273Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:54:15.111665Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:15.118452Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T17:54:15.118551Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:15.119337Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T17:54:15.119428Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:15.120321Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:15.120371Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:15.120435Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:54:15.120511Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:15.120576Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T17:54:15.120674Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:15.126896Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:15.128895Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T17:54:15.129594Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T17:54:15.129667Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:54:15.144066Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-04-17T17:54:15.144225Z node 2 :TX_DATASHARD NOTICE: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> TSchemeShardLoginTest::BanUnbanUser >> TWebLoginService::AuditLogLogout [GOOD] |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |79.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> DataShardReadIterator::TryWriteManyRows-Commit [GOOD] >> DataShardReadIteratorBatchMode::RangeFull >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T17:54:14.168681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:14.168760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:14.168796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:14.168826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:14.168876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:14.168909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:14.168968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:14.169044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:14.169356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:14.258636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:14.258700Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:14.274054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:14.274216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:14.274397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:14.286510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:14.286705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:14.287351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:14.288639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:14.291865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:14.293219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:14.293297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:14.293439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:14.293491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:14.293529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:14.293743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.301141Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-04-17T17:54:14.445060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:14.445303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.445526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:14.445785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:14.445840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.449030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:14.449225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:14.449483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.449550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:14.449587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:14.449639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:14.452079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.452137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:14.452177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:14.454429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.454487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.454540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:14.454602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:14.457840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:14.460820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:14.460988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:14.462001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:14.462154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:14.462238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:14.462508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:14.462560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:14.462737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:14.462854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:14.466298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:14.466356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:14.466526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:14.466570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:14.466849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.466887Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:14.466973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:14.466998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:14.467043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:14.467066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:14.467094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:14.467121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:14.467149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:14.467166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:14.467219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:14.467251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:14.467278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:14.468531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:14.468620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:14.468644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:17.797685Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:54:17.797728Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:54:17.797773Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:17.797869Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:54:17.801156Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:54:17.801697Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T17:54:17.805034Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:17.814751Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:17.814924Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:54:17.814971Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:54:17.815026Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:54:17.815071Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:54:17.815180Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:17.815250Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T17:54:17.815301Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:54:17.815355Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:54:17.815398Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-17T17:54:17.815437Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-17T17:54:17.815779Z node 4 :TX_PROXY DEBUG: actor# [4:271:2262] Bootstrap 2025-04-17T17:54:17.859100Z node 4 :TX_PROXY DEBUG: actor# [4:271:2262] Become StateWork (SchemeCache [4:277:2268]) 2025-04-17T17:54:17.866837Z node 4 :TX_PROXY DEBUG: actor# [4:271:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:54:17.880069Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:17.880219Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-17T17:54:17.880676Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:17.880740Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:17.880936Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:17.880987Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T17:54:17.881477Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:54:17.881739Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:54:17.881841Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:54:17.881880Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:54:17.881933Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-17T17:54:17.881978Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:17.882097Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T17:54:17.887275Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-17T17:54:17.887789Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-17T17:54:17.889778Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T17:54:17.889833Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-17T17:54:18.035747Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU1NjU4LCJpYXQiOjE3NDQ5MTI0NTgsInN1YiI6InVzZXIxIn0.Skv3GPVCK93rlxZRHVU8sjLtTutu8RSdeMF8WFbqVVKYulZ_ZwaF-_vlCAAfhb9aNmmIQhmVBNv6XyqDntFHumzXqrUhVimOKb44G27Js0dmNfoB6HimgAn4GiBO_9z3kmvdC-OXhrEc1fjzsSjb4xPx9N4sf3C0GVrAWJZy-XxLVraSANJU8xLMrplC5ijGSHcFtn1PDBkYOtvUk4pGpg6th4S8OS9J8ZACufPqU2PKGlJ1cC32xCTDd-e-jHPsxK6calUh16a6Telzb-Ckot--zzznYxh_RnHp-ob6YEvdL4xQivbqr7MO2rMCZ65xRbTYB6HpUSoC2RMK97NmkQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU1NjU4LCJpYXQiOjE3NDQ5MTI0NTgsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-17T17:54:18.035908Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:18.035967Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:18.036231Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:18.036289Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:208:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-17T17:54:18.037862Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-04-17T17:54:18.038580Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:18.038787Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 229us result status StatusSuccess 2025-04-17T17:54:18.039529Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0TQHWRoCasqo8qi0lskZ\np/Rm290jA+JS/9yzqm5Py2E+kBe8rISdhXISNxEHFl/QXgpUxX/SpoPMDnXL8ssa\nSBdlYAxYE+yeKU4DfeIMLMiAw9K2BvTQyVcidGxec1PuEg+DFpwIvTHkjGtTK7Wd\ntGnb4EpxQ/PPKt1K2048R9MLRg9e3PjykhuZ+IL41ZT1Xt+iZEY1fCIOMe6A/ydP\nRf/T70cXcddUZnwI3X4RFB0Wir37cHJYcovptd1ouTG8JKUWXry4I5fkGtTiTomj\nqNKX0rUg3QMuwTuLUlSsKKi8omzYD5CRwY9OAhnVvy5YbofipBsZ2TWdES4kYSvg\nywIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744998858023 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:18.040085Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-04-17T17:54:18.040472Z node 4 :HTTP ERROR: Logout: No ydb_session_id cookie 2025-04-17T17:54:18.041060Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-04-17T17:54:18.041809Z node 4 :TICKET_PARSER ERROR: Ticket **** (589A015B): Token is not in correct format 2025-04-17T17:54:18.041889Z node 4 :HTTP ERROR: Logout: Token is not in correct format 2025-04-17T17:54:18.042304Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2025-04-17T17:54:17.776031Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-17T17:54:17.814597Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-04-17T17:54:18.036749Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU1NjU4LCJpYXQiOjE3NDQ5MTI0NTgsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2025-04-17T17:54:18.043621Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU1NjU4LCJpYXQiOjE3NDQ5MTI0NTgsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2025-04-17T17:54:18.043621Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU1NjU4LCJpYXQiOjE3NDQ5MTI0NTgsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> TTopicApiDescribes::DescribeConsumer [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] Test command err: 2025-04-17T17:54:10.979363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:10.980030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:10.980090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c1/r3tmp/tmpGKQaEO/pdisk_1.dat 2025-04-17T17:54:11.371599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:11.420831Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:11.463508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:11.463664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:11.475160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:11.559142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:11.598953Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:54:11.599297Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:11.744130Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:11.744256Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:11.745977Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:54:11.746079Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:54:11.746137Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:54:11.746567Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:11.746712Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:11.746790Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:54:11.759960Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:11.803733Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:54:11.803938Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:11.804047Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:54:11.804096Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:11.804135Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:54:11.804169Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:11.804587Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:54:11.804694Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:54:11.804806Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:11.804849Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:11.804891Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:54:11.804948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:11.805041Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:54:11.805449Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:11.805668Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:54:11.805739Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:54:11.807353Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:11.818768Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:54:11.818885Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:54:12.004377Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:54:12.009289Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:54:12.009369Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:12.010046Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:12.010100Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:12.010149Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T17:54:12.011788Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T17:54:12.011949Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:54:12.012802Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:12.012874Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:54:12.013288Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:54:12.013653Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:12.015249Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T17:54:12.015299Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:12.015844Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T17:54:12.015920Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:12.017120Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:12.017172Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:12.017222Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:54:12.017288Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:12.017338Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T17:54:12.017418Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:12.025257Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:12.033980Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T17:54:12.034729Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T17:54:12.034801Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:54:12.060456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.060582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.060677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.066299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:54:12.072518Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:12.237737Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:12.240562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:54:12.334917Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:12.668711Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e62ed1sghf05dk3xm8te3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhkZmIzOGUtMjhiMjQ2ZDEtZDBlN2UyNWEtZWM5NjVjMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:12.674625Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T17:54:12.675116Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T17:54:12.675279Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... aChangedResult 2025-04-17T17:54:18.687908Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-04-17T17:54:18.687963Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:54:18.689089Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:953:2764], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 953 RawX2: 8589937356 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\271\003\000\000\000\000\000\000\021\314\n\000\000\002\000\000\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-04-17T17:54:18.689144Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:54:18.689217Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:18.689389Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-04-17T17:54:18.689460Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-17T17:54:18.689510Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:54:18.689548Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-17T17:54:18.689579Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:18.689609Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:54:18.689644Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-17T17:54:18.689695Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-17T17:54:18.689730Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:54:18.689750Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:18.689769Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit MakeScanSnapshot 2025-04-17T17:54:18.689789Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit MakeScanSnapshot 2025-04-17T17:54:18.689812Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:54:18.689829Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-17T17:54:18.689849Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-17T17:54:18.689867Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T17:54:18.689906Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:953:2764] for [0:281474976715665] at 72075186224037888 2025-04-17T17:54:18.689933Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-04-17T17:54:18.690081Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:953:2764], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715665 2025-04-17T17:54:18.690113Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-17T17:54:18.694257Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:953:2764], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715665 Cleared: true 2025-04-17T17:54:18.694317Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-17T17:54:18.694426Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:18.694455Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:18.694518Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:18.694559Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:54:18.694606Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-04-17T17:54:18.694644Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T17:54:18.694697Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037888 2025-04-17T17:54:18.694737Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:54:18.694777Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-17T17:54:18.694815Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ReadTableScan 2025-04-17T17:54:18.694849Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-04-17T17:54:18.695084Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-04-17T17:54:18.695113Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:54:18.695148Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:54:18.695187Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:18.695221Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:18.695744Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:959:2769], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:54:18.695786Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T17:54:18.695975Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-04-17T17:54:18.696447Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:54:18.696607Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-04-17T17:54:18.696647Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-04-17T17:54:18.718744Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:54:18.718821Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:18.719344Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T17:54:18.719394Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-04-17T17:54:18.719621Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:946:2757], Recipient [2:666:2570]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:946:2757] ServerId: [2:948:2759] } 2025-04-17T17:54:18.719689Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T17:54:18.719763Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:18.719794Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:18.719849Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:18.719883Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:54:18.719919Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-04-17T17:54:18.719949Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-04-17T17:54:18.719989Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037888 error: , IsFatalError: 0 2025-04-17T17:54:18.720036Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:54:18.720067Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ReadTableScan 2025-04-17T17:54:18.720091Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:54:18.720118Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-17T17:54:18.720157Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T17:54:18.720219Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-04-17T17:54:18.720250Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:54:18.720289Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:18.720329Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:18.720374Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-17T17:54:18.720400Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:18.720433Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-17T17:54:18.720470Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:18.720496Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:54:18.720524Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:18.720552Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:18.720618Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:18.720668Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-17T17:54:18.720711Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> PartitionStats::CollectorOverload [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:51:46.040104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:51:46.040212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:51:46.040256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:51:46.040294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:51:46.040353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:51:46.040384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:51:46.040448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:51:46.040524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:51:46.040817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:46.128004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:51:46.128072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:46.150157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:46.150308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:51:46.150447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:51:46.157774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:51:46.158037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:51:46.158613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:51:46.158855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:51:46.163147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:51:46.164276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:51:46.164350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:51:46.164462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:51:46.164536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:51:46.164589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:51:46.164775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.172211Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:51:46.330313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:51:46.330594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.330862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:51:46.331175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:51:46.331238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.340933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:51:46.341131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:51:46.341361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.341427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:51:46.341466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:51:46.341514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:51:46.347152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.347227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:51:46.347262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:51:46.355220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.355309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.355372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:51:46.355447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:51:46.374775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:51:46.379821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:51:46.380069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:51:46.381122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:51:46.381276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:51:46.381335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:51:46.381632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:51:46.381690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:51:46.381885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:51:46.381972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:51:46.384583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:51:46.384627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:51:46.384787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:51:46.384826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:51:46.385211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:51:46.385261Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:51:46.385349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:51:46.385401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:51:46.385440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:51:46.385470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:51:46.385507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:51:46.385562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:51:46.385603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:51:46.385632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:51:46.385717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:51:46.385756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:51:46.385787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:51:46.388277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:51:46.388390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:51:46.388424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-17T17:54:18.700747Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:18.701262Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-04-17T17:54:18.701520Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-04-17T17:54:18.701603Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-04-17T17:54:18.701680Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-04-17T17:54:18.701753Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-04-17T17:54:18.701832Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2025-04-17T17:54:18.702824Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:54:18.702923Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:54:18.702967Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:54:18.704938Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:54:18.705030Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:54:18.705062Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:54:18.705276Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:54:18.705346Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:54:18.705389Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:54:18.705446Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:54:18.705486Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:54:18.705612Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2025-04-17T17:54:18.707013Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:54:18.707076Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:18.707412Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:18.707557Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-04-17T17:54:18.707618Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-17T17:54:18.707691Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-04-17T17:54:18.707726Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-17T17:54:18.707766Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2025-04-17T17:54:18.707877Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [19:379:2347] message: TxId: 103 2025-04-17T17:54:18.707986Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-17T17:54:18.708106Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:54:18.708174Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:54:18.708341Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:18.708422Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-17T17:54:18.708451Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-17T17:54:18.708487Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:54:18.708512Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-17T17:54:18.708536Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-17T17:54:18.708580Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:54:18.708609Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-04-17T17:54:18.708634Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-04-17T17:54:18.708668Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-17T17:54:18.708692Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2025-04-17T17:54:18.708715Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2025-04-17T17:54:18.708773Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-04-17T17:54:18.709959Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:54:18.710064Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-04-17T17:54:18.710209Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-17T17:54:18.710311Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-17T17:54:18.710361Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:54:18.710813Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:54:18.716629Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:54:18.716769Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:54:18.716871Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:54:18.716914Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:54:18.719392Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:54:18.719700Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:54:18.719785Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [19:760:2664] 2025-04-17T17:54:18.720189Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-17T17:54:18.720801Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:54:18.721128Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 376us result status StatusPathDoesNotExist 2025-04-17T17:54:18.721360Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:54:18.722171Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:54:18.723789Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 1.64ms result status StatusPathDoesNotExist 2025-04-17T17:54:18.724070Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2025-04-17T17:54:05.888560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341886642904477:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:05.888611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:05.997507Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341886351354794:2289];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:06.006745Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:06.476065Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:54:06.488519Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001618/r3tmp/tmpjJQxCu/pdisk_1.dat 2025-04-17T17:54:06.994309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:07.082451Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:07.094160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:07.094833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:07.096138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:07.096186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:07.099804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:07.100614Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:54:07.101652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:07.125327Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27085, node 1 2025-04-17T17:54:07.172590Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:54:07.172607Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:54:07.239407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001618/r3tmp/yandexd6Rvu2.tmp 2025-04-17T17:54:07.239431Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001618/r3tmp/yandexd6Rvu2.tmp 2025-04-17T17:54:07.239585Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001618/r3tmp/yandexd6Rvu2.tmp 2025-04-17T17:54:07.239733Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:54:07.299407Z INFO: TTestServer started on Port 10678 GrpcPort 27085 TClient is connected to server localhost:10678 PQClient connected to localhost:27085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:07.690242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:54:07.836896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:54:10.251329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341907826191438:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:10.251411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341907826191425:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:10.251551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:10.268531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-04-17T17:54:10.346620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341907826191455:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-04-17T17:54:10.639515Z node 2 :TX_PROXY ERROR: Actor# [2:7494341907826191482:2180] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:10.690638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:10.703003Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494341907826191489:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:10.703313Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2M3Y2MyNzQtZDM5OGU1Y2UtNzk0NDJlYWYtZGIxZDNjYzM=, ActorId: [2:7494341907826191423:2313], ActorState: ExecuteState, TraceId: 01js2e60ns34xy0ha167r8dzhw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:10.705871Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:10.702423Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341908117741908:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:10.704156Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGNmZmRhZjYtNjczMjJkZTUtZDc1NDUyYWItYThkZWYyZTE=, ActorId: [1:7494341908117741831:2337], ActorState: ExecuteState, TraceId: 01js2e60rc4erce89skkev8878, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:10.705862Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:10.774315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:10.892424Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341886642904477:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:10.892486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:10.914967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:54:10.998398Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341886351354794:2289];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:10.998489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:11.155539Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2e61e42h20gkrpszxe4qk0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRlYTJjZDMtYmI4M2Y0ZTYtMWVlODlkNWYtNDA3MzVkOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494341912412709680:3099] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic ... 7898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:17.369136Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] has a tx writes info 2025-04-17T17:54:17.378344Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 1, Generation 2 2025-04-17T17:54:17.383748Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7494341938182515192:2559] 2025-04-17T17:54:17.385289Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:17.385331Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895] has a tx writes info 2025-04-17T17:54:17.386156Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7494341938182515193:2560] 2025-04-17T17:54:17.397456Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7494341937890963720:2470] 2025-04-17T17:54:17.399731Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.399754Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7494341937890963684:2465] 2025-04-17T17:54:17.402583Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.402622Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7494341937890963683:2464] 2025-04-17T17:54:17.399269Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:5:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.399318Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [1:7494341938182515144:2555] 2025-04-17T17:54:17.399532Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:10:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.399558Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [1:7494341938182515148:2556] 2025-04-17T17:54:17.408701Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7494341937890963723:2473] 2025-04-17T17:54:17.416065Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 1, Generation 2 2025-04-17T17:54:17.416107Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2025-04-17T17:54:17.418226Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.418276Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [1:7494341938182515193:2560] 2025-04-17T17:54:17.417964Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:17.417989Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897] has a tx writes info 2025-04-17T17:54:17.418677Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:17.418696Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894] has a tx writes info 2025-04-17T17:54:17.420124Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.420171Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7494341938182515192:2559] 2025-04-17T17:54:17.419444Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7494341937890963774:2481] 2025-04-17T17:54:17.422018Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7494341937890963776:2483] 2025-04-17T17:54:17.426666Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2025-04-17T17:54:17.428705Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.428764Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7494341937890963723:2473] 2025-04-17T17:54:17.429448Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7494341937890963773:2480] 2025-04-17T17:54:17.432285Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7494341937890963775:2482] 2025-04-17T17:54:17.437668Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.437708Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7494341937890963720:2470] 2025-04-17T17:54:17.441861Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.441900Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7494341937890963774:2481] 2025-04-17T17:54:17.442114Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.442129Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7494341937890963776:2483] 2025-04-17T17:54:17.444868Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2025-04-17T17:54:17.449086Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.449112Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7494341937890963773:2480] 2025-04-17T17:54:17.449308Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:54:17.449325Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7494341937890963775:2482] 2025-04-17T17:54:17.453478Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2025-04-17T17:54:17.453503Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2025-04-17T17:54:17.811159Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-17T17:54:17.811259Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2025-04-17T17:54:17.811290Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7494341938182515345:2580]: Bootstrap 2025-04-17T17:54:17.812012Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341938182515345:2580]: Request location 2025-04-17T17:54:17.812889Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341938182515347:2581] connected; active server actors: 1 2025-04-17T17:54:17.813095Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-17T17:54:17.813305Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341938182515345:2580]: Got location Got response: 2025-04-17T17:54:17.814340Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341938182515347:2581] disconnected; active server actors: 1 operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-04-17T17:54:17.814369Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341938182515347:2581] disconnected no session 2025-04-17T17:54:17.815262Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-17T17:54:17.815331Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2025-04-17T17:54:17.815362Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7494341938182515348:2582]: Bootstrap 2025-04-17T17:54:17.816616Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341938182515348:2582]: Request location 2025-04-17T17:54:17.817247Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341938182515351:2584] connected; active server actors: 1 2025-04-17T17:54:17.817613Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744912457 nanos: 331000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2025-04-17T17:54:17.821742Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341938182515348:2582]: Got location 2025-04-17T17:54:17.827317Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341938182515351:2584] disconnected; active server actors: 1 2025-04-17T17:54:17.827350Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341938182515351:2584] disconnected no session 2025-04-17T17:54:17.825943Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-17T17:54:17.826020Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-04-17T17:54:17.826051Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7494341938182515353:2585]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2025-04-17T17:54:10.972753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:10.973246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:10.973300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ab/r3tmp/tmpo0YlPH/pdisk_1.dat 2025-04-17T17:54:11.341929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:11.380686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:11.419481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:11.419607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:11.431053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:11.526742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:11.592142Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:54:11.593250Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:54:11.593789Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:54:11.594061Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:11.681966Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:54:11.682690Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:11.682782Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:11.684865Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:54:11.684942Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:54:11.685071Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:54:11.685456Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:11.685582Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:11.685651Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:54:11.696665Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:11.722524Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:54:11.722720Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:11.722839Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:54:11.722881Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:11.722914Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:54:11.722946Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:11.723231Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:11.723280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:11.723558Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:54:11.723644Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:54:11.723781Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:11.723848Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:11.723905Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:54:11.723956Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:11.723991Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:11.724019Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:54:11.724072Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:11.724217Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:11.724255Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:11.724290Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:54:11.724658Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:54:11.724713Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:54:11.724805Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:11.725027Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:54:11.725084Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:54:11.725158Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:54:11.725202Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:54:11.725241Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:54:11.725271Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:54:11.725300Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:54:11.725545Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:54:11.725578Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:54:11.725610Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:54:11.725650Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:54:11.725704Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:54:11.725732Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:54:11.725776Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:54:11.725815Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:54:11.725839Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:54:11.727135Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:54:11.727194Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:11.738412Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:54:11.738496Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:54:11.738535Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:54:11.738576Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:54:11.738647Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:54:11.899117Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:11.899183Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:11.899231Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:54:11.899620Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:54:11.899662Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:54:11.899798Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:54:11.899839Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:54:11.899879Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:54:11.899915Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:54:11.906432Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:54:11.906545Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:11.908540Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:11.908590Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:11.908670Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:1 ... 19.776671Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715668, MessageQuota: 0 2025-04-17T17:54:19.857325Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-17T17:54:19.857385Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037889 2025-04-17T17:54:19.857565Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:971:2777], Recipient [2:971:2777]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:19.857598Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:19.857651Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:54:19.857682Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:54:19.857712Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037889 for ReadTableScan 2025-04-17T17:54:19.857737Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit ReadTableScan 2025-04-17T17:54:19.857768Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037889 error: , IsFatalError: 0 2025-04-17T17:54:19.857824Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-04-17T17:54:19.857861Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit ReadTableScan 2025-04-17T17:54:19.857895Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompleteOperation 2025-04-17T17:54:19.857917Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:19.858086Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2025-04-17T17:54:19.858115Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2025-04-17T17:54:19.858143Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:54:19.858172Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:54:19.858230Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-04-17T17:54:19.858259Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:54:19.858291Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2025-04-17T17:54:19.858333Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:19.858365Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:54:19.858400Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T17:54:19.858454Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T17:54:19.871090Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:54:19.871185Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:54:19.871227Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:19.871320Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [2:1162:2940], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:19.871385Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:54:19.871837Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1162:2940], Recipient [2:974:2779]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2025-04-17T17:54:19.871894Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-17T17:54:19.871990Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2025-04-17T17:54:19.872049Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:54:19.872082Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:54:19.872249Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:974:2779], Recipient [2:974:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:19.872285Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:19.872352Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T17:54:19.872393Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:54:19.872436Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-04-17T17:54:19.872468Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2025-04-17T17:54:19.872503Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [3500:281474976715668] at 72075186224037890 2025-04-17T17:54:19.872544Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-17T17:54:19.872578Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-17T17:54:19.872610Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2025-04-17T17:54:19.872641Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-04-17T17:54:19.872896Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2025-04-17T17:54:19.872931Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:54:19.872960Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-17T17:54:19.872990Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2025-04-17T17:54:19.873027Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:54:19.873895Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1183:2959], Recipient [2:974:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:54:19.873942Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T17:54:19.874257Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-04-17T17:54:19.875062Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T17:54:19.876767Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-04-17T17:54:19.876822Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-04-17T17:54:19.944839Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-17T17:54:19.944905Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2025-04-17T17:54:19.945128Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:974:2779], Recipient [2:974:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:19.945174Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:19.945244Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T17:54:19.945283Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:54:19.945328Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2025-04-17T17:54:19.945361Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-04-17T17:54:19.945400Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2025-04-17T17:54:19.945445Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-17T17:54:19.945491Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2025-04-17T17:54:19.945523Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2025-04-17T17:54:19.945554Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-04-17T17:54:19.945756Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2025-04-17T17:54:19.945790Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2025-04-17T17:54:19.945819Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:54:19.945849Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:54:19.945882Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-17T17:54:19.945906Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:54:19.945933Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2025-04-17T17:54:19.945962Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:19.945990Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-17T17:54:19.946024Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T17:54:19.946054Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:54:19.957627Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:54:19.957717Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:54:19.957770Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-04-17T17:54:19.957844Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1162:2940], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T17:54:19.957900Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2025-04-17T17:54:04.358682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341882239211775:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:04.358731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:04.545614Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341883492431304:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:04.545660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:04.895201Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:54:04.910923Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001649/r3tmp/tmpAYONFz/pdisk_1.dat 2025-04-17T17:54:05.458626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:05.630370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:05.689531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:05.698252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:05.698336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:05.709174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:05.709254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:05.720982Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:54:05.721099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:05.731093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64847, node 1 2025-04-17T17:54:06.083129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001649/r3tmp/yandexuoKKOh.tmp 2025-04-17T17:54:06.083150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001649/r3tmp/yandexuoKKOh.tmp 2025-04-17T17:54:06.083336Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001649/r3tmp/yandexuoKKOh.tmp 2025-04-17T17:54:06.083558Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:54:06.243023Z INFO: TTestServer started on Port 11151 GrpcPort 64847 TClient is connected to server localhost:11151 PQClient connected to localhost:64847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:06.845787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:54:06.939682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:54:09.362322Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341882239211775:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:09.362396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:09.546293Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341883492431304:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:09.546361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:09.659135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341904967268048:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:09.659212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341904967268061:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:09.659258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:09.665939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:54:09.718364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341904967268064:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:54:09.829341Z node 2 :TX_PROXY ERROR: Actor# [2:7494341904967268091:2181] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:10.193905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:10.207844Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341903714049433:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:10.209418Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzVmMWVjZTMtZGExNzNjODMtMTkzMDcxNTItYTJhMzM4NWU=, ActorId: [1:7494341903714049390:2337], ActorState: ExecuteState, TraceId: 01js2e6080bgz3hw73pf6jd6gc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:10.207656Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494341904967268098:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:10.209372Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTVhZWRiNWMtYjBhN2FlMWEtOTc0MzRlN2EtYTI3Y2Q1NjE=, ActorId: [2:7494341904967268046:2313], ActorState: ExecuteState, TraceId: 01js2e603m54g0h6vq7axp80rf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:10.211792Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:10.212406Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:10.333703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:10.488390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:54:10.791158Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2e612h1q2c2vctgyfgjspx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjYzZThlZTMtNGVjZjI2MjYtZjQ5MjQzMjUtN2NmNGJlNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494341908009017208:3100] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPS ... te_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 11 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744912458 nanos: 98000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1744912458 nanos: 119000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 12 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744912458 nanos: 225000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1744912458 nanos: 253000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744912458 nanos: 181000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1744912458 nanos: 247000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744912458 nanos: 93000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1744912458 nanos: 158000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } } } } 2025-04-17T17:54:18.609216Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341942368757992:2638] connected; active server actors: 1 2025-04-17T17:54:18.609466Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-04-17T17:54:18.607820Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-17T17:54:18.609482Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-17T17:54:18.609495Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-04-17T17:54:18.607920Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2025-04-17T17:54:18.609512Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-04-17T17:54:18.608590Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341942368757990:2637]: Request location 2025-04-17T17:54:18.609522Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-04-17T17:54:18.610257Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341942368757990:2637]: Got location 2025-04-17T17:54:18.609531Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-04-17T17:54:18.609542Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-04-17T17:54:18.609553Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-04-17T17:54:18.609575Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-04-17T17:54:18.609584Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-04-17T17:54:18.609593Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-04-17T17:54:18.609603Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-04-17T17:54:18.609612Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-04-17T17:54:18.609622Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-04-17T17:54:18.609630Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-04-17T17:54:18.612239Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341942368757992:2638] disconnected; active server actors: 1 2025-04-17T17:54:18.612258Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341942368757992:2638] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744912457505 tx_id: 281474976710679 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-04-17T17:54:18.619147Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-17T17:54:18.619219Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744912457505 tx_id: 281474976710679 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2025-04-17T17:54:18.630457Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-17T17:54:18.630548Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2025-04-17T17:54:06.573365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:06.573767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:06.573827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d8/r3tmp/tmpYEiWjP/pdisk_1.dat 2025-04-17T17:54:06.989578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:07.047774Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:07.107744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:07.107870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:07.120226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:07.204841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:07.260916Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-17T17:54:07.261212Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:07.313753Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:07.313923Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:07.315728Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:54:07.315848Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:54:07.315903Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:54:07.316346Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:07.316669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:07.316760Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2578] in generation 1 2025-04-17T17:54:07.318586Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-04-17T17:54:07.318834Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:07.328841Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:07.328989Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:07.330500Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T17:54:07.330579Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T17:54:07.330642Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T17:54:07.330970Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:07.331110Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:07.331187Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-17T17:54:07.342279Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:07.381807Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:54:07.382034Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:07.382202Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-17T17:54:07.382255Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:07.382290Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:54:07.382328Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:07.382754Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:07.382811Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T17:54:07.382902Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:07.382992Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-17T17:54:07.383072Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T17:54:07.383104Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T17:54:07.383129Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:54:07.383559Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:54:07.383702Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:54:07.383867Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:07.383922Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:07.383977Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:54:07.384073Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:07.384125Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T17:54:07.384182Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T17:54:07.384685Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:690:2585], sessionId# [0:0:0] 2025-04-17T17:54:07.384778Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:54:07.384813Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:07.384842Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T17:54:07.384877Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:54:07.385031Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:07.385273Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:54:07.385366Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:54:07.387484Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:07.399106Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:54:07.399237Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:54:07.443744Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-17T17:54:07.443944Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T17:54:07.444527Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T17:54:07.444701Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T17:54:07.445255Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:54:07.456397Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T17:54:07.456513Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-17T17:54:07.592110Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:742:2620], sessionId# [0:0:0] 2025-04-17T17:54:07.592315Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-17T17:54:07.595678Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T17:54:07.595779Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:54:07.596370Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:54:07.596426Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:07.596481Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-17T17:54:07.596783Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T17:54:07.596954Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:54:07.597391Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:54:07.597435Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:07.597551Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:54:07.597626Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:54:07.599807Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:54:07.600258Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:07.601770Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:07.601813Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:07.601851Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T17:54:07.602093Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T ... an for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:19.906956Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit DirectOp 2025-04-17T17:54:19.906995Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit DirectOp 2025-04-17T17:54:19.907040Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2025-04-17T17:54:19.907176Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:54:19.907212Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit DirectOp 2025-04-17T17:54:19.907256Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:19.907289Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:19.907330Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:54:19.907352Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:19.907376Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-17T17:54:19.918845Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-17T17:54:19.918920Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit DirectOp 2025-04-17T17:54:19.918976Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status SCHEME_ERROR 2025-04-17T17:54:20.134239Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-04-17T17:54:20.134352Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715662 ProcessProposeKqpTransaction 2025-04-17T17:54:20.135232Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e6a4k1a7ykw5yd7d28kr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTRmNzQ1MzYtYzFjOGNjYWUtZmU3NDAwNzUtYmY3ODkzZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:20.138126Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1115:2908], Recipient [3:670:2574]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-17T17:54:20.138318Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:54:20.138377Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v8000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v8000/18446744073709551615 ImmediateWriteEdgeReplied# v8000/18446744073709551615 2025-04-17T17:54:20.138419Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-04-17T17:54:20.138479Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckRead 2025-04-17T17:54:20.138588Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T17:54:20.138637Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:54:20.138725Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:20.138762Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:54:20.138818Z node 3 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-17T17:54:20.138867Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T17:54:20.138898Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:20.138920Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:54:20.138951Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:20.139080Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-17T17:54:20.139341Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1115:2908], 0} after executionsCount# 1 2025-04-17T17:54:20.139398Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1115:2908], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:20.139514Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1115:2908], 0} finished in read 2025-04-17T17:54:20.139573Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T17:54:20.139603Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:54:20.139644Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:20.139669Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:20.139713Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T17:54:20.139738Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:20.139762Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-17T17:54:20.139814Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:54:20.139924Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:54:20.141245Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1115:2908], Recipient [3:670:2574]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:54:20.141322Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2025-04-17T17:54:20.359116Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-04-17T17:54:20.359192Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715663 ProcessProposeKqpTransaction 2025-04-17T17:54:20.360087Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e6abj4bn394mrfgxzz3ft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2M2ZDFiMTctOTg2ZTE5ZDEtYmYzYWIxN2QtZDVhNTY4ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:20.362646Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1146:2933], Recipient [3:911:2738]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2025-04-17T17:54:20.362836Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-17T17:54:20.362893Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2025-04-17T17:54:20.362945Z node 3 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-04-17T17:54:20.363019Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-04-17T17:54:20.363166Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-17T17:54:20.363217Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-04-17T17:54:20.363256Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:20.363294Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:54:20.363348Z node 3 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-04-17T17:54:20.363395Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-17T17:54:20.363422Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:20.363462Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-04-17T17:54:20.363489Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-04-17T17:54:20.363600Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-17T17:54:20.363929Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[3:1146:2933], 0} after executionsCount# 1 2025-04-17T17:54:20.363985Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1146:2933], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:20.364131Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1146:2933], 0} finished in read 2025-04-17T17:54:20.364199Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-17T17:54:20.364229Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:54:20.364254Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:54:20.364280Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:54:20.364346Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-17T17:54:20.364372Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:54:20.364403Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-04-17T17:54:20.364440Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:54:20.364542Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:54:20.365985Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1146:2933], Recipient [3:911:2738]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:54:20.366045Z node 3 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload >> TTopicApiDescribes::DescribeTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2025-04-17T17:54:08.520858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341900803300308:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:08.520943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:08.572130Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341902790956451:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:08.572168Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:08.874531Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:54:08.905527Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015f1/r3tmp/tmpk8b2uP/pdisk_1.dat 2025-04-17T17:54:09.339172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:09.339244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:09.345285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:09.345353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:09.346762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:09.353136Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:54:09.358642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:09.403244Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3879, node 1 2025-04-17T17:54:09.440685Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:54:09.440707Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:54:09.489904Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0015f1/r3tmp/yandexwRIh8k.tmp 2025-04-17T17:54:09.489927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0015f1/r3tmp/yandexwRIh8k.tmp 2025-04-17T17:54:09.490048Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0015f1/r3tmp/yandexwRIh8k.tmp 2025-04-17T17:54:09.490197Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:54:09.550514Z INFO: TTestServer started on Port 63247 GrpcPort 3879 TClient is connected to server localhost:63247 PQClient connected to localhost:3879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:09.880811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:09.909070Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:09.933356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:54:12.206005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341917983170573:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.206134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.206960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341917983170606:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.209747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:54:12.231834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341917983170608:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:54:12.503617Z node 1 :TX_PROXY ERROR: Actor# [1:7494341917983170698:2771] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:12.552439Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341917983170711:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:12.556035Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494341919970826050:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:12.554707Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODM4OTdmNDYtZWM2MmE4MzQtMTU5Yjc5N2YtMzU0NmQzNjE=, ActorId: [1:7494341917983170567:2336], ActorState: ExecuteState, TraceId: 01js2e62k4fx7hpnznweghrzzp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:12.556900Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:12.557121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:12.557233Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Njk5NGNiODUtOGI2NWJjODUtNGM0MzQyYTgtYmYxYTlhMGY=, ActorId: [2:7494341919970826011:2312], ActorState: ExecuteState, TraceId: 01js2e62qb4warqe6x3dcpt24n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:12.557536Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:12.677632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:12.869408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:54:13.084235Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2e63b475ehwzy6169gwsm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQwM2NkYWYtYjQ5YmMwNTctOGExMGY3Mi0zNjdiOTM0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494341922278138453:3097] 2025-04-17T17:54:13.517880Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341900803300308:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:13.517957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:13.572826Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341902790956451:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:13.572879Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc ... nected TabletId 72057594046644480, NodeId 1, Generation 2 ===Query complete 2025-04-17T17:54:18.979522Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7494341945740630466:2410] 2025-04-17T17:54:18.983081Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] bootstrapping 4 [1:7494341943752975920:2503] 2025-04-17T17:54:18.998825Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7494341943752975921:2504] 2025-04-17T17:54:19.001142Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7494341943752975921:2504] 2025-04-17T17:54:18.999738Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7494341945740630466:2410] 2025-04-17T17:54:19.006389Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 1 [1:7494341943752975920:2503] 2025-04-17T17:54:19.008028Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7494341945740630465:2409] 2025-04-17T17:54:19.023183Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7494341945740630478:2412] 2025-04-17T17:54:19.008153Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7494341943752975923:2505] 2025-04-17T17:54:19.010086Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7494341943752975923:2505] 2025-04-17T17:54:19.025377Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7494341945740630465:2409] 2025-04-17T17:54:19.027998Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7494341945740630478:2412] 2025-04-17T17:54:19.024910Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7494341943752975917:2500] 2025-04-17T17:54:19.027664Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7494341943752975918:2501] 2025-04-17T17:54:19.029568Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7494341943752975918:2501] 2025-04-17T17:54:19.030897Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7494341943752975917:2500] 2025-04-17T17:54:19.033079Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] bootstrapping 10 [1:7494341943752975945:2510] 2025-04-17T17:54:19.035347Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7494341945740630481:2415] 2025-04-17T17:54:19.037305Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7494341945740630481:2415] 2025-04-17T17:54:19.037037Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7494341943752975945:2510] 2025-04-17T17:54:19.055559Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] bootstrapping 5 [1:7494341943752975930:2509] 2025-04-17T17:54:19.047776Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] bootstrapping 3 [2:7494341945740630479:2413] 2025-04-17T17:54:19.049794Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 1 [2:7494341945740630479:2413] 2025-04-17T17:54:19.057762Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7494341943752975930:2509] 2025-04-17T17:54:19.051589Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7494341945740630480:2414] 2025-04-17T17:54:19.053776Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7494341945740630480:2414] 2025-04-17T17:54:19.097550Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7494341945740630498:2418] 2025-04-17T17:54:19.099772Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 1 [2:7494341945740630498:2418] 2025-04-17T17:54:19.107633Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7494341950035597809:2421] 2025-04-17T17:54:19.109810Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7494341950035597809:2421] 2025-04-17T17:54:19.309865Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:19.311081Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:19.321420Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:19.322055Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:19.325978Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:19.345576Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:19.368184Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:54:19.369489Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig Create topic result: 1 2025-04-17T17:54:19.493763Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341948047943482:3978]: Request location 2025-04-17T17:54:19.505989Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341948047943498:3987] connected; active server actors: 1 2025-04-17T17:54:19.507902Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-04-17T17:54:19.507926Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-04-17T17:54:19.508444Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341948047943482:3978]: Got location 2025-04-17T17:54:19.507937Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 1 2025-04-17T17:54:19.507982Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-17T17:54:19.507995Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 1 2025-04-17T17:54:19.508010Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-04-17T17:54:19.508021Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 1 2025-04-17T17:54:19.508032Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 1 2025-04-17T17:54:19.508040Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 1 2025-04-17T17:54:19.508048Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 1 2025-04-17T17:54:19.508057Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 1 2025-04-17T17:54:19.508067Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 1 2025-04-17T17:54:19.508074Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 1 2025-04-17T17:54:19.508083Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 1 2025-04-17T17:54:19.508091Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 1 2025-04-17T17:54:19.511586Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341948047943507:3991]: Request location 2025-04-17T17:54:19.518286Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341948047943498:3987] disconnected; active server actors: 1 2025-04-17T17:54:19.518337Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341948047943498:3987] disconnected no session 2025-04-17T17:54:19.518378Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341948047943517:3998] connected; active server actors: 1 2025-04-17T17:54:19.518441Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-04-17T17:54:19.518459Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-17T17:54:19.518469Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-04-17T17:54:19.519538Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341948047943507:3991]: Got location 2025-04-17T17:54:19.520273Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341948047943559:4034]: Request location 2025-04-17T17:54:19.520444Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341948047943517:3998] disconnected; active server actors: 1 2025-04-17T17:54:19.520467Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341948047943517:3998] disconnected no session 2025-04-17T17:54:19.522866Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341948047943561:4036] connected; active server actors: 1 >> DataShardReadIterator::ShouldReadRangePrefix3 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled |79.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> SystemView::StoragePoolsFields [GOOD] >> SystemView::StoragePoolsRanges >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] >> TS3WrapperTests::HeadObject >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> TS3WrapperTests::UploadUnknownPart >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> TS3WrapperTests::HeadObject [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2025-04-17T17:54:08.408556Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341899058494393:2251];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:08.408628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:08.454406Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341902816611522:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:08.456589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001613/r3tmp/tmpetIGN4/pdisk_1.dat 2025-04-17T17:54:08.791470Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:54:08.806901Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:54:09.101457Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:09.111560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:09.111665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:09.112616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:09.112666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:09.116972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:09.117452Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:54:09.118353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9449, node 1 2025-04-17T17:54:09.304684Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001613/r3tmp/yandexx0lmUB.tmp 2025-04-17T17:54:09.323898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001613/r3tmp/yandexx0lmUB.tmp 2025-04-17T17:54:09.336225Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001613/r3tmp/yandexx0lmUB.tmp 2025-04-17T17:54:09.358361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:54:09.481426Z INFO: TTestServer started on Port 26202 GrpcPort 9449 TClient is connected to server localhost:26202 PQClient connected to localhost:9449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:09.825467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:54:09.916403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:54:12.799180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341919996481111:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.799276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341919996481086:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.799361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.809451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:54:12.852105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341919996481116:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:54:12.927736Z node 2 :TX_PROXY ERROR: Actor# [2:7494341919996481143:2180] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:13.231820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:13.236303Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341916238364519:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:13.238224Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWM0NTA2ZTgtOTkwYmU2OTEtZGZiMThmNjQtNThlNjQwYTM=, ActorId: [1:7494341916238364493:2338], ActorState: ExecuteState, TraceId: 01js2e63925h8kb2mkwh5cg2wv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:13.240620Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:13.245978Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494341919996481150:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:13.246352Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTc1MGU0OGQtNWEyMzIzZDgtY2Q4MzUyYmQtYmYwZTQxNTE=, ActorId: [2:7494341919996481084:2313], ActorState: ExecuteState, TraceId: 01js2e635a0e2ct5371590sbmf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:13.246710Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:13.377718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:13.403147Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341899058494393:2251];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:13.403214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:13.450554Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341902816611522:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:13.450762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:13.579309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:54:13.914988Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2e644081v4mx68mkh0t3td, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzhmNmQyNS01ZTRhYWU1NC1jYTgyNTFjMC1mMWM5ZTRj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494341920533332285:3088] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-17T17:54:20.110260Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-17T17:54:20.229112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01js2e6ab3ez58r7qt0v4bc74c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNlN2UxZjQtYTEzNzg3YWItYzMzYmNiNDYtOTU2YTM4ODg=, CurrentExe ... } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } consumer_stats { min_partitions_last_read_time { seconds: 1744912460 nanos: 690000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } topic_stats { min_last_write_time { seconds: 1744912460 nanos: 702000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2025-04-17T17:54:21.402091Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-17T17:54:21.402205Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" include_location: true 2025-04-17T17:54:21.402315Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2025-04-17T17:54:21.402844Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341954893072676:2616]: Request location 2025-04-17T17:54:21.404447Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7494341954893072676:2616]: Got location 2025-04-17T17:54:21.403515Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341954893072678:2617] connected; active server actors: 1 2025-04-17T17:54:21.403971Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-04-17T17:54:21.404007Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-17T17:54:21.404020Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-04-17T17:54:21.404037Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-04-17T17:54:21.404058Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-04-17T17:54:21.404073Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-04-17T17:54:21.404085Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-04-17T17:54:21.404131Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-04-17T17:54:21.404144Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-04-17T17:54:21.404156Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-04-17T17:54:21.404169Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-04-17T17:54:21.404182Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-04-17T17:54:21.404195Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-04-17T17:54:21.404206Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-04-17T17:54:21.404218Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-04-17T17:54:21.405953Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341954893072678:2617] disconnected; active server actors: 1 2025-04-17T17:54:21.405980Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7494341954893072678:2617] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744912460424 tx_id: 281474976710679 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe topic with no stats or location 2025-04-17T17:54:21.410345Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-17T17:54:21.410433Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" 2025-04-17T17:54:21.410530Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744912460424 tx_id: 281474976710679 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe bad topic 2025-04-17T17:54:21.416372Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-17T17:54:21.416472Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-04-17T17:54:21.416563Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> TS3WrapperTests::UploadUnknownPart [GOOD] |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> TIcNodeCache::GetNodesInfoTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:54:14.495094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:14.495188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:14.495235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:14.495273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:14.495320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:14.495352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:14.495434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:14.495544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:14.495931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:14.596640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:14.596721Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:14.619334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:14.619509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:14.619737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:14.634952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:14.635270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:14.635924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:14.636224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:14.645382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:14.646814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:14.646892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:14.647003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:14.647047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:14.647083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:14.647292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.657691Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:54:14.792902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:14.793072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.793236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:14.793459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:14.793513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.798010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:14.798144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:14.798351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.798417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:14.798447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:14.798502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:14.800663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.800826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:14.800861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:14.805278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.805334Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.805396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:14.805451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:14.807990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:14.809901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:14.810064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:14.811158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:14.811299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:14.811355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:14.811670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:14.811736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:14.811892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:14.811988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:14.820552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:14.820607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:14.820789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:14.820827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:14.821209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:14.821274Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:14.821381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:14.821424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:14.821461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:14.821499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:14.821534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:14.821571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:14.821603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:14.821630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:14.821714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:14.821753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:14.821783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:14.823768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:14.823876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:14.823910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... DbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:23.301538Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-17T17:54:23.301565Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:54:23.301589Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:54:23.301620Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-17T17:54:23.301646Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-04-17T17:54:23.304209Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSuccess TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:23.304323Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-04-17T17:54:23.304573Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:23.304637Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:23.304792Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:23.304830Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:360:2336], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-17T17:54:23.305219Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:54:23.305352Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:54:23.305388Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:54:23.305419Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-17T17:54:23.305450Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:23.305546Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-17T17:54:23.311023Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:54:23.312225Z node 5 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [5:313:2300] sender: [5:406:2058] recipient: [5:102:2137] Leader for TabletID 72057594046678944 is [5:313:2300] sender: [5:409:2058] recipient: [5:15:2062] Leader for TabletID 72057594046678944 is [5:313:2300] sender: [5:410:2058] recipient: [5:408:2379] Leader for TabletID 72057594046678944 is [5:411:2380] sender: [5:412:2058] recipient: [5:408:2379] 2025-04-17T17:54:23.357270Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:23.357380Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:23.357420Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:23.357460Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:23.357500Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:23.357544Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:23.357600Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:23.357678Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:23.358077Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:23.378625Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:23.380022Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:23.380209Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:23.380344Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:23.380394Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:23.380626Z node 5 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:23.381372Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:23.381499Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.381585Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.382008Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.382093Z node 5 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T17:54:23.382332Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.382424Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.382499Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.382599Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.382687Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.382834Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.383109Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.383230Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.383628Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.383711Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.383868Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.383955Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.384057Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.384305Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.384389Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.384532Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.384768Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.384947Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.384997Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.385062Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.399467Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:23.399623Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:23.400206Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:23.400273Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:23.400321Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:23.400565Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:411:2380] sender: [5:466:2058] recipient: [5:15:2062] 2025-04-17T17:54:23.433551Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T17:54:23.433615Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-17T17:54:23.619995Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU1NjYzLCJpYXQiOjE3NDQ5MTI0NjMsInN1YiI6InVzZXIxIn0.QQtHrzuW52qGLpx_Ts4qBu_A47SGGo0cJDy3erGzftHseZQyNwRWdMphYqkxNfYkLjG7evx2TToW7K6VcO6wZnmzTC7dseRTKXHtZ9lQqT_zynXPFeUy0trK0ThHVUGvhKkHCM-7BANQmdJIBBdaJq0FJKzWwq_po2Cu0cfCDdG4NkThTaMNIKqrqx9pJx8818a3omiz0XUdS65dmPgAKs01ONc87uZOIKyvRqzvm4MoouIEVkMJiXI6bNpErbMunfVsaIU1vcq7R6R_Xa4yLjjCr6H1E-GRLpnTjpAcnJ9PCUkAIsdu9zXZnNt0XS1-qQeJZd4jfgoRTTvS9WvSuQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU1NjYzLCJpYXQiOjE3NDQ5MTI0NjMsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-17T17:54:23.620162Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:23.620211Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:23.620415Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:23.620456Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:460:2418], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-17T17:54:23.621004Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 |79.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> DataShardVolatile::DistributedWrite >> DataShardVolatile::DistributedWriteThenImmediateUpsert ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2025-04-17T17:54:24.306516Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 38AC6262-5378-4F97-B52E-B0399E997227, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:7500 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F21CA8AA-B7B7-402B-9457-CD4195C38167 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2025-04-17T17:54:24.316682Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 38AC6262-5378-4F97-B52E-B0399E997227, response# ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2025-04-17T17:54:24.200675Z node 1 :S3_WRAPPER NOTICE: Request: uuid# E82582CB-89F4-4AE5-BAD1-0ED000964235, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:25181 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B6959CE2-C492-4D03-962C-21B8642A6336 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-17T17:54:24.208288Z node 1 :S3_WRAPPER NOTICE: Response: uuid# E82582CB-89F4-4AE5-BAD1-0ED000964235, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-17T17:54:24.208684Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 66990252-52E2-4C62-AAE4-A06AD29B00C6, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:25181 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 86599494-593A-48C3-AA0E-85DEC253A47C amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-04-17T17:54:24.211858Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 66990252-52E2-4C62-AAE4-A06AD29B00C6, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:23.405994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:23.406062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:23.406100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:23.406129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:23.406164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:23.406206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:23.406278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:23.406368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:23.406936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:23.488548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:23.488604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:23.504368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:23.504837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:23.505004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:23.518920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:23.519073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:23.519565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:23.519801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:23.522688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:23.523919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:23.523966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:23.524119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:23.524157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:23.524185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:23.524278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.529465Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:23.657944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:23.658242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.658463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:23.658708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:23.658761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.661225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:23.661350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:23.661519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.661578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:23.661614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:23.661645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:23.663668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.663732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:23.663768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:23.665528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.665582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.665625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:23.665687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:23.669416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:23.671463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:23.671649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:23.672579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:23.672713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:23.672770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:23.673073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:23.673122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:23.673302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:23.673374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:23.675463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:23.675502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:23.675682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:23.675732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:23.676111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.676160Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:23.676268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:23.676299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:23.676335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:23.676364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:23.676395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:23.676433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:23.676476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:23.676507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:23.676576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:23.676613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:23.676654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:23.678434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:23.678575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:23.678627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... schemeshard: 72057594046678944 2025-04-17T17:54:24.101090Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T17:54:24.101286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.101396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.101494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-04-17T17:54:24.101540Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueue, at schemeshard: 72057594046678944 2025-04-17T17:54:24.101582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T17:54:24.101616Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueueReadBalancer, at schemeshard: 72057594046678944 2025-04-17T17:54:24.101638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:54:24.101741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.101813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.102001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 8, at schemeshard: 72057594046678944 2025-04-17T17:54:24.106847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:54:24.107263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.107419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.107873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.107953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.108244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.108359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.108474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.108645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.108732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.108934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.109329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.109608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.109685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.109735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:24.109974Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:54:24.116087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:54:24.116270Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T17:54:24.118107Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:568:2499], Recipient [1:568:2499]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-17T17:54:24.118169Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-17T17:54:24.118576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:24.118665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:24.118754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:24.118802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:24.118841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:24.118880Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:54:24.119166Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:604:2499], Recipient [1:568:2499]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:54:24.119201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:54:24.119231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:568:2499] sender: [1:623:2058] recipient: [1:15:2062] 2025-04-17T17:54:24.174128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:622:2541], Recipient [1:568:2499]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T17:54:24.174210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:54:24.174352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:24.174597Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 191us result status StatusSuccess 2025-04-17T17:54:24.175028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:24.175718Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:624:2542], Recipient [1:568:2499]: NKikimrPQ.TEvPeriodicTopicStats PathId: 2 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2025-04-17T17:54:24.175769Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-17T17:54:24.175804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 19 UsedReserveSize 7 2025-04-17T17:54:24.175848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T17:54:24.175911Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-17T17:54:24.176117Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:625:2543], Recipient [1:568:2499]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T17:54:24.176148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:54:24.176230Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:24.176377Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 151us result status StatusSuccess 2025-04-17T17:54:24.176756Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] Test command err: 2025-04-17T17:54:11.942747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:11.943331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:11.943404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001597/r3tmp/tmpB3pAXK/pdisk_1.dat 2025-04-17T17:54:12.324680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:12.368651Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:12.420460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:12.420599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:12.432624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:12.516097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:12.578002Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2579] 2025-04-17T17:54:12.578313Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:12.627778Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:686:2581] 2025-04-17T17:54:12.628028Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:12.637912Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:12.638064Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:12.639867Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:54:12.639978Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:54:12.640048Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:54:12.640494Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:12.640654Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:12.640728Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:720:2579] in generation 1 2025-04-17T17:54:12.641513Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:12.641633Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:12.643082Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T17:54:12.643151Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T17:54:12.643196Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T17:54:12.643522Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:12.643774Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:12.643847Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:726:2581] in generation 1 2025-04-17T17:54:12.645791Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:690:2583] 2025-04-17T17:54:12.645999Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:12.657139Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:12.657409Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:12.658889Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-17T17:54:12.658960Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-17T17:54:12.659018Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-17T17:54:12.659368Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:12.659842Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:692:2585] 2025-04-17T17:54:12.660044Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:12.668190Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:12.668281Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:748:2583] in generation 1 2025-04-17T17:54:12.669640Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:12.669737Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:12.671178Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-17T17:54:12.671258Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-17T17:54:12.671336Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-17T17:54:12.671637Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:12.671762Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:12.671816Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:753:2585] in generation 1 2025-04-17T17:54:12.682898Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:12.724305Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:54:12.724526Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:12.724656Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:758:2621] 2025-04-17T17:54:12.724710Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:12.724757Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:54:12.724796Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:12.725167Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:12.725207Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T17:54:12.725265Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:12.725322Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:759:2622] 2025-04-17T17:54:12.725349Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T17:54:12.725373Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T17:54:12.725404Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:54:12.725883Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:54:12.725991Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:54:12.726074Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:12.726106Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-17T17:54:12.726164Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:12.726262Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:760:2623] 2025-04-17T17:54:12.726299Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-17T17:54:12.726321Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-17T17:54:12.726342Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T17:54:12.726400Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:12.726427Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-17T17:54:12.726475Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:12.726522Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:761:2624] 2025-04-17T17:54:12.726543Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T17:54:12.726582Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-17T17:54:12.726617Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:54:12.727291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:12.727356Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:12.727429Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:54:12.727482Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:12.727534Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T17:54:12.727618Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T17:54:12.727842Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2573], serverId# [1:724:2605], sessionId# [0:0:0] 2025-04-17T17:54:12.727899Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:54:12.727926Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:12.727972Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T17:54:12.728017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:54:12.728250Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:12.728488Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:54:12.728571Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:54:12.729055Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:672:2574], serverId# [1:715:2598], sessionId# [0:0:0] 2025-04-17T17:54:12.729144Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-04-17T17:54:12.729225Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-04-17T17:54:12.729263Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-17T17:54:12.729308Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove ... 72075186224037888 2025-04-17T17:54:22.573075Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:22.585467Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:54:22.585593Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:54:22.751241Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-17T17:54:22.751961Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:54:22.752021Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:22.752246Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:22.752294Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:22.752372Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T17:54:22.752663Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T17:54:22.752849Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:54:22.756863Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:22.756990Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:54:22.757560Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:54:22.758043Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:22.759893Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T17:54:22.759945Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:22.760631Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T17:54:22.760754Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:22.761665Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:22.761712Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:22.761773Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:54:22.761844Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:22.761906Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T17:54:22.762056Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:22.765249Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:22.768257Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T17:54:22.768323Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:54:22.768820Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T17:54:22.778465Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:22.778572Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:22.778639Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:22.787186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:54:22.796261Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:22.990380Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:22.993912Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:54:23.112169Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:23.525575Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e6cxrc3wk11pk4jvq1qah, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGEwMWNiZWMtMzUyNzE5NDMtNmNjZDE2NWEtZjAxMjAxZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:23.532511Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-17T17:54:23.532977Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T17:54:23.533180Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-04-17T17:54:23.546940Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:23.743529Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e6dqn8vgfrptxbs44yvas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmUxYjk0MWUtZmNmNWFkYzAtNmRhNTczLTEyZGU2Y2Uw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:23.745944Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint32_value: 300 } } 2025-04-17T17:54:23.752977Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-17T17:54:23.764484Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-17T17:54:23.764579Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:23.764682Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-17T17:54:23.765523Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-17T17:54:23.765599Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:23.835839Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e6dwp2esz3ccghqxqb04q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmUxYjk0MWUtZmNmNWFkYzAtNmRhNTczLTEyZGU2Y2Uw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:23.839238Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:5] at 72075186224037888 2025-04-17T17:54:23.839444Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-17T17:54:23.849982Z node 3 :TX_DATASHARD INFO: Write transaction 5 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-04-17T17:54:23.850317Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T17:54:23.850541Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T17:54:23.850630Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:23.850925Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:913:2693], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:863:2693]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:913:2693].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T17:54:23.851650Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:906:2693], SessionActorId: [3:863:2693], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:863:2693]. isRollback=0 2025-04-17T17:54:23.852133Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmUxYjk0MWUtZmNmNWFkYzAtNmRhNTczLTEyZGU2Y2Uw, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01js2e6dwp2esz3ccghqxqb04q, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:907:2693] from: [3:906:2693] 2025-04-17T17:54:23.852382Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:907:2693] TxId: 281474976715662. Ctx: { TraceId: 01js2e6dwp2esz3ccghqxqb04q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmUxYjk0MWUtZmNmNWFkYzAtNmRhNTczLTEyZGU2Y2Uw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T17:54:23.852759Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-17T17:54:23.852816Z node 3 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037888 2025-04-17T17:54:23.852985Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:23.853142Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmUxYjk0MWUtZmNmNWFkYzAtNmRhNTczLTEyZGU2Y2Uw, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01js2e6dwp2esz3ccghqxqb04q, Create QueryResponse for error on request, msg: >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [GOOD] >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] |79.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop >> DataShardReadIteratorBatchMode::RangeFull [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2025-04-17T17:54:08.155212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341899600908115:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:08.155278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:08.295425Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341901607673782:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:08.295489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:08.568479Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:54:08.579532Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001604/r3tmp/tmppBLa8v/pdisk_1.dat 2025-04-17T17:54:08.980632Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:09.009331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:09.009415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:09.010624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:09.010684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:09.020847Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:54:09.021006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:09.026777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26381, node 1 2025-04-17T17:54:09.278777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001604/r3tmp/yandexd2PkSo.tmp 2025-04-17T17:54:09.278806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001604/r3tmp/yandexd2PkSo.tmp 2025-04-17T17:54:09.278931Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001604/r3tmp/yandexd2PkSo.tmp 2025-04-17T17:54:09.279033Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:54:09.334033Z INFO: TTestServer started on Port 13369 GrpcPort 26381 TClient is connected to server localhost:13369 PQClient connected to localhost:26381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:09.689472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:54:09.786620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:54:10.015251Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:12.609823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341918787543352:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.609948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.610063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494341918787543364:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.612173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341916780778180:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.612231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341916780778170:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.612466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:12.628687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:54:12.644252Z node 2 :TX_PROXY ERROR: Actor# [2:7494341918787543368:2174] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:54:12.658309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341916780778185:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:54:12.658403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494341918787543367:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:54:12.733771Z node 2 :TX_PROXY ERROR: Actor# [2:7494341918787543394:2180] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:12.747554Z node 1 :TX_PROXY ERROR: Actor# [1:7494341916780778283:2759] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:12.982794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:12.984507Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494341916780778293:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:12.986343Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzE5N2E1ZTktNWY4NzRjMjYtOGQ1ZWQ0NWMtNTFiNWMxZWQ=, ActorId: [1:7494341916780778168:2338], ActorState: ExecuteState, TraceId: 01js2e62yx9rcetx5ks731e2bf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:12.986387Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494341918787543401:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:54:12.988401Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:12.986753Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWQwNjBmMjgtN2NjYmQwNmItYTg5M2VkMy1iM2VkMzAxNQ==, ActorId: [2:7494341918787543350:2313], ActorState: ExecuteState, TraceId: 01js2e62zbezfmcstc9epfhbta, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:54:12.989184Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:54:13.076678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:13.158304Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341899600908115:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:13.158394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:13.240612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:13.282297Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341901607673782:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:13.282393Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:54:13.593671Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2e63qnf4y3rqymte6s7nyt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM1Mjc3Mi1hYWJmMjY4Ny1lZGViMTdiYi1lOTdiM2Y5NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494341921075746055:3101] === CheckClustersList. Ok >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2025-04-17T17:54:24.544246Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000808/r3tmp/tmpgqnbBx/pdisk_1.dat 2025-04-17T17:54:24.647475Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:24.647566Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:24.652803Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:24.652889Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:24.657366Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:25.214939Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:549:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1296:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:25.215095Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.215137Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.215163Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.215189Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.215215Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.215242Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.215276Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:8:0:0:1296:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:25.215344Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG33 2025-04-17T17:54:25.215393Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG32 2025-04-17T17:54:25.215434Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG33 2025-04-17T17:54:25.215464Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG32 2025-04-17T17:54:25.215497Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG33 2025-04-17T17:54:25.215522Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG32 2025-04-17T17:54:25.215706Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:3] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:25.215771Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:2] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:25.215819Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:1] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:25.218760Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-17T17:54:25.218934Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-17T17:54:25.219032Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-17T17:54:25.219100Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-17T17:54:25.219155Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:25.219356Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.066 sample PartId# [72057594037932033:2:8:0:0:1296:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.067 sample PartId# [72057594037932033:2:8:0:0:1296:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.067 sample PartId# [72057594037932033:2:8:0:0:1296:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 4.051 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.185 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.281 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-17T17:54:25.275436Z node 1 :BS_PROXY_PUT INFO: [f913878b3da83702] bootstrap ActorId# [1:594:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:25.275620Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.275671Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.275707Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.275733Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.275772Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.275807Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.275849Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:25.275922Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-04-17T17:54:25.275969Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-04-17T17:54:25.276013Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-04-17T17:54:25.276042Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-04-17T17:54:25.276075Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-04-17T17:54:25.276102Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-04-17T17:54:25.276251Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:25.276320Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:25.276367Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:72:2098] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:25.290886Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-17T17:54:25.291153Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-17T17:54:25.291249Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutL ... own Marker# BPG51 2025-04-17T17:54:25.358694Z node 1 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:25.358837Z node 1 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-17T17:54:25.358881Z node 1 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-17T17:54:25.359001Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:599:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:25.361772Z node 1 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-17T17:54:25.361899Z node 1 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-17T17:54:25.361967Z node 1 :BS_PROXY_PUT INFO: [efc53170c63234c6] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:25.362096Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.643 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.449 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-04-17T17:54:25.362640Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-17T17:54:25.362690Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-17T17:54:25.362783Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-04-17T17:54:25.363382Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/6873/000808/r3tmp/tmpgqnbBx//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-17T17:54:25.364312Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-17T17:54:25.364357Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-17T17:54:25.366314Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:610:2105] Create Queue# [2:612:2106] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:25.366459Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:610:2105] Create Queue# [2:613:2107] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:25.366576Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:610:2105] Create Queue# [2:614:2108] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:25.366689Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:610:2105] Create Queue# [2:615:2109] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:25.366816Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:610:2105] Create Queue# [2:616:2110] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:25.366950Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:610:2105] Create Queue# [2:617:2111] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:25.367071Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:610:2105] Create Queue# [2:618:2112] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:25.367098Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-17T17:54:25.368191Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:25.368432Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:25.368497Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:25.368648Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:25.368728Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:25.368799Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:25.368848Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:25.368873Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-17T17:54:25.368906Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-17T17:54:25.369042Z node 2 :BS_PROXY_BLOCK DEBUG: [c85e1a21dcb31b54] bootstrap ActorId# [2:619:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-17T17:54:25.369098Z node 2 :BS_PROXY_BLOCK DEBUG: [c85e1a21dcb31b54] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-17T17:54:25.369277Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:612:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 5703791402319364857 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-17T17:54:25.370646Z node 2 :BS_PROXY_BLOCK DEBUG: [c85e1a21dcb31b54] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-17T17:54:25.370711Z node 2 :BS_PROXY_BLOCK DEBUG: [c85e1a21dcb31b54] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-04-17T17:54:25.370997Z node 2 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-17T17:54:25.371191Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-17T17:54:25.371502Z node 1 :BS_PROXY_PUT INFO: [29f8d54199d206dd] bootstrap ActorId# [1:620:2513] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:25.371649Z node 1 :BS_PROXY_PUT DEBUG: [29f8d54199d206dd] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:25.371702Z node 1 :BS_PROXY_PUT DEBUG: [29f8d54199d206dd] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:25.371766Z node 1 :BS_PROXY_PUT DEBUG: [29f8d54199d206dd] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-04-17T17:54:25.371811Z node 1 :BS_PROXY_PUT DEBUG: [29f8d54199d206dd] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-04-17T17:54:25.371964Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:599:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:25.372184Z node 1 :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T17:54:25.372442Z node 1 :BS_PROXY_PUT INFO: [29f8d54199d206dd] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-17T17:54:25.372524Z node 1 :BS_PROXY_PUT ERROR: [29f8d54199d206dd] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-04-17T17:54:25.372580Z node 1 :BS_PROXY_PUT NOTICE: [29f8d54199d206dd] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:25.372689Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.626 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } ] } 2025-04-17T17:54:25.373065Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:612:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings |79.4%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: 2025-04-17T17:54:24.320582Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:24.321810Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:24.321902Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:24.323511Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:24.324675Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:24.330315Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0008dd/r3tmp/tmplUR91M/pdisk_1.dat Formatting pdisk Creating PDisk Creating pdisk Verify that PDisk returns ERROR 2025-04-17T17:54:24.958568Z node 1 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0008dd/r3tmp/tmpV7p1v0//new_pdisk.dat": no such file. PDiskId# 1001 2025-04-17T17:54:24.959096Z node 1 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1001 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0008dd/r3tmp/tmpV7p1v0//new_pdisk.dat": no such file. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0008dd/r3tmp/tmpV7p1v0//new_pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11843937974341841282 PDiskId# 1001 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 HashedMainKey[0]# 0x221976E60BD392C7 StartOwnerRound# 10 SectorMap# false EnableSectorEncryption # 1 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# Enable WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1001 2025-04-17T17:54:24.990478Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] bootstrap ActorId# [1:543:2461] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:344:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:24.990650Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:344:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:24.990688Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:344:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:24.990716Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:344:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:24.990743Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:344:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:24.990778Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:344:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:24.990803Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:344:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:24.990839Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:344:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:24.990903Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:344:1] Marker# BPG33 2025-04-17T17:54:24.990951Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:344:1] Marker# BPG32 2025-04-17T17:54:24.990988Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:344:2] Marker# BPG33 2025-04-17T17:54:24.991014Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:344:2] Marker# BPG32 2025-04-17T17:54:24.991041Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:344:3] Marker# BPG33 2025-04-17T17:54:24.991066Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:344:3] Marker# BPG32 2025-04-17T17:54:24.991203Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:344:3] FDS# 344 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:24.991265Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:344:2] FDS# 344 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:24.991309Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:344:1] FDS# 344 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:24.993387Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:344:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82708 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-17T17:54:24.993567Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:344:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82708 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-17T17:54:24.993655Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:344:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82708 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-17T17:54:24.993719Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:344:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-17T17:54:24.993780Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:344:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:24.993961Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.01 sample PartId# [72057594037932033:2:8:0:0:344:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.011 sample PartId# [72057594037932033:2:8:0:0:344:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.011 sample PartId# [72057594037932033:2:8:0:0:344:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.13 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.274 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.355 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } >> TBlobStorageWardenTest::TestDeleteStoragePool >> TBlobStorageWardenTest::TestSendToInvalidGroupId >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> BindQueue::Basic |79.4%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> TBlobStorageWardenTest::TestHttpMonPage |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2025-04-17T17:52:55.433451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:55.434044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:55.434130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00111d/r3tmp/tmpYNCuUs/pdisk_1.dat 2025-04-17T17:52:55.863746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:55.919079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:55.961094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:55.961254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:55.973442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:56.056190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.107498Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:52:56.108734Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:52:56.109225Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:52:56.109498Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:56.212608Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:52:56.213416Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:56.213512Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:52:56.215339Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:52:56.215419Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:52:56.215480Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:52:56.215852Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:52:56.215982Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:52:56.216045Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:52:56.226746Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:52:56.263097Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:52:56.263320Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:52:56.263467Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:52:56.263506Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:52:56.263545Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:52:56.263584Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:56.263851Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.263918Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.264196Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:52:56.264309Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:52:56.264465Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:56.264534Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:56.264595Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:52:56.264638Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:52:56.264698Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:52:56.264735Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:52:56.264799Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:52:56.264961Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.269412Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.269500Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:52:56.270024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:52:56.270085Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:52:56.270304Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:52:56.270610Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:52:56.270677Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:52:56.270778Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:52:56.270834Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:52:56.270877Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:52:56.270920Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:52:56.270961Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.271295Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:52:56.271342Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:52:56.271383Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:52:56.271436Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.271506Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:52:56.271539Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:52:56.271575Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:52:56.271610Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:56.271636Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:52:56.272930Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:52:56.272981Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:52:56.284651Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:52:56.284806Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.284846Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.284895Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:52:56.284976Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:52:56.443469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.443520Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.443555Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:52:56.443887Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:52:56.443931Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:52:56.444032Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:56.444069Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:52:56.444109Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:52:56.444138Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:52:56.454304Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:52:56.454386Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:56.455103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.455135Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.455178Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:5 ... de 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037888 has finished 2025-04-17T17:54:25.088040Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:25.088097Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:54:25.088154Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:25.088208Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:25.088428Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [13:884:2711], Recipient [13:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:25.088467Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:25.088515Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:54:25.088551Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:25.088582Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:54:25.088617Z node 13 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-17T17:54:25.088650Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit PlanQueue 2025-04-17T17:54:25.088682Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-17T17:54:25.088710Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit PlanQueue 2025-04-17T17:54:25.088740Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit LoadTxDetails 2025-04-17T17:54:25.088774Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit LoadTxDetails 2025-04-17T17:54:25.088904Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-04-17T17:54:25.088953Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-17T17:54:25.088984Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit LoadTxDetails 2025-04-17T17:54:25.089012Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:25.089041Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:54:25.089083Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically complete end at 72075186224037889 2025-04-17T17:54:25.089119Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically incomplete end at 72075186224037889 2025-04-17T17:54:25.089158Z node 13 :TX_DATASHARD TRACE: Activated operation [3500:281474976715665] at 72075186224037889 2025-04-17T17:54:25.089200Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-17T17:54:25.089225Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:25.089250Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-17T17:54:25.089277Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-17T17:54:25.089372Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-17T17:54:25.089399Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-17T17:54:25.089437Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-17T17:54:25.089473Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-17T17:54:25.089500Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-17T17:54:25.089528Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-17T17:54:25.089553Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompleteOperation 2025-04-17T17:54:25.089577Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:25.089712Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is DelayComplete 2025-04-17T17:54:25.089741Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompleteOperation 2025-04-17T17:54:25.089777Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:54:25.089812Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:54:25.089843Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-17T17:54:25.089868Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:54:25.089895Z node 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037889 has finished 2025-04-17T17:54:25.089926Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:25.089960Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:54:25.089993Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T17:54:25.090030Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T17:54:25.101880Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-17T17:54:25.102029Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:25.102105Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037888 on unit CompleteOperation 2025-04-17T17:54:25.102225Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1081:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:25.102318Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:25.102761Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-17T17:54:25.102813Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:54:25.102846Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:25.102889Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1081:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:25.102936Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:54:25.104549Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:594:2519], Recipient [13:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-17T17:54:25.104726Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:54:25.104841Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-04-17T17:54:25.104976Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T17:54:25.105042Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:54:25.105104Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:25.105162Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:54:25.105211Z node 13 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-04-17T17:54:25.105271Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T17:54:25.105306Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:25.105331Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:54:25.105358Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:25.105515Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW } 2025-04-17T17:54:25.105920Z node 13 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715665 2025-04-17T17:54:25.106007Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:594:2519], 1} after executionsCount# 1 2025-04-17T17:54:25.106131Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:594:2519], 1} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:25.107848Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:594:2519], 1} finished in read 2025-04-17T17:54:25.107972Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T17:54:25.108010Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:54:25.108039Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:25.108071Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:25.108130Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T17:54:25.108158Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:25.108199Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-17T17:54:25.108277Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:54:25.108466Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> TS3WrapperTests::AbortUnknownUpload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] Test command err: 2025-04-17T17:52:55.387965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:55.388633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:55.388715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001077/r3tmp/tmpiC8O1h/pdisk_1.dat 2025-04-17T17:52:55.812382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:55.864421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:55.906208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:55.906384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:55.918406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:56.009937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.055486Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:52:56.056680Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:52:56.057116Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:52:56.057374Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:56.112596Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:52:56.113451Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:56.113586Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:52:56.115619Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:52:56.115710Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:52:56.115781Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:52:56.116169Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:52:56.116339Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:52:56.116418Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:52:56.134892Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:52:56.208477Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:52:56.208697Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:52:56.208831Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:52:56.208886Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:52:56.208925Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:52:56.208975Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:56.209221Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.209285Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.209548Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:52:56.209650Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:52:56.209776Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:56.209833Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:56.209888Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:52:56.209922Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:52:56.209968Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:52:56.210005Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:52:56.210049Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:52:56.210206Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.210254Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.210302Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:52:56.210707Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:52:56.210764Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:52:56.210888Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:52:56.211186Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:52:56.211253Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:52:56.211353Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:52:56.211403Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:52:56.211443Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:52:56.211478Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:52:56.211512Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.211816Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:52:56.211860Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:52:56.211901Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:52:56.211948Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.212002Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:52:56.212057Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:52:56.212104Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:52:56.212139Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:56.212164Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:52:56.213609Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:52:56.213662Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:52:56.226507Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:52:56.226591Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.226633Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.226678Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:52:56.226764Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:52:56.379920Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.379976Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.380010Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:52:56.380406Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:52:56.380539Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:52:56.380652Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:56.380704Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:52:56.380766Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:52:56.380805Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:52:56.384584Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:52:56.384664Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:56.385494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.385530Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.385577Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:5 ... -04-17T17:54:25.915643Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:54:25.915668Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropIndexNotice 2025-04-17T17:54:25.915690Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveTable 2025-04-17T17:54:25.915715Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveTable 2025-04-17T17:54:25.915740Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:54:25.915763Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveTable 2025-04-17T17:54:25.915785Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveIndex 2025-04-17T17:54:25.915807Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveIndex 2025-04-17T17:54:25.915828Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:54:25.915850Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveIndex 2025-04-17T17:54:25.915892Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateCdcStream 2025-04-17T17:54:25.915921Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateCdcStream 2025-04-17T17:54:25.915947Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:54:25.915973Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateCdcStream 2025-04-17T17:54:25.915995Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit AlterCdcStream 2025-04-17T17:54:25.916021Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit AlterCdcStream 2025-04-17T17:54:25.916051Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:54:25.916075Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit AlterCdcStream 2025-04-17T17:54:25.916096Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit DropCdcStream 2025-04-17T17:54:25.916120Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit DropCdcStream 2025-04-17T17:54:25.916144Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:54:25.916168Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropCdcStream 2025-04-17T17:54:25.916188Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateIncrementalRestoreSrc 2025-04-17T17:54:25.916215Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateIncrementalRestoreSrc 2025-04-17T17:54:25.916242Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:54:25.916265Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateIncrementalRestoreSrc 2025-04-17T17:54:25.916286Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompleteOperation 2025-04-17T17:54:25.916309Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-04-17T17:54:25.916689Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is DelayComplete 2025-04-17T17:54:25.916752Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompleteOperation 2025-04-17T17:54:25.916814Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:54:25.916871Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:54:25.916911Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-17T17:54:25.916934Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:54:25.916968Z node 13 :TX_DATASHARD TRACE: Execution plan for [2500:281474976715663] at 72075186224037890 has finished 2025-04-17T17:54:25.917030Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:25.917095Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-17T17:54:25.917153Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T17:54:25.917208Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T17:54:25.919898Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [13:24:2071], Recipient [13:985:2787]: {TEvRegisterTabletResult TabletId# 72075186224037890 Entry# 2000} 2025-04-17T17:54:25.919955Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T17:54:25.920020Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 2000 2025-04-17T17:54:25.920087Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:54:25.920807Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2500} 2025-04-17T17:54:25.920907Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:54:25.922847Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T17:54:25.922903Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CreateTable 2025-04-17T17:54:25.922970Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T17:54:25.923041Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-17T17:54:25.923098Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-04-17T17:54:25.923167Z node 13 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715663] from 72075186224037890 at tablet 72075186224037890 send result to client [13:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:25.923243Z node 13 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-04-17T17:54:25.923383Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T17:54:25.924910Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:686:2582], Recipient [13:884:2711]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:54:25.924950Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T17:54:25.925063Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:686:2582], Recipient [13:667:2571]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:54:25.925086Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:25.925223Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:686:2582], Recipient [13:985:2787]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:54:25.925246Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T17:54:25.925628Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [13:24:2071], Recipient [13:985:2787]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2500 ReadStep# 2500 } 2025-04-17T17:54:25.925666Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T17:54:25.925720Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2500 2025-04-17T17:54:25.925995Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1027:2820], Recipient [13:985:2787]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1030:2823] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T17:54:25.926033Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T17:54:25.927503Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:410:2405], Recipient [13:985:2787]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715663 2025-04-17T17:54:25.927557Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T17:54:25.927643Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037890 state Ready 2025-04-17T17:54:25.927746Z node 13 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-17T17:54:25.933849Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1045:2832], Recipient [13:985:2787]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:25.933925Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:25.933981Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1044:2831], serverId# [13:1045:2832], sessionId# [0:0:0] 2025-04-17T17:54:25.934153Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [13:1043:2830], Recipient [13:985:2787]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:54:25.935558Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:594:2519], Recipient [13:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72075186224037888 TableId: 2 SchemaVersion: 1111 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:54:25.935867Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:54:25.935986Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:54:25.936172Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1047:2834], Recipient [13:985:2787]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:25.936216Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:25.936286Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1046:2833], serverId# [13:1047:2834], sessionId# [0:0:0] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] |79.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] Test command err: 2025-04-17T17:54:26.980608Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.982553Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.983312Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.984251Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.984983Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.985758Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.986433Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000671/r3tmp/tmp7AlYuV/pdisk_1.dat 2025-04-17T17:54:27.628852Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:480:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1293:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:27.629033Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.629083Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.629113Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.629153Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.629182Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.629210Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.629252Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1293:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:27.629330Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1293:1] Marker# BPG33 2025-04-17T17:54:27.629373Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1293:1] Marker# BPG32 2025-04-17T17:54:27.629415Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1293:2] Marker# BPG33 2025-04-17T17:54:27.629440Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1293:2] Marker# BPG32 2025-04-17T17:54:27.629487Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1293:3] Marker# BPG33 2025-04-17T17:54:27.629510Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1293:3] Marker# BPG32 2025-04-17T17:54:27.629704Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1293:3] FDS# 1293 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.629780Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1293:2] FDS# 1293 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.629829Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1293:1] FDS# 1293 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.639137Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1293:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90181 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-17T17:54:27.639315Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1293:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90181 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-17T17:54:27.639379Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1293:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90181 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-17T17:54:27.639434Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1293:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-17T17:54:27.639479Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1293:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:27.639635Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.132 sample PartId# [72057594037932033:2:8:0:0:1293:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.133 sample PartId# [72057594037932033:2:8:0:0:1293:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.133 sample PartId# [72057594037932033:2:8:0:0:1293:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 10.468 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 10.608 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 10.67 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-17T17:54:27.662359Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2025-04-17T17:54:27.664603Z node 1 :BS_PROXY CRIT: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2025-04-17T17:54:27.664904Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2025-04-17T17:54:27.665018Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2025-04-17T17:54:26.575748Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0007e2/r3tmp/tmpQbwyd0/pdisk_1.dat 2025-04-17T17:54:26.665371Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.665488Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.673916Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.673999Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.683071Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:27.249983Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:549:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1296:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:27.250159Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.250220Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.250249Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.250281Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.250306Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.250333Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.250373Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:8:0:0:1296:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:27.250444Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG33 2025-04-17T17:54:27.250494Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG32 2025-04-17T17:54:27.250535Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG33 2025-04-17T17:54:27.250561Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG32 2025-04-17T17:54:27.250592Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG33 2025-04-17T17:54:27.250619Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG32 2025-04-17T17:54:27.250811Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:3] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.250887Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:2] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.250933Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:1] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.271235Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-17T17:54:27.271591Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-17T17:54:27.271712Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-17T17:54:27.271783Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-17T17:54:27.271841Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:27.272007Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.166 sample PartId# [72057594037932033:2:8:0:0:1296:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.166 sample PartId# [72057594037932033:2:8:0:0:1296:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.167 sample PartId# [72057594037932033:2:8:0:0:1296:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 21.522 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 21.835 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 21.946 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-17T17:54:27.350784Z node 1 :BS_PROXY_PUT INFO: [f913878b3da83702] bootstrap ActorId# [1:594:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:27.350920Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.350959Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.350982Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.351007Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.351028Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.351049Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.351081Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:27.351139Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-04-17T17:54:27.351178Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-04-17T17:54:27.351214Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-04-17T17:54:27.351237Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-04-17T17:54:27.351262Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-04-17T17:54:27.351283Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-04-17T17:54:27.351440Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.351501Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.351541Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:72:2098] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.357535Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-17T17:54:27.357799Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-17T17:54:27.357898Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntP ... 0] Create Queue# [1:600:2504] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.400955Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:595:2500] Create Queue# [1:601:2505] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.401056Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:595:2500] Create Queue# [1:602:2506] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.401155Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:595:2500] Create Queue# [1:603:2507] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.401255Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:595:2500] Create Queue# [1:604:2508] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.401356Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:595:2500] Create Queue# [1:605:2509] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.401384Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-17T17:54:27.401910Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.402001Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.402108Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.406278Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.406469Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.406537Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.406588Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.406613Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-17T17:54:27.406645Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-17T17:54:27.406688Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-04-17T17:54:27.407431Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [1:606:2510] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:27.407542Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.407595Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:27.407637Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-17T17:54:27.407663Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-17T17:54:27.407761Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:599:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.416354Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-17T17:54:27.416468Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-17T17:54:27.416514Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:27.416615Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.46 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 9.089 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-04-17T17:54:27.417124Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-17T17:54:27.417166Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-17T17:54:27.417254Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2025-04-17T17:54:27.417759Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/6873/0007e2/r3tmp/tmpQbwyd0//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-17T17:54:27.418691Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-17T17:54:27.418734Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-17T17:54:27.420549Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:608:2105] Create Queue# [2:610:2106] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.420670Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:608:2105] Create Queue# [2:611:2107] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.420781Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:608:2105] Create Queue# [2:612:2108] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.420883Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:608:2105] Create Queue# [2:613:2109] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.420990Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:608:2105] Create Queue# [2:614:2110] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.421096Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:608:2105] Create Queue# [2:615:2111] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.421203Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:608:2105] Create Queue# [2:616:2112] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.421227Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-17T17:54:27.426465Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.426616Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.426812Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.426873Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.426926Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.427051Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.427144Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.427177Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-17T17:54:27.427204Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-17T17:54:27.427411Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:610:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] Test command err: 2025-04-17T17:54:26.740342Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.742118Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.743249Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.744099Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.744697Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.745316Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.745907Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006a8/r3tmp/tmphF2LzA/pdisk_1.dat 2025-04-17T17:54:27.572373Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:480:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1291:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:27.572523Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.572558Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.572579Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.572603Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.572629Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.572647Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.572673Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1291:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:27.572736Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG33 2025-04-17T17:54:27.572802Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG32 2025-04-17T17:54:27.572837Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG33 2025-04-17T17:54:27.572863Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG32 2025-04-17T17:54:27.572892Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG33 2025-04-17T17:54:27.572910Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG32 2025-04-17T17:54:27.573047Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:3] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.573104Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:2] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.573149Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:1] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.577431Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-17T17:54:27.577711Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-17T17:54:27.577806Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-17T17:54:27.577894Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-17T17:54:27.577955Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:27.578167Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.953 sample PartId# [72057594037932033:2:8:0:0:1291:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.953 sample PartId# [72057594037932033:2:8:0:0:1291:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.954 sample PartId# [72057594037932033:2:8:0:0:1291:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 5.3 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 5.524 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 5.615 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-17T17:54:27.642919Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:525:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:27.643081Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.643123Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.643152Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.643178Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.643204Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.643230Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.643273Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:27.643363Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-04-17T17:54:27.643405Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-04-17T17:54:27.643447Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-04-17T17:54:27.643472Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-04-17T17:54:27.643500Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-04-17T17:54:27.643525Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-04-17T17:54:27.643703Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.643764Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.643809Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.646485Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-17T17:54:27.646673Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 ... Y_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:10:0:0:238:1] Marker# BPG32 2025-04-17T17:54:27.686710Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG33 2025-04-17T17:54:27.686736Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG32 2025-04-17T17:54:27.686763Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG33 2025-04-17T17:54:27.686789Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG32 2025-04-17T17:54:27.686935Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:3] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.686999Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:2] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.687045Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:1] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.700803Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 11 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 12 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-17T17:54:27.700992Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-17T17:54:27.701047Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-17T17:54:27.701099Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Result# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-17T17:54:27.701141Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:27.701265Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.038 sample PartId# [72057594037932033:2:10:0:0:238:3] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.039 sample PartId# [72057594037932033:2:10:0:0:238:2] QueryCount# 1 VDiskId# [2000000:1:0:2:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.039 sample PartId# [72057594037932033:2:10:0:0:238:1] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 14.833 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 14.979 VDiskId# [2000000:1:0:2:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 15.035 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-17T17:54:27.705404Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T17:54:27.705478Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T17:54:27.707363Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:530:2503] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.707506Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:531:2504] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.707635Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:532:2505] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.707723Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:533:2506] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.707803Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:534:2507] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.707868Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:535:2508] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.707932Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:536:2509] targetNodeId# 1 Marker# DSP01 2025-04-17T17:54:27.707949Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-17T17:54:27.708408Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.708507Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.708552Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.708596Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.708639Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.708672Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.708705Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:27.708720Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-17T17:54:27.708740Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-17T17:54:27.708828Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] bootstrap ActorId# [1:537:2510] Group# 2181038082 TabletId# 1234 Generation# 1 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-17T17:54:27.708859Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Sending TEvVBlock Tablet# 1234 Generation# 1 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-17T17:54:27.709017Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 1 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 2687731018470054797 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-17T17:54:27.709755Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-17T17:54:27.709794Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-04-17T17:54:27.710029Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 2025-04-17T17:54:27.710912Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-04-17T17:54:27.711578Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] bootstrap ActorId# [1:539:2512] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-17T17:54:27.711631Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-17T17:54:27.711811Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 16052456455717933015 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-17T17:54:27.712536Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-17T17:54:27.712589Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-04-17T17:54:27.712912Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] bootstrap ActorId# [1:540:2513] Group# 2181038082 TabletId# 1234 Generation# 4 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-17T17:54:27.712962Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Sending TEvVBlock Tablet# 1234 Generation# 4 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-17T17:54:27.713093Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 4 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 6231241553533389705 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-17T17:54:27.713743Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-17T17:54:27.713790Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Result# TEvBlockResult {Status# OK} Marker# DSPB04 |79.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2025-04-17T17:54:28.022931Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 3A200E76-E75F-4391-8C76-810E73459D87, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:16448 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 01C55963-2E75-4F26-AD26-458E1F375C7C amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2025-04-17T17:54:28.035055Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 3A200E76-E75F-4391-8C76-810E73459D87, response# >> YdbOlapStore::LogPagingBefore [GOOD] >> YdbOlapStore::LogPagingBetween >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] |79.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink >> TS3WrapperTests::PutObject [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2025-04-17T17:54:29.490385Z node 1 :S3_WRAPPER NOTICE: Request: uuid# C5CD0B5D-AE7C-4A8E-B41A-5698E2439902, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:10280 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 89AF38EB-2CA1-46E2-8C1C-B9EA0346F979 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-17T17:54:29.495168Z node 1 :S3_WRAPPER NOTICE: Response: uuid# C5CD0B5D-AE7C-4A8E-B41A-5698E2439902, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2025-04-17T17:54:27.790160Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:27.792017Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:27.792666Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:27.793494Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:27.794205Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:27.795153Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:27.795794Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000566/r3tmp/tmpP7SnU4/pdisk_1.dat 2025-04-17T17:54:28.863807Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:28.869091Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:28.869893Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:28.871316Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:28.871419Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:28.871467Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000566/r3tmp/tmpKSL9yH/pdisk_1.dat >> DataShardReadIterator::ShouldReadRangePrefix4 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] Test command err: 2025-04-17T17:54:26.992193Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.993755Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.994610Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.995483Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.996126Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.996764Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T17:54:26.997470Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000605/r3tmp/tmp7fVhjJ/pdisk_1.dat 2025-04-17T17:54:27.670941Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:480:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1293:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:27.671097Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.671132Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.671154Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.671175Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.671197Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.671221Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1293:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.671254Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1293:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:27.671333Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1293:1] Marker# BPG33 2025-04-17T17:54:27.671373Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1293:1] Marker# BPG32 2025-04-17T17:54:27.671410Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1293:2] Marker# BPG33 2025-04-17T17:54:27.671432Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1293:2] Marker# BPG32 2025-04-17T17:54:27.671459Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1293:3] Marker# BPG33 2025-04-17T17:54:27.671481Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1293:3] Marker# BPG32 2025-04-17T17:54:27.671671Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1293:3] FDS# 1293 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.671741Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1293:2] FDS# 1293 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.671784Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1293:1] FDS# 1293 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.683014Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1293:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90181 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-17T17:54:27.683229Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1293:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90181 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-17T17:54:27.683326Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1293:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90181 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-17T17:54:27.683420Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1293:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-17T17:54:27.683485Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1293:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:27.683689Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.024 sample PartId# [72057594037932033:2:8:0:0:1293:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.025 sample PartId# [72057594037932033:2:8:0:0:1293:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.025 sample PartId# [72057594037932033:2:8:0:0:1293:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 12.31 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 12.472 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 12.586 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-17T17:54:27.754771Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:525:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:27.754925Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.754967Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.754993Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.755019Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.755043Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.755068Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:27.755100Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:27.755172Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-04-17T17:54:27.755234Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-04-17T17:54:27.755279Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-04-17T17:54:27.755304Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-04-17T17:54:27.755331Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-04-17T17:54:27.755356Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-04-17T17:54:27.755515Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.755598Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.755644Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:27.778799Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-17T17:54:27.779002Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BP ... wn Marker# BPG51 2025-04-17T17:54:29.381330Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:29.381387Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-17T17:54:29.381426Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-17T17:54:29.381550Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:29.384730Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-17T17:54:29.384943Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-17T17:54:29.385010Z node 2 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:29.385135Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.554 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 3.758 VDiskId# [82000002:1:0:0:0] NodeId# 2 Status# OK } ] } 2025-04-17T17:54:29.385688Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-17T17:54:29.385733Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-17T17:54:29.385821Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-04-17T17:54:29.386457Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/6873/000605/r3tmp/tmp4w0rjy//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-17T17:54:29.387346Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-17T17:54:29.387393Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-17T17:54:29.389313Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:607:2106] targetNodeId# 2 Marker# DSP01 2025-04-17T17:54:29.389467Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:608:2107] targetNodeId# 2 Marker# DSP01 2025-04-17T17:54:29.389580Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:609:2108] targetNodeId# 2 Marker# DSP01 2025-04-17T17:54:29.389708Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:610:2109] targetNodeId# 2 Marker# DSP01 2025-04-17T17:54:29.389814Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:611:2110] targetNodeId# 2 Marker# DSP01 2025-04-17T17:54:29.389927Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:612:2111] targetNodeId# 2 Marker# DSP01 2025-04-17T17:54:29.390050Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:613:2112] targetNodeId# 2 Marker# DSP01 2025-04-17T17:54:29.390077Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-17T17:54:29.391216Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:29.391397Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:29.391491Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:29.391699Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:29.391769Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:29.391822Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:29.391874Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-17T17:54:29.391901Z node 3 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-17T17:54:29.391932Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-17T17:54:29.392073Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] bootstrap ActorId# [3:614:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-17T17:54:29.392124Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 2 Marker# DSPB03 2025-04-17T17:54:29.392301Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:607:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 16091412869073006631 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-17T17:54:29.393579Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 2 Marker# DSPB01 2025-04-17T17:54:29.393671Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-04-17T17:54:29.393998Z node 3 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-17T17:54:29.394194Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-17T17:54:29.394551Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [2:615:2513] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-17T17:54:29.394704Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T17:54:29.394752Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T17:54:29.394813Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-04-17T17:54:29.394853Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-04-17T17:54:29.395002Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T17:54:29.395230Z node 2 :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T17:54:29.395519Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-17T17:54:29.395621Z node 2 :BS_PROXY_PUT ERROR: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-04-17T17:54:29.395675Z node 2 :BS_PROXY_PUT NOTICE: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T17:54:29.395777Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.632 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } ] } 2025-04-17T17:54:29.396136Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:607:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> TBackupTests::BackupUuidColumn[Zstd] |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> SystemView::StoragePoolsRanges [GOOD] >> SystemView::SystemViewFailOps >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00144c/r3tmp/tmpPlCpDI/pdisk_1.dat 2025-04-17T17:51:20.443439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:20.526334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:20.526447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:20.529698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16981, node 1 2025-04-17T17:51:20.594730Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:20.704392Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:20.704412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:20.704417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:20.704561Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:51:20.785344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:20.818758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:20.852507Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7494341181119407570:2308] 2025-04-17T17:51:20.852853Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:20.870894Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:20.870994Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:20.872764Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:51:20.872804Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:51:20.872830Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:51:20.873232Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:20.873286Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:51:20.873310Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7494341181119407584:2308] in generation 1 2025-04-17T17:51:20.907082Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:51:20.988855Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:51:20.989020Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:51:20.989059Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7494341181119407589:2309] 2025-04-17T17:51:20.989083Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:20.989092Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:51:20.989113Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:20.989538Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:51:20.989614Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:51:20.989636Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:20.989653Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:20.989679Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:51:20.989692Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:20.998861Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7494341181119407562:2299], serverId# [1:7494341181119407586:2310], sessionId# [0:0:0] 2025-04-17T17:51:20.999064Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:20.999421Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:51:20.999532Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-17T17:51:21.006744Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:21.007571Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:51:21.007649Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:51:21.012443Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7494341185414374899:2320], serverId# [1:7494341185414374901:2322], sessionId# [0:0:0] 2025-04-17T17:51:21.017357Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744912281056 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912281056 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:51:21.017407Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:21.017530Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:21.017590Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:21.017609Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:51:21.017630Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744912281056:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-17T17:51:21.017889Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744912281056:281474976710657 keys extracted: 0 2025-04-17T17:51:21.018019Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:51:21.018834Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:21.018917Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:51:21.021328Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:51:21.021823Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:21.026153Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744912281055 2025-04-17T17:51:21.026201Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:21.026253Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744912281056} 2025-04-17T17:51:21.026308Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:21.026355Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744912281063 2025-04-17T17:51:21.028082Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:21.028106Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:21.028155Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:51:21.028196Z node 1 :TX_DATASHARD DEBUG: Complete [1744912281056 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7494341181119407416:2207], exec latency: 3 ms, propose latency: 10 ms 2025-04-17T17:51:21.028232Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-17T17:51:21.028272Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:21.029758Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7494341181119407589:2309][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-17T17:51:21.035954Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-17T17:51:21.036015Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:51:21.053262Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:21.053394Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:21.053501Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-17T17:51:21.053525Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-17T17:51:21.053532Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-17T17:51:21.075975Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:51:21.120188Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:51:21.123777Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-17T17:51:21.124065Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:51:21.124357Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-17T17:51:21.124379Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:51:21.124401Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-17T17:51:21.124417Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:51:21.124441Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:21.124485Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-17T17:51:21.126953Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7494341185414374951:2362], now have 1 active actors on pipe 2025-04-17T17:51:21.126980Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7494341185414375006:2311], now have 1 active actors on pipe 2025-04-17T17:51:21.171213Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvPersQueu ... ASHARD INFO: OnTabletDead: 72075186224037892 2025-04-17T17:54:27.994573Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037891 not found 2025-04-17T17:54:27.995112Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037892 not found 2025-04-17T17:54:28.022799Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [24:1266:2645], now have 1 active actors on pipe ... release register requests ... wait for merge tx notification 2025-04-17T17:54:28.045059Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:28.045221Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:28.045751Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:54:28.045872Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:54:28.045987Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:54:28.046115Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037893 2025-04-17T17:54:28.046209Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:54:28.046278Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:54:28.046392Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:54:28.046611Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:54:28.048087Z node 24 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 ... wait for final heartbeat >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-17T17:54:28.051651Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:28.051796Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:28.052768Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 2 max time lag 0ms effective offset 0 2025-04-17T17:54:28.052891Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-04-17T17:54:28.053047Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-17T17:54:28.053134Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T17:54:28.053904Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:54:28.065528Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:54:28.065822Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:54:28.066151Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:28.066309Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:28.066546Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:54:28.066929Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1317:3041] Handle NKikimr::NPQ::TEvPartitionWriter::TEvInitResult { SessionId: TxId: Success { OwnerCookie: 72075186224037893|725aeba4-f229c5ce-466e3720-2384930e_0 SourceIdInfo: SourceId: "\00072075186224037893" SeqNo: 0 Offset: 2 WriteTimestampMS: 0 Explicit: true State: STATE_REGISTERED } } 2025-04-17T17:54:28.067149Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1314:3041] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-17T17:54:28.067411Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1317:3041] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-17T17:54:28.067852Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:28.067898Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:28.068015Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-17T17:54:28.068145Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:28.068178Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:28.068277Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037893' SeqNo: 1 partNo : 0 messageNo: 1 size 26 offset: -1 2025-04-17T17:54:28.068529Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037893' version v6000/0 2025-04-17T17:54:28.068701Z node 24 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-04-17T17:54:28.068946Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-17T17:54:28.069885Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 107 count 1 nextOffset 3 batches 1 2025-04-17T17:54:28.071419Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 93 WTime 6505 2025-04-17T17:54:28.071780Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:54:28.071886Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:54:28.071992Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:54:28.072096Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:54:28.072196Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-04-17T17:54:28.072256Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-04-17T17:54:28.072285Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:54:28.072347Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:54:28.072456Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:54:28.072629Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:54:28.072837Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 93 2025-04-17T17:54:28.074436Z node 24 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 93 actorID [24:1286:3021] 2025-04-17T17:54:28.074740Z node 24 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 size 93 2025-04-17T17:54:28.085237Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:54:28.085440Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:54:28.085635Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037893', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 2 is stored on disk 2025-04-17T17:54:28.086132Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-17T17:54:28.086551Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1317:3041] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037893" SeqNo: 1 Offset: 2 WriteTimestampMS: 6505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-17T17:54:28.086694Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1314:3041] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-17T17:54:28.086911Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037893 2025-04-17T17:54:28.086997Z node 24 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037893 2025-04-17T17:54:28.102454Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037893 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-17T17:54:28.514397Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:28.514481Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:28.514629Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2025-04-17T17:54:28.514668Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-04-17T17:54:28.514725Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-17T17:54:28.514758Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T17:54:28.514888Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TBackupTests::BackupUuidColumn[Raw] >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> DataShardReadIteratorBatchMode::RangeFromInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] Test command err: Pick Disable nodeId# 85 Pick Disable nodeId# 83 Enable nodeId# 83 Delete nodeId# 18 Pick Enable nodeId# 85 Pick Disable nodeId# 43 Delete nodeId# 63 Pick Pick Pick Pick Disable nodeId# 45 Pick Pick Enable nodeId# 45 Delete nodeId# 1 Disable nodeId# 25 Disable nodeId# 15 Delete nodeId# 10 Enable nodeId# 43 Enable nodeId# 25 Pick Pick Disable nodeId# 54 Pick Disable nodeId# 100 Add nodeId# 101 Pick Add nodeId# 102 Enable nodeId# 54 Enable nodeId# 15 Pick Pick Enable nodeId# 100 Add nodeId# 103 Delete nodeId# 41 Add nodeId# 104 Delete nodeId# 93 Delete nodeId# 35 Add nodeId# 105 Delete nodeId# 50 Disable nodeId# 88 Delete nodeId# 77 Enable nodeId# 88 Disable nodeId# 85 Pick Delete nodeId# 91 Pick Enable nodeId# 85 Add nodeId# 106 Pick Add nodeId# 107 Add nodeId# 108 Pick Pick Pick Delete nodeId# 92 Pick Pick Pick Delete nodeId# 57 Pick Delete nodeId# 95 Add nodeId# 109 Disable nodeId# 90 Add nodeId# 110 Add nodeId# 111 Delete nodeId# 97 Disable nodeId# 38 Add nodeId# 112 Disable nodeId# 6 Pick Add nodeId# 113 Pick Enable nodeId# 6 Delete nodeId# 70 Enable nodeId# 90 Delete nodeId# 66 Delete nodeId# 32 Disable nodeId# 11 Delete nodeId# 29 Pick Add nodeId# 114 Pick Disable nodeId# 33 Pick Enable nodeId# 38 Delete nodeId# 44 Enable nodeId# 33 Enable nodeId# 11 Delete nodeId# 30 Disable nodeId# 108 Enable nodeId# 108 Delete nodeId# 39 Delete nodeId# 19 Delete nodeId# 25 Delete nodeId# 113 Add nodeId# 115 Delete nodeId# 28 Add nodeId# 116 Add nodeId# 117 Delete nodeId# 88 Delete nodeId# 8 Disable nodeId# 23 Pick Enable nodeId# 23 Pick Delete nodeId# 94 Disable nodeId# 60 Pick Disable nodeId# 111 Delete nodeId# 112 Disable nodeId# 103 Pick Enable nodeId# 103 Pick Delete nodeId# 102 Delete nodeId# 49 Add nodeId# 118 Enable nodeId# 111 Enable nodeId# 60 Add nodeId# 119 Delete nodeId# 80 Add nodeId# 120 Pick Disable nodeId# 98 Enable nodeId# 98 Pick Pick Pick Pick Pick Delete nodeId# 110 Delete nodeId# 11 Delete nodeId# 34 Delete nodeId# 107 Disable nodeId# 64 Add nodeId# 121 Delete nodeId# 38 Disable nodeId# 21 Add nodeId# 122 Add nodeId# 123 Pick Delete nodeId# 48 Delete nodeId# 17 Add nodeId# 124 Pick Enable nodeId# 21 Delete nodeId# 15 Add nodeId# 125 Disable nodeId# 47 Enable nodeId# 64 Pick Disable nodeId# 7 Disable nodeId# 119 Pick Enable nodeId# 119 Disable nodeId# 98 Disable nodeId# 52 Add nodeId# 126 Enable nodeId# 7 Pick Enable nodeId# 98 Disable nodeId# 59 Delete nodeId# 55 Disable nodeId# 125 Delete nodeId# 54 Delete nodeId# 67 Enable nodeId# 59 Enable nodeId# 47 Add nodeId# 127 Delete nodeId# 36 Delete nodeId# 103 Enable nodeId# 52 Delete nodeId# 4 Delete nodeId# 98 Disable nodeId# 121 Add nodeId# 128 Disable nodeId# 119 Disable nodeId# 69 Disable nodeId# 51 Enable nodeId# 121 Delete nodeId# 74 Enable nodeId# 69 Add nodeId# 129 Add nodeId# 130 Disable nodeId# 78 Enable nodeId# 119 Enable nodeId# 51 Add nodeId# 131 Pick Disable nodeId# 65 Add nodeId# 132 Disable nodeId# 23 Enable nodeId# 65 Add nodeId# 133 Enable nodeId# 125 Enable nodeId# 23 Disable nodeId# 56 Pick Pick Delete nodeId# 37 Pick Pick Pick Disable nodeId# 61 Enable nodeId# 61 Add nodeId# 134 Disable nodeId# 2 Enable nodeId# 2 Add nodeId# 135 Disable nodeId# 86 Add nodeId# 136 Enable nodeId# 78 Delete nodeId# 131 Enable nodeId# 86 Pick Disable nodeId# 65 Enable nodeId# 65 Add nodeId# 137 Pick Disable nodeId# 83 Add nodeId# 138 Pick Add nodeId# 139 Add nodeId# 140 Add nodeId# 141 Delete nodeId# 27 Add nodeId# 142 Pick Enable nodeId# 83 Delete nodeId# 90 Add nodeId# 143 Add nodeId# 144 Disable nodeId# 134 Pick Add nodeId# 145 Add nodeId# 146 Add nodeId# 147 Pick Pick Add nodeId# 148 Delete nodeId# 76 Delete nodeId# 86 Disable nodeId# 129 Add nodeId# 149 Add nodeId# 150 Disable nodeId# 150 Pick Delete nodeId# 117 Enable nodeId# 150 Enable nodeId# 134 Add nodeId# 151 Delete nodeId# 16 Disable nodeId# 43 Add nodeId# 152 Delete nodeId# 5 Add nodeId# 153 Disable nodeId# 149 Delete nodeId# 143 Pick Add nodeId# 154 Disable nodeId# 82 Pick Pick Disable nodeId# 96 Disable nodeId# 53 Enable nodeId# 43 Delete nodeId# 21 Delete nodeId# 150 Pick Pick Pick Delete nodeId# 128 Add nodeId# 155 Add nodeId# 156 Delete nodeId# 154 Enable nodeId# 96 Pick Add nodeId# 157 Enable nodeId# 53 Enable nodeId# 56 Enable nodeId# 149 Enable nodeId# 129 Enable nodeId# 82 Add nodeId# 158 Disable nodeId# 33 Disable nodeId# 78 Pick Disable nodeId# 151 Enable nodeId# 151 Disable nodeId# 142 Disable nodeId# 12 Disable nodeId# 56 Delete nodeId# 144 Enable nodeId# 78 Delete nodeId# 42 Enable nodeId# 56 Enable nodeId# 142 Add nodeId# 159 Add nodeId# 160 Disable nodeId# 9 Add nodeId# 161 Add nodeId# 162 Add nodeId# 163 Enable nodeId# 9 Disable nodeId# 45 Pick Pick Delete nodeId# 3 Pick Disable nodeId# 156 Enable nodeId# 45 Add nodeId# 164 Disable nodeId# 100 Delete nodeId# 101 Pick Add nodeId# 165 Pick Delete nodeId# 163 Delete nodeId# 165 Enable nodeId# 156 Pick Pick Pick Enable nodeId# 12 Delete nodeId# 155 Enable nodeId# 33 Delete nodeId# 106 Delete nodeId# 127 Pick Disable nodeId# 160 Enable nodeId# 100 Add nodeId# 166 Delete nodeId# 136 Disable nodeId# 105 Pick Pick Pick Pick Enable nodeId# 105 Disable nodeId# 114 Pick Disable nodeId# 137 Add nodeId# 167 Pick Enable nodeId# 160 Enable nodeId# 114 Disable nodeId# 126 Delete nodeId# 149 Pick Enable nodeId# 126 Pick Delete nodeId# 156 Enable nodeId# 137 Pick Disable nodeId# 120 Disable nodeId# 167 Pick Enable nodeId# 167 Add nodeId# 168 Delete nodeId# 96 Disable nodeId# 52 Pick Delete nodeId# 52 Add nodeId# 169 Pick Enable nodeId# 120 Disable nodeId# 162 Add nodeId# 170 Enable nodeId# 162 Disable nodeId# 148 Enable nodeId# 148 Add nodeId# 171 Pick Add nodeId# 172 Delete nodeId# 20 Pick Delete nodeId# 167 Add nodeId# 173 Add nodeId# 174 Add nodeId# 175 Disable nodeId# 169 Enable nodeId# 169 Delete nodeId# 121 Delete nodeId# 99 Disable nodeId# 68 Add nodeId# 176 Add nodeId# 177 Delete nodeId# 148 Delete nodeId# 126 Enable nodeId# 68 Delete nodeId# 122 Delete nodeId# 6 Pick Disable nodeId# 100 Disable nodeId# 145 Add nodeId# 178 Pick Add nodeId# 179 Disable nodeId# 56 Pick Enable nodeId# 145 Delete nodeId# 172 Add nodeId# 180 Add nodeId# 181 Enable nodeId# 56 Add nodeId# 182 Disable nodeId# 75 Disable nodeId# 173 Delete nodeId# 115 Pick Enable nodeId# 75 Add nodeId# 183 Pick Disable nodeId# 109 Disable nodeId# 104 Enable nodeId# 173 Add nodeId# 184 Enable nodeId# 100 Add nodeId# 185 Delete nodeId# 183 Disable nodeId# 75 Delete nodeId# 133 Add nodeId# 186 Delete nodeId# 72 Disable nodeId# 62 Enable nodeId# 62 Pick Delete nodeId# 83 Delete nodeId# 171 Delete nodeId# 2 Add nodeId# 187 Add nodeId# 188 Add nodeId# 189 Add nodeId# 190 Disable nodeId# 130 Add nodeId# 191 Add nodeId# 192 Delete nodeId# 56 Delete nodeId# 132 Disable nodeId# 116 Add nodeId# 193 Enable nodeId# 116 Delete nodeId# 170 Add nodeId# 194 Enable nodeId# 109 Delete nodeId# 175 Add nodeId# 195 Delete nodeId# 189 Pick Pick Add nodeId# 196 Delete nodeId# 168 Add nodeId# 197 Pick Enable nodeId# 104 Add nodeId# 198 Delete nodeId# 73 Enable nodeId# 130 Disable nodeId# 87 Delete nodeId# 135 Add nodeId# 199 Disable nodeId# 119 Pick Delete nodeId# 195 Enable nodeId# 87 Delete nodeId# 140 Add nodeId# 200 Enable nodeId# 75 Delete nodeId# 187 Enable nodeId# 119 Pick Disable nodeId# 198 Add nodeId# 201 Add nodeId# 202 Disable nodeId# 123 Delete nodeId# 46 Pick Disable nodeId# 24 Delete nodeId# 89 Add nodeId# 203 Delete nodeId# 142 Pick Pick Delete nodeId# 177 Delete nodeId# 129 Delete nodeId# 53 Pick Add nodeId# 204 Delete nodeId# 40 Add nodeId# 205 Add nodeId# 206 Enable nodeId# 123 Add nodeId# 207 Disable nodeId# 43 Disable nodeId# 181 Pick Add nodeId# 208 Disable nodeId# 196 Delete nodeId# 31 Enable nodeId# 196 Disable nodeId# 87 Disable nodeId# 134 Enable nodeId# 198 Add nodeId# 209 Delete nodeId# 111 Pick Pick Add nodeId# 210 Pick Disable nodeId# 200 Pick Pick Disable nodeId# 146 Disable nodeId# 182 Enable nodeId# 146 Add nodeId# 211 Delete nodeId# 119 Enable nodeId# 200 Enable nodeId# 182 Disable nodeId# 176 Pick Add nodeId# 212 Delete nodeId# 181 Pick Enable nodeId# 87 Disable nodeId# 9 Pick Add nodeId# 213 Enable nodeId# 134 Disable nodeId# 69 Add nodeId# 214 Pick Enable nodeId# 69 Enable nodeId# 24 Pick Enable nodeId# 9 Enable nodeId# 176 Disable nodeId# 212 Delete nodeId# 188 Enable nodeId# 43 Delete nodeId# 84 Delete nodeId# 43 Enable nodeId# 212 Add nodeId# 215 Disable nodeId# 104 Add nodeId# 216 Pick Enable nodeId# 104 Pick Disable nodeId# 206 Enable nodeId# 206 Disable nodeId# 204 Enable nodeId# 204 Delete nodeId# 130 Pick Delete nodeId# 178 Disable nodeId# 68 Disable nodeId# 158 Add nodeId# 217 Enable nodeId# 68 Pick Disable nodeId# 45 Delete nodeId# 204 Disable nodeId# 139 Delete nodeId# 215 Add nodeId# 218 Enable nodeId# 139 Delete nodeId# 87 Pick Enable nodeId# 158 Enable nodeId# 45 Add nodeId# 219 Add nodeId# 220 Disable nodeId# 184 Pick Disable nodeId# 197 Pick Disable nodeId# 203 Disable nodeId# 158 Enable nodeId# 203 Disable nodeId# 193 Enable nodeId# 197 Enable nodeId# 193 Enable nodeId# 158 Pick Enable nodeId# 184 Disable nodeId# 65 Delete nodeId# 186 Enable nodeId# 65 Add nodeId# 221 Pick Delete nodeId# 190 Delete nodeId# 200 Disable nodeId# 211 Delete nodeId# 207 Pick Enable nodeId# 211 Pick Disable nodeId# 64 Pick Delete nodeId# 201 Enable nodeId# 64 Disable nodeId# 138 Delete nodeId# 184 Delete nodeId# 173 Delete nodeId# 193 Pick Disable nodeId# 139 Delete nodeId# 217 Pick Pick Disable nodeId# 104 Add nodeId# 222 Delete nodeId# 216 Add nodeId# 223 Disable nodeId# 192 Add nodeId# 224 Enable nodeId# 104 Enable nodeId# 139 Disable nodeId# 65 Enable nodeId# 65 Disable nodeId# 60 Add nodeId# 225 Pick Enable nodeId# 60 Disable nodeId# 198 Pick Add nodeId# 226 Disable nodeId# 45 Add nodeId# 227 Delete nodeId# 199 Enable nodeId# 45 Delete nodeId# 176 Pick Delete nodeId# 23 Enable nodeId# 198 Add nodeId# 228 Pick Add nodeId# 229 Enable nodeId# 192 Add nodeId# 230 Enable nodeId# 138 Pick Delete nodeId# 205 Disable nodeId# 69 Enable nodeId# 69 Disable nodeId# 60 Pick Delete nodeId# 229 Delete nodeId# 151 Enable nodeId# 60 Pick Add nodeId# 231 Add nodeId# 232 Delete nodeId# 79 Add nodeId# 233 Pick Disable nodeId# 69 Disable nodeId# 191 Delete nodeId# 58 Disable nodeId# 47 Disable nodeId# 202 Enable nodeId# 47 Add nodeId# 234 Pick Add nodeId# 235 Delete nodeId# 146 Add nodeId# 236 Enable nodeId# 202 Disable nodeId# 125 Add nodeId# 237 Pick Pick Disable nodeId# 202 Add nodeId# 238 Delete nodeId# 9 Enable nodeId# 125 Pick Delete nodeId# 61 Enable nodeId# 69 Disable nodeId# 7 Disable nodeId# 33 Add nodeId# 239 Enable nodeId# 202 Add nodeId# 240 Enable nodeId# 7 Delete nodeId# 134 Delete nodeId# 7 Add nodeId# 241 Add nodeId# 242 Add nodeId# 243 Add nodeId# 244 Add nodeId# 245 Enable nodeId# 33 Pick Enable nodeId# 191 Pick Disable nodeId# 108 Disable nodeId# 180 Pick Delete nodeId# 114 Pick Add nodeId# 246 Pick Disable nodeId# 226 Enable nodeId# 226 Delete nodeId# 180 Disable nodeId# 197 Enable nodeId# 108 Add nodeId# 247 Disable nodeId# 227 Delete nodeId# 24 Pick Pick Disable nodeId# 208 Delete nodeId# 116 Pick Add nodeId# 248 Pick Add nodeId# 249 Pick Pick Pick Disable nodeId# 85 Delete nodeId# 182 Pick Disable nodeId# 45 Delete nodeId# 196 Pick Enable nodeId# 197 Enable nodeId# 227 Delete nodeId# 208 Add nodeId# 250 Delete nodeId# 220 Delete nodeId# 104 Disable nodeId# 250 Disable nodeId# 185 Enable nodeId# 45 Enable nodeId# 250 Add nodeId# 251 Pick Disable nodeId# 145 Disable nodeId# 161 Enable nodeId# 145 Enable nodeId# 85 Enable nodeId# 185 Disable nodeId# 238 Delete nodeId# 124 Delete nodeId# 153 Delete nodeId# 227 Enable nodeId# 161 Delete nodeId# 120 Delete nodeId# 118 Add nodeId# 252 Add nodeId# 253 Add nodeId# 254 Disable nodeId# 212 Add nodeId# 255 Pick Pick Delete nodeId# 247 Disable nodeId# 152 Enable nodeId# 238 Disable nodeId# 45 Delete nodeId# 236 Add nodeId# 256 Pick Enable nodeId# 152 Delete nodeId# 158 Delete nodeId# 234 Pick Enable nodeId# 45 Enable nodeId# 212 Pick Add nodeId# 257 Delete nodeId# 141 Disable nodeId# ... Id# 20272 Delete nodeId# 20298 Enable nodeId# 20286 Pick Disable nodeId# 20297 Pick Pick Delete nodeId# 20265 Delete nodeId# 20302 Disable nodeId# 20291 Enable nodeId# 20272 Add nodeId# 20303 Delete nodeId# 20272 Delete nodeId# 20264 Pick Add nodeId# 20304 Disable nodeId# 20280 Pick Delete nodeId# 20304 Enable nodeId# 20291 Enable nodeId# 20297 Pick Disable nodeId# 20291 Delete nodeId# 20258 Delete nodeId# 20303 Pick Delete nodeId# 20301 Pick Delete nodeId# 20297 Pick Delete nodeId# 20286 Disable nodeId# 20287 Add nodeId# 20305 Delete nodeId# 20253 Pick Enable nodeId# 20280 Delete nodeId# 20305 Delete nodeId# 20274 Enable nodeId# 20242 Disable nodeId# 20242 Delete nodeId# 20260 Add nodeId# 20306 Delete nodeId# 20293 Pick Delete nodeId# 20280 Enable nodeId# 20291 Add nodeId# 20307 Enable nodeId# 20287 Disable nodeId# 20290 Delete nodeId# 20299 Disable nodeId# 20287 Disable nodeId# 20306 Add nodeId# 20308 Enable nodeId# 20242 Pick Pick Add nodeId# 20309 Disable nodeId# 20276 Add nodeId# 20310 Delete nodeId# 20310 Add nodeId# 20311 Delete nodeId# 20296 Enable nodeId# 20290 Add nodeId# 20312 Delete nodeId# 20306 Enable nodeId# 20287 Enable nodeId# 20276 Add nodeId# 20313 Add nodeId# 20314 Disable nodeId# 20311 Enable nodeId# 20311 Delete nodeId# 20276 Disable nodeId# 20312 Delete nodeId# 20290 Pick Pick Delete nodeId# 20242 Enable nodeId# 20312 Delete nodeId# 20313 Disable nodeId# 20312 Add nodeId# 20315 Pick Delete nodeId# 20308 Disable nodeId# 20285 Enable nodeId# 20312 Enable nodeId# 20285 Pick Disable nodeId# 20312 Enable nodeId# 20312 Pick Pick Disable nodeId# 20315 Add nodeId# 20316 Disable nodeId# 20312 Add nodeId# 20317 Disable nodeId# 20309 Enable nodeId# 20315 Add nodeId# 20318 Disable nodeId# 20287 Add nodeId# 20319 Disable nodeId# 20311 Delete nodeId# 20316 Delete nodeId# 20285 Add nodeId# 20320 Delete nodeId# 20287 Disable nodeId# 20291 Add nodeId# 20321 Disable nodeId# 20320 Pick Pick Add nodeId# 20322 Add nodeId# 20323 Pick Pick Add nodeId# 20324 Delete nodeId# 20321 Delete nodeId# 20314 Disable nodeId# 20318 Disable nodeId# 20317 Disable nodeId# 20323 Disable nodeId# 20315 Enable nodeId# 20315 Disable nodeId# 20315 Add nodeId# 20325 Delete nodeId# 20322 Disable nodeId# 20325 Add nodeId# 20326 Pick Disable nodeId# 20324 Enable nodeId# 20315 Disable nodeId# 20315 Pick Pick Pick Pick Delete nodeId# 20319 Pick Delete nodeId# 20291 Enable nodeId# 20311 Add nodeId# 20327 Add nodeId# 20328 Pick Enable nodeId# 20315 Delete nodeId# 20320 Enable nodeId# 20318 Pick Pick Add nodeId# 20329 Enable nodeId# 20312 Delete nodeId# 20312 Add nodeId# 20330 Enable nodeId# 20323 Disable nodeId# 20327 Add nodeId# 20331 Disable nodeId# 20328 Delete nodeId# 20325 Pick Add nodeId# 20332 Pick Disable nodeId# 20332 Enable nodeId# 20327 Add nodeId# 20333 Add nodeId# 20334 Add nodeId# 20335 Delete nodeId# 20329 Enable nodeId# 20328 Delete nodeId# 20332 Enable nodeId# 20324 Delete nodeId# 20327 Add nodeId# 20336 Delete nodeId# 20331 Pick Disable nodeId# 20323 Add nodeId# 20337 Disable nodeId# 20335 Enable nodeId# 20323 Delete nodeId# 20337 Add nodeId# 20338 Add nodeId# 20339 Pick Add nodeId# 20340 Add nodeId# 20341 Enable nodeId# 20317 Pick Delete nodeId# 20328 Add nodeId# 20342 Add nodeId# 20343 Add nodeId# 20344 Pick Disable nodeId# 20326 Add nodeId# 20345 Disable nodeId# 20338 Disable nodeId# 20323 Add nodeId# 20346 Disable nodeId# 20346 Enable nodeId# 20326 Add nodeId# 20347 Pick Pick Pick Delete nodeId# 20336 Delete nodeId# 20345 Pick Enable nodeId# 20309 Disable nodeId# 20318 Delete nodeId# 20324 Pick Add nodeId# 20348 Add nodeId# 20349 Pick Delete nodeId# 20326 Disable nodeId# 20307 Disable nodeId# 20309 Delete nodeId# 20342 Enable nodeId# 20309 Disable nodeId# 20317 Pick Enable nodeId# 20323 Add nodeId# 20350 Delete nodeId# 20343 Delete nodeId# 20349 Enable nodeId# 20307 Delete nodeId# 20317 Delete nodeId# 20323 Enable nodeId# 20338 Pick Add nodeId# 20351 Enable nodeId# 20346 Delete nodeId# 20334 Pick Disable nodeId# 20339 Enable nodeId# 20318 Pick Enable nodeId# 20339 Disable nodeId# 20333 Delete nodeId# 20346 Disable nodeId# 20318 Disable nodeId# 20350 Enable nodeId# 20335 Enable nodeId# 20333 Disable nodeId# 20348 Add nodeId# 20352 Disable nodeId# 20347 Delete nodeId# 20348 Delete nodeId# 20341 Add nodeId# 20353 Disable nodeId# 20315 Enable nodeId# 20350 Add nodeId# 20354 Add nodeId# 20355 Disable nodeId# 20355 Pick Add nodeId# 20356 Pick Disable nodeId# 20344 Pick Enable nodeId# 20344 Enable nodeId# 20315 Enable nodeId# 20318 Add nodeId# 20357 Disable nodeId# 20318 Enable nodeId# 20347 Pick Enable nodeId# 20318 Delete nodeId# 20350 Delete nodeId# 20330 Disable nodeId# 20309 Disable nodeId# 20354 Enable nodeId# 20355 Add nodeId# 20358 Disable nodeId# 20347 Add nodeId# 20359 Pick Disable nodeId# 20339 Pick Pick Disable nodeId# 20355 Enable nodeId# 20309 Disable nodeId# 20340 Disable nodeId# 20309 Pick Pick Enable nodeId# 20339 Delete nodeId# 20318 Pick Pick Disable nodeId# 20351 Disable nodeId# 20353 Disable nodeId# 20344 Delete nodeId# 20359 Enable nodeId# 20355 Pick Pick Delete nodeId# 20356 Delete nodeId# 20344 Pick Add nodeId# 20360 Disable nodeId# 20352 Pick Delete nodeId# 20340 Enable nodeId# 20309 Add nodeId# 20361 Add nodeId# 20362 Add nodeId# 20363 Disable nodeId# 20355 Enable nodeId# 20353 Enable nodeId# 20347 Delete nodeId# 20355 Enable nodeId# 20354 Add nodeId# 20364 Disable nodeId# 20309 Disable nodeId# 20361 Delete nodeId# 20307 Add nodeId# 20365 Add nodeId# 20366 Add nodeId# 20367 Enable nodeId# 20309 Add nodeId# 20368 Enable nodeId# 20361 Add nodeId# 20369 Pick Delete nodeId# 20315 Pick Enable nodeId# 20351 Add nodeId# 20370 Enable nodeId# 20352 Pick Delete nodeId# 20338 Pick Delete nodeId# 20358 Delete nodeId# 20367 Add nodeId# 20371 Add nodeId# 20372 Delete nodeId# 20365 Pick Delete nodeId# 20339 Disable nodeId# 20352 Enable nodeId# 20352 Add nodeId# 20373 Disable nodeId# 20309 Pick Pick Pick Enable nodeId# 20309 Delete nodeId# 20368 Disable nodeId# 20370 Enable nodeId# 20370 Add nodeId# 20374 Pick Delete nodeId# 20362 Pick Pick Disable nodeId# 20357 Add nodeId# 20375 Disable nodeId# 20373 Disable nodeId# 20366 Add nodeId# 20376 Enable nodeId# 20357 Add nodeId# 20377 Disable nodeId# 20333 Add nodeId# 20378 Disable nodeId# 20370 Enable nodeId# 20366 Pick Pick Disable nodeId# 20376 Enable nodeId# 20333 Pick Delete nodeId# 20364 Disable nodeId# 20363 Disable nodeId# 20335 Add nodeId# 20379 Delete nodeId# 20333 Pick Add nodeId# 20380 Delete nodeId# 20357 Enable nodeId# 20373 Add nodeId# 20381 Pick Pick Add nodeId# 20382 Pick Add nodeId# 20383 Enable nodeId# 20363 Disable nodeId# 20353 Disable nodeId# 20363 Pick Pick Add nodeId# 20384 Disable nodeId# 20360 Disable nodeId# 20373 Pick Delete nodeId# 20361 Pick Disable nodeId# 20354 Pick Disable nodeId# 20374 Delete nodeId# 20372 Add nodeId# 20385 Enable nodeId# 20374 Enable nodeId# 20335 Add nodeId# 20386 Add nodeId# 20387 Add nodeId# 20388 Disable nodeId# 20382 Disable nodeId# 20311 Pick Add nodeId# 20389 Delete nodeId# 20374 Add nodeId# 20390 Pick Enable nodeId# 20370 Pick Add nodeId# 20391 Pick Enable nodeId# 20354 Delete nodeId# 20384 Disable nodeId# 20377 Delete nodeId# 20387 Add nodeId# 20392 Enable nodeId# 20353 Delete nodeId# 20335 Enable nodeId# 20360 Add nodeId# 20393 Pick Delete nodeId# 20369 Pick Add nodeId# 20394 Disable nodeId# 20391 Disable nodeId# 20381 Pick Add nodeId# 20395 Disable nodeId# 20309 Delete nodeId# 20392 Add nodeId# 20396 Delete nodeId# 20377 Delete nodeId# 20386 Pick Delete nodeId# 20378 Enable nodeId# 20311 Disable nodeId# 20353 Enable nodeId# 20391 Pick Delete nodeId# 20390 Disable nodeId# 20379 Pick Enable nodeId# 20363 Delete nodeId# 20393 Add nodeId# 20397 Enable nodeId# 20309 Disable nodeId# 20354 Disable nodeId# 20351 Add nodeId# 20398 Pick Pick Add nodeId# 20399 Delete nodeId# 20383 Pick Pick Disable nodeId# 20371 Disable nodeId# 20385 Enable nodeId# 20382 Pick Pick Pick Pick Delete nodeId# 20370 Pick Disable nodeId# 20347 Enable nodeId# 20354 Pick Delete nodeId# 20385 Delete nodeId# 20388 Disable nodeId# 20309 Delete nodeId# 20375 Delete nodeId# 20380 Add nodeId# 20400 Add nodeId# 20401 Delete nodeId# 20352 Enable nodeId# 20373 Enable nodeId# 20379 Enable nodeId# 20376 Delete nodeId# 20382 Delete nodeId# 20371 Delete nodeId# 20354 Add nodeId# 20402 Enable nodeId# 20353 Pick Disable nodeId# 20379 Delete nodeId# 20381 Enable nodeId# 20309 Pick Delete nodeId# 20399 Delete nodeId# 20400 Pick Delete nodeId# 20402 Pick Add nodeId# 20403 Delete nodeId# 20363 Disable nodeId# 20397 Disable nodeId# 20311 Disable nodeId# 20391 Enable nodeId# 20397 Pick Disable nodeId# 20398 Delete nodeId# 20311 Disable nodeId# 20360 Delete nodeId# 20366 Disable nodeId# 20397 Delete nodeId# 20394 Delete nodeId# 20373 Enable nodeId# 20351 Disable nodeId# 20401 Pick Pick Pick Delete nodeId# 20353 Disable nodeId# 20403 Disable nodeId# 20396 Delete nodeId# 20389 Enable nodeId# 20401 Add nodeId# 20404 Enable nodeId# 20396 Enable nodeId# 20397 Disable nodeId# 20395 Add nodeId# 20405 Delete nodeId# 20351 Add nodeId# 20406 Pick Disable nodeId# 20406 Enable nodeId# 20391 Disable nodeId# 20391 Delete nodeId# 20398 Pick Add nodeId# 20407 Delete nodeId# 20401 Pick Delete nodeId# 20360 Enable nodeId# 20391 Delete nodeId# 20397 Add nodeId# 20408 Enable nodeId# 20403 Pick Disable nodeId# 20309 Disable nodeId# 20407 Enable nodeId# 20395 Enable nodeId# 20379 Pick Pick Disable nodeId# 20376 Disable nodeId# 20403 Add nodeId# 20409 Pick Disable nodeId# 20408 Enable nodeId# 20376 Enable nodeId# 20407 Pick Delete nodeId# 20376 Add nodeId# 20410 Delete nodeId# 20396 Delete nodeId# 20395 Disable nodeId# 20410 Enable nodeId# 20406 Delete nodeId# 20391 Enable nodeId# 20309 Enable nodeId# 20410 Add nodeId# 20411 Enable nodeId# 20403 Add nodeId# 20412 Pick Enable nodeId# 20408 Add nodeId# 20413 Delete nodeId# 20379 Delete nodeId# 20410 Enable nodeId# 20347 Disable nodeId# 20408 Delete nodeId# 20406 Pick Disable nodeId# 20347 Add nodeId# 20414 Pick Delete nodeId# 20414 Add nodeId# 20415 Add nodeId# 20416 Pick Delete nodeId# 20309 Enable nodeId# 20347 Delete nodeId# 20412 Pick Disable nodeId# 20413 Disable nodeId# 20411 Disable nodeId# 20405 Enable nodeId# 20411 Add nodeId# 20417 Pick Delete nodeId# 20405 Pick Pick Add nodeId# 20418 Delete nodeId# 20407 Pick Delete nodeId# 20409 Disable nodeId# 20411 Add nodeId# 20419 Enable nodeId# 20408 Delete nodeId# 20418 Disable nodeId# 20417 Pick Add nodeId# 20420 Disable nodeId# 20420 Disable nodeId# 20404 Enable nodeId# 20411 Delete nodeId# 20413 Add nodeId# 20421 Disable nodeId# 20421 Disable nodeId# 20416 Add nodeId# 20422 Disable nodeId# 20415 Delete nodeId# 20347 Disable nodeId# 20411 Delete nodeId# 20417 Pick Enable nodeId# 20404 Pick Add nodeId# 20423 Delete nodeId# 20419 Delete nodeId# 20422 Pick Enable nodeId# 20411 Enable nodeId# 20415 Disable nodeId# 20408 Disable nodeId# 20404 Enable nodeId# 20420 Pick Pick Enable nodeId# 20408 Disable nodeId# 20423 Enable nodeId# 20421 Add nodeId# 20424 Enable nodeId# 20423 Delete nodeId# 20404 Disable nodeId# 20420 Enable nodeId# 20420 Disable nodeId# 20411 Pick Disable nodeId# 20415 Pick Add nodeId# 20425 Delete nodeId# 20415 Add nodeId# 20426 Pick Delete nodeId# 20426 Add nodeId# 20427 Disable nodeId# 20423 Disable nodeId# 20427 Pick Pick Enable nodeId# 20423 Add nodeId# 20428 Add nodeId# 20429 Disable nodeId# 20423 Enable nodeId# 20423 Enable nodeId# 20411 Add nodeId# 20430 Delete nodeId# 20429 Pick Add nodeId# 20431 Delete nodeId# 20416 Delete nodeId# 20425 Delete nodeId# 20408 Enable nodeId# 20427 Delete nodeId# 20424 Pick Disable nodeId# 20423 Enable nodeId# 20423 Pick Pick Add nodeId# 20432 Add nodeId# 20433 Delete nodeId# 20403 Pick Delete nodeId# 20411 Pick Disable nodeId# 20430 Enable nodeId# 20430 Add nodeId# 20434 Delete nodeId# 20423 Add nodeId# 20435 Add nodeId# 20436 Disable nodeId# 20421 Disable nodeId# 20432 Add nodeId# 20437 Enable nodeId# 20432 Enable nodeId# 20421 Add nodeId# 20438 Disable nodeId# 20420 Disable nodeId# 20430 Pick >> TS3WrapperTests::MultipartUpload >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 |79.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |79.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:54:31.760750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:31.760844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.760879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:31.760964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:31.761015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:31.761047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:31.761101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.761174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:31.761476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:31.846749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:31.846803Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:31.862074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:31.862225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:31.862404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:31.869068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:31.869356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:31.870008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:31.870332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:31.874528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:31.875908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:31.875970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:31.876072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:31.876116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:31.876160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:31.876364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.882855Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:54:31.995588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:31.995835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.996056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:31.996288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:31.996346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.998513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:31.998678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:31.998857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.998934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:31.998984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:31.999016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:32.008325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.008441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:32.008479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:32.015232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.015303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.015341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.015393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.025284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:32.029374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:32.029607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:32.031058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.031229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:32.031282Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.031597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:32.031656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.031911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:32.031998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:32.034432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.034488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.034738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.034811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:32.035150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.035193Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:32.035304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.035337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.035375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.035404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.035439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:32.035477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.035524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:32.035555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:32.035631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:32.035678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:32.035725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:32.037661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.037777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.037812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:32.357941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.358112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:32.360209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:54:32.360376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T17:54:32.361091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.361228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:32.361300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:54:32.361425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T17:54:32.361592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:32.399532Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:417:2388], attempt# 0 2025-04-17T17:54:32.424483Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:417:2388], sender# [1:416:2387] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:54:32.431785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.431863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:54:32.432132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.432202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:54:32.432618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.432674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:54:32.433313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:32.433496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:32.433566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:54:32.433636Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:54:32.433691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:32.433799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:4998 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BD26A74F-1245-4818-8CA5-2CBC50D0F900 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-17T17:54:32.447394Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:417:2388], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-04-17T17:54:32.451999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:4998 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FF3B5A89-7E1A-4932-9BA3-A57635B1E1ED amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-17T17:54:32.453028Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:417:2388], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-17T17:54:32.453099Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:416:2387] 2025-04-17T17:54:32.453250Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:417:2388], sender# [1:416:2387], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:4998 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4FE47EE8-6976-4340-81E3-52844556F472 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-04-17T17:54:32.457198Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:417:2388], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-04-17T17:54:32.457257Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:417:2388], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:54:32.457391Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:416:2387], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:54:32.475215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.475280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:54:32.475449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.475580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.475644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.475737Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.475800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:54:32.475851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:54:32.475996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.477898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.478197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.478241Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:54:32.478362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:32.478398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.478436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:32.478482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.478524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:54:32.478616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2317] message: TxId: 102 2025-04-17T17:54:32.478667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.478725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:54:32.478759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:54:32.478876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:32.490311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:54:32.490392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:402:2374] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:23.601785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:23.601877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:23.601933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:23.601977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:23.602026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:23.602054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:23.602152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:23.602849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:23.603209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:23.688736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:23.688794Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:23.711880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:23.712311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:23.712499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:23.729958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:23.730223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:23.730946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:23.731196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:23.736160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:23.737543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:23.737611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:23.737783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:23.737897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:23.737961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:23.738107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.744930Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:23.887560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:23.887829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.888077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:23.888371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:23.888433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.891213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:23.891374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:23.891615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.891680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:23.891715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:23.891749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:23.894000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.894077Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:23.894116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:23.897998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.898056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.898099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:23.898227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:23.907437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:23.909525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:23.909706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:23.911104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:23.911255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:23.911305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:23.911663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:23.911727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:23.911931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:23.912024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:23.914387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:23.914443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:23.914657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:23.914699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:23.915087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:23.915132Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:23.915231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:23.915263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:23.915320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:23.915358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:23.915395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:23.915434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:23.915484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:23.915522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:23.915607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:23.915653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:23.915701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:23.917602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:23.917739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:23.917781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:54:32.562266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.562346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.562727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.562832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T17:54:32.562915Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T17:54:32.563132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-17T17:54:32.563269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.563360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.563418Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-17T17:54:32.563456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:54:32.563586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.563692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.563906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-17T17:54:32.564203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.564319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.564676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.564751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.564956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.565080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.565165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.565371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.565447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.565600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.565799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.565960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.566012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.566065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.566341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:54:32.572316Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:54:32.572457Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T17:54:32.573828Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:1015:2959], Recipient [1:1015:2959]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-17T17:54:32.573871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-17T17:54:32.575271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.575326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.575678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1015:2959], Recipient [1:1015:2959]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:54:32.575726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:54:32.576335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:32.576395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.576444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:32.576478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:54:32.577470Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1051:2959], Recipient [1:1015:2959]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:54:32.577515Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:54:32.577556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1015:2959] sender: [1:1071:2058] recipient: [1:15:2062] 2025-04-17T17:54:32.617486Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1070:3003], Recipient [1:1015:2959]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-17T17:54:32.617555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:54:32.617693Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:54:32.618024Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 317us result status StatusSuccess 2025-04-17T17:54:32.618868Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82488 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:31.579405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:31.579543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.579601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:31.579640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:31.579710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:31.579744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:31.579818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.579934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:31.580254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:31.777591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:31.777649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:31.798938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:31.799422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:31.799609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:31.807998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:31.808186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:31.808738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:31.808953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:31.812232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:31.813385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:31.813471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:31.813653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:31.813697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:31.813741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:31.813882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.820085Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:31.929466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:31.929723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.929979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:31.930261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:31.930320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.932581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:31.932714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:31.932907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.932960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:31.932992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:31.933043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:31.935174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.935242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:31.935279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:31.937031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.937093Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.937121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:31.937157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:31.945411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:31.947040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:31.947192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:31.948067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:31.948193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:31.948229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:31.948446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:31.948486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:31.948615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:31.948708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:31.950492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:31.950529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:31.950793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:31.950859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:31.951220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.951276Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:31.951372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:31.951401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:31.951439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:31.951475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:31.951537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:31.951580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:31.951612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:31.951639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:31.951710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:31.951758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:31.951804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:31.953399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:31.953520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:31.953574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1.csv / / 11 2025-04-17T17:54:32.448509Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:481:2440], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-04-17T17:54:32.449967Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:483:2441], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-04-17T17:54:32.450012Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:483:2441], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:54:32.450279Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:482:2439], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:27910 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F2C085C7-44F9-440B-9309-19D98C111290 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-04-17T17:54:32.459533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.459632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:54:32.459928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.460070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:54:32.460535Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:481:2440], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-04-17T17:54:32.460647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.460710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:54:32.461210Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:480:2438] 2025-04-17T17:54:32.461290Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:481:2440], sender# [1:480:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-17T17:54:32.462475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:32.462625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:32.462685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:54:32.462745Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:54:32.462812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:54:32.462926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:27910 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 376B28B0-8F70-4D0E-AE80-F4C4427FD5C4 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-04-17T17:54:32.465114Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:481:2440], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-04-17T17:54:32.465163Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:481:2440], success# 1, error# , multipart# 0, uploadId# (empty maybe) FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:54:32.473683Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:480:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:54:32.477819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:54:32.502483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.502558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T17:54:32.502729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.502837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.502907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.503073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.503884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.503923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:54:32.504041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.504132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.504185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.504251Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.504294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:54:32.504341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T17:54:32.504365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:54:32.504630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.507297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.507527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.507858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.507902Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:54:32.508006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:32.508044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.508075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:32.508114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.508145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:54:32.508209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:372:2339] message: TxId: 102 2025-04-17T17:54:32.508244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.508282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:54:32.508306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:54:32.508411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:32.509716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:54:32.509756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:456:2416] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:54:31.579461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:31.579562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.579603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:31.579686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:31.579732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:31.579770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:31.579840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.579908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:31.580225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:31.806837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:31.806896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:31.821536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:31.821668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:31.821845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:31.832489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:31.832905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:31.833611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:31.833949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:31.838674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:31.839811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:31.839862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:31.839951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:31.839987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:31.840015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:31.840170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.845949Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:54:31.992954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:31.993199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.993438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:31.993701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:31.993765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.996582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:31.996766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:31.996992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.997066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:31.997110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:31.997148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:31.999703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.999771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:31.999815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:32.008791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.008861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.008906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.008975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.012907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:32.015492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:32.015700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:32.016721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.016858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:32.016907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.017194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:32.017265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.017438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:32.017536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:32.020786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.020845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.021087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.021149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:32.021444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.021491Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:32.021597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.021632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.021674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.021711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.021762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:32.021806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.021840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:32.021873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:32.021954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:32.021994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:32.022042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:32.023917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.024012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.024043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:32.375676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.375827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:32.377807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:54:32.377931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T17:54:32.378617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.378739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:32.378798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:54:32.378902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T17:54:32.379028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:32.388240Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:417:2388], attempt# 0 2025-04-17T17:54:32.411144Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:417:2388], sender# [1:416:2387] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:54:32.417326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.417397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:54:32.417685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.417735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:54:32.418237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.418300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:54:32.418873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:32.419036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:32.419112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:54:32.419206Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:54:32.419248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:32.419327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:54:32.422303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:19703 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FA4420AC-0534-4CD7-9CD9-1FF447F8E445 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-17T17:54:32.447407Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:417:2388], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:19703 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DF605063-D9D1-4F54-B9AF-8D7739A9A44E amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-17T17:54:32.467299Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:417:2388], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-17T17:54:32.467399Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:416:2387] 2025-04-17T17:54:32.467555Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:417:2388], sender# [1:416:2387], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:19703 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BFAD970A-FFE5-4EF3-BBAF-1F3FAA49D119 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-04-17T17:54:32.475320Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:417:2388], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-04-17T17:54:32.475399Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:417:2388], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:54:32.475597Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:416:2387], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:54:32.492171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.492249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:54:32.492420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.492556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:32.492683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.492723Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.492778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:54:32.492839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:54:32.493019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.495311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.495706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.495751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:54:32.495884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:32.495923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.495970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:32.496007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.496042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:54:32.496139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2317] message: TxId: 102 2025-04-17T17:54:32.496206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.496248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:54:32.496278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:54:32.496397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:32.498610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:54:32.498664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:402:2374] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:31.835456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:31.835559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.835596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:31.835651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:31.835700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:31.835726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:31.835831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.835893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:31.836161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:31.918897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:31.918958Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:31.940085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:31.940487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:31.940631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:31.952860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:31.953034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:31.953654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:31.953867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:31.956633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:31.957839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:31.957897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:31.958067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:31.958135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:31.958173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:31.958317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:31.964031Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:32.074474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:32.074691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.074866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:32.075097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:32.075171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.077427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.077554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:32.077726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.077784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:32.077819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:32.077847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:32.079355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.079402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:32.079431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:32.080640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.080678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.080719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.080765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.089426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:32.091664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:32.091895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:32.093048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.093210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:32.093259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.093557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:32.093629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.093788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:32.093860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:32.096730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.096777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.097027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.097110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:32.097492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.097547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:32.097646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.097698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.097737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.097767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.097819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:32.097864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.097895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:32.097923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:32.097990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:32.098023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:32.098074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:32.099813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.099927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.099962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:32.420518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.420677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:32.422533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:54:32.422683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T17:54:32.423328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.423477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:32.423552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:54:32.423668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T17:54:32.423801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:32.444823Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:414:2385], attempt# 0 2025-04-17T17:54:32.467049Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:414:2385], sender# [1:413:2382] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:12588 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EF7A4509-FDB2-48D2-A6D8-61E37341C470 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-17T17:54:32.474970Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-04-17T17:54:32.477269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.477352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:54:32.477763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.477814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:54:32.478653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.478743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:32.479587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:32.479695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:32.479736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:54:32.479816Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:54:32.479903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:32.480006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:12588 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 190A0237-3F19-4A33-84C2-F9279346DA63 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:54:32.481716Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-04-17T17:54:32.482333Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:413:2382] 2025-04-17T17:54:32.482565Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:414:2385], sender# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-17T17:54:32.484223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12588 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 44C23001-62B8-4D64-BB73-25D8DF81C9DE amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-04-17T17:54:32.486119Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-04-17T17:54:32.486211Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:414:2385], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:54:32.486368Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:54:32.495741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-17T17:54:32.495818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:54:32.496020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-17T17:54:32.496151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-17T17:54:32.496213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.496252Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.496292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:54:32.496337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:54:32.496519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.498829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.499150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.499196Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:54:32.499298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:32.499336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.499399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:32.499497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.499559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:54:32.499638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-04-17T17:54:32.499706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:32.499750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:54:32.499795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:54:32.499925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:32.506948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:54:32.507020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2025-04-17T17:54:33.442977Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D91728F3-4C30-489B-922A-E53BBC82E804, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:8178 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 31C6FB90-5E6F-4401-9AAF-EBD91190F791 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-04-17T17:54:33.448783Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D91728F3-4C30-489B-922A-E53BBC82E804, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-04-17T17:54:33.449135Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B565325D-143C-4429-B6F5-79F6B5367DAD, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8178 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 96B8F1AA-D4FC-4030-8278-34667F7D38C2 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2025-04-17T17:54:33.452574Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B565325D-143C-4429-B6F5-79F6B5367DAD, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-17T17:54:33.453079Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 5110AA8D-1063-4DBB-9D9F-DC4C9A2E085C, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8178 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 664A2453-FED3-4B1B-9F48-5F1D917E9A81 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2025-04-17T17:54:33.457107Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 5110AA8D-1063-4DBB-9D9F-DC4C9A2E085C, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-17T17:54:33.457629Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D59035F0-F50B-466D-9175-4E6ED600F399, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:8178 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 580064C6-05D9-477D-93AE-BD391FB60489 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-04-17T17:54:33.460823Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D59035F0-F50B-466D-9175-4E6ED600F399, response# GetObjectResult { } >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:32.412012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:32.412100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:32.412137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:32.412204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:32.412261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:32.412292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:32.412406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:32.412483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:32.412808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:32.501980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:32.502040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:32.518075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:32.518566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:32.518742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:32.527889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:32.528122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:32.528857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.529087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:32.533504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.534974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.535041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.535235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:32.535327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.535369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:32.535538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.542936Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:32.682748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:32.683025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.683277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:32.683546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:32.683605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.686669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.686837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:32.687028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.687085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:32.687131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:32.687169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:32.689455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.689518Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:32.689556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:32.691782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.691840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.691883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.691931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.695608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:32.697880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:32.698105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:32.699192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.699370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:32.699418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.699748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:32.699818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.700004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:32.700092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:32.702535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.702580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.702835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.702910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:32.703295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.703338Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:32.703458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.703492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.703545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.703578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.703626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:32.703676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.703711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:32.703742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:32.703813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:32.703852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:32.703917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:32.705680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.705793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.705827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:33.000007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T17:54:33.000135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:33.001594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:54:33.001707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T17:54:33.002278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:33.002387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:33.002430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:54:33.002515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T17:54:33.002611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:33.014397Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:414:2385], attempt# 0 2025-04-17T17:54:33.028373Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:414:2385], sender# [1:413:2382] 2025-04-17T17:54:33.033424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:33.033517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:54:33.033806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:33.033885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:54:33.034555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.034621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:19062 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AE5A4B53-066F-48F1-B8CF-2B143656B96E amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-17T17:54:33.035597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:33.035702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:33.035750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:54:33.035794Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:54:33.035846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:33.035927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:54:33.036402Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:19062 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FE7C5035-C648-4C1F-A40A-91BA1DB91541 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-17T17:54:33.040766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:54:33.041273Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-04-17T17:54:33.041346Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:413:2382] 2025-04-17T17:54:33.041442Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:414:2385], sender# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:19062 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F7A4497D-53E2-40D9-8857-0BC8D63E24DE amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-04-17T17:54:33.044225Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-04-17T17:54:33.044310Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:414:2385], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:54:33.044474Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:54:33.065586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-17T17:54:33.065674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:54:33.065836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-17T17:54:33.065925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-17T17:54:33.065980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:33.066013Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.066064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:54:33.066100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:54:33.066321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:33.068829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.068977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.069069Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:54:33.069165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:33.069199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:33.069235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:33.069316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:33.069373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:54:33.069446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-04-17T17:54:33.069488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:33.069522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:54:33.069574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:54:33.069689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:33.072365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:54:33.072415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> TS3WrapperTests::GetObject >> TS3WrapperTests::CopyPartUpload >> TS3WrapperTests::AbortMultipartUpload >> TS3WrapperTests::GetUnknownObject >> TS3WrapperTests::HeadUnknownObject >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> TS3WrapperTests::CompleteUnknownUpload >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> TS3WrapperTests::GetObject [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> DataShardVolatile::DistributedWriteThenReadIterator >> TS3WrapperTests::CopyPartUpload [GOOD] >> TS3WrapperTests::HeadUnknownObject [GOOD] >> TS3WrapperTests::GetUnknownObject [GOOD] >> TS3WrapperTests::AbortMultipartUpload [GOOD] >> TS3WrapperTests::CompleteUnknownUpload [GOOD] |79.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |79.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |79.6%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2025-04-17T17:54:36.098447Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 30701B34-C07E-46E3-92FB-7C5D5D609B25, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:11703 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 595E9B8F-C7DB-4C8A-A9EE-A7E02C3A232D amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2025-04-17T17:54:36.104357Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 30701B34-C07E-46E3-92FB-7C5D5D609B25, response# ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2025-04-17T17:54:35.963987Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0045F32F-4507-4FDA-96EF-5852C4DB13C8, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:4567 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CECC97A3-CE94-49C3-A8DD-A557A6269BC5 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-04-17T17:54:35.969000Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0045F32F-4507-4FDA-96EF-5852C4DB13C8, response# No response body. ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2025-04-17T17:54:35.978069Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 850FD911-0A52-4422-813F-ABD622BB5519, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:15784 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1E81AD9D-646E-4564-A3CC-C976C54A045E amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-17T17:54:35.983858Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 850FD911-0A52-4422-813F-ABD622BB5519, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-17T17:54:35.984212Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 2C6C9735-EA84-47A3-A629-36E8808B5084, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:15784 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D7F28D7A-E979-42B0-A3AF-D8C51DBA4053 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-04-17T17:54:35.986896Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 2C6C9735-EA84-47A3-A629-36E8808B5084, response# GetObjectResult { } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2025-04-17T17:54:35.972223Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 95CE1579-7E20-43B4-A1AA-D0844E336698, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:9146 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 82D91A32-B041-439E-9871-4874AE7A9BF8 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-04-17T17:54:35.976157Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 95CE1579-7E20-43B4-A1AA-D0844E336698, response# No response body. >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2025-04-17T17:54:35.969678Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 5D2970A2-57E5-443E-A377-59B36C7E2DFB, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:8161 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F18886D0-8C55-430D-94FD-0E22071967AD amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-17T17:54:35.974281Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 5D2970A2-57E5-443E-A377-59B36C7E2DFB, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-17T17:54:35.974628Z node 1 :S3_WRAPPER NOTICE: Request: uuid# CCDA53A6-43C3-4F3B-9B64-3380FACEA54E, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:8161 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B54812BB-FA74-4138-9C33-57CE42FFAF9D amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2025-04-17T17:54:35.977752Z node 1 :S3_WRAPPER NOTICE: Response: uuid# CCDA53A6-43C3-4F3B-9B64-3380FACEA54E, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2025-04-17T17:54:35.978127Z node 1 :S3_WRAPPER NOTICE: Request: uuid# E13D3567-1BF2-4DD1-9B24-D20E5E8337E9, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8161 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1CB872B6-58FE-4299-AF68-3FF8B33ECA00 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2025-04-17T17:54:35.981451Z node 1 :S3_WRAPPER NOTICE: Response: uuid# E13D3567-1BF2-4DD1-9B24-D20E5E8337E9, response# UploadPartCopyResult { } 2025-04-17T17:54:35.981803Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 6ADC92A4-8D0B-48D8-B094-851A50F6571D, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8161 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C06BFD83-1A46-4F17-8812-26DB747D901E amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2025-04-17T17:54:35.985181Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 6ADC92A4-8D0B-48D8-B094-851A50F6571D, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2025-04-17T17:54:35.985437Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B20F8CEE-85CE-40A7-8CE1-7408CD5DBAF0, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:8161 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A6D0AAEA-1365-4D51-A967-0B276FDF8FB2 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2025-04-17T17:54:35.988324Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B20F8CEE-85CE-40A7-8CE1-7408CD5DBAF0, response# GetObjectResult { } >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:33.821146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:33.821273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:33.821333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:33.821372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:33.821438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:33.821468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:33.821542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:33.821652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:33.821940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:33.912466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:33.912521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:33.928205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:33.928615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:33.928765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:33.937627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:33.937822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:33.938503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:33.938719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:33.941992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:33.943420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:33.943515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:33.943693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:33.943739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:33.943784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:33.943912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.950594Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:34.059959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:34.060189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.060465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:34.060688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:34.060744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.063206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:34.063324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:34.063517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.063569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:34.063602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:34.063656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:34.065623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.065680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:34.065713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:34.067601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.067647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.067687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:34.067732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:34.071246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:34.073143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:34.073352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:34.074316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:34.074476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:34.074523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:34.074789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:34.074842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:34.074993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:34.075077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:34.077109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:34.077151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:34.077378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:34.077443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:34.077797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.077846Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:34.077931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:34.077969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:34.078002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:34.078030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:34.078075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:34.078115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:34.078149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:34.078195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:34.078258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:34.078306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:34.078351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:34.080117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:34.080245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:34.080291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:54:34.512057Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:481:2440], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:61140 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2166EB1D-8CB7-417B-A804-89AEF648BFE5 amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 2025-04-17T17:54:34.513962Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:483:2441], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-04-17T17:54:34.514006Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:483:2441], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:54:34.514120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.514201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:34.514644Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:482:2439], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:61140 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 051AB886-9DEE-45DB-B6D5-725D47595117 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-04-17T17:54:34.522546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:34.522642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:54:34.522672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:54:34.522719Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:54:34.522763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:54:34.522834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:54:34.523103Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:481:2440], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-04-17T17:54:34.523308Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:480:2438] 2025-04-17T17:54:34.523436Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:481:2440], sender# [1:480:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:61140 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2143A84B-557E-4E44-A038-5E0810930F82 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-04-17T17:54:34.529640Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:481:2440], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-04-17T17:54:34.529703Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:481:2440], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:54:34.530042Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:480:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:54:34.538608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:54:34.562476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:34.562550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:54:34.562708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:34.562809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:34.562879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:34.563119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:34.563698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:34.563728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T17:54:34.563820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:34.563896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-17T17:54:34.563935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:34.563961Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.563998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:54:34.564038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T17:54:34.564059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:54:34.564163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:34.567163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.567463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.567825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:54:34.567860Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:54:34.567948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:34.567979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:34.568011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:54:34.568035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:34.568067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:54:34.568148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:372:2339] message: TxId: 102 2025-04-17T17:54:34.568199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:54:34.568236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:54:34.568271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:54:34.568377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:34.569906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:54:34.569949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:456:2416] TestWaitNotification: OK eventTxId 102 >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2025-04-17T17:54:35.986474Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 1E4BD842-A5EF-4E7F-BEA3-178D56FFB55C, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:1072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0F928688-9F44-4ACC-A746-CB28AF01B61C amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-04-17T17:54:35.991265Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 1E4BD842-A5EF-4E7F-BEA3-178D56FFB55C, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-04-17T17:54:35.991603Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 9BF70F99-51F5-428C-94AB-771AA40B579C, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:1072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A64602F1-5DC9-4F18-A9C5-14B94F7C4777 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2025-04-17T17:54:35.994520Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 9BF70F99-51F5-428C-94AB-771AA40B579C, response# AbortMultipartUploadResult { } 2025-04-17T17:54:35.994789Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A4627707-7EC3-417E-B850-6DDD810EDD32, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:1072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0EE6678B-C636-42D9-94A4-5B91E046F1D9 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-04-17T17:54:35.997020Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A4627707-7EC3-417E-B850-6DDD810EDD32, response# No response body. |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |79.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:36.819016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:36.819128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:36.819178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:36.819215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:36.819270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:36.819301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:36.819368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:36.819451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:36.819792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:36.903527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:36.903590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:36.920188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:36.920655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:36.920816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:36.929012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:36.929198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:36.929876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:36.930084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:36.932987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:36.934320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:36.934392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:36.934574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:36.934626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:36.934666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:36.934818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:36.941617Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:37.070727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:37.070982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:37.071228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:37.071498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:37.071556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:37.073979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:37.074109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:37.074333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:37.074401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:37.074438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:37.074474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:37.076429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:37.076495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:37.076535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:37.078299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:37.078339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:37.078377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:37.078451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:37.082020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:37.084149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:37.084289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:37.084968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:37.085070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:37.085107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:37.085416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:37.085464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:37.085606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:37.085662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:37.087694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:37.087740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:37.087928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:37.087970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:37.088395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:37.088468Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:37.088558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:37.088591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:37.088627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:37.088654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:37.088689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:37.088727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:37.088782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:37.088811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:37.088878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:37.088919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:37.088949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:37.090801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:37.090920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:37.090959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 11 } 2025-04-17T17:54:37.859985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T17:54:37.860034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:54:37.860123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:54:37.860155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:54:37.860182Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:54:37.860223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:54:37.860285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-17T17:54:37.860324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:54:37.860737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:218:2217], Recipient [1:285:2272]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 2025-04-17T17:54:37.860796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-04-17T17:54:37.860857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T17:54:37.861994Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2025-04-17T17:54:37.862052Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-04-17T17:54:37.863155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:54:37.864773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:54:37.864810Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:54:37.864908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:54:37.865294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:54:37.866995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:54:37.867038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:54:37.867164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T17:54:37.867430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [1:1033:2896], Recipient [1:285:2272]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1033:2896] ServerId: [1:1035:2898] } 2025-04-17T17:54:37.867479Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T17:54:37.867511Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T17:54:37.867818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T17:54:37.867859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T17:54:37.868426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1049:2912], Recipient [1:285:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:37.868476Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:37.868531Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:54:37.868696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:552:2487], Recipient [1:285:2272]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2025-04-17T17:54:37.868727Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:54:37.868794Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T17:54:37.868891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:54:37.868932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1047:2910] 2025-04-17T17:54:37.869112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:1049:2912], Recipient [1:285:2272]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:54:37.869148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:54:37.869184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-17T17:54:37.869854Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1050:2913], Recipient [1:285:2272]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T17:54:37.869903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:54:37.870002Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:37.870235Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 176us result status StatusSuccess 2025-04-17T17:54:37.870649Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:37.871472Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:1051:2914], Recipient [1:285:2272]: NKikimrPQ.TEvPeriodicTopicStats PathId: 4 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2025-04-17T17:54:37.871522Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-17T17:54:37.871559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 4] DataSize 151 UsedReserveSize 151 2025-04-17T17:54:37.871589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T17:54:37.872057Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1052:2915], Recipient [1:285:2272]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T17:54:37.872101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:54:37.872184Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:37.875354Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 203us result status StatusSuccess 2025-04-17T17:54:37.875784Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink |79.6%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::SystemViewFailOps [GOOD] |79.6%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] Test command err: 2025-04-17T17:52:55.898071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:55.899349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:55.899420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001068/r3tmp/tmpmJyihi/pdisk_1.dat 2025-04-17T17:52:56.364272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.443851Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:56.500052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:56.500207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:56.515452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:56.617172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.695188Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:52:56.696264Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:52:56.696733Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:52:56.696997Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:56.759564Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:52:56.760322Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:56.760447Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:52:56.770688Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:52:56.770803Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:52:56.770871Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:52:56.771288Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:52:56.771458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:52:56.771540Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:52:56.782837Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:52:56.829489Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:52:56.829698Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:52:56.829828Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:52:56.829868Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:52:56.829911Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:52:56.829945Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:56.830252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.830308Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.830591Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:52:56.830690Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:52:56.830910Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:56.830971Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:56.831015Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:52:56.831048Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:52:56.831103Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:52:56.831138Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:52:56.831183Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:52:56.831355Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.831395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.831450Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:52:56.831845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:52:56.831893Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:52:56.832001Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:52:56.832243Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:52:56.832305Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:52:56.832410Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:52:56.832496Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:52:56.832542Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:52:56.832573Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:52:56.832604Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.832937Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:52:56.832973Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:52:56.833005Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:52:56.833055Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.833110Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:52:56.833136Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:52:56.833165Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:52:56.833211Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:56.833238Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:52:56.839026Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:52:56.839093Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:52:56.850830Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:52:56.850908Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.850941Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.850998Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:52:56.851085Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:52:57.024342Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:57.024396Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:57.024468Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:52:57.024824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:52:57.024860Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:52:57.024984Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:57.025023Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:52:57.025057Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:52:57.025094Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:52:57.041323Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:52:57.041412Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:57.042477Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:57.042523Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:57.042576Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:5 ... 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037888 has finished 2025-04-17T17:54:37.391474Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:37.391544Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:54:37.391627Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:37.391694Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:37.391947Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:885:2711], Recipient [15:885:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:37.391989Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:37.392048Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:54:37.392086Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:37.392123Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:54:37.392161Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-04-17T17:54:37.392196Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-04-17T17:54:37.392231Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:54:37.392263Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-04-17T17:54:37.392296Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-04-17T17:54:37.392332Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-04-17T17:54:37.392502Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-04-17T17:54:37.392557Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:54:37.392632Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-04-17T17:54:37.392665Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:37.392698Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:54:37.392746Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-04-17T17:54:37.392789Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-04-17T17:54:37.392835Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-04-17T17:54:37.392878Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:54:37.392908Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:37.392964Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-17T17:54:37.392996Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-17T17:54:37.393115Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-17T17:54:37.393148Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-17T17:54:37.393188Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-17T17:54:37.393229Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-17T17:54:37.393258Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:54:37.393286Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-17T17:54:37.393316Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-04-17T17:54:37.393348Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:37.393519Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-04-17T17:54:37.393559Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-04-17T17:54:37.393597Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:54:37.393638Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:54:37.393676Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:54:37.393705Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:54:37.393734Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-04-17T17:54:37.393779Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:37.393815Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:54:37.393855Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T17:54:37.393931Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T17:54:37.415938Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-04-17T17:54:37.416110Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:37.416190Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-17T17:54:37.416341Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1049:2835], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:37.416449Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:37.416875Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-04-17T17:54:37.416929Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:54:37.416963Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:37.417010Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1049:2835], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:37.417049Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:54:37.418967Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW RangesSize: 1 2025-04-17T17:54:37.419193Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:54:37.419323Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-17T17:54:37.419524Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:54:37.419600Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:54:37.419679Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:37.419742Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:54:37.419797Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-17T17:54:37.419871Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:54:37.419908Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:37.419940Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:54:37.419974Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:37.420166Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW } 2025-04-17T17:54:37.420691Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-04-17T17:54:37.420789Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 1} after executionsCount# 1 2025-04-17T17:54:37.420874Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:37.421239Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 1} finished in read 2025-04-17T17:54:37.421360Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:54:37.421397Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:54:37.421429Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:37.421459Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:37.421515Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:54:37.421541Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:37.421582Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-17T17:54:37.421653Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:54:37.421885Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToInclusive |79.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TGRpcStreamingTest::ClientNeverWrites >> TComputeScheduler::TTotalLimits [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TGRpcStreamingTest::ClientDisconnects >> TGRpcStreamingTest::ReadFinish |79.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::TTotalLimits [GOOD] Test command err: 1610 1600 1610 1600 |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |79.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] >> KqpPg::TableDeleteAllData+useSink [GOOD] >> KqpPg::TableDeleteAllData-useSink >> TKqpScanData::EmptyColumns >> TComputeScheduler::QueryLimits >> TKqpScanData::EmptyColumns [GOOD] >> TComputeScheduler::QueryLimits [GOOD] >> TComputeScheduler::ResourceWeight >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> TKqpScanData::FailOnUnsupportedPgType >> TComputeScheduler::ResourceWeight [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2025-04-17T17:52:55.665087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:55.665677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:55.665744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00106d/r3tmp/tmpGvWaxa/pdisk_1.dat 2025-04-17T17:52:56.090937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.183215Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:56.236094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:56.236244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:56.250350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:56.340006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.385916Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:52:56.392141Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:52:56.392626Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:52:56.392856Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:56.447427Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:52:56.448284Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:56.448391Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:52:56.453233Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:52:56.453311Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:52:56.453359Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:52:56.453749Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:52:56.453881Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:52:56.454031Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:52:56.464851Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:52:56.496741Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:52:56.496930Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:52:56.497050Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:52:56.497089Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:52:56.497146Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:52:56.497195Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:56.497427Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.497478Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.497737Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:52:56.497821Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:52:56.497922Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:56.497978Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:56.498029Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:52:56.498067Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:52:56.498122Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:52:56.498157Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:52:56.498301Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:52:56.498483Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.498525Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.498570Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:52:56.498974Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:52:56.499021Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:52:56.499142Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:52:56.499391Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:52:56.499455Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:52:56.499555Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:52:56.499609Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:52:56.499652Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:52:56.499690Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:52:56.499725Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.500046Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:52:56.500092Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:52:56.500133Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:52:56.500206Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.500256Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:52:56.500289Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:52:56.500321Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:52:56.500356Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:56.500386Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:52:56.501811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:52:56.501866Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:52:56.512482Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:52:56.512547Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.512574Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.512605Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:52:56.512692Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:52:56.672224Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.672288Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.672331Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:52:56.672788Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:52:56.672827Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:52:56.672952Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:56.673002Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:52:56.673047Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:52:56.673120Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:52:56.677815Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:52:56.677892Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:56.679462Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.679508Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.679563Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:5 ... :2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.713378Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.713577Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709543002, quota bytes left# 18446744073709000383, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.713694Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.713727Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.713757Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.713992Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542903, quota bytes left# 18446744073708994047, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.714105Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.714137Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.714167Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.714440Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542804, quota bytes left# 18446744073708987711, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.714532Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.714568Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.714596Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.714792Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542705, quota bytes left# 18446744073708981375, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.714893Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.714926Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.714961Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.715177Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542606, quota bytes left# 18446744073708975039, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.715310Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.715351Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.715390Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.715611Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542507, quota bytes left# 18446744073708968703, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.715703Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.715736Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.715764Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.716009Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542408, quota bytes left# 18446744073708962367, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.716148Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.716184Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.716214Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.716512Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542309, quota bytes left# 18446744073708956031, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.716625Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.716661Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.716695Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.716912Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542210, quota bytes left# 18446744073708949695, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.717037Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.717070Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.717105Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.717311Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542111, quota bytes left# 18446744073708943359, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.717438Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.717474Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.717510Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.717714Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542012, quota bytes left# 18446744073708937023, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.717805Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.717838Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.717864Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.718065Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541913, quota bytes left# 18446744073708930687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.718195Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.718234Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.718262Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.718478Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541814, quota bytes left# 18446744073708924351, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.718616Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.718648Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.718675Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.718871Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541715, quota bytes left# 18446744073708918015, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.718967Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.719005Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.719033Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.719227Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541616, quota bytes left# 18446744073708911679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.719355Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:40.719387Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-04-17T17:54:40.719414Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-04-17T17:54:40.719529Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 1, bytes# 64, quota rows left# 18446744073709541615, quota bytes left# 18446744073708911615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:40.719590Z node 15 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[15:594:2519], 1} finished in ReadContinue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:39.516028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:39.516111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:39.516150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:39.516216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:39.516262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:39.516298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:39.516362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:39.516455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:39.516723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:39.584720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:39.584780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:39.597343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:39.597729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:39.597843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:39.606022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:39.606237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:39.606838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:39.607055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:39.610594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:39.612210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:39.612279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:39.612493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:39.612549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:39.612606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:39.612765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.619995Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:39.743229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:39.743466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.743698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:39.743912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:39.743967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.746387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:39.746531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:39.746707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.746767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:39.746803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:39.746835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:39.748959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.749031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:39.749074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:39.750916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.750966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.751018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:39.751075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.754743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:39.756768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:39.756950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:39.757887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:39.758016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:39.758063Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:39.758361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:39.758422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:39.758606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:39.758679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:39.760913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:39.760959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:39.761127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:39.761183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:39.761554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.761598Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:39.761687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:39.761731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.761766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:39.761793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.761830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:39.761865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.761914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:39.761944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:39.762008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:39.762046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:39.762078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:39.763846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:39.763966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:39.764011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:41.670134Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-17T17:54:41.670218Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-04-17T17:54:41.670555Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-17T17:54:41.670648Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-17T17:54:41.670732Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-17T17:54:41.671072Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 16975298 2025-04-17T17:54:41.671171Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-17T17:54:41.671378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-04-17T17:54:41.684862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T17:54:41.695268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:41.695441Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 147us result status StatusSuccess 2025-04-17T17:54:41.695863Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:42.284776Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-17T17:54:42.284849Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-04-17T17:54:42.285206Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-17T17:54:42.285289Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-17T17:54:42.285365Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-17T17:54:42.285737Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2025-04-17T17:54:42.285832Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-17T17:54:42.286053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-04-17T17:54:42.301144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T17:54:42.311638Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:42.311762Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 139us result status StatusSuccess 2025-04-17T17:54:42.312052Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:42.353653Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:42.353893Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 251us result status StatusSuccess 2025-04-17T17:54:42.354291Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::QueryLimits [GOOD] Test command err: 800 800 800 800 |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::ResourceWeight [GOOD] Test command err: 510 500 1510 1500 990 1000 1000 1000 |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |79.7%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> TKqpScanData::UnboxedValueSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2025-04-17T17:52:54.790592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:54.791110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:54.791170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00113f/r3tmp/tmpMsZlS6/pdisk_1.dat 2025-04-17T17:52:55.208785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:55.251649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:55.303825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:55.303960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:55.316039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:55.406495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:55.449904Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:52:55.451102Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:52:55.451573Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:52:55.451842Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:55.500524Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:52:55.501420Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:55.501515Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:52:55.503346Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:52:55.503427Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:52:55.503481Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:52:55.503885Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:52:55.504069Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:52:55.504166Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:52:55.515064Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:52:55.565398Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:52:55.565606Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:52:55.565779Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:52:55.565815Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:52:55.565846Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:52:55.565879Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:55.566129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:55.566784Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:55.567127Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:52:55.567228Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:52:55.567357Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:55.567408Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:55.567459Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:52:55.567491Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:52:55.567525Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:52:55.567558Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:52:55.567607Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:52:55.567747Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:55.567789Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:55.567828Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:52:55.568208Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:52:55.568294Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:52:55.568413Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:52:55.568696Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:52:55.568778Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:52:55.568854Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:52:55.568906Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:52:55.568946Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:52:55.568979Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:52:55.569032Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:55.569350Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:52:55.569410Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:52:55.569444Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:52:55.569473Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:55.569515Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:52:55.569544Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:52:55.569577Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:52:55.569630Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:55.569658Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:52:55.571910Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:52:55.571959Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:52:55.587684Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:52:55.587782Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:55.587823Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:55.587865Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:52:55.587993Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:52:55.764770Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:55.764828Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:55.764862Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:52:55.765345Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:52:55.765389Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:52:55.765498Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:55.765541Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:52:55.765575Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:52:55.765602Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:52:55.769648Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:52:55.769715Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:55.770509Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:55.770548Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:55.770596Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:5 ... ed 0 immediate 0 planned 0 2025-04-17T17:54:41.992794Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:54:41.992854Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:41.992914Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:41.993139Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:885:2711], Recipient [15:885:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:41.993179Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:41.993227Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:54:41.993261Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:41.993297Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:54:41.993334Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-17T17:54:41.993375Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-17T17:54:41.993412Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:41.993445Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-17T17:54:41.993475Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-17T17:54:41.993509Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-17T17:54:41.993639Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-17T17:54:41.993685Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:41.993715Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-17T17:54:41.993746Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:41.993777Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:54:41.993819Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-17T17:54:41.993855Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-17T17:54:41.993891Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-17T17:54:41.993932Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:41.993957Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:41.993981Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-17T17:54:41.994005Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-17T17:54:41.994095Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-17T17:54:41.994123Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-17T17:54:41.994161Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-17T17:54:41.994218Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-17T17:54:41.994242Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:41.994267Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-17T17:54:41.994294Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-17T17:54:41.994321Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:41.994453Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-17T17:54:41.994483Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-17T17:54:41.994517Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:54:41.994552Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:54:41.994589Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:41.994616Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:54:41.994643Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-17T17:54:41.994678Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:41.994713Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:54:41.994749Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T17:54:41.994784Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T17:54:42.005866Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-17T17:54:42.005994Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:42.006063Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-17T17:54:42.006155Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1090:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:42.006259Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:42.006593Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-17T17:54:42.006629Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:54:42.006648Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:42.006677Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1090:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:42.006704Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:54:42.008309Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-17T17:54:42.008487Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:54:42.008602Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-17T17:54:42.008732Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:54:42.008800Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:54:42.008861Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:42.008912Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:54:42.008963Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-17T17:54:42.009023Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:54:42.009045Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:42.009063Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:54:42.009081Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:42.009217Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-17T17:54:42.009624Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:54:42.009694Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-17T17:54:42.009752Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 10} after executionsCount# 1 2025-04-17T17:54:42.009845Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:42.010039Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 10} finished in read 2025-04-17T17:54:42.010121Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:54:42.010141Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:54:42.010159Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:42.010198Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:42.010237Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:54:42.010254Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:42.010280Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-17T17:54:42.010331Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:54:42.010490Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TGRpcStreamingTest::ReadFinish [GOOD] >> TKqpScanData::UnboxedValueSize [GOOD] >> TSchemeShardSubDomainTest::Delete >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:39.572727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:39.572851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:39.572892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:39.572943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:39.573009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:39.573043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:39.573114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:39.573203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:39.573535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:39.659315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:39.659382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:39.675710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:39.676195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:39.676371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:39.685248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:39.685428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:39.686044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:39.686273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:39.689022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:39.690308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:39.690373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:39.690566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:39.690616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:39.690660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:39.690788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.697224Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:39.819354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:39.819621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.819846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:39.820101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:39.820158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.822668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:39.822809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:39.823015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.823084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:39.823121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:39.823157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:39.825311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.825383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:39.825425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:39.827693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.827746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.827791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:39.827858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.831669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:39.834018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:39.834215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:39.835201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:39.835388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:39.835455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:39.835776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:39.835843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:39.836031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:39.836134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:39.838392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:39.838440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:39.838611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:39.838652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:39.839040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.839089Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:39.839184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:39.839234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.839280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:39.839319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.839358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:39.839400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.839468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:39.839503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:39.839570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:39.839611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:39.839653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:39.841557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:39.841682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:39.841721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 0s, InflightLimit# 10 2025-04-17T17:54:44.022249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:44.022283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:44.022314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:44.022552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:44.022605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:44.022670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:44.022930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:44.036705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:44.038045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:44.038192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:44.038417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:44.038457Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:44.038757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:44.039497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-17T17:54:44.039607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SomeTable, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:54:44.039696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.039773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.040141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:44.040330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T17:54:44.040416Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T17:54:44.040600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-17T17:54:44.040729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.041002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:44.041042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:54:44.041134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:44.041270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:44.041473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-17T17:54:44.041733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.041822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.042128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.042218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.042387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.042487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.042555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.042686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.042765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.042868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.043051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.043170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.043209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.043247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:44.051131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:44.051204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:44.051527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:44.051588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:44.051633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:44.051801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:760:2714] sender: [1:812:2058] recipient: [1:15:2062] 2025-04-17T17:54:44.097728Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:44.098010Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 317us result status StatusSuccess 2025-04-17T17:54:44.099672Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82488 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:44.102284Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:44.102507Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 241us result status StatusSuccess 2025-04-17T17:54:44.102989Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2025-04-17T17:52:56.311824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:56.312385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:56.312466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00105b/r3tmp/tmpF6S3VB/pdisk_1.dat 2025-04-17T17:52:56.795724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.861074Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:56.902611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:56.902910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:56.915443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:57.022862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:57.100950Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:52:57.102016Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:52:57.102554Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:52:57.102837Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:57.155719Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:52:57.156513Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:57.156626Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:52:57.158452Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:52:57.158531Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:52:57.158583Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:52:57.159009Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:52:57.159141Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:52:57.159216Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:52:57.170757Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:52:57.194127Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:52:57.194325Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:52:57.194437Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:52:57.194470Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:52:57.194505Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:52:57.194539Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:57.194756Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:57.194821Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:57.195071Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:52:57.195155Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:52:57.195272Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:57.195326Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:57.195385Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:52:57.195421Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:52:57.195465Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:52:57.195496Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:52:57.195538Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:52:57.195670Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:57.195710Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:57.195751Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:52:57.196104Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:52:57.196159Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:52:57.196257Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:52:57.196524Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:52:57.196586Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:52:57.196687Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:52:57.196745Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:52:57.196782Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:52:57.196814Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:52:57.196845Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:57.197123Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:52:57.197160Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:52:57.197193Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:52:57.197239Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:57.197288Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:52:57.197320Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:52:57.197348Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:52:57.197386Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:57.197425Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:52:57.199686Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:52:57.199741Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:52:57.210865Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:52:57.210937Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:57.210980Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:57.211015Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:52:57.211115Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:52:57.375581Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:57.375636Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:57.375670Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:52:57.376133Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:52:57.376179Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:52:57.376302Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:57.376347Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:52:57.376385Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:52:57.376433Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:52:57.384050Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:52:57.384127Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:57.385004Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:57.385051Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:57.385105Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:5 ... planned 0 immediate 0 planned 0 2025-04-17T17:54:42.513737Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:54:42.513802Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:42.513856Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:42.514121Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:885:2711], Recipient [15:885:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:42.514162Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:42.514241Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T17:54:42.514283Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:54:42.514323Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:54:42.514367Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-17T17:54:42.514408Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-17T17:54:42.514444Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:42.514476Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-17T17:54:42.514511Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-17T17:54:42.514546Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-17T17:54:42.514686Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-17T17:54:42.514733Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:42.514762Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-17T17:54:42.514792Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:42.514820Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:54:42.514863Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-17T17:54:42.514911Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-17T17:54:42.514963Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-17T17:54:42.515019Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:42.515051Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:42.515079Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-17T17:54:42.515108Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-17T17:54:42.515217Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-17T17:54:42.515278Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-17T17:54:42.515328Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-17T17:54:42.515375Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-17T17:54:42.515438Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:42.515472Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-17T17:54:42.515507Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-17T17:54:42.515541Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:42.515713Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-17T17:54:42.515746Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-17T17:54:42.515792Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:54:42.515837Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:54:42.515876Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-17T17:54:42.515904Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:54:42.515934Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-17T17:54:42.515979Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:42.516025Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:54:42.516070Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T17:54:42.516108Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T17:54:42.527515Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-17T17:54:42.527684Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:42.527770Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-17T17:54:42.527883Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1090:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:42.527982Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:42.528452Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-17T17:54:42.528506Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:54:42.528538Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:54:42.528586Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1090:2867], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:42.528631Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:54:42.530398Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-17T17:54:42.530633Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:54:42.530758Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-17T17:54:42.530906Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:54:42.530978Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:54:42.531055Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:42.531119Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:54:42.531175Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-17T17:54:42.531248Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:54:42.531277Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:42.531305Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:54:42.531333Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:42.531533Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-17T17:54:42.532039Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:54:42.532129Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-17T17:54:42.532228Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 3} after executionsCount# 1 2025-04-17T17:54:42.532325Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:42.532595Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} finished in read 2025-04-17T17:54:42.532703Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:54:42.532740Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:54:42.532770Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:42.532801Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:42.532859Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:54:42.532887Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:42.532930Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-17T17:54:42.533000Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:54:42.533206Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2025-04-17T17:54:41.891346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342041589471468:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:41.891403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dd0/r3tmp/tmpVx41OI/pdisk_1.dat 2025-04-17T17:54:42.688896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:42.689028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:42.704335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:42.757127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:42.927891Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.151626s 2025-04-17T17:54:42.927992Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.151757s 2025-04-17T17:54:43.188092Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:44120 2025-04-17T17:54:43.188347Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:44120 2025-04-17T17:54:43.188489Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7494342050179406615:2266] peer# ipv6:[::1]:44120 2025-04-17T17:54:43.188520Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-17T17:54:43.188816Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2025-04-17T17:54:43.188841Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# unknown (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-04-17T17:54:41.891337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342044400452032:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:41.891424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dbd/r3tmp/tmpVJhpp4/pdisk_1.dat 2025-04-17T17:54:42.692968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:42.693072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:42.704201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:42.752319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:42.927938Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.152195s 2025-04-17T17:54:42.928006Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.152337s 2025-04-17T17:54:43.187778Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream accepted Name# Session ok# true peer# ipv6:[::1]:51394 2025-04-17T17:54:43.188294Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade attach Name# Session actor# [1:7494342052990387178:2266] peer# ipv6:[::1]:51394 2025-04-17T17:54:43.188368Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade read Name# Session peer# ipv6:[::1]:51394 2025-04-17T17:54:43.188483Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade finish Name# Session peer# ipv6:[::1]:51394 grpc status# (0) message# 2025-04-17T17:54:43.188868Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] read finished Name# Session ok# false data# peer# ipv6:[::1]:51394 2025-04-17T17:54:43.188916Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream done notification Name# Session ok# true peer# ipv6:[::1]:51394 2025-04-17T17:54:43.188972Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream finished Name# Session ok# true peer# ipv6:[::1]:51394 grpc status# (0) message# 2025-04-17T17:54:43.189044Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] deregistering request Name# Session peer# ipv6:[::1]:51394 (finish done) 2025-04-17T17:54:43.189135Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2025-04-17T17:54:41.891266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342040610330655:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:41.891346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dbe/r3tmp/tmpuzXaXN/pdisk_1.dat 2025-04-17T17:54:42.689064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:42.689160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:42.704197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:42.775685Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:42.927922Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.151314s 2025-04-17T17:54:42.927990Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.151397s 2025-04-17T17:54:43.187761Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream accepted Name# Session ok# true peer# ipv6:[::1]:34856 2025-04-17T17:54:43.188305Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade attach Name# Session actor# [1:7494342049200265801:2266] peer# ipv6:[::1]:34856 2025-04-17T17:54:43.188351Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade read Name# Session peer# ipv6:[::1]:34856 2025-04-17T17:54:43.188458Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade write Name# Session data# peer# ipv6:[::1]:34856 2025-04-17T17:54:43.188824Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade finish Name# Session peer# ipv6:[::1]:34856 grpc status# (0) message# 2025-04-17T17:54:43.188860Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] write finished Name# Session ok# true peer# ipv6:[::1]:34856 2025-04-17T17:54:43.188893Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-17T17:54:43.189146Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] read finished Name# Session ok# false data# peer# ipv6:[::1]:34856 2025-04-17T17:54:43.189191Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream done notification Name# Session ok# true peer# ipv6:[::1]:34856 2025-04-17T17:54:43.189198Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-17T17:54:43.189228Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream finished Name# Session ok# true peer# ipv6:[::1]:34856 grpc status# (0) message# 2025-04-17T17:54:43.189233Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-17T17:54:43.189311Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] deregistering request Name# Session peer# ipv6:[::1]:34856 (finish done) >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] >> TSchemeShardSubDomainTest::Delete [GOOD] >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TSchemeShardSubDomainTest::ForceDropTwice >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:45.322137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:45.322233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.322273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:45.322303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:45.322361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:45.322389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:45.322445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.322532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:45.322816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:45.411971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:45.412037Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:45.431682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:45.432043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:45.432211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:45.441565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:45.441795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:45.442487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.442732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:45.455854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.457423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.457485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.457677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:45.457723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.457759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:45.457879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.464540Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:45.569160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:45.569403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.569620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:45.569846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:45.569895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.575042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.575181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:45.575361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.575453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:45.575490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:45.575525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:45.577406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.577455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:45.577491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:45.583838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.583900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.583939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.584004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.587972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:45.589960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:45.590157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:45.591174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.591307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:45.591363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.591719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:45.591768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.591922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:45.592025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:45.594167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.594234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.594420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.594475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:45.594873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.594918Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:45.595001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:45.595033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.595093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:45.595125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.595158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:45.595205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.595241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:45.595270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:45.595340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:45.595391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:45.595420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:45.597161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:45.597266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:45.597300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:54:45.733252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:54:45.733279Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:54:45.733306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:54:45.733380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T17:54:45.737492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:54:45.737565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:54:45.737597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:54:45.738364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:54:45.738506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:54:45.738611Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-17T17:54:45.739861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.740260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:45.741712Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-17T17:54:45.742082Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-17T17:54:45.742559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T17:54:45.742815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-17T17:54:45.743688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:54:45.743839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:54:45.744222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:54:45.744261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:54:45.744355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:54:45.744916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:54:45.744963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:54:45.745012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:45.746341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:54:45.746379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T17:54:45.748580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:54:45.748627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T17:54:45.748712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:54:45.748746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:54:45.748889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:54:45.749027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T17:54:45.749262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:54:45.749301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T17:54:45.749701Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:54:45.749815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:54:45.749861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:491:2445] TestWaitNotification: OK eventTxId 101 2025-04-17T17:54:45.750385Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:45.750591Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 243us result status StatusPathDoesNotExist 2025-04-17T17:54:45.750759Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:54:45.751249Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:45.751451Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 194us result status StatusSuccess 2025-04-17T17:54:45.751799Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-04-17T17:54:45.752332Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-17T17:54:45.752424Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-17T17:54:45.752497Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-04-17T17:54:45.752905Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:45.753109Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 179us result status StatusSuccess 2025-04-17T17:54:45.753408Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:45.597144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:45.597207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.597235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:45.597286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:45.597370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:45.597444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:45.597518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.597607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:45.597937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:45.670011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:45.670065Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:45.685612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:45.685929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:45.686070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:45.693776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:45.693954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:45.694529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.694716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:45.697261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.698602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.698660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.698835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:45.698896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.698943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:45.699061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.705507Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:45.823878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:45.824150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.824406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:45.824643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:45.824698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.827675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.827842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:45.828094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.828169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:45.828211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:45.828249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:45.830538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.830602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:45.830662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:45.832861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.832918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.832968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.833055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.836786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:45.838702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:45.838914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:45.839876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.840005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:45.840055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.840339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:45.840386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.840526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:45.840630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:45.842604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.842653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.842805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.842859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:45.843174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.843215Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:45.843297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:45.843327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.843409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:45.843443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.843481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:45.843515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.843547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:45.843574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:45.843628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:45.843663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:45.843713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:45.845005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:45.845079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:45.845103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T17:54:45.845130Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T17:54:45.845161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:45.845248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T17:54:45.848723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T17:54:45.849249Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-17T17:54:45.849884Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T17:54:45.866231Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T17:54:45.868409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:45.868654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.868738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2025-04-17T17:54:45.869553Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:54:45.872617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:45.872813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2025-04-17T17:54:45.873480Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2025-04-17T17:54:45.876356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:45.876621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.876716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2025-04-17T17:54:45.880118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:45.880285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-17T17:54:45.880592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T17:54:45.880687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-17T17:54:45.880792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:54:45.880826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T17:54:45.881208Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T17:54:45.881311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T17:54:45.881339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:288:2279] 2025-04-17T17:54:45.881492Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:54:45.881520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:54:45.881534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:288:2279] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-17T17:54:45.881926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:45.882069Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 164us result status StatusPathDoesNotExist 2025-04-17T17:54:45.882237Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:54:45.882625Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:45.882782Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 133us result status StatusPathDoesNotExist 2025-04-17T17:54:45.882897Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:54:45.883268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:45.883442Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 175us result status StatusSuccess 2025-04-17T17:54:45.883792Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:54:45.559457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:45.559560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.559605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:45.559640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:45.559682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:45.559711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:45.559767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.559873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:45.560160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:45.633686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:45.633738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:45.648643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:45.648773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:45.648936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:45.655528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:45.655842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:45.656485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.656791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:45.660775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.662001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.662059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.662209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:45.662253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.662292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:45.662482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.669078Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:54:45.773986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:45.774247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.774477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:45.774701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:45.774755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.777551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.777674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:45.777820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.777867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:45.777905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:45.777939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:45.780251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.780307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:45.780348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:45.781952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.782008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.782038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.782112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.790899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:45.793062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:45.793240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:45.793986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.794103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:45.794159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.794420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:45.794476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.794603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:45.794708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:45.797810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.797856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.798014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.798046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:45.798343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.798396Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:45.798486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:45.798524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.798573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:45.798610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.798646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:45.798701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.798734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:45.798764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:45.798830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:45.798882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:45.798929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:45.800420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:45.800518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:45.800565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T17:54:46.140163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:46.140217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:54:46.140362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:54:46.140501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:46.140548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T17:54:46.140624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-17T17:54:46.140912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.140960Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T17:54:46.141062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:54:46.141096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:54:46.141152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:54:46.141189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:54:46.141227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T17:54:46.141266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:54:46.141306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:54:46.141337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T17:54:46.141549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-04-17T17:54:46.141606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-04-17T17:54:46.141657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-04-17T17:54:46.141689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-17T17:54:46.142431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:54:46.142542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:54:46.142587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:54:46.142626Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-17T17:54:46.142668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:46.143641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:54:46.143726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:54:46.143753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:54:46.143783Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T17:54:46.143810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-17T17:54:46.143894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-17T17:54:46.143951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:920:2753] 2025-04-17T17:54:46.149226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:54:46.150667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:54:46.150762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:54:46.150806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:921:2754] TestWaitNotification: OK eventTxId 101 2025-04-17T17:54:46.151329Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:46.151514Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 208us result status StatusSuccess 2025-04-17T17:54:46.151841Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.152276Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:46.152426Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 134us result status StatusSuccess 2025-04-17T17:54:46.152655Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.152971Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:46.153080Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 125us result status StatusSuccess 2025-04-17T17:54:46.153412Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSystemView::Join |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] >> KqpSysColV1::InnerJoinSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:43.547886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:43.547980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:43.548029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:43.548065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:43.548114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:43.548144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:43.548216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:43.548352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:43.548667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:43.623402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:43.623461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:43.639066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:43.639579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:43.639750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:43.649599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:43.649837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:43.650578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:43.650813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:43.654564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:43.656224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:43.656298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:43.656514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:43.656584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:43.656633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:43.656794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:43.665566Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:43.820114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:43.820408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:43.820673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:43.820959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:43.821037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:43.825225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:43.825364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:43.825587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:43.825656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:43.825696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:43.825734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:43.828063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:43.828133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:43.828175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:43.830052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:43.830114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:43.830157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:43.830257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:43.834143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:43.836216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:43.836396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:43.837413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:43.837537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:43.837594Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:43.837898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:43.837964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:43.838161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:43.838289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:43.840471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:43.840516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:43.840709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:43.840767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:43.841181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:43.841236Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:43.841334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:43.841369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:43.841407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:43.841437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:43.841477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:43.841524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:43.841578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:43.841613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:43.841683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:43.841724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:43.841770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:43.843740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:43.843874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:43.843928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:45.676267Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-17T17:54:45.676356Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-04-17T17:54:45.677198Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2025-04-17T17:54:45.677321Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-17T17:54:45.677561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-04-17T17:54:45.692706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T17:54:45.704063Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:45.704279Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 242us result status StatusSuccess 2025-04-17T17:54:45.704719Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.292357Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-17T17:54:46.292434Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-04-17T17:54:46.293242Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2025-04-17T17:54:46.293343Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-17T17:54:46.293594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-04-17T17:54:46.307190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T17:54:46.317724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.317905Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 210us result status StatusSuccess 2025-04-17T17:54:46.318372Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.360397Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:46.360635Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 291us result status StatusSuccess 2025-04-17T17:54:46.361060Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.361860Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:632:2553] connected; active server actors: 1 2025-04-17T17:54:46.379557Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2025-04-17T17:54:46.379956Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-04-17T17:54:46.382161Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.382346Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 206us result status StatusSuccess 2025-04-17T17:54:46.382747Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.382964Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2025-04-17T17:54:46.383943Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2025-04-17T17:54:46.417831Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:679:2588] connected; active server actors: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:45.895260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:45.895343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.895399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:45.895435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:45.895499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:45.895540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:45.895609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.895687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:45.895994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:45.978676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:45.978739Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:45.994342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:45.994778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:45.994960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:46.003347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:46.003581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:46.004245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:46.004447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:46.007131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:46.008415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:46.008471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:46.008654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:46.008710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:46.008758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:46.008883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.015116Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:46.127452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:46.127666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.127837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:46.128022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:46.128063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.129874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:46.130008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:46.130136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.130202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:46.130234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:46.130260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:46.131723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.131769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:46.131795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:46.133150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.133182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.133213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:46.133258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:46.135869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:46.137501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:46.137689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:46.138698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:46.138820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.138878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:46.139158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:46.139212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:46.139386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:46.139490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:46.141634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:46.141685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:46.141843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:46.141903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:46.142300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.142350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:46.142447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:46.142487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:46.142544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:46.142581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:46.142636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:46.142671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:46.142706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:46.142737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:46.142824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:46.142868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:46.142901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:46.144650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:46.144758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:46.144794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rd: 72057594046678944, cookie: 103 2025-04-17T17:54:46.628808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.628953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.629237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.629288Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:54:46.629380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:54:46.629417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:54:46.629467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:54:46.629501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:54:46.629546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T17:54:46.629615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:488:2444] message: TxId: 103 2025-04-17T17:54:46.629663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:54:46.629702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:54:46.629736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:54:46.629862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T17:54:46.631834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:54:46.631873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:489:2445] TestWaitNotification: OK eventTxId 103 2025-04-17T17:54:46.632383Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:46.632572Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 231us result status StatusSuccess 2025-04-17T17:54:46.633062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.633721Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:46.633972Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 231us result status StatusSuccess 2025-04-17T17:54:46.634433Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.635042Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:46.635189Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 154us result status StatusSuccess 2025-04-17T17:54:46.635495Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.635957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:46.636152Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 193us result status StatusSuccess 2025-04-17T17:54:46.636545Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> KqpSysColV1::StreamInnerJoinTables |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |79.8%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> DataShardReadIteratorBatchMode::RangeToInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive >> KqpSysColV1::UpdateAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:18.583058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:18.583124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:18.583162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:18.583210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:18.583260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:18.583296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:18.583360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:18.583458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:18.583782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:18.681202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:18.681260Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:18.711694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:18.712137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:18.712299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:18.723805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:18.723969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:18.724583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:18.724798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:18.729871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:18.731214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:18.731284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:18.731458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:18.731498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:18.731529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:18.731678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:18.739897Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:18.889863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:18.890109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:18.893055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:18.893338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:18.893404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:18.906492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:18.906644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:18.906856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:18.906915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:18.906954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:18.907008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:18.909975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:18.910039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:18.910076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:18.917725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:18.917833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:18.917884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:18.917970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:18.921855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:18.924480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:18.924692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:18.925817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:18.925955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:18.926053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:18.926377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:18.926438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:18.926687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:18.926780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:18.929275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:18.929357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:18.929563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:18.929605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:18.930049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:18.930118Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:18.930240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:18.930287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:18.930339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:18.930369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:18.930406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:18.930445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:18.930487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:18.930520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:18.930588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:18.930630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:18.930665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:18.932572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:18.932697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:18.932756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... val# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:45.194714Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.194772Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:45.194818Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:45.194864Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:45.194910Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:45.194984Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.195093Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:45.195505Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:45.217900Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:45.219669Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:45.219918Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:45.220074Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:45.220124Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:45.220488Z node 5 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:45.221420Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.221550Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.221669Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.222138Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.222267Z node 5 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T17:54:45.224479Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.224607Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.224713Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.224861Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.224954Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.225115Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.225470Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.225612Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.226062Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.226195Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.226406Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.226521Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.226638Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.226949Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.227060Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.227223Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.227503Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.227712Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.227771Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.227830Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.238397Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.238491Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.239055Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:45.239128Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.239186Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:45.239846Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:375:2344] sender: [5:432:2058] recipient: [5:15:2062] 2025-04-17T17:54:45.283177Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T17:54:45.283252Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-17T17:54:45.329380Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 login denied: too many failed password attempts", at schemeshard: 72057594046678944 2025-04-17T17:54:45.329551Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.329598Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.329796Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.329860Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:426:2384], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-17T17:54:45.330592Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2025-04-17T17:54:47.331588Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T17:54:47.347381Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU1Njg3LCJpYXQiOjE3NDQ5MTI0ODcsInN1YiI6InVzZXIxIn0.ec1Co99gKK2PaPiy4Wfn7yPfIQ0s9G0yoOeVl79Xr4nBRjimknBibTxbe4NFomRUisFbKdK0JK9KErPfkDI4sDwVYDm1rEpmbpXBnpa13_deyEw4g5W6Qcz-yD52uKjS4R_J5QTwwvhvFBSdx7FII96Bn4ECvZAr3JFyNFjCTVQW_LV-tuYYCoSstU4e4yxTVqeoJ-ANyP-AwJqoH42qGp-25xdg46HApDrs8jTcO7dl6vuKMKOueRLWtETGXAKQrYeEiXJao1qAZBnHnUi3obcTEidODD13hj8M3r4Tr3lOwh1JqsmZCQWfjEDDh8jgnmojj5J9NZdhab0PL_XxWw" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU1Njg3LCJpYXQiOjE3NDQ5MTI0ODcsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-17T17:54:47.348143Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:47.348402Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 283us result status StatusSuccess 2025-04-17T17:54:47.348937Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwzddlC5Zok1PIAWO2WDn\nqXfL80zKrq9fftME2GpIidKqmHMs3ywlTJQVU1QIaTaaL7aKN0qhf9xbhlpYuCgb\n439MP5IBGi3/s/kSoatn+OTJZfVua7V6exfpxarWkOnA2b3i+VzZiIg7xCUonG2k\nIQvsl/kwP33iizH276QVt3SB2yES6R0cRouO9IoIsXQBWUfeTUpFSLJkQML9j1kv\n0nni/qOOVso8rxJ10kvF3JHRk3kttZPDybUP2LkSJv1qhT3hTMHoug+P2ryW6zFr\nT51kFgpcNxgsAO69Q+BV7iXQBleoatSn3qWAVbKUeiTVp64vgLH0qc7XgxjuTDcU\nOQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744998882942 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsSwiIlKge/WrpBW5iQMF\nDhggHpVXBF00MdAjsjqJlZfjxK9YLnrRLa6rpFPs/sQlM5mqVgk4C0d3va+aZ6vC\nsLLeM/kKpcKZy1l+njR+l/PgjqyShTf8pXMs1zJ1x12eWSgN/ikLMYrESBGomt4W\n5ugN1oz/YKYz94T4HtpRXXC9U2SqIHQYeoq7z0SaDfVKvL16RPNUV6KxZorJuZrC\nV5bHRydcFypf2OA55KDTSOlEvXZAlTHIemi0p9jxkJ5TuvrC3uFb6J067s6Phmf8\nW9l57SC/AzkvDM78Dakm1Aku0lbgtb/WBb9sKDm4CjRlhrtoCh8MYo80DqgGNO0A\nbwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744998883113 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsThBw2sYpZTl6NYIB7gG\nspALedkE+mkjyc7KHeGpYcbie7wHK4hSwlIstUMTFGPVdDgyLGwe3MDrYya+Wrfq\nY9sypPOtB+efkGHLvr0CEvsIsdqnQFA6WwSV5X/Hu/Uz61BL3UfjqnJn+yg21pRm\n/q2WoGUAzq6GWQLq4fB9ZEaH17BAbP7Q4Db4P+BlQBDQ6BjR8PUGXujLwU5B7nWO\nA7FfjUAjlhAYPWy9WwQIhBkfcKmGVCmygsKUna7ygGLt7UazG/c2v8AE5ohfJULF\nUuNTaQlrTULZH2To9rllN57btO+9PQxunNBDmxBPOGTFQhigdTlm35QPR5UQtoRv\nLQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744998885326 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:46.693259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:46.693331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:46.693358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:46.693382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:46.693439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:46.693462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:46.693502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:46.693581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:46.693811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:46.770646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:46.770702Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:46.784827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:46.785289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:46.785455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:46.793282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:46.793494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:46.794152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:46.794390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:46.798751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:46.800464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:46.800550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:46.800782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:46.800836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:46.800895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:46.801037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.809084Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:46.924187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:46.924432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.924634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:46.924851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:46.924899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.928942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:46.929101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:46.929298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.929352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:46.929388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:46.929421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:46.931300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.931350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:46.931399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:46.933100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.933149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.933189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:46.933254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:46.936338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:46.938709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:46.938959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:46.939973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:46.940123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:46.940178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:46.940470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:46.940531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:46.940670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:46.940781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:46.943553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:46.943616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:46.943795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:46.943830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:46.944155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:46.944196Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:46.944270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:46.944294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:46.944338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:46.944362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:46.944389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:46.944414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:46.944437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:46.944457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:46.944509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:46.944548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:46.944579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:46.946122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:46.946265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:46.946301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8944 2025-04-17T17:54:47.241241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:54:47.241270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:54:47.241284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:54:47.242558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:54:47.242853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:54:47.243192Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-04-17T17:54:47.243476Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T17:54:47.244195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-17T17:54:47.244555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 Forgetting tablet 72075186233409550 2025-04-17T17:54:47.245719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:47.245938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409546 2025-04-17T17:54:47.247601Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-17T17:54:47.247741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T17:54:47.247920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:54:47.248587Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-17T17:54:47.248859Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T17:54:47.249894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-17T17:54:47.250090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:47.250780Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-17T17:54:47.251041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:54:47.251205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-04-17T17:54:47.253277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T17:54:47.253452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-17T17:54:47.256113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:54:47.256166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:54:47.256305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:54:47.257877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:54:47.257927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:54:47.257994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:47.258695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-17T17:54:47.258743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-17T17:54:47.262527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:54:47.262606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T17:54:47.262770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:54:47.262788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T17:54:47.262884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T17:54:47.262907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-17T17:54:47.265983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:54:47.266040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:54:47.266212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T17:54:47.266263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T17:54:47.266346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:54:47.267618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-04-17T17:54:47.267795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:54:47.267825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-17T17:54:47.267913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:54:47.267950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:54:47.268278Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:54:47.268374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:54:47.268405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:668:2570] 2025-04-17T17:54:47.268462Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:54:47.268558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:54:47.268581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:668:2570] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-17T17:54:47.269077Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:47.269270Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 217us result status StatusPathDoesNotExist 2025-04-17T17:54:47.269462Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:54:47.269970Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:47.270149Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 172us result status StatusSuccess 2025-04-17T17:54:47.270657Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2025-04-17T17:51:20.057925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341178208659390:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:20.057972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001435/r3tmp/tmphVy8VN/pdisk_1.dat 2025-04-17T17:51:20.878792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:20.926142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:20.938379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:20.955591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12104, node 1 2025-04-17T17:51:21.312027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:21.312055Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:21.312068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:21.312165Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:51:21.508714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:21.554457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:21.631457Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7494341182503627312:2309] 2025-04-17T17:51:21.631809Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:21.658084Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:21.658138Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:21.660027Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:51:21.660072Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:51:21.660103Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:51:21.660467Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:21.660521Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:51:21.660550Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7494341182503627328:2309] in generation 1 2025-04-17T17:51:21.674678Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:51:21.787270Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:51:21.787416Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:51:21.787452Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7494341182503627331:2310] 2025-04-17T17:51:21.787473Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:21.787485Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:51:21.787494Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:21.787662Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:51:21.787721Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:51:21.787735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:21.787746Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:21.787767Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:51:21.787781Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:21.789886Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7494341182503627308:2305], serverId# [1:7494341182503627326:2313], sessionId# [0:0:0] 2025-04-17T17:51:21.789981Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:21.790215Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:51:21.790280Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-17T17:51:21.793579Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:21.793975Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:51:21.794022Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:51:21.796964Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7494341182503627345:2324], serverId# [1:7494341182503627346:2325], sessionId# [0:0:0] 2025-04-17T17:51:21.801694Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744912281840 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912281840 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:51:21.801734Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:21.801882Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:21.801898Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:51:21.801917Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744912281840:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-17T17:51:21.802151Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744912281840:281474976710657 keys extracted: 0 2025-04-17T17:51:21.802514Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:51:21.802681Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:21.802736Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:51:21.805543Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:51:21.805940Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:21.810741Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744912281840} 2025-04-17T17:51:21.810807Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:21.811281Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744912281839 2025-04-17T17:51:21.811310Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:21.811356Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:21.811455Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744912281854 2025-04-17T17:51:21.811491Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:21.811518Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:21.811546Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:51:21.811603Z node 1 :TX_DATASHARD DEBUG: Complete [1744912281840 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7494341178208659809:2190], exec latency: 3 ms, propose latency: 9 ms 2025-04-17T17:51:21.811630Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-17T17:51:21.811679Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:21.813067Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7494341182503627331:2310][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-17T17:51:21.825992Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-17T17:51:21.826056Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:51:21.838285Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:21.838393Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:21.838497Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-17T17:51:21.838522Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-17T17:51:21.838531Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-17T17:51:21.839688Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:51:21.868548Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:51:21.868842Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-17T17:51:21.869474Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:51:21.869734Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-17T17:51:21.869771Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:51:21.869785Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-17T17:51:21.869798Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:51:21.869826Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:21.869851Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-17T17:51:21.889694Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7494341182503627400:2369], now have 1 active actors on pipe 2025-04-17T17:51:21.910438Z node 1 :PERSQUEUE DEBUG: [P ... ed on disk 2025-04-17T17:54:44.974373Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 9 requestId: cookie: 5 2025-04-17T17:54:44.974827Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2693] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 5 Offset: 4 WriteTimestampMS: 8969 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 5 } } } 2025-04-17T17:54:44.975071Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:850:2693] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-17T17:54:44.975326Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-17T17:54:44.975446Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037888 2025-04-17T17:54:44.976347Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... unblocking updates ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-17T17:54:45.084670Z node 27 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2025-04-17T17:54:45.084817Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:45.084970Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2025-04-17T17:54:45.085237Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T17:54:45.089175Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-04-17T17:54:45.089348Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-04-17T17:54:45.089472Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:45.089574Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 12000 from mediator time cast 2025-04-17T17:54:45.089830Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:684:2580] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-04-17T17:54:45.090109Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:850:2693] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-04-17T17:54:45.090675Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-04-17T17:54:45.091077Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:850:2693], at tablet# 72075186224037888 2025-04-17T17:54:45.091175Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-04-17T17:54:45.091480Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:850:2693] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-17T17:54:45.092324Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2693] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-17T17:54:45.092914Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:45.093045Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:45.093234Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 10 requestId: cookie: 6 2025-04-17T17:54:45.093494Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:45.093541Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:45.093643Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2025-04-17T17:54:45.093928Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-04-17T17:54:45.094123Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-04-17T17:54:45.094411Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-17T17:54:45.095443Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2025-04-17T17:54:45.097113Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 0 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000| size 93 WTime 8979 2025-04-17T17:54:45.097540Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:54:45.097658Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:54:45.097765Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:54:45.097878Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:54:45.098003Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-04-17T17:54:45.098082Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000005_00000_0000000001_00000| 2025-04-17T17:54:45.098117Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:54:45.098225Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:54:45.098349Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:54:45.098589Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:54:45.099714Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2025-04-17T17:54:45.101144Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [27:797:2661] 2025-04-17T17:54:45.101406Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 size 93 2025-04-17T17:54:45.112309Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:54:45.112526Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:54:45.112672Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-17T17:54:45.113118Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2025-04-17T17:54:45.113543Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2693] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2025-04-17T17:54:45.113752Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:850:2693] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-17T17:54:45.113983Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-17T17:54:45.114088Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2025-04-17T17:54:45.114947Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-17T17:54:45.225053Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:45.225243Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:45.225572Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2025-04-17T17:54:45.225708Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 6 2025-04-17T17:54:45.225912Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 8. All data is from uncompacted head. 2025-04-17T17:54:45.226015Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T17:54:45.227710Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:36.365655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:36.365729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:36.365787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:36.365823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:36.365862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:36.365898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:36.365969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:36.366039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:36.370766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:36.442860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:36.442922Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:36.457451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:36.457797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:36.457934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:36.465821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:36.466032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:36.466781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:36.467018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:36.470069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:36.471582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:36.471648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:36.471832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:36.471889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:36.471942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:36.472106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:36.478944Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:36.661960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:36.662264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:36.662542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:36.662802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:36.662864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:36.665741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:36.665898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:36.666122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:36.666217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:36.666258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:36.666298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:36.669259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:36.669338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:36.669383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:36.674383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:36.674448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:36.674490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:36.674558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:36.678266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:36.681115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:36.681305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:36.682140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:36.682270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:36.682315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:36.682578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:36.682625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:36.682767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:36.682835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:36.684751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:36.684784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:36.684914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:36.684943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:36.685223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:36.685256Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:36.685334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:36.685366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:36.685403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:36.685426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:36.685456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:36.685485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:36.685523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:36.685552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:36.685600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:36.685631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:36.685659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:36.687209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:36.687341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:36.687379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... t for Tables, read records: 2, at schemeshard: 72057594046678944 2025-04-17T17:54:47.085210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T17:54:47.085269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-17T17:54:47.085345Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T17:54:47.085627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-04-17T17:54:47.085790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.085893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-04-17T17:54:47.085939Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-17T17:54:47.085978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:54:47.086013Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 3], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-17T17:54:47.086034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:54:47.086534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2025-04-17T17:54:47.086682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.086941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2025-04-17T17:54:47.087333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.087454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.087807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.087877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.088066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.088152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.088239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.088454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.088534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.088696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.088938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.089083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.089143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.089202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.089432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:54:47.095926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:54:47.096104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T17:54:47.098521Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:1138:3070], Recipient [1:1138:3070]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-17T17:54:47.098577Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-17T17:54:47.100374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:47.100443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:47.101076Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1138:3070], Recipient [1:1138:3070]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:54:47.101126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:54:47.101274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:47.101310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:47.101348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:47.101409Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:54:47.102251Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1174:3070], Recipient [1:1138:3070]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:54:47.102282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:54:47.102353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1138:3070] sender: [1:1194:2058] recipient: [1:15:2062] 2025-04-17T17:54:47.141150Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1193:3114], Recipient [1:1138:3070]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-17T17:54:47.141229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:54:47.141374Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:54:47.141683Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 308us result status StatusSuccess 2025-04-17T17:54:47.142491Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 19110 Memory: 141368 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] Test command err: 2025-04-17T17:53:03.773925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:03.774669Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:03.774739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00103d/r3tmp/tmpZ6ezK9/pdisk_1.dat 2025-04-17T17:53:04.205795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:53:04.248755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:04.291511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:04.291622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:04.303183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:04.390789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:53:04.443597Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:53:04.444621Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:53:04.445049Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:53:04.445282Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:53:04.499319Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:53:04.500068Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:53:04.500182Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:53:04.501877Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:53:04.501958Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:53:04.502007Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:53:04.502367Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:53:04.502516Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:53:04.502606Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:53:04.513421Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:53:04.548763Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:53:04.548974Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:53:04.549104Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:53:04.549136Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:53:04.549181Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:53:04.549221Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:53:04.549459Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:53:04.549517Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:53:04.549788Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:53:04.549878Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:53:04.549987Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:53:04.550037Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:53:04.550087Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:53:04.550121Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:53:04.554147Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:53:04.554284Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:53:04.554382Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:53:04.554584Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:04.554633Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:04.554678Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:53:04.555091Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:53:04.555133Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:53:04.555235Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:53:04.555472Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:53:04.555527Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:53:04.555609Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:53:04.555655Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:53:04.555701Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:53:04.555749Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:53:04.555802Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:53:04.556052Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:53:04.556082Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:53:04.556132Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:53:04.556181Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:53:04.556225Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:53:04.556248Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:53:04.556278Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:53:04.556305Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:53:04.556325Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:53:04.557727Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:53:04.557780Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:53:04.569328Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:53:04.569406Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:53:04.569439Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:53:04.569479Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:53:04.569559Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:53:04.724252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:04.724313Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:04.724383Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:53:04.724829Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:53:04.724872Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:53:04.724970Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:53:04.725013Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:53:04.725050Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:53:04.725081Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:53:04.730769Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:53:04.730865Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:53:04.731772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:53:04.731813Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:53:04.731874Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:53:0 ... 2025-04-17T17:54:45.698422Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:45.698471Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:45.698530Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T17:54:45.698549Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:45.698577Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-17T17:54:45.698638Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:54:45.698714Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2025-04-17T17:54:45.699021Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:45.699069Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:45.699121Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:45.699155Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:54:45.699178Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2025-04-17T17:54:45.699202Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:45.699314Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-17T17:54:45.699740Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-17T17:54:45.699783Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 3} after executionsCount# 2 2025-04-17T17:54:45.699817Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:45.699940Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} finished in read 2025-04-17T17:54:45.699997Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T17:54:45.700023Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:54:45.700047Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:45.700071Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:45.700117Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T17:54:45.700143Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:45.700165Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-17T17:54:45.700190Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:45.700231Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:54:45.700313Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:45.700375Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:45.701000Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=MjkzZmFhOTMtZjY2MDdjNGMtYzMwYTRhZjEtN2ViNzA2ZjA=, workerId: [15:1163:2930], local sessions count: 0 2025-04-17T17:54:45.702435Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-17T17:54:45.702636Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:54:45.702758Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2025-04-17T17:54:45.702890Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-17T17:54:45.703021Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:54:45.703084Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:45.703137Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:54:45.703195Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2025-04-17T17:54:45.703264Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-17T17:54:45.703288Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:45.703308Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:54:45.703327Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:45.703509Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-17T17:54:45.703909Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-17T17:54:45.703987Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 4} after executionsCount# 1 2025-04-17T17:54:45.704081Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:45.704313Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 4} finished in read 2025-04-17T17:54:45.704408Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-17T17:54:45.704453Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:54:45.704479Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:45.704503Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:45.704568Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-17T17:54:45.704594Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:45.704632Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2025-04-17T17:54:45.704709Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:54:45.705691Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-17T17:54:45.705858Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:54:45.705965Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2025-04-17T17:54:45.706092Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-17T17:54:45.706172Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:54:45.706252Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:45.706315Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:54:45.706382Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2025-04-17T17:54:45.706443Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-17T17:54:45.706467Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:45.706488Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:54:45.706508Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:45.706646Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-17T17:54:45.707018Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-04-17T17:54:45.707093Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 5} after executionsCount# 1 2025-04-17T17:54:45.707182Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:54:45.707417Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 5} finished in read 2025-04-17T17:54:45.707512Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-17T17:54:45.707538Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:54:45.707559Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:45.707582Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:45.707624Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-17T17:54:45.707644Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:45.707675Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2025-04-17T17:54:45.707737Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:47.210154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:47.210259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:47.210294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:47.210325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:47.210374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:47.210410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:47.210462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:47.210526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:47.210800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:47.292658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:47.292721Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:47.307524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:47.307917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:47.308075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:47.316855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:47.317074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:47.317635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:47.317888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:47.325198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:47.326491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:47.326554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:47.326754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:47.326819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:47.326863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:47.327006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.333711Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:47.452506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:47.452743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.452948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:47.453172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:47.453221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.459072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:47.459202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:47.459413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.459484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:47.459517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:47.459548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:47.461447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.461507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:47.461535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:47.463186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.463229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.463267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:47.463328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:47.466722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:47.468468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:47.468645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:47.469600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:47.469727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:47.469779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:47.470049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:47.470101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:47.470289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:47.470387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:47.472343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:47.472383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:47.472586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:47.472623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:47.472966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.473004Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:47.473108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:47.473136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:47.473184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:47.473212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:47.473246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:47.473283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:47.473314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:47.473337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:47.473393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:47.473424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:47.473452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:47.475108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:47.475213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:47.475243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046316545 2025-04-17T17:54:47.518003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T17:54:47.518129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T17:54:47.518466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:47.518568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:47.518609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T17:54:47.518861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T17:54:47.518917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T17:54:47.519061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:47.519130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:54:47.519188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T17:54:47.520919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:47.520953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:47.521090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:54:47.521191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:47.521234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T17:54:47.521292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-17T17:54:47.521554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.521605Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T17:54:47.521716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T17:54:47.521762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:54:47.521800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T17:54:47.521842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:54:47.521880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T17:54:47.521915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T17:54:47.521946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T17:54:47.521971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T17:54:47.522029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:54:47.522068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-17T17:54:47.522116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T17:54:47.522154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T17:54:47.522760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:54:47.522832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:54:47.522869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:54:47.522908Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:54:47.522942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:47.523586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:54:47.523672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T17:54:47.523700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T17:54:47.523726Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:54:47.523750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:54:47.523807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-17T17:54:47.526678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T17:54:47.526776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-17T17:54:47.526991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T17:54:47.527034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-17T17:54:47.527432Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T17:54:47.527515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T17:54:47.527566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 100 2025-04-17T17:54:47.528157Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:47.528339Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 226us result status StatusSuccess 2025-04-17T17:54:47.528713Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:47.529169Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:47.529342Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 149us result status StatusSuccess 2025-04-17T17:54:47.529667Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink >> TBoardSubscriberTest::NotAvailableByShutdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] Test command err: 2025-04-17T17:53:04.968182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:04.968765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:04.968836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001029/r3tmp/tmpR3f0zh/pdisk_1.dat 2025-04-17T17:53:05.402431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:53:05.444617Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:05.485415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:05.485567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:05.499729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:05.584498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:53:05.651579Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:53:05.652677Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:53:05.653181Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:53:05.653437Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:53:05.712142Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:53:05.713050Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:53:05.713173Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:53:05.716078Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:53:05.716176Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:53:05.716235Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:53:05.716696Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:53:05.716868Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:53:05.716959Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:53:05.733277Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:53:05.766244Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:53:05.766445Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:53:05.766556Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:53:05.766589Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:53:05.766622Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:53:05.766663Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:53:05.766865Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:53:05.766975Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:53:05.767208Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:53:05.767294Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:53:05.767390Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:53:05.767442Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:53:05.767488Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:53:05.767523Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:53:05.767571Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:53:05.767604Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:53:05.767645Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:53:05.767756Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:05.767794Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:05.767871Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:53:05.768249Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:53:05.768295Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:53:05.768411Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:53:05.768657Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:53:05.768712Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:53:05.768793Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:53:05.768838Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:53:05.768875Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:53:05.768910Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:53:05.768939Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:53:05.769188Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:53:05.769220Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:53:05.769254Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:53:05.769295Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:53:05.769342Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:53:05.769403Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:53:05.769436Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:53:05.769477Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:53:05.769500Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:53:05.773375Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:53:05.773458Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:53:05.784758Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:53:05.784848Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:53:05.784922Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:53:05.784965Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:53:05.785049Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:53:05.943196Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:05.943266Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:53:05.943310Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:53:05.943700Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:53:05.943742Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:53:05.943851Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:53:05.943894Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:53:05.943939Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:53:05.943976Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:53:05.948510Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:53:05.948606Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:53:05.949501Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:53:05.949544Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:53:05.949596Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:53:0 ... teState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-17T17:54:46.604662Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3001} 2025-04-17T17:54:46.604724Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:46.604756Z node 15 :TX_DATASHARD TRACE: Complete execution for [3001:281474976715667] at 72075186224037888 on unit CompleteOperation 2025-04-17T17:54:46.604810Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:46.605965Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [15:1168:2935], Recipient [15:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:46.606017Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:46.606058Z node 15 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [15:1167:2934], serverId# [15:1168:2935], sessionId# [0:0:0] 2025-04-17T17:54:46.606973Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2025-04-17T17:54:46.607204Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:54:46.607306Z node 15 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-17T17:54:46.607397Z node 15 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2025-04-17T17:54:46.607502Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-17T17:54:46.607688Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:54:46.607762Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:54:46.607829Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:54:46.607894Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:54:46.607972Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-17T17:54:46.608043Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:54:46.608075Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:54:46.608103Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:54:46.608134Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:46.608296Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-17T17:54:46.608822Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2025-04-17T17:54:46.608872Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2025-04-17T17:54:46.608984Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:54:46.632823Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [15:1063:2855], Recipient [15:667:2571]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:54:46.632958Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T17:54:46.633048Z node 15 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2025-04-17T17:54:46.633244Z node 15 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:54:46.633504Z node 15 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1157:2910], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:46.633671Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:54:46.633764Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2025-04-17T17:54:46.634311Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1157:2910] TxId: 281474976715667. Ctx: { TraceId: 01js2e73ye6a3t4brv0t9p2zqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=YzdjNGI1NzMtY2ZiMDU2YjEtYjAxNmY3MWItYzI4NzhlN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-17T17:54:46.634598Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1157:2910] TxId: 281474976715667. Ctx: { TraceId: 01js2e73ye6a3t4brv0t9p2zqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=YzdjNGI1NzMtY2ZiMDU2YjEtYjAxNmY3MWItYzI4NzhlN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T17:54:46.634706Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1157:2910] TxId: 281474976715667. Ctx: { TraceId: 01js2e73ye6a3t4brv0t9p2zqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=YzdjNGI1NzMtY2ZiMDU2YjEtYjAxNmY3MWItYzI4NzhlN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T17:54:46.634944Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:46.635034Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:46.635389Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:46.636490Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:46.636567Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:54:46.636666Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2025-04-17T17:54:46.636764Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:54:46.636994Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-17T17:54:46.637609Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-17T17:54:46.637703Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 1} after executionsCount# 2 2025-04-17T17:54:46.637796Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2025-04-17T17:54:46.638103Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:54:46.638172Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:54:46.638309Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:54:46.638373Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:54:46.638435Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:54:46.638464Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:54:46.638524Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-17T17:54:46.638597Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:46.638664Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:54:46.638724Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:46.638794Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:46.639240Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [15:594:2519], selfId: [15:57:2104], source: [15:1135:2910] 2025-04-17T17:54:46.639779Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:46.639921Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 2 2025-04-17T17:54:46.640195Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 2 2025-04-17T17:54:46.640421Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2025-04-17T17:54:46.641615Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T17:54:46.641694Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 4 2025-04-17T17:54:46.641869Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 4 2025-04-17T17:54:46.642004Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2025-04-17T17:54:46.642140Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[15:594:2519], 1} finished in ReadContinue 2025-04-17T17:54:46.642440Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=YzdjNGI1NzMtY2ZiMDU2YjEtYjAxNmY3MWItYzI4NzhlN2Y=, workerId: [15:1135:2910], local sessions count: 0 2025-04-17T17:54:46.642678Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1063:2855]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND >> TBoardSubscriberTest::DropByDisconnect >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriberTest::SimpleSubscriber |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriberTest::ReconnectReplica |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriberTest::ReconnectReplica [GOOD] |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TKesusTest::TestRegisterProxy >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> TKesusTest::TestKesusConfig >> KqpSystemView::PartitionStatsRange1 >> TKesusTest::TestAcquireWaiterDowngrade >> TKesusTest::TestAcquireSemaphoreTimeout |79.9%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSysColV1::SelectRange >> TKesusTest::TestQuoterAccountResourcesBurst >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> SystemView::QueryStatsAllTables [GOOD] >> SystemView::QueryStatsRetries >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TKesusTest::TestQuoterHDRRParametersValidation >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TKesusTest::TestUnregisterProxy |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::SystemViewFailOps [GOOD] Test command err: 2025-04-17T17:50:30.834586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340964211912802:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:30.834633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005bd/r3tmp/tmpj1leyf/pdisk_1.dat 2025-04-17T17:50:31.203792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:31.213263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:31.213354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12309, node 1 2025-04-17T17:50:31.220755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:31.234696Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:31.234726Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:31.293764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:31.293798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:31.293807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:31.294192Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19193 TClient is connected to server localhost:19193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:31.736954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:33.711100Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-17T17:50:33.711140Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-17T17:50:33.741947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340977096815813:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:33.741956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340977096815825:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:33.742049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:33.745643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:33.770866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340977096815827:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:33.840364Z node 1 :TX_PROXY ERROR: Actor# [1:7494340977096815911:2758] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:33.841940Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-17T17:50:33.842070Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F000357368 2025-04-17T17:50:33.842111Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7494340977096815784:2336], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01js2dzd8b4syb8pjce6y1pkqr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYzNzQ2ODgtNmFjMGU2MmMtYTAwOTI4MjEtOTUxMGY3ZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-17T17:50:33.842261Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-17T17:50:33.842319Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7494340977096815784:2336], queueSize: 1 2025-04-17T17:50:33.842968Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7494340977096815784:2336], compileActor: [1:7494340977096815930:2349] 2025-04-17T17:50:34.104558Z node 1 :KQP_YQL INFO: TraceId: 01js2dzd8b4syb8pjce6y1pkqr, SessionId: CompileActor 2025-04-17 17:50:34.103 INFO ydb-core-sys_view-ut(pid=58414, tid=0x00007F4120C10640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-04-17T17:50:34.105554Z node 1 :KQP_YQL TRACE: TraceId: 01js2dzd8b4syb8pjce6y1pkqr, SessionId: CompileActor 2025-04-17 17:50:34.104 TRACE ydb-core-sys_view-ut(pid=58414, tid=0x00007F4120C10640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (let $4 (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-17T17:50:34.105912Z node 1 :KQP_YQL DEBUG: TraceId: 01js2dzd8b4syb8pjce6y1pkqr, SessionId: CompileActor 2025-04-17 17:50:34.105 DEBUG ydb-core-sys_view-ut(pid=58414, tid=0x00007F4120C10640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 263us 2025-04-17T17:50:34.106578Z node 1 :KQP_YQL DEBUG: TraceId: 01js2dzd8b4syb8pjce6y1pkqr, SessionId: CompileActor 2025-04-17 17:50:34.106 DEBUG ydb-core-sys_view-ut(pid=58414, tid=0x00007F4120C10640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-17T17:50:34.107910Z node 1 :KQP_YQL DEBUG: TraceId: 01js2dzd8b4syb8pjce6y1pkqr, SessionId: CompileActor 2025-04-17 17:50:34.107 DEBUG ydb-core-sys_view-ut(pid=58414, tid=0x00007F4120C10640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-04-17T17:50:34.108538Z node 1 :KQP_YQL TRACE: TraceId: 01js2dzd8b4syb8pjce6y1pkqr, SessionId: CompileActor 2025-04-17 17:50:34.108 TRACE ydb-core-sys_view-ut(pid=58414, tid=0 ... id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:54:30.221787Z node 27 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [27:7494341994693866627:2377], row count: 2, finished: 1 2025-04-17T17:54:30.221846Z node 27 :SYSTEM_VIEWS INFO: Scan finished, actor: [27:7494341994693866627:2377], owner: [27:7494341994693866624:2375], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-17T17:54:30.225613Z node 27 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912470201, txId: 281474976715666] shutting down 2025-04-17T17:54:30.491624Z node 27 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2e6m6w3dmqcbfyc2y1eyqh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=YmMyY2RhZTItOWViNTNiMjctZjY5ZDM0ZjktNTY0NTIxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:30.495133Z node 27 :SYSTEM_VIEWS INFO: Scan started, actor: [27:7494341994693866659:2386], owner: [27:7494341994693866656:2384], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-17T17:54:30.498536Z node 27 :SYSTEM_VIEWS INFO: Scan prepared, actor: [27:7494341994693866659:2386], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:54:30.500056Z node 27 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [27:7494341994693866659:2386], row count: 3, finished: 1 2025-04-17T17:54:30.500107Z node 27 :SYSTEM_VIEWS INFO: Scan finished, actor: [27:7494341994693866659:2386], owner: [27:7494341994693866656:2384], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-17T17:54:30.504105Z node 27 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912470481, txId: 281474976715668] shutting down 2025-04-17T17:54:32.592307Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7494342003637443482:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:32.592436Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005bd/r3tmp/tmpLh3XOZ/pdisk_1.dat 2025-04-17T17:54:32.768630Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:32.816571Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:32.816741Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:32.822314Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9430, node 28 2025-04-17T17:54:32.903131Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:32.903166Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:32.903196Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:32.903458Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:33.331225Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:37.592559Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7494342003637443482:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:37.592630Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:37.665334Z node 28 :TX_PROXY ERROR: Actor# [28:7494342025112280822:2525] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.sys\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-17T17:54:37.699776Z node 28 :TX_PROXY ERROR: Actor# [28:7494342025112280831:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-17T17:54:37.739373Z node 28 :TX_PROXY ERROR: Actor# [28:7494342025112280840:2539] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-17T17:54:37.778119Z node 28 :TX_PROXY ERROR: Actor# [28:7494342025112280848:2545] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-17T17:54:37.798055Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [28:7494342025112280857:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:37.798171Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:37.823517Z node 28 :TX_PROXY ERROR: Actor# [28:7494342025112280870:2555] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Execution, code: 1060
:2:28: Error: Executing DROP TABLE
: Error: Incorrect scheme found while performing Kikimr operation., code: 2003
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-17T17:54:37.885036Z node 28 :TX_PROXY ERROR: [ReadTable [28:7494342025112280883:2342] TxId# 281474976710663] Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats] 2025-04-17T17:54:37.903815Z node 28 :TX_PROXY ERROR: [ReadTable [28:7494342025112280883:2342] TxId# 281474976710663] RESPONSE Status# ResolveError shard: 0 table: /Root/.sys/partition_stats
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats], code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats]
: Error: Bulk upsert to table '/Root/.sys/partition_stats' is not supported. Table is a system view 2025-04-17T17:54:37.991294Z node 28 :TX_PROXY ERROR: Actor# [28:7494342025112280892:2569] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.sys\', error: path part \'.sys\' is reserved by the system" severity: 1 }
: Error: Check failed: path: '/Root/.sys', error: path part '.sys' is reserved by the system 2025-04-17T17:54:38.024529Z node 28 :TX_PROXY ERROR: Actor# [28:7494342029407248197:2576] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.sys\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-17T17:54:38.038980Z node 28 :TX_PROXY ERROR: Actor# [28:7494342029407248206:2583] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.sys\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-17T17:54:38.055205Z node 28 :TX_PROXY ERROR: Actor# [28:7494342029407248214:2589] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-17T17:54:38.081770Z node 28 :TX_PROXY ERROR: Actor# [28:7494342029407248222:2595] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 >> KqpSysColV1::StreamInnerJoinTables [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> KqpSysColV1::InnerJoinSelect [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TKesusTest::TestAllocatesResources [GOOD] >> TKesusTest::TestSessionDetach |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 12092, MsgBus: 18037 2025-04-17T17:54:47.734684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342067279726945:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:47.734753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c57/r3tmp/tmpWQLyZv/pdisk_1.dat 2025-04-17T17:54:48.075852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:48.095714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:48.095813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:48.097469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12092, node 1 2025-04-17T17:54:48.212069Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:48.212114Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:48.212124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:48.212264Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18037 TClient is connected to server localhost:18037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:48.898660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:48.959000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:49.162572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:49.306224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:49.371507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:50.800942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342080164630599:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:50.801074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.173988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.203760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.273858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.315865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.346442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.392777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.432768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.498728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342084459598562:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.498812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.499300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342084459598567:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.502266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:51.510651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342084459598569:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:51.570320Z node 1 :TX_PROXY ERROR: Actor# [1:7494342084459598624:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:52.734890Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342067279726945:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:52.734945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:53.108658Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912493128, txId: 281474976710672] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] >> KqpSystemView::PartitionStatsRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 24618, MsgBus: 22223 2025-04-17T17:54:47.548940Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342067877936518:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:47.548987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c59/r3tmp/tmpxXxqlK/pdisk_1.dat 2025-04-17T17:54:47.955782Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:48.001316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:48.001445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24618, node 1 2025-04-17T17:54:48.004470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:48.134495Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:48.134511Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:48.134517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:48.134595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22223 TClient is connected to server localhost:22223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:48.695007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:48.744830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:48.880758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:49.047725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:49.125540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:50.844615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342080762840182:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:50.844754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.145976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.173983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.198052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.225905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.254292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.329043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.402266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.476854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342085057808146:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.476970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342085057808151:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.476976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.480760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:51.495042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342085057808153:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:51.555993Z node 1 :TX_PROXY ERROR: Actor# [1:7494342085057808208:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:52.554853Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342067877936518:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:52.561224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TKesusTest::TestAttachNewSessions >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-04-17T17:54:52.050883Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:52.051045Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:52.072728Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:52.073071Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:52.104269Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:52.104854Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=2900122721005518190, session=0, seqNo=0) 2025-04-17T17:54:52.105039Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:52.117462Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=2900122721005518190, session=1) 2025-04-17T17:54:52.117883Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=1205985641372061675, session=0, seqNo=0) 2025-04-17T17:54:52.118039Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:52.130427Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=1205985641372061675, session=2) 2025-04-17T17:54:52.130867Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-17T17:54:52.131054Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:52.131175Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:52.147540Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:54:52.147891Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:52.148229Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-17T17:54:52.148333Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-04-17T17:54:52.161218Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:54:52.161318Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=333) 2025-04-17T17:54:52.162011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=12079979193460405874, name="Lock1") 2025-04-17T17:54:52.162137Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=12079979193460405874) 2025-04-17T17:54:52.670744Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:52.670843Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:52.697987Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:52.698104Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:52.734862Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:52.735317Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=1439162352970861997, session=0, seqNo=0) 2025-04-17T17:54:52.735468Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:52.754622Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=1439162352970861997, session=1) 2025-04-17T17:54:52.754945Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=6317124811032692967, session=0, seqNo=0) 2025-04-17T17:54:52.755063Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:52.768871Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=6317124811032692967, session=2) 2025-04-17T17:54:52.769169Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:52.769328Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:52.769433Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:52.785971Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-04-17T17:54:52.786298Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:54:52.786617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:52.800057Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=222) 2025-04-17T17:54:52.800133Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=333) 2025-04-17T17:54:52.800629Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:148:2172], cookie=13015570211933163644, name="Lock1") 2025-04-17T17:54:52.800707Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:148:2172], cookie=13015570211933163644) 2025-04-17T17:54:52.801099Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:151:2175], cookie=6952138407157549379, name="Lock1") 2025-04-17T17:54:52.801167Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:151:2175], cookie=6952138407157549379) 2025-04-17T17:54:53.199149Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.199280Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.216557Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.216670Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.230796Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.231228Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=3063433886567273653, session=0, seqNo=0) 2025-04-17T17:54:53.231393Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:53.253853Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=3063433886567273653, session=1) 2025-04-17T17:54:53.254147Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=5477620132422518388, session=0, seqNo=0) 2025-04-17T17:54:53.254289Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:53.267062Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=5477620132422518388, session=2) 2025-04-17T17:54:53.267748Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:53.267891Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:53.267970Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:53.280044Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=111) 2025-04-17T17:54:53.280372Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:54:53.280681Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-17T17:54:53.280735Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-17T17:54:53.294597Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=222) 2025-04-17T17:54:53.294679Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=333) 2025-04-17T17:54:53.295326Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:149:2173], cookie=12820341082410347034, name="Lock1") 2025-04-17T17:54:53.295418Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:149:2173], cookie=12820341082410347034) 2025-04-17T17:54:53.295874Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:152:2176], cookie=12928447819875981315, name="Lock1") 2025-04-17T17:54:53.295931Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:152:2176], cookie=12928447819875981315) 2025-04-17T17:54:53.310780Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.310883Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.311440Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.312004Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.355045Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.355202Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:53.355580Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:192:2206], cookie=12912750262456187228, name="Lock1") 2025-04-17T17:54:53.355666Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:192:2206], cookie=12912750262456187228) 2025-04-17T17:54:53.356185Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:200:2213], cookie=4590821957071349318, name="Lock1") 2025-04-17T17:54:53.356251Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:200:2213], cookie=4590821957071349318) 2025-04-17T17:54:53.844203Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.844306Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.861509Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.861621Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.878906Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.879441Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=7718891691980440857, session=0, seqNo=0) 2025-04-17T17:54:53.879604Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:53.901955Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=7718891691980440857, session=1) 2025-04-17T17:54:53.902259Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=9142366088921224995, session=0, seqNo=0) 2025-04-17T17:54:53.902393Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:53.914882Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=9142366088921224995, session=2) 2025-04-17T17:54:53.915198Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:53.915351Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:53.915457Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:53.928774Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=111) 2025-04-17T17:54:53.929086Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:54:53.929448Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=333, name="Lock1") 2025-04-17T17:54:53.929532Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-17T17:54:53.941382Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=222) 2025-04-17T17:54:53.941455Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=333) 2025-04-17T17:54:54.272118Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:54.272203Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:54.286844Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:54.288539Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:54.315656Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:54.324722Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=17661161388351729324, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-17T17:54:54.324994Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:54.337823Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=17661161388351729324) 2025-04-17T17:54:54.338480Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=5400762148333392686, path="/Root/Res", config={ }) 2025-04-17T17:54:54.338718Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:54:54.353439Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=5400762148333392686) 2025-04-17T17:54:54.354887Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 5988611942852360982. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:54.354955Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=5988611942852360982) 2025-04-17T17:54:54.355414Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:146:2170]. Cookie: 1899542856702197533. Data: { } 2025-04-17T17:54:54.355447Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:146:2170], cookie=1899542856702197533) 2025-04-17T17:54:54.397322Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:54.438896Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:54.469934Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:54.500943Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:54.542496Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2025-04-17T17:54:52.043385Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:52.043492Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:52.065040Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:52.065351Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:52.094750Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:52.597673Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:52.597772Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:52.617369Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:52.617484Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:52.645792Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.101549Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.101644Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.117140Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.117259Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.132508Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.546607Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.546691Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.568530Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.568863Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.598573Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.600301Z node 4 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 5 2025-04-17T17:54:53.600883Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:187:2159]) 2025-04-17T17:54:54.202153Z node 6 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:54.202292Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:54.223394Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:54.223737Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute ... waiting for register request 2025-04-17T17:54:54.240595Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2025-04-17T17:54:54.241418Z node 6 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 7 2025-04-17T17:54:54.241508Z node 6 :KESUS_TABLET TRACE: Got TEvServerDisconnected([6:187:2159]) >> DataShardReadIteratorBatchMode::RangeToNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::MultipleRanges >> TKesusTest::TestAcquireLocks >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> KqpSysColV1::UpdateAndDelete [GOOD] >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 16635, MsgBus: 64481 2025-04-17T17:54:48.778090Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342071410800327:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:48.778131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c4b/r3tmp/tmpStfpFF/pdisk_1.dat 2025-04-17T17:54:49.268065Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16635, node 1 2025-04-17T17:54:49.297590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:49.297658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:49.319520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:49.339864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:49.339902Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:49.339917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:49.340049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64481 TClient is connected to server localhost:64481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:49.903433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:49.932932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:54:49.951616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:50.107052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:50.299929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:50.390073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:52.087355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342088590671308:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:52.087500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:52.553697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.594832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.627163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.662330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.691973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.734430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.770051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.922461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342088590671977:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:52.922529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:52.922666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342088590671982:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:52.925578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:52.935885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342088590671984:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:52.998471Z node 1 :TX_PROXY ERROR: Actor# [1:7494342088590672037:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:53.778338Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342071410800327:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:53.778406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:54.395606Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912494384, txId: 281474976710672] shutting down >> TKesusTest::TestSessionTimeoutAfterDetach >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TKesusTest::TestAttachOutOfSequence |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestQuoterResourceDescribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-04-17T17:54:52.030194Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:52.030325Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:52.052393Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:52.052690Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:52.077287Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:52.077657Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:132:2158], cookie=13367881000887555476, path="/foo/bar/baz") 2025-04-17T17:54:52.093135Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:132:2158], cookie=13367881000887555476, status=SUCCESS) 2025-04-17T17:54:52.093736Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:141:2165], cookie=6264399880189201287) 2025-04-17T17:54:52.110845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:141:2165], cookie=6264399880189201287) 2025-04-17T17:54:52.111451Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:146:2170], cookie=7267949934555691727, path="/foo/bar/baz") 2025-04-17T17:54:52.124557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:146:2170], cookie=7267949934555691727, status=SUCCESS) 2025-04-17T17:54:52.125061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:151:2175], cookie=5005255804123268856) 2025-04-17T17:54:52.137068Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:151:2175], cookie=5005255804123268856) 2025-04-17T17:54:52.148124Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:52.148217Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:52.148836Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:52.149395Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:52.187624Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:52.187910Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:193:2207], cookie=2488990021044209515) 2025-04-17T17:54:52.214845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:193:2207], cookie=2488990021044209515) 2025-04-17T17:54:52.215589Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:201:2214], cookie=10798323166527114957, path="/foo/bar/baz") 2025-04-17T17:54:52.227907Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:201:2214], cookie=10798323166527114957, status=SUCCESS) 2025-04-17T17:54:52.228428Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:206:2219], cookie=10281225972232002886, path="/foo/bar/baz") 2025-04-17T17:54:52.228491Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:206:2219], cookie=10281225972232002886, status=PRECONDITION_FAILED) 2025-04-17T17:54:52.766013Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:52.766107Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:52.791084Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:52.791194Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:52.905988Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:52.906301Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:132:2158], cookie=4160786373752111837, name="Lock1") 2025-04-17T17:54:52.906374Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:132:2158], cookie=4160786373752111837) 2025-04-17T17:54:53.381835Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.381940Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.397670Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.397782Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.413731Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.414449Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=2729346532327095044, session=0, seqNo=0) 2025-04-17T17:54:53.414652Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:53.437399Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=2729346532327095044, session=1) 2025-04-17T17:54:53.437738Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:53.437896Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:53.437986Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:53.449906Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:130:2156], cookie=111) 2025-04-17T17:54:53.450436Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:141:2165], cookie=17133736746069684939, name="Lock1", force=0) 2025-04-17T17:54:53.462287Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:141:2165], cookie=17133736746069684939) 2025-04-17T17:54:53.462909Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:146:2170], cookie=6990746318690946301, name="Sem1", force=0) 2025-04-17T17:54:53.475207Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:146:2170], cookie=6990746318690946301) 2025-04-17T17:54:53.475756Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:151:2175], cookie=2747524786109514859, name="Sem1", limit=42) 2025-04-17T17:54:53.475899Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-17T17:54:53.488863Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:151:2175], cookie=2747524786109514859) 2025-04-17T17:54:53.489398Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:156:2180], cookie=3483360912364147446, name="Sem1", force=0) 2025-04-17T17:54:53.489499Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-04-17T17:54:53.501795Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:156:2180], cookie=3483360912364147446) 2025-04-17T17:54:53.502373Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:161:2185], cookie=16615483607218136699, name="Sem1", force=0) 2025-04-17T17:54:53.514623Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:161:2185], cookie=16615483607218136699) 2025-04-17T17:54:53.816525Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.816637Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.835639Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.835769Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.850487Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.851063Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=9107097036984128977, session=0, seqNo=0) 2025-04-17T17:54:53.851236Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:53.874547Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=9107097036984128977, session=1) 2025-04-17T17:54:53.874907Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=8061938834944245472, session=0, seqNo=0) 2025-04-17T17:54:53.875079Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:53.887384Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=8061938834944245472, session=2) 2025-04-17T17:54:53.887675Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:130:2156], cookie=4196874025947037747 2025-04-17T17:54:53.888251Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:142:2166], cookie=12371495801537097568, name="Sem1", limit=3) 2025-04-17T17:54:53.888414Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:54:53.900830Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:142:2166], cookie=12371495801537097568) 2025-04-17T17:54:53.901203Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=112, name="Sem1") 2025-04-17T17:54:53.901290Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=112) 2025-04-17T17:54:53.901531Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=113, name="Sem1") 2025-04-17T17:54:53.901593Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=113) 2025-04-17T17:54:53.901811Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=10785394292897107693, session=2, seqNo=0) 2025-04-17T17:54:53.913957Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=10785394292897107693, session=2) 2025-04-17T17:54:53.914251Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=114, name="Sem1") 2025-04-17T17:54:53.914339Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=114) 2025-04-17T17:54:53.914544Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=115, name="Sem1") 2025-04-17T17:54:53.914590Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=115) 2025-04-17T17:54:53.914930Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:149:2173], cookie=15944371349279052475, name="Sem1") 2025-04-17T17:54:53.927924Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:149:2173], cookie=15944371349279052475) 2025-04-17T17:54:53.928315Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=116, session=1, semaphore="Sem1" count=1) 2025-04-17T17:54:53.928491Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:54:53.940602Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=116) 2025-04-17T17:54:53.941017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=117, session=2, semaphore="Sem1" count=2) 2025-04-17T17:54:53.941147Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-17T17:54:53.953388Z node 4 :KESUS_TABLET DEBUG: [7205759403 ... .870309Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-17T17:54:54.882355Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:200:2218], cookie=11713863082668740828) 2025-04-17T17:54:54.882737Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=126, session=1, semaphore="Sem2" count=3) 2025-04-17T17:54:54.882873Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2025-04-17T17:54:54.895528Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=126) 2025-04-17T17:54:54.895974Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=127, name="Sem2") 2025-04-17T17:54:54.896120Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=127) 2025-04-17T17:54:54.896424Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=128, session=1, semaphore="Sem2" count=3) 2025-04-17T17:54:54.908893Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=128) 2025-04-17T17:54:55.253613Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:55.268062Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:55.278849Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=129, session=1, semaphore="Sem2" count=2) 2025-04-17T17:54:55.299221Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=129) 2025-04-17T17:54:55.299755Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=130, name="Sem2") 2025-04-17T17:54:55.299853Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=130) 2025-04-17T17:54:55.300246Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=131, session=1, semaphore="Sem2" count=1) 2025-04-17T17:54:55.316036Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=131) 2025-04-17T17:54:55.316500Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=132, name="Sem2") 2025-04-17T17:54:55.316594Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=132) 2025-04-17T17:54:55.316847Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=133, name="Sem2") 2025-04-17T17:54:55.316923Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=133) 2025-04-17T17:54:55.761483Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:55.761601Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:55.782099Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:55.782631Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:55.807313Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:55.813514Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=5894825954688138150, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-04-17T17:54:55.813792Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2025-04-17T17:54:55.826014Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=5894825954688138150) 2025-04-17T17:54:55.826748Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=6819644178767344690, path="/Root1/Res", config={ }) 2025-04-17T17:54:55.827029Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-04-17T17:54:55.846033Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=6819644178767344690) 2025-04-17T17:54:55.846801Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2170], cookie=6313947021051623993, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-04-17T17:54:55.847008Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2025-04-17T17:54:55.859456Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2170], cookie=6313947021051623993) 2025-04-17T17:54:55.860136Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:151:2175], cookie=8954586219552723874, path="/Root2/Res", config={ }) 2025-04-17T17:54:55.860346Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-04-17T17:54:55.872648Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:151:2175], cookie=8954586219552723874) 2025-04-17T17:54:55.873332Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:156:2180], cookie=6888267715044498711, path="/Root2/Res/Subres", config={ }) 2025-04-17T17:54:55.873573Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-04-17T17:54:55.885931Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:156:2180], cookie=6888267715044498711) 2025-04-17T17:54:55.887136Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 10723930977775816399. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:55.887196Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=10723930977775816399) 2025-04-17T17:54:55.931891Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:55.973647Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:56.005407Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:56.006113Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:168:2189]. Cookie: 1273010612858232390. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-17T17:54:56.006942Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 16742910551387689424. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:56.006996Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=16742910551387689424) 2025-04-17T17:54:56.038804Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:56.080711Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:56.081493Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:176:2196]. Cookie: 4127779317163120402. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-17T17:54:56.082335Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 5642165462224722688. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:56.082389Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=5642165462224722688) 2025-04-17T17:54:56.083058Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 9560743808086636441. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:56.083108Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=9560743808086636441) 2025-04-17T17:54:56.114408Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:56.114527Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-04-17T17:54:56.115262Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:183:2203]. Cookie: 12221943888769565644. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> TKesusTest::TestSessionStealingAnyKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 20821, MsgBus: 27515 2025-04-17T17:54:48.365767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342071665749001:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:48.366507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c4d/r3tmp/tmp5CWo97/pdisk_1.dat 2025-04-17T17:54:48.994765Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:49.035420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:49.035501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:49.038075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20821, node 1 2025-04-17T17:54:49.162610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:49.162641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:49.162651Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:49.162774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27515 TClient is connected to server localhost:27515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:49.647704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:49.671072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:49.824038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:50.021752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:50.127162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:51.814335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342084550652661:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.814483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:52.148224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.186627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.224447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.303519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.376131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.620081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.693822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:52.770569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342088845620640:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:52.770666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:52.770959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342088845620645:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:52.775985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:52.802091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342088845620647:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:52.888310Z node 1 :TX_PROXY ERROR: Actor# [1:7494342088845620704:3561] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:53.365897Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342071665749001:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:53.365969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> KqpSystemView::PartitionStatsRange1 [GOOD] >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |80.0%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |80.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-04-17T17:54:55.079674Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:55.079822Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:55.099721Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:55.100059Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:55.124815Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:55.125255Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=1110018342270892665, session=0, seqNo=0) 2025-04-17T17:54:55.125391Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:55.137638Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=1110018342270892665, session=1) 2025-04-17T17:54:55.139507Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:132:2158], cookie=1021233566392377348, session=2) 2025-04-17T17:54:55.139603Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:132:2158], cookie=1021233566392377348) 2025-04-17T17:54:55.140261Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:132:2158], cookie=824495230367779240 2025-04-17T17:54:55.141080Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=3409639404488108056, session=1, seqNo=0) 2025-04-17T17:54:55.153661Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=3409639404488108056, session=1) 2025-04-17T17:54:55.154101Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:55.154315Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:55.154412Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:55.154615Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:132:2158], cookie=15718812451789486628, session=1) 2025-04-17T17:54:55.164988Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:54:55.165066Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:54:55.165120Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-17T17:54:55.177595Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:54:55.177680Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:132:2158], cookie=15718812451789486628) 2025-04-17T17:54:55.177779Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:54:55.590136Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:55.590252Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:55.611112Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:55.611231Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:55.639080Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:55.639480Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[2:132:2158], cookie=8917572922720326541, path="") 2025-04-17T17:54:55.654766Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[2:132:2158], cookie=8917572922720326541, status=SUCCESS) 2025-04-17T17:54:55.655555Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:141:2165], cookie=111, session=0, seqNo=0) 2025-04-17T17:54:55.655695Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:55.655883Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[2:141:2165], cookie=4190359991513531124, session=1) 2025-04-17T17:54:55.666577Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:54:55.666653Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:54:55.682059Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:141:2165], cookie=111, session=1) 2025-04-17T17:54:55.682158Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[2:141:2165], cookie=4190359991513531124) 2025-04-17T17:54:55.682230Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:54:55.994700Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:55.994808Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.016299Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.016429Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.031008Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.031424Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=4908019178991979174, session=0, seqNo=0) 2025-04-17T17:54:56.031542Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:56.054230Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=4908019178991979174, session=1) 2025-04-17T17:54:56.054772Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:130:2156], cookie=4267550835258135989, session=1) 2025-04-17T17:54:56.054864Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:54:56.066872Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:130:2156], cookie=4267550835258135989) 2025-04-17T17:54:56.067794Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:147:2171], cookie=10067765807509560726) 2025-04-17T17:54:56.067868Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:147:2171], cookie=10067765807509560726) 2025-04-17T17:54:56.068435Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:150:2174], cookie=1702325444315562863, session=0, seqNo=0) 2025-04-17T17:54:56.068555Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:56.080735Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:150:2174], cookie=1702325444315562863, session=2) 2025-04-17T17:54:56.081978Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:130:2156], cookie=3788948143121129934, session=2) 2025-04-17T17:54:56.082082Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 2025-04-17T17:54:56.096099Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:130:2156], cookie=3788948143121129934) 2025-04-17T17:54:56.453890Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:56.454020Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.478235Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.478388Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.501444Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.502493Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-17T17:54:56.502704Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:56.525366Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2025-04-17T17:54:56.525986Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:136:2161], cookie=23456, session=1, seqNo=0) 2025-04-17T17:54:56.537780Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:136:2161], cookie=23456, session=1) 2025-04-17T17:54:56.903137Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:56.903217Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.917846Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.918187Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.942336Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.943220Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=12345, session=0, seqNo=0) 2025-04-17T17:54:56.943397Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:56.955666Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=12345, session=1) 2025-04-17T17:54:56.956282Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:139:2163], cookie=23456, session=1, seqNo=0) 2025-04-17T17:54:56.968238Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:139:2163], cookie=23456, session=1) >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TKesusTest::TestReleaseLockFailure >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> KqpSysColV1::SelectRange [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-04-17T17:54:55.603504Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:55.603651Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:55.632250Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:55.632634Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:55.663999Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:55.664564Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=1290664735034087362, session=0, seqNo=0) 2025-04-17T17:54:55.664738Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:55.677973Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=1290664735034087362, session=1) 2025-04-17T17:54:55.678336Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=9708746633026167167, session=0, seqNo=0) 2025-04-17T17:54:55.678444Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:55.693513Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=9708746633026167167, session=2) 2025-04-17T17:54:56.031486Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:56.031627Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.055565Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.055694Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.085618Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.086160Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=2354969837955836355, session=1, seqNo=0) 2025-04-17T17:54:56.100102Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=2354969837955836355, session=1) 2025-04-17T17:54:56.468266Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:56.468406Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.488562Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.488674Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.505950Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.506918Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=16709811906186943431, session=0, seqNo=0) 2025-04-17T17:54:56.507093Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:56.531214Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=16709811906186943431, session=1) 2025-04-17T17:54:56.876703Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:56.876799Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.893221Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.893338Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.907484Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.907801Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:130:2156], cookie=11971506306340204423, path="") 2025-04-17T17:54:56.930633Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:130:2156], cookie=11971506306340204423, status=SUCCESS) 2025-04-17T17:54:56.931588Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:139:2163], cookie=7216594880400190067, session=0, seqNo=0) 2025-04-17T17:54:56.931697Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:56.943515Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:139:2163], cookie=7216594880400190067, session=1) 2025-04-17T17:54:56.944204Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2164], cookie=111, session=0, seqNo=0) 2025-04-17T17:54:56.944295Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:56.944412Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:140:2164], cookie=222, seqNo=0 2025-04-17T17:54:56.956220Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2164], cookie=111, session=2) 2025-04-17T17:54:57.276788Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:57.276888Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:57.290988Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:57.291443Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:57.315133Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:57.315397Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:132:2158], cookie=14565805285847647578, path="") 2025-04-17T17:54:57.327272Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:132:2158], cookie=14565805285847647578, status=SUCCESS) 2025-04-17T17:54:57.328112Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2165], cookie=12700594191842931235, session=0, seqNo=0) 2025-04-17T17:54:57.328224Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:57.340084Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2165], cookie=12700594191842931235, session=1) 2025-04-17T17:54:57.340833Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:141:2165], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:57.340981Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:57.341055Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:57.341360Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=111, session=0, seqNo=0) 2025-04-17T17:54:57.341419Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:57.341521Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=222, session=1, seqNo=0) 2025-04-17T17:54:57.353313Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:141:2165], cookie=123) 2025-04-17T17:54:57.353385Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=111, session=2) 2025-04-17T17:54:57.353438Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=222, session=1) >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogPagingAfter >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> TKesusTest::TestAcquireUpgrade >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 19819, MsgBus: 13717 2025-04-17T17:54:51.844214Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342086257299163:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:51.844358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c38/r3tmp/tmpaAOFs3/pdisk_1.dat 2025-04-17T17:54:52.252437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:52.252560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:52.255971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:52.302627Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19819, node 1 2025-04-17T17:54:52.364427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:52.364460Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:52.364473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:52.364608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13717 TClient is connected to server localhost:13717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:53.138034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:53.175093Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:54:53.189441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:53.333235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:53.469966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:53.538127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:55.108144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342103437170158:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:55.108274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:55.432815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:55.469920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:55.501685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:55.542865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:55.583200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:55.645013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:55.677007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:55.735576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342103437170814:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:55.735658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:55.735740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342103437170819:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:55.739796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:55.749789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342103437170821:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:55.829738Z node 1 :TX_PROXY ERROR: Actor# [1:7494342103437170876:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:56.843764Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342086257299163:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:56.843835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:56.961937Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912496955, txId: 281474976710672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2025-04-17T17:51:19.386593Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341172774133210:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:19.386799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001440/r3tmp/tmpNPO6Jx/pdisk_1.dat 2025-04-17T17:51:19.982263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:19.982370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:19.995875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11820, node 1 2025-04-17T17:51:20.048634Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:51:20.048660Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:51:20.058678Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:20.222271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:20.222303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:20.222310Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:20.222433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:51:20.299124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:51:20.346151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:51:20.391570Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7494341177069101132:2309] 2025-04-17T17:51:20.391870Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:51:20.410551Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:51:20.410628Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:51:20.412459Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:51:20.412545Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:51:20.412616Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:51:20.413016Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:51:20.413079Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:51:20.413111Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7494341177069101146:2309] in generation 1 2025-04-17T17:51:20.418671Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:51:20.470008Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:51:20.470170Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:51:20.474302Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7494341177069101150:2310] 2025-04-17T17:51:20.474334Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:20.474347Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:51:20.474357Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:20.474540Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:51:20.474618Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:51:20.474665Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7494341177069101117:2297], serverId# [1:7494341177069101149:2310], sessionId# [0:0:0] 2025-04-17T17:51:20.474684Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:20.474696Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:20.474719Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:51:20.474745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:20.475248Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:20.475468Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:51:20.475533Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-17T17:51:20.482277Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:20.482840Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:51:20.482906Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:51:20.487576Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7494341177069101164:2318], serverId# [1:7494341177069101165:2319], sessionId# [0:0:0] 2025-04-17T17:51:20.492485Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744912280531 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912280531 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:51:20.492523Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:20.492663Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:20.492725Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:20.492766Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:51:20.492786Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744912280531:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-17T17:51:20.493032Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744912280531:281474976710657 keys extracted: 0 2025-04-17T17:51:20.493156Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:51:20.493282Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:51:20.493321Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:51:20.495847Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:51:20.496272Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:51:20.499056Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744912280530 2025-04-17T17:51:20.499081Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:20.499123Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744912280531} 2025-04-17T17:51:20.499168Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:20.499195Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:51:20.499209Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:51:20.499234Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:51:20.499287Z node 1 :TX_DATASHARD DEBUG: Complete [1744912280531 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7494341172774133666:2207], exec latency: 2 ms, propose latency: 6 ms 2025-04-17T17:51:20.499310Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-17T17:51:20.499360Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:51:20.499419Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744912280538 2025-04-17T17:51:20.500802Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7494341177069101150:2310][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-17T17:51:20.506062Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-17T17:51:20.506118Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:51:20.526575Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:51:20.526822Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:51:20.526925Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-17T17:51:20.526952Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-17T17:51:20.526961Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-17T17:51:20.532223Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:51:20.602596Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:51:20.603560Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-17T17:51:20.603795Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:51:20.603978Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-17T17:51:20.603993Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:51:20.604010Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-17T17:51:20.604025Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:51:20.604047Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:51:20.604079Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-17T17:51:20.605035Z node 1 :PERSQUEU ... G: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:55.065754Z node 27 :TX_DATASHARD DEBUG: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 7500, txId# 281474976715662, at tablet# 72075186224037888 2025-04-17T17:54:55.066373Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:55.085215Z node 27 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 7500} 2025-04-17T17:54:55.085424Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:55.085508Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:55.085564Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:55.085800Z node 27 :TX_DATASHARD DEBUG: Complete [7500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [27:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:54:55.085912Z node 27 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-17T17:54:55.086133Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:55.086469Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-04-17T17:54:55.086762Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T17:54:55.088059Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-04-17T17:54:55.090337Z node 27 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-17T17:54:55.090499Z node 27 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:54:55.102875Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-04-17T17:54:55.103096Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-04-17T17:54:55.103253Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:55.103426Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 9000 from mediator time cast 2025-04-17T17:54:55.103535Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 0 change record(s): at tablet# 72075186224037888 2025-04-17T17:54:55.103624Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:55.103825Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:684:2580] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-04-17T17:54:55.104137Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:981:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-04-17T17:54:55.104486Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-04-17T17:54:55.104700Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:981:2779], at tablet# 72075186224037888 2025-04-17T17:54:55.104756Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-04-17T17:54:55.104907Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:981:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-17T17:54:55.105147Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1062:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-17T17:54:55.105743Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:55.105798Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:55.105903Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 2 requestId: cookie: 2 2025-04-17T17:54:55.106048Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:55.106084Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:55.106142Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 4 partNo : 0 messageNo: 3 size 26 offset: -1 2025-04-17T17:54:55.106386Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v6000/0 2025-04-17T17:54:55.106573Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-04-17T17:54:55.106864Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-17T17:54:55.107844Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 3 PartNo 0 PackedSize 107 count 1 nextOffset 4 batches 1 2025-04-17T17:54:55.108476Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 3,1 HeadOffset 0 endOffset 3 curOffset 4 d0000000000_00000000000000000003_00000_0000000001_00000| size 93 WTime 7451 2025-04-17T17:54:55.108610Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:54:55.108643Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:54:55.108676Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:54:55.108707Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:54:55.108738Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-04-17T17:54:55.108787Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000003_00000_0000000001_00000| 2025-04-17T17:54:55.108817Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:54:55.108843Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:54:55.108872Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:54:55.108940Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:54:55.109014Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 93 2025-04-17T17:54:55.109845Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 3 count 1 size 93 actorID [27:927:2736] 2025-04-17T17:54:55.109961Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 size 93 2025-04-17T17:54:55.120438Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:54:55.120609Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:54:55.120711Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-17T17:54:55.121010Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 3 requestId: cookie: 2 2025-04-17T17:54:55.121314Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1062:2779] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 4 Offset: 3 WriteTimestampMS: 7451 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 2 } } } 2025-04-17T17:54:55.121446Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:981:2779] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-17T17:54:55.121681Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-17T17:54:55.121720Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-04-17T17:54:55.132794Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-17T17:54:55.281008Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-17T17:54:55.281092Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-17T17:54:55.281243Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2025-04-17T17:54:55.281418Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-04-17T17:54:55.281913Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-17T17:54:55.282031Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T17:54:55.283048Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> KqpSysColV0::SelectRange >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 9092, MsgBus: 27717 2025-04-17T17:54:52.111252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342089012879633:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:52.115565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c34/r3tmp/tmpER9ymD/pdisk_1.dat 2025-04-17T17:54:52.738431Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:52.757034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:52.757132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:52.760086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9092, node 1 2025-04-17T17:54:53.046886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:53.046910Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:53.046935Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:53.047059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27717 TClient is connected to server localhost:27717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:53.586583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:53.611229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:53.765197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:53.887205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:53.945011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:55.642653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342101897783293:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:55.642785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:56.012107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:56.039942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:56.064201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:56.097265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:56.128982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:56.184948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:56.251626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:56.305801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342106192751255:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:56.305863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:56.305904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342106192751260:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:56.309356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:56.318155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342106192751262:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:56.410446Z node 1 :TX_PROXY ERROR: Actor# [1:7494342106192751317:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:57.110944Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342089012879633:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:57.111018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV0::SelectRowAsterisk >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestCreateSemaphore [GOOD] >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |80.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-04-17T17:54:57.597091Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:57.597210Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:57.612777Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:57.613079Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:57.637040Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:57.637756Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=6362713100592039562, session=0, seqNo=0) 2025-04-17T17:54:57.637921Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:57.650245Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=6362713100592039562, session=1) 2025-04-17T17:54:57.650511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=10112970140224957450, session=0, seqNo=0) 2025-04-17T17:54:57.650613Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:57.662401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=10112970140224957450, session=2) 2025-04-17T17:54:57.663420Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:57.663563Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:57.663661Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:57.675826Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:54:57.676183Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-17T17:54:57.676304Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-17T17:54:57.676393Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-17T17:54:57.689306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=112) 2025-04-17T17:54:57.689647Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=333, name="Lock1") 2025-04-17T17:54:57.689789Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-17T17:54:57.689989Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:54:57.690095Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-04-17T17:54:57.690202Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-17T17:54:57.690350Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-17T17:54:57.702292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=333) 2025-04-17T17:54:57.702360Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=222) 2025-04-17T17:54:57.702427Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=223) 2025-04-17T17:54:57.702693Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=334, name="Lock2") 2025-04-17T17:54:57.702769Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-17T17:54:57.702831Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:54:57.714342Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=334) 2025-04-17T17:54:57.714844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:161:2185], cookie=16088869946684081866, name="Lock1") 2025-04-17T17:54:57.714932Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:161:2185], cookie=16088869946684081866) 2025-04-17T17:54:57.715244Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:164:2188], cookie=11233457772264406859, name="Lock2") 2025-04-17T17:54:57.715289Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:164:2188], cookie=11233457772264406859) 2025-04-17T17:54:57.725086Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:57.725151Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:57.725554Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:57.725987Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:57.773837Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:57.773994Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:54:57.774041Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-17T17:54:57.774372Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:204:2218], cookie=11114533421386616536, name="Lock1") 2025-04-17T17:54:57.774457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:204:2218], cookie=11114533421386616536) 2025-04-17T17:54:57.775039Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:212:2225], cookie=13198731280912352466, name="Lock2") 2025-04-17T17:54:57.775097Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:212:2225], cookie=13198731280912352466) 2025-04-17T17:54:58.242153Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.242267Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.254685Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.254814Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:58.279570Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:58.280411Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=15117144736267355667, session=0, seqNo=0) 2025-04-17T17:54:58.280570Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:58.294910Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=15117144736267355667, session=1) 2025-04-17T17:54:58.295263Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=2888993870559622250, session=0, seqNo=0) 2025-04-17T17:54:58.295416Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:58.307225Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=2888993870559622250, session=2) 2025-04-17T17:54:58.308075Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:58.308194Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:58.308269Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:58.326993Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-04-17T17:54:58.327339Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-17T17:54:58.327463Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-17T17:54:58.327528Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-17T17:54:58.341662Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=112) 2025-04-17T17:54:58.342047Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=333, session=1, semaphore="Lock1" count=1) 2025-04-17T17:54:58.342280Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:54:58.342371Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-17T17:54:58.342443Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-17T17:54:58.356095Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=333) 2025-04-17T17:54:58.356176Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=222) 2025-04-17T17:54:58.356205Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=223) 2025-04-17T17:54:58.356738Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:159:2183], cookie=1470946787152583240, name="Lock1") 2025-04-17T17:54:58.356810Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:159:2183], cookie=1470946787152583240) 2025-04-17T17:54:58.357196Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:162:2186], cookie=18229172640950645908, name="Lock2") 2025-04-17T17:54:58.357247Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:162:2186], cookie=18229172640950645908) 2025-04-17T17:54:58.357618Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2189], cookie=690088311458606229, name="Lock1") 2025-04-17T17:54:58.357681Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2189], cookie=690088311458606229) 2025-04-17T17:54:58.358076Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2192], cookie=16526641702439188738, name="Lock2") 2025-04-17T17:54:58.358128Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2192], cookie=16526641702439188738) 2025-04-17T17:54:58.358368Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=444, session=2, semaphore="Lock2" count=1) 2025-04-17T17:54:58.358479Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:54:58.378715Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=444) 2025-04-17T17:54:58.379426Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:173:2197], cookie=17404827137556763844, name="Lock2") 2025-04-17T17:54:58.379511Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:173:2197], cookie=17404827137556763844) 2025-04-17T17:54:58.379927Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:176:2200], cookie=13134681370503578660, name="Lock2") 2025-04-17T17:54:58.379993Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:176:2200], cookie=13134681370503578660) 2025-04-17T17:54:58.392627Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.392719Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.393251Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.393519Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:58.453278Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:58.453429Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:58.453480Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-17T17:54:58.453506Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-17T17:54:58.453531Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:54:58.453893Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:216:2230], cookie=4729042134673848725, name="Lock1") 2025-04-17T17:54:58.453973Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:216:2230], cookie=4729042134673848725) 2025-04-17T17:54:58.454617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:224:2237], cookie=7497379376761447253, name="Lock2") 2025-04-17T17:54:58.454693Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:224:2237], cookie=7497379376761447253) >> KqpSysColV0::SelectRowById >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-04-17T17:54:57.081441Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:57.081574Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:57.099849Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:57.100133Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:57.124649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:57.125278Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=9725782991834006886, session=0, seqNo=222) 2025-04-17T17:54:57.125402Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:57.137766Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=9725782991834006886, session=1) 2025-04-17T17:54:57.138036Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=18092455681607948755, session=1, seqNo=111) 2025-04-17T17:54:57.149847Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=18092455681607948755, session=1) 2025-04-17T17:54:57.466100Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:57.466209Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:57.480118Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:57.480216Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:57.506789Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:57.507254Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=111, session=0, seqNo=42) 2025-04-17T17:54:57.507412Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:57.507587Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=222, session=1, seqNo=41) 2025-04-17T17:54:57.519595Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=111, session=1) 2025-04-17T17:54:57.519681Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=222, session=1) 2025-04-17T17:54:57.885695Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:57.885806Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:57.903404Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:57.903525Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:57.917754Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:57.918253Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=11925624394979971726, session=0, seqNo=0) 2025-04-17T17:54:57.918426Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:57.941236Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=11925624394979971726, session=1) 2025-04-17T17:54:57.942811Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:147:2171], cookie=6994001099205993128) 2025-04-17T17:54:57.942909Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:147:2171], cookie=6994001099205993128) 2025-04-17T17:54:58.302857Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.302948Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.316049Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.316183Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:58.330788Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:58.771233Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.771342Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.789965Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.790391Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:58.814644Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:58.815219Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=11762173018379272804, session=0, seqNo=0) 2025-04-17T17:54:58.815406Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:58.827363Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=11762173018379272804, session=1) 2025-04-17T17:54:58.827683Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:58.827863Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:58.827978Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:58.840207Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-17T17:54:58.840962Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:146:2170], cookie=7444386485968812408, name="Sem1", limit=42) 2025-04-17T17:54:58.841076Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-17T17:54:58.852998Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:146:2170], cookie=7444386485968812408) 2025-04-17T17:54:58.853520Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2175], cookie=8876840941259519365, name="Sem1", limit=42) 2025-04-17T17:54:58.865655Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2175], cookie=8876840941259519365) 2025-04-17T17:54:58.866157Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:156:2180], cookie=3905434672261653718, name="Sem1", limit=51) 2025-04-17T17:54:58.878414Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:156:2180], cookie=3905434672261653718) 2025-04-17T17:54:58.879003Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:161:2185], cookie=9946495373949298584, name="Lock1", limit=42) 2025-04-17T17:54:58.891343Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:161:2185], cookie=9946495373949298584) 2025-04-17T17:54:58.891942Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:166:2190], cookie=2760452281873160412, name="Lock1", limit=18446744073709551615) 2025-04-17T17:54:58.904085Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:166:2190], cookie=2760452281873160412) 2025-04-17T17:54:58.904701Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:171:2195], cookie=4683601092582875345, name="Sem1") 2025-04-17T17:54:58.904805Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:171:2195], cookie=4683601092582875345) 2025-04-17T17:54:58.905225Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:174:2198], cookie=17561878918849892226, name="Sem2") 2025-04-17T17:54:58.905281Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:174:2198], cookie=17561878918849892226) 2025-04-17T17:54:58.917655Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.917780Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.918438Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.919218Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:58.970908Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:58.971022Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:58.971394Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:214:2228], cookie=2312277450378913643, name="Sem1") 2025-04-17T17:54:58.971464Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:214:2228], cookie=2312277450378913643) 2025-04-17T17:54:58.971943Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:221:2234], cookie=3686176536949099742, name="Sem2") 2025-04-17T17:54:58.971997Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:221:2234], cookie=3686176536949099742) >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:45.508688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:45.508781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.508827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:45.508863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:45.508943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:45.508980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:45.509039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:45.509122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:45.509460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:45.590964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:45.591023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:45.607397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:45.607770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:45.607925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:45.616673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:45.616866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:45.617404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.617587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:45.620378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.621817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.621893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.622125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:45.622214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.622280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:45.622436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.629662Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:45.775048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:45.775323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.775575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:45.775828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:45.775905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.778287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.778410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:45.778588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.778652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:45.778686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:45.778722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:45.781021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.781097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:45.781140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:45.782898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.782953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.783060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.783128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.786747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:45.788753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:45.788948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:45.789953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:45.790084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:45.790146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.790474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:45.790535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:45.790692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:45.790817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:45.793160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:45.793206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:45.793401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:45.793447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:45.793844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:45.793913Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:45.794034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:45.794075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.794138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:45.794195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.794232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:45.794275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:45.794313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:45.794343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:45.794411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:45.794448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:45.794481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:45.796323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:45.796494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:45.796537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 94046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 768 RawX2: 4294970000 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-17T17:54:58.995376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 0 2025-04-17T17:54:58.995480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 768 RawX2: 4294970000 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-17T17:54:58.995520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-17T17:54:58.997000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T17:54:58.997050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-17T17:54:58.997086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T17:54:58.997113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-17T17:54:58.997172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-17T17:54:58.997259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-17T17:54:58.997361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:54:58.997417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:54:58.998785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T17:54:58.998902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T17:54:59.000079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:59.000117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:54:59.000232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T17:54:59.000342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:59.000389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-17T17:54:59.000427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 4 2025-04-17T17:54:59.000723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T17:54:59.000762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:54:59.000839Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T17:54:59.000868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T17:54:59.000898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-17T17:54:59.001632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-04-17T17:54:59.001710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-04-17T17:54:59.001757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-17T17:54:59.001797Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2025-04-17T17:54:59.001844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:54:59.002286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-17T17:54:59.002335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-17T17:54:59.002357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-17T17:54:59.002377Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:54:59.002399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:54:59.002453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-17T17:54:59.004073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T17:54:59.004112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:59.004349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:54:59.004464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-17T17:54:59.004499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T17:54:59.004528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-17T17:54:59.004566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T17:54:59.004604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-17T17:54:59.004637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T17:54:59.004665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-17T17:54:59.004705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-17T17:54:59.004770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:54:59.005204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:59.005231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:54:59.006591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T17:54:59.008633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T17:54:59.008943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:59.008977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-17T17:54:59.009395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2025-04-17T17:54:59.009785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-17T17:54:59.009813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-17T17:54:59.010212Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-17T17:54:59.010285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-17T17:54:59.010311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:998:2925] TestWaitNotification: OK eventTxId 107 2025-04-17T17:54:59.010783Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:54:59.010939Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 142us result status StatusSuccess 2025-04-17T17:54:59.011193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSystemView::PartitionStatsRange3 >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> KqpSystemView::NodesSimple |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-04-17T17:54:57.118735Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:57.118852Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:57.132704Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:57.133032Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:57.159202Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:57.167178Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:132:2158], cookie=18138404801214091996, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-17T17:54:57.167473Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:57.181386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:132:2158], cookie=18138404801214091996) 2025-04-17T17:54:57.182132Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:141:2165], cookie=675515444618034663, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-17T17:54:57.182367Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-04-17T17:54:57.194666Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:141:2165], cookie=675515444618034663) 2025-04-17T17:54:57.195366Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2170], cookie=316627160117351799, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:54:57.195588Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-04-17T17:54:57.207836Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2170], cookie=316627160117351799) 2025-04-17T17:54:57.208510Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:151:2175], cookie=9570894509286097328, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:54:57.208730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-17T17:54:57.222209Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:151:2175], cookie=9570894509286097328) 2025-04-17T17:54:57.222876Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:156:2180], cookie=13972343152414620746, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:54:57.223150Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-04-17T17:54:57.235056Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:156:2180], cookie=13972343152414620746) 2025-04-17T17:54:57.235659Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:161:2185], cookie=11988572213739447190, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:54:57.235849Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-04-17T17:54:57.247984Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:161:2185], cookie=11988572213739447190) 2025-04-17T17:54:57.248551Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:166:2190], cookie=12808986211028863203, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-17T17:54:57.248758Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 7 "Root2" 2025-04-17T17:54:57.261063Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:166:2190], cookie=12808986211028863203) 2025-04-17T17:54:57.261742Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:171:2195], cookie=10197365979293502749, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:54:57.261978Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-04-17T17:54:57.274339Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:171:2195], cookie=10197365979293502749) 2025-04-17T17:54:57.274994Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:176:2200], cookie=8153581212623796061, ids=[100], paths=[], recursive=0) 2025-04-17T17:54:57.275102Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:176:2200], cookie=8153581212623796061) 2025-04-17T17:54:57.275695Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:179:2203], cookie=6229043508792673669, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-17T17:54:57.275788Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:179:2203], cookie=6229043508792673669) 2025-04-17T17:54:57.276355Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:182:2206], cookie=6423426508139778911, ids=[], paths=[/Root, ], recursive=0) 2025-04-17T17:54:57.276450Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:182:2206], cookie=6423426508139778911) 2025-04-17T17:54:57.277063Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:185:2209], cookie=898882715369974709, ids=[1, 1], paths=[], recursive=0) 2025-04-17T17:54:57.277118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:185:2209], cookie=898882715369974709) 2025-04-17T17:54:57.277461Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:188:2212], cookie=3037123961320159682, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-17T17:54:57.277518Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:188:2212], cookie=3037123961320159682) 2025-04-17T17:54:57.277903Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:191:2215], cookie=7399703350452946355, ids=[], paths=[], recursive=1) 2025-04-17T17:54:57.277959Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:191:2215], cookie=7399703350452946355) 2025-04-17T17:54:57.278512Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:194:2218], cookie=11750338966243611967, ids=[], paths=[], recursive=0) 2025-04-17T17:54:57.278559Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:194:2218], cookie=11750338966243611967) 2025-04-17T17:54:57.278943Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:197:2221], cookie=7741326998460155569, ids=[3, 2], paths=[], recursive=1) 2025-04-17T17:54:57.278989Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:197:2221], cookie=7741326998460155569) 2025-04-17T17:54:57.279362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:200:2224], cookie=1520390138995423444, ids=[3, 2], paths=[], recursive=0) 2025-04-17T17:54:57.279401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:200:2224], cookie=1520390138995423444) 2025-04-17T17:54:57.279801Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:203:2227], cookie=11340846477238662149, ids=[], paths=[Root2/], recursive=1) 2025-04-17T17:54:57.279851Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:203:2227], cookie=11340846477238662149) 2025-04-17T17:54:57.280228Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:206:2230], cookie=1029240202665225326, ids=[], paths=[Root2/], recursive=0) 2025-04-17T17:54:57.280269Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:206:2230], cookie=1029240202665225326) 2025-04-17T17:54:57.292432Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:57.292520Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:57.293105Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:57.293687Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:57.330979Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:57.331349Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:246:2260], cookie=3735963097963941705, ids=[100], paths=[], recursive=0) 2025-04-17T17:54:57.331425Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:246:2260], cookie=3735963097963941705) 2025-04-17T17:54:57.332128Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:252:2265], cookie=17129875046220856871, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-17T17:54:57.332216Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:252:2265], cookie=17129875046220856871) 2025-04-17T17:54:57.332938Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:255:2268], cookie=5688055474772578227, ids=[], paths=[/Root, ], recursive=0) 2025-04-17T17:54:57.333026Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:255:2268], cookie=5688055474772578227) 2025-04-17T17:54:57.333769Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:258:2271], cookie=1841088090173411470, ids=[1, 1], paths=[], recursive=0) 2025-04-17T17:54:57.333846Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:258:2271], cookie=1841088090173411470) 2025-04-17T17:54:57.334525Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:261:2274], cookie=13745606788795705949, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-17T17:54:57.334602Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:261:2274], cookie=13745606788795705949) 2025-04-17T17:54:57.335258Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:264:2277], cookie=7701850770508325761, ids=[], paths=[], recursive=1) 2025-04-17T17:54:57.335347Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:264:2277], cookie=7701850770508325761) 2025-04-17T17:54:57.336048Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:267:2280], cookie=8415623562256289157, ids=[], paths=[], recursive=0) 2025-04-17T17:54:57.336101Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:267:2280], cookie=8415623562256289157) 2025-04-17T17:54:57.336796Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:270:2283], cookie=12130148126324184433, ids=[3, 2], paths=[], recursive=1) 2025-04-17T17:54:57.336872Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:270:2283], cookie=12130148126324184433) 2025-04-17T17:54:57.337550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:273:2286], cookie=10567100888820914859, ids=[3, 2], paths=[], recursive=0) 2025-0 ... [72057594037927937] TTxInit::Execute 2025-04-17T17:54:59.403470Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:59.403896Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=15723711811801621173, path="/Root", config={ MaxUnitsPerSecond: 1 }) 2025-04-17T17:54:59.404136Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:59.426692Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=15723711811801621173) 2025-04-17T17:54:59.427317Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:139:2163], cookie=17779896075108538258, path="/Root/Q", config={ }) 2025-04-17T17:54:59.427574Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Q" 2025-04-17T17:54:59.439959Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:139:2163], cookie=17779896075108538258) 2025-04-17T17:54:59.440588Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:144:2168], cookie=11838201112719203569, path="/Root/Folder", config={ }) 2025-04-17T17:54:59.440825Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2025-04-17T17:54:59.453011Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:144:2168], cookie=11838201112719203569) 2025-04-17T17:54:59.453607Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:149:2173], cookie=10396967071580211862, path="/Root/Folder/Q1", config={ }) 2025-04-17T17:54:59.453836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-17T17:54:59.467952Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:149:2173], cookie=10396967071580211862) 2025-04-17T17:54:59.468513Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:154:2178], cookie=12741238300673916180, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.468608Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:154:2178], cookie=12741238300673916180) 2025-04-17T17:54:59.469262Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:160:2184], cookie=6369061295844307437, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.469317Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:160:2184], cookie=6369061295844307437) 2025-04-17T17:54:59.470028Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:166:2190], cookie=14591421003973184925, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.470116Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:166:2190], cookie=14591421003973184925) 2025-04-17T17:54:59.470556Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:169:2193], cookie=7940878300204363888, id=0, path="/Root/Folder/NonexistingRes") 2025-04-17T17:54:59.470623Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:169:2193], cookie=7940878300204363888) 2025-04-17T17:54:59.471087Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:172:2196], cookie=15822240979714958437, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.471175Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:172:2196], cookie=15822240979714958437) 2025-04-17T17:54:59.471775Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:175:2199], cookie=18417667991677751547, id=100, path="") 2025-04-17T17:54:59.471847Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:175:2199], cookie=18417667991677751547) 2025-04-17T17:54:59.472347Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:178:2202], cookie=4280806596200380599, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.472425Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:178:2202], cookie=4280806596200380599) 2025-04-17T17:54:59.473006Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:181:2205], cookie=11059841393988710215, id=3, path="") 2025-04-17T17:54:59.473082Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:181:2205], cookie=11059841393988710215) 2025-04-17T17:54:59.473764Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:184:2208], cookie=2634012017539008682, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.473863Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:184:2208], cookie=2634012017539008682) 2025-04-17T17:54:59.474387Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:187:2211], cookie=13749861664899520397, id=0, path="/Root/Folder/Q1") 2025-04-17T17:54:59.474519Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-04-17T17:54:59.486998Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:187:2211], cookie=13749861664899520397) 2025-04-17T17:54:59.487919Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:192:2216], cookie=2007690741270618921, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.488006Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:192:2216], cookie=2007690741270618921) 2025-04-17T17:54:59.500523Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:59.500625Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:59.501244Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:59.501739Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:59.548897Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:59.549219Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:232:2246], cookie=11569390994641463653, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.549299Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:232:2246], cookie=11569390994641463653) 2025-04-17T17:54:59.550045Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:238:2251], cookie=5446205710160084263, id=3, path="") 2025-04-17T17:54:59.550204Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-04-17T17:54:59.562626Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:238:2251], cookie=5446205710160084263) 2025-04-17T17:54:59.563241Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:243:2256], cookie=16295699352250312783, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.563318Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:243:2256], cookie=16295699352250312783) 2025-04-17T17:54:59.574712Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:59.574809Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:59.575456Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:59.576053Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:59.623691Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:59.624085Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:283:2286], cookie=15613108001603933936, ids=[], paths=[], recursive=1) 2025-04-17T17:54:59.624174Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:283:2286], cookie=15613108001603933936) 2025-04-17T17:55:00.000402Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:00.000527Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:00.023194Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:00.023869Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:00.048550Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:00.049080Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=17615586608921512751, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:55:00.049288Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Q1" 2025-04-17T17:55:00.061233Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=17615586608921512751) 2025-04-17T17:55:00.061800Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=3794718139452990227, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T17:55:00.061967Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Q2" 2025-04-17T17:55:00.074291Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=3794718139452990227) 2025-04-17T17:55:00.076292Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 5203641224892333568. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:55:00.076370Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=5203641224892333568) 2025-04-17T17:55:00.077294Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 10496457517675241737. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-04-17T17:55:00.077365Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=10496457517675241737) >> KqpSysColV0::InnerJoinTables >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-04-17T17:54:58.115163Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.115293Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.132364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.132649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:58.156841Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:58.157202Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=4108358512798028364, session=0, seqNo=0) 2025-04-17T17:54:58.157328Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:58.169491Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=4108358512798028364, session=1) 2025-04-17T17:54:58.169815Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=4303064010112984541, session=0, seqNo=0) 2025-04-17T17:54:58.169943Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:58.181747Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=4303064010112984541, session=2) 2025-04-17T17:54:58.182045Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=111, name="Lock1") 2025-04-17T17:54:58.193910Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:54:58.194151Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:58.194291Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:58.194374Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:58.206118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:54:58.206355Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=333, name="Lock1") 2025-04-17T17:54:58.218366Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=333) 2025-04-17T17:54:58.531283Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.531411Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.549812Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.549911Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:58.574357Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:58.574823Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=1845078559715731620, session=0, seqNo=0) 2025-04-17T17:54:58.575004Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:58.588668Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=1845078559715731620, session=1) 2025-04-17T17:54:58.589049Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=9421441176771955917, session=0, seqNo=0) 2025-04-17T17:54:58.589200Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:58.604860Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=9421441176771955917, session=2) 2025-04-17T17:54:58.605378Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:143:2167], cookie=14285244991645483364, name="Sem1", limit=1) 2025-04-17T17:54:58.605540Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:54:58.619814Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:143:2167], cookie=14285244991645483364) 2025-04-17T17:54:58.620263Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-17T17:54:58.620451Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:54:58.620696Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-17T17:54:58.634943Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-04-17T17:54:58.635054Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=222) 2025-04-17T17:54:58.635772Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:151:2175], cookie=5834251967278537270, name="Sem1") 2025-04-17T17:54:58.635892Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:151:2175], cookie=5834251967278537270) 2025-04-17T17:54:58.636361Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:154:2178], cookie=1765473536477183351, name="Sem1") 2025-04-17T17:54:58.636431Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:154:2178], cookie=1765473536477183351) 2025-04-17T17:54:58.636679Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:132:2158], cookie=333, name="Sem1") 2025-04-17T17:54:58.636807Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-04-17T17:54:58.651771Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:132:2158], cookie=333) 2025-04-17T17:54:58.652388Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:159:2183], cookie=15953798296504001253, name="Sem1") 2025-04-17T17:54:58.652479Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:159:2183], cookie=15953798296504001253) 2025-04-17T17:54:58.652936Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:162:2186], cookie=17438042625930502732, name="Sem1") 2025-04-17T17:54:58.653005Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:162:2186], cookie=17438042625930502732) 2025-04-17T17:54:58.653244Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:132:2158], cookie=444, name="Sem1") 2025-04-17T17:54:58.653342Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-17T17:54:58.670660Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:132:2158], cookie=444) 2025-04-17T17:54:58.671436Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2191], cookie=9091532248781054260, name="Sem1") 2025-04-17T17:54:58.671523Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2191], cookie=9091532248781054260) 2025-04-17T17:54:58.672103Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:170:2194], cookie=10109733894410821497, name="Sem1") 2025-04-17T17:54:58.672171Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:170:2194], cookie=10109733894410821497) 2025-04-17T17:54:59.154559Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:59.154659Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:59.165772Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:59.165859Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:59.181253Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:59.181566Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:130:2156], cookie=2860588685140425099, name="Sem1", limit=1) 2025-04-17T17:54:59.181715Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:54:59.204106Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:130:2156], cookie=2860588685140425099) 2025-04-17T17:54:59.204635Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:139:2163], cookie=11475879209668908998, name="Sem2", limit=1) 2025-04-17T17:54:59.204778Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-17T17:54:59.216798Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:139:2163], cookie=11475879209668908998) 2025-04-17T17:54:59.217320Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:144:2168], cookie=8537442872413796709, name="Sem1") 2025-04-17T17:54:59.217403Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:144:2168], cookie=8537442872413796709) 2025-04-17T17:54:59.217793Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:147:2171], cookie=18406946530681528353, name="Sem2") 2025-04-17T17:54:59.217851Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:147:2171], cookie=18406946530681528353) 2025-04-17T17:54:59.229488Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:59.229599Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:59.230216Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:59.230851Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:59.271977Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:59.272238Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:187:2201], cookie=11660124582790225638, name="Sem1") 2025-04-17T17:54:59.272303Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:187:2201], cookie=11660124582790225638) 2025-04-17T17:54:59.272813Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:193:2206], cookie=4571008449057544473, name="Sem2") 2025-04-17T17:54:59.272868Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:193:2206], cookie=4571008449057544473) 2025-04-17T17:54:59.273374Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:196:2209], cookie=537704705816006296, name="Sem1", limit=1) 2025-04-17T17:54:59.302918Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:196:2209], cookie=537704705816006296) 2025-04-17T17:54:59.303609Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:201:2214], cookie=7805683048079124351, name="Sem2", limit=1) 2025-04-17T17:54:59.316218Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:201:2214], cookie=7805683048079124351) 2025-04-17T17:54:59.316911Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:206:2219], cookie=637714168006922351, name="Sem1") 2025-04-17T17:54:59.317000Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:206:2219], cookie=6377141680 ... 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 9 "Sem1" 2025-04-17T17:55:00.083157Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:222:2245], cookie=11763104150378031528) 2025-04-17T17:55:00.083828Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:227:2250], cookie=17438370687648625057, name="Sem1", force=0) 2025-04-17T17:55:00.083916Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 9 "Sem1" 2025-04-17T17:55:00.098479Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:227:2250], cookie=17438370687648625057) 2025-04-17T17:55:00.099156Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:232:2255], cookie=12473186800087226006, name="Sem1", limit=1) 2025-04-17T17:55:00.099278Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 10 "Sem1" 2025-04-17T17:55:00.111307Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:232:2255], cookie=12473186800087226006) 2025-04-17T17:55:00.111857Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:237:2260], cookie=2187130682853024158, name="Sem1", force=0) 2025-04-17T17:55:00.111931Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 10 "Sem1" 2025-04-17T17:55:00.124092Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:237:2260], cookie=2187130682853024158) 2025-04-17T17:55:00.124924Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:242:2265], cookie=5840954443472046734, name="Sem1", limit=1) 2025-04-17T17:55:00.125066Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 11 "Sem1" 2025-04-17T17:55:00.137626Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:242:2265], cookie=5840954443472046734) 2025-04-17T17:55:00.138208Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-17T17:55:00.138385Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-04-17T17:55:00.150535Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=111) 2025-04-17T17:55:00.151139Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-17T17:55:00.163505Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=222) 2025-04-17T17:55:00.164095Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=333, name="Sem1") 2025-04-17T17:55:00.164228Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-04-17T17:55:00.186751Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=333) 2025-04-17T17:55:00.187360Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=444, session=2, semaphore="Sem1" count=1) 2025-04-17T17:55:00.199825Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=444) 2025-04-17T17:55:00.200440Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=555, name="Sem1") 2025-04-17T17:55:00.200556Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-04-17T17:55:00.200631Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-04-17T17:55:00.212978Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=555) 2025-04-17T17:55:00.630500Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:00.630608Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:00.650258Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:00.650821Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:00.679064Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:00.679660Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=829756238187518386, session=0, seqNo=0) 2025-04-17T17:55:00.679824Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:55:00.692319Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=829756238187518386, session=1) 2025-04-17T17:55:00.692722Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=112, name="Sem1", limit=5) 2025-04-17T17:55:00.692897Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:55:00.705136Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=112) 2025-04-17T17:55:00.705492Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=113, name="Sem1") 2025-04-17T17:55:00.717718Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=113) 2025-04-17T17:55:00.718051Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=114, name="Sem1", force=0) 2025-04-17T17:55:00.718150Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-17T17:55:00.730392Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=114) 2025-04-17T17:55:00.730719Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[5:132:2158], cookie=11230161244166372475 2025-04-17T17:55:00.730995Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=115, name="Sem1", limit=5) 2025-04-17T17:55:00.745003Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=115) 2025-04-17T17:55:00.745403Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=116, name="Sem1") 2025-04-17T17:55:00.762125Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=116) 2025-04-17T17:55:00.762477Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=117, name="Sem1", force=0) 2025-04-17T17:55:00.786061Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=117) 2025-04-17T17:55:00.786414Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=118, session=1, semaphore="Sem1" count=1) 2025-04-17T17:55:00.798752Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=118) 2025-04-17T17:55:00.799100Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=119, name="Sem1") 2025-04-17T17:55:00.811384Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=119) 2025-04-17T17:55:00.811758Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=120, name="Sem1") 2025-04-17T17:55:00.811855Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=120) 2025-04-17T17:55:00.812067Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:132:2158], cookie=17806210090955454874, session=1) 2025-04-17T17:55:00.812157Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:55:00.824795Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:132:2158], cookie=17806210090955454874) 2025-04-17T17:55:00.825132Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=121, name="Sem1", limit=5) 2025-04-17T17:55:00.837390Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=121) 2025-04-17T17:55:00.837816Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=122, name="Sem1") 2025-04-17T17:55:00.852024Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=122) 2025-04-17T17:55:00.852358Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=123, name="Sem1", force=0) 2025-04-17T17:55:00.864737Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=123) 2025-04-17T17:55:00.865091Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=124, session=1, semaphore="Sem1" count=1) 2025-04-17T17:55:00.877327Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=124) 2025-04-17T17:55:00.877701Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=125, name="Sem1") 2025-04-17T17:55:00.900480Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=125) 2025-04-17T17:55:00.900836Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=126, name="Sem1") 2025-04-17T17:55:00.900922Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=126) 2025-04-17T17:55:00.901514Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=127, name="Sem1", limit=5) 2025-04-17T17:55:00.901581Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=127) 2025-04-17T17:55:00.901832Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=128, name="Sem1") 2025-04-17T17:55:00.901887Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=128) 2025-04-17T17:55:00.902067Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=129, name="Sem1", force=0) 2025-04-17T17:55:00.902121Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=129) 2025-04-17T17:55:00.902345Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=130, session=1, semaphore="Sem1" count=1) 2025-04-17T17:55:00.902409Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=130) 2025-04-17T17:55:00.902595Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=131, name="Sem1") 2025-04-17T17:55:00.902646Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=131) 2025-04-17T17:55:00.902828Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=132, name="Sem1") 2025-04-17T17:55:00.902899Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=132) >> KqpSystemView::QueryStatsScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:47.357350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:47.357442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:47.357485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:47.357542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:47.359176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:47.359254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:47.359369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:47.359459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:47.359964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:47.459775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:47.459846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:47.476901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:47.477384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:47.477608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:47.487309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:47.487572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:47.488269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:47.488522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:47.498377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:47.500151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:47.500226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:47.500453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:47.500526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:47.500576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:47.500731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.509098Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:47.640695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:47.640954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.641192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:47.641405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:47.641446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.644988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:47.645146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:47.645371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.645432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:47.645471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:47.645500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:47.648140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.648225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:47.648287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:47.650707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.650766Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.650809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:47.650867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:47.654543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:47.656943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:47.657150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:47.658136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:47.658307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:47.658376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:47.658726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:47.658782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:47.658954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:47.659069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:47.662088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:47.662131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:47.662347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:47.662390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:47.662771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:47.662819Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:47.662956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:47.662993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:47.663064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:47.663097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:47.663130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:47.663171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:47.663212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:47.663240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:47.663312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:47.663376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:47.663413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:47.665378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:47.665507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:47.665545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:55:00.457448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-17T17:55:00.457534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 4294969699 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-17T17:55:00.457572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-17T17:55:00.457976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:55:00.458025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-17T17:55:00.458058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T17:55:00.458087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-17T17:55:00.458157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-17T17:55:00.458272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-17T17:55:00.458362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:55:00.458422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:55:00.460851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:55:00.461574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:55:00.462199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:00.462247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:55:00.462434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:55:00.462610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:00.462676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T17:55:00.462722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-17T17:55:00.463260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:55:00.463320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-17T17:55:00.463422Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:55:00.463465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T17:55:00.463512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-17T17:55:00.464335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:00.464464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:00.464502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:55:00.464550Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-04-17T17:55:00.464591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:55:00.465264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:00.465341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:00.465368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:55:00.465396Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T17:55:00.465419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T17:55:00.465482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T17:55:00.468914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:55:00.468975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:00.469306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:55:00.469465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:55:00.469495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:55:00.469537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:55:00.469564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:55:00.469594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T17:55:00.469656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:409:2376] message: TxId: 103 2025-04-17T17:55:00.469697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:55:00.469725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:55:00.469756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:55:00.469845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:55:00.470325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:00.470360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:55:00.471102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:55:00.471223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:55:00.472838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:00.472888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-17T17:55:00.473206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:55:00.473250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1347:3273] 2025-04-17T17:55:00.473793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-17T17:55:00.477233Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:00.477430Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 239us result status StatusSuccess 2025-04-17T17:55:00.477781Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-04-17T17:54:52.294983Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:52.295127Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:52.318798Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:52.319138Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:52.344246Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:52.350166Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:132:2158], cookie=9600787842742300495, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:54:52.350630Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:52.363690Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:132:2158], cookie=9600787842742300495) 2025-04-17T17:54:52.364318Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:142:2166], cookie=11361738570495317631, path="/Root/Res", config={ }) 2025-04-17T17:54:52.364556Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:54:52.379782Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:142:2166], cookie=11361738570495317631) 2025-04-17T17:54:52.381545Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:147:2171]. Cookie: 15109145605275147351. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:52.381629Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:147:2171], cookie=15109145605275147351) 2025-04-17T17:54:52.382356Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:147:2171]. Cookie: 4103413378136327794. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-04-17T17:54:52.382431Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:147:2171], cookie=4103413378136327794) 2025-04-17T17:54:54.486301Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:54.486407Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:54.504437Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:54.504540Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:54.529100Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:54.529444Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:132:2158], cookie=3979154205692317378, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:54:54.529698Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:54.541248Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:132:2158], cookie=3979154205692317378) 2025-04-17T17:54:54.541902Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:142:2166]. Cookie: 17574665971794445404. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:54.541945Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:142:2166], cookie=17574665971794445404) 2025-04-17T17:54:54.542479Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:142:2166]. Cookie: 1142656389431182738. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:54.542571Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:142:2166], cookie=1142656389431182738) 2025-04-17T17:54:54.543104Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:142:2166]. Cookie: 8790012961648070099. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-17T17:54:54.543150Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:142:2166], cookie=8790012961648070099) 2025-04-17T17:54:54.543595Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:142:2166]. Cookie: 11972596355940736659. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-17T17:54:54.543646Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:142:2166], cookie=11972596355940736659) 2025-04-17T17:54:56.786539Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:56.786641Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.801412Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.801525Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.816168Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.816604Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:130:2156], cookie=11923639260640501, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:54:56.816945Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:56.841221Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:130:2156], cookie=11923639260640501) 2025-04-17T17:54:56.841827Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:140:2164], cookie=6437265764443522475, path="/Root/Res1", config={ }) 2025-04-17T17:54:56.842063Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-04-17T17:54:56.854003Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:140:2164], cookie=6437265764443522475) 2025-04-17T17:54:56.854477Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:145:2169], cookie=14518408116279521437, path="/Root/Res2", config={ }) 2025-04-17T17:54:56.854640Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-04-17T17:54:56.866879Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:145:2169], cookie=14518408116279521437) 2025-04-17T17:54:56.867820Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:150:2174]. Cookie: 9030281573808827529. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:56.867876Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:150:2174], cookie=9030281573808827529) 2025-04-17T17:54:56.868546Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:150:2174]. Cookie: 163150635342713660. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:56.868600Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:150:2174], cookie=163150635342713660) 2025-04-17T17:54:56.869122Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:150:2174]. Cookie: 58517260705354557. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1019500 } } 2025-04-17T17:54:56.869166Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:150:2174], cookie=58517260705354557) 2025-04-17T17:54:58.987844Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.987946Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.998825Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.998911Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:59.012551Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:59.012865Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=17723162096170586605, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-04-17T17:54:59.013138Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:59.035649Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=17723162096170586605) 2025-04-17T17:54:59.036818Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 13180162127337343030. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:59.036930Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=13180162127337343030) 2025-04-17T17:54:59.037420Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 6164104619478482399. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 26500 } } 2025-04-17T17:54:59.037471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=6164104619478482399) 2025-04-17T17:55:01.216965Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:01.217044Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:01.231657Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:01.232128Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:01.259058Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:01.259537Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=6549633484901378119, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-17T17:55:01.259762Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:55:01.271569Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=6549633484901378119) 2025-04-17T17:55:01.272110Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=10894169251051067010, path="/Root/Res", config={ }) 2025-04-17T17:55:01.272306Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:55:01.284580Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=10894169251051067010) 2025-04-17T17:55:01.285409Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 9516169042607573444. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:55:01.285469Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=9516169042607573444) 2025-04-17T17:55:01.285957Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:150:2174], cookie=4706613264565646768, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-04-17T17:55:01.286121Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2025-04-17T17:55:01.286323Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-04-17T17:55:01.298902Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:150:2174], cookie=4706613264565646768) 2025-04-17T17:55:01.299562Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:146:2170]. Cookie: 14675235783309975620. Data: { } 2025-04-17T17:55:01.299617Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:146:2170], cookie=14675235783309975620) >> SystemView::QueryStatsRetries [GOOD] >> KqpSysColV1::InnerJoinTables >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> YdbIndexTable::MultiShardTableOneIndex >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> KqpSystemView::Join [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] >> YdbIndexTable::OnlineBuild ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 23313, MsgBus: 20683 2025-04-17T17:54:47.162469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342067145691999:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:47.166663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c6f/r3tmp/tmpW8yCVV/pdisk_1.dat 2025-04-17T17:54:47.560180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:47.603398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:47.603518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:47.605127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23313, node 1 2025-04-17T17:54:47.681766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:47.681788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:47.681797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:47.681916Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20683 TClient is connected to server localhost:20683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:48.256244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:48.284463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:48.436210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:48.619926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:48.753370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:50.556674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342080030595674:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:50.556810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:50.848293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:50.883815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:50.909816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:50.938510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:50.971271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.004047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.037893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:51.094929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342084325563630:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.095019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.095092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342084325563635:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:51.098442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:51.110047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342084325563637:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:51.165761Z node 1 :TX_PROXY ERROR: Actor# [1:7494342084325563690:3538] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:52.163753Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342067145691999:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:52.164503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:52.661836Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912492622, txId: 281474976710672] shutting down waiting... 2025-04-17T17:54:53.812045Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912493800, txId: 281474976710674] shutting down waiting... 2025-04-17T17:54:54.970428Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912494964, txId: 281474976710676] shutting down waiting... 2025-04-17T17:54:56.132335Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912496126, txId: 281474976710678] shutting down waiting... 2025-04-17T17:54:57.316039Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912497308, txId: 281474976710680] shutting down waiting... 2025-04-17T17:54:58.467529Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912498461, txId: 281474976710682] shutting down waiting... 2025-04-17T17:54:59.637666Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912499626, txId: 281474976710684] shutting down waiting... 2025-04-17T17:55:00.777570Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912500772, txId: 281474976710686] shutting down waiting... 2025-04-17T17:55:01.969530Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912501961, txId: 281474976710688] shutting down 2025-04-17T17:55:02.481387Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912502462, txId: 281474976710690] shutting down 2025-04-17T17:55:02.550586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:55:02.550640Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpSysColV0::SelectRowAsterisk [GOOD] >> KqpSysColV0::SelectRange [GOOD] |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |80.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] |80.1%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> KqpSysColV0::SelectRowById [GOOD] >> KqpSystemView::PartitionStatsRange3 [GOOD] >> KqpScan::ScanRetryRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 61521, MsgBus: 15928 2025-04-17T17:54:58.796740Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342114290087558:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:58.796868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c33/r3tmp/tmpSNuLxw/pdisk_1.dat 2025-04-17T17:54:59.079793Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61521, node 1 2025-04-17T17:54:59.149237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:59.149387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:59.152424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:59.170867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:59.170889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:59.170897Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:59.170998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15928 TClient is connected to server localhost:15928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:59.605337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:59.632735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:59.776504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:59.912947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:59.993276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:01.692139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342127174991228:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:01.692265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.130137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.164519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.198472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.270786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.346242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.388114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.425531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.496464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342131469959190:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.496554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.496750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342131469959195:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.500180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:02.513214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342131469959197:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:02.612872Z node 1 :TX_PROXY ERROR: Actor# [1:7494342131469959255:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:03.798407Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342114290087558:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:03.798465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:39.360176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:39.360255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:39.360284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:39.360308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:39.360341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:39.360371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:39.360416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:39.360488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:39.360734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:39.425411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:39.425483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:39.437799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:39.438257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:39.438421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:39.446884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:39.447085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:39.447711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:39.447934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:39.450833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:39.452199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:39.452267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:39.452435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:39.452494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:39.452538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:39.452699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.458932Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:39.581062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:39.581334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.581562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:39.581789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:39.581881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.584408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:39.584535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:39.584703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.584753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:39.584785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:39.584811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:39.586575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.586651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:39.586690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:39.588408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.588442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.588475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:39.588519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.591518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:39.593013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:39.593159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:39.593803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:39.593900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:39.593936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:39.594147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:39.594212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:39.594427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:39.594504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:39.597456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:39.597525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:39.597655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:39.597690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:39.598008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:39.598045Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:39.598127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:39.598160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.598211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:39.598239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.598268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:39.598298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:39.598334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:39.598359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:39.598407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:39.598437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:39.598460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:39.599834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:39.599927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:39.599954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ly one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:55:04.285216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.285283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.285652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:04.285768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T17:55:04.285852Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T17:55:04.286046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-17T17:55:04.286168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.286319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:04.286371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-17T17:55:04.286410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:55:04.286522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:04.286620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.286804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-17T17:55:04.287077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.287178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.287520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.287615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.287803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.287888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.287977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.288138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.288218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.288351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.288558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.288704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.288772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.288824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T17:55:04.289035Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:55:04.297181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:55:04.297389Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T17:55:04.299513Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:1751:3674], Recipient [1:1751:3674]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-17T17:55:04.299578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-17T17:55:04.301808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:04.301887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:04.302169Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1751:3674], Recipient [1:1751:3674]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:55:04.302229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:55:04.302588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:04.302669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:04.302719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:04.302764Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:55:04.303041Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1789:3674], Recipient [1:1751:3674]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:55:04.303083Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T17:55:04.303119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1751:3674] sender: [1:1807:2058] recipient: [1:15:2062] 2025-04-17T17:55:04.364616Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1806:3718], Recipient [1:1751:3674]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-17T17:55:04.364698Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:55:04.364834Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:55:04.365147Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 303us result status StatusSuccess 2025-04-17T17:55:04.366021Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 16161 Memory: 156728 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 29812, MsgBus: 2142 2025-04-17T17:54:59.044085Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342119119700615:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:59.044207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c29/r3tmp/tmp6NZaAU/pdisk_1.dat 2025-04-17T17:54:59.333569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:59.333737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:59.335817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:59.348629Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29812, node 1 2025-04-17T17:54:59.403517Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:59.403538Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:59.403544Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:59.403671Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2142 TClient is connected to server localhost:2142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:59.897312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:59.936445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:00.063779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:00.213856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:00.268530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:01.641883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342127709636985:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:01.642002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.064778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.148125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.181184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.219922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.255586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.290787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.329405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.439968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342132004604952:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.440097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.440286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342132004604957:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.443884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:02.452799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342132004604959:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:02.513870Z node 1 :TX_PROXY ERROR: Actor# [1:7494342132004605016:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 12319, MsgBus: 19482 2025-04-17T17:54:59.481920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342119620446849:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:59.481978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c28/r3tmp/tmpAd7xqc/pdisk_1.dat 2025-04-17T17:54:59.808044Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12319, node 1 2025-04-17T17:54:59.856921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:59.857197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:59.859601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:59.890596Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:59.890646Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:59.890662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:59.890797Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19482 TClient is connected to server localhost:19482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:00.389056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:00.422741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:55:00.560238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:00.690035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:00.753601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.548489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342132505350505:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.548643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:02.852352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.898874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.936381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:02.974875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.024081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.066680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.156424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.252124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342136800318475:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:03.252206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:03.252446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342136800318480:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:03.256702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:03.274654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342136800318482:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:03.330795Z node 1 :TX_PROXY ERROR: Actor# [1:7494342136800318537:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:04.481891Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342119620446849:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:04.481952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] Test command err: 2025-04-17T17:52:56.164517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:56.164978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:56.165166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001052/r3tmp/tmpmZN8FS/pdisk_1.dat 2025-04-17T17:52:56.583891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.653685Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:56.702674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:56.702803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:56.714312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:56.804635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:56.866148Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:52:56.867477Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:52:56.868001Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:52:56.868313Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:56.923851Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:52:56.924468Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:56.924544Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:52:56.925833Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:52:56.925895Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:52:56.925933Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:52:56.926231Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:52:56.926355Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:52:56.926432Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:52:56.937404Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:52:56.971117Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:52:56.971304Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:52:56.971399Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:52:56.971437Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:52:56.971470Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:52:56.971502Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:56.971745Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.971800Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:56.972092Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:52:56.972194Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:52:56.972336Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:56.972388Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:56.972461Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:52:56.972511Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:52:56.972548Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:52:56.972576Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:52:56.972616Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:52:56.972759Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.972799Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:56.972842Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:52:56.973211Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:52:56.973252Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:52:56.973355Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:52:56.973633Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:52:56.973733Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:52:56.973827Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:52:56.973871Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:52:56.973925Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:52:56.973963Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:52:56.973999Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.974327Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:52:56.974371Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:52:56.974410Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:52:56.974460Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.974508Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:52:56.974537Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:52:56.974567Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:52:56.974598Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:56.974626Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:52:56.976028Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:52:56.976077Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:52:56.986799Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:52:56.986874Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:56.986907Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:56.986955Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:52:56.987044Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:52:57.143837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:57.143895Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:57.143940Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:52:57.144400Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:52:57.144450Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:52:57.144684Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:57.144738Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:52:57.144777Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:52:57.144815Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:52:57.152778Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:52:57.152861Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:57.153827Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:57.153879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:57.153962Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:5 ... ssTransaction::Execute at 72075186224037889 2025-04-17T17:55:02.986446Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:55:02.986483Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:55:02.986525Z node 16 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-04-17T17:55:02.986561Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-04-17T17:55:02.986598Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:55:02.986630Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-04-17T17:55:02.986659Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-04-17T17:55:02.986696Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-04-17T17:55:02.986826Z node 16 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-04-17T17:55:02.986877Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:55:02.986904Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-04-17T17:55:02.986931Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:55:02.986959Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:55:02.987002Z node 16 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-04-17T17:55:02.987047Z node 16 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-04-17T17:55:02.987096Z node 16 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-04-17T17:55:02.987143Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:55:02.987167Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:55:02.987194Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-17T17:55:02.987222Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-17T17:55:02.987352Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-17T17:55:02.987409Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-17T17:55:02.987455Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-17T17:55:02.987499Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-17T17:55:02.987531Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:55:02.987556Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-17T17:55:02.987584Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-04-17T17:55:02.989247Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:55:02.989478Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-04-17T17:55:02.989518Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-04-17T17:55:02.989567Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:55:02.989620Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:55:02.989659Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-17T17:55:02.989690Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:55:02.989722Z node 16 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-04-17T17:55:02.989766Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:55:02.989811Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T17:55:02.989854Z node 16 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T17:55:02.989904Z node 16 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T17:55:03.013079Z node 16 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-04-17T17:55:03.013251Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T17:55:03.013349Z node 16 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-17T17:55:03.013472Z node 16 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [16:1048:2835], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:55:03.013572Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:55:03.013988Z node 16 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-04-17T17:55:03.014045Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:55:03.014081Z node 16 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-17T17:55:03.014125Z node 16 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [16:1048:2835], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:55:03.014165Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:55:03.016300Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:593:2518], Recipient [16:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 RangesSize: 3 2025-04-17T17:55:03.016536Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:55:03.016681Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-17T17:55:03.016882Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:55:03.016959Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:55:03.017034Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:55:03.017108Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:55:03.017162Z node 16 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-17T17:55:03.017229Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:55:03.017264Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:55:03.017289Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:55:03.017322Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:55:03.017503Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 } 2025-04-17T17:55:03.017584Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-04-17T17:55:03.017883Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:55:03.017917Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:55:03.017944Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:55:03.017975Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:55:03.018028Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-17T17:55:03.018058Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:55:03.018099Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-17T17:55:03.019419Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:55:03.019681Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:55:03.021171Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553236, Sender [16:1069:2853], Recipient [16:666:2570]: NKikimr::TEvDataShard::TEvReadScanStarted 2025-04-17T17:55:03.021282Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553237, Sender [16:1069:2853], Recipient [16:666:2570]: NKikimr::TEvDataShard::TEvReadScanFinished 2025-04-17T17:55:03.021707Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [16:666:2570], Recipient [16:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:55:03.021768Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:55:03.021883Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:55:03.021969Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:55:03.022055Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:55:03.022124Z node 16 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:55:03.022214Z node 16 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:55:03.022295Z node 16 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:55:03.022407Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] >> KqpSysColV0::InnerJoinTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 25161, MsgBus: 22968 2025-04-17T17:55:00.380215Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342123385953014:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:00.380350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c19/r3tmp/tmpGzRKnP/pdisk_1.dat 2025-04-17T17:55:00.711186Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25161, node 1 2025-04-17T17:55:00.739637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:00.739784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:00.744983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:00.769356Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:00.769383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:00.769398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:00.769536Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22968 TClient is connected to server localhost:22968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:01.239301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:01.271749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:01.420734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:01.582470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:55:01.662025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:03.470283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342136270856663:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:03.470380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:03.785879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.822325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.856626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.895112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.939542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.982656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.024536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.116388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342140565824624:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.116473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.116752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342140565824629:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.120503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:04.134943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342140565824631:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:04.191646Z node 1 :TX_PROXY ERROR: Actor# [1:7494342140565824688:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:05.382410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342123385953014:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:05.382470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:05.534546Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912505509, txId: 281474976710672] shutting down >> KqpSysColV0::InnerJoinSelect [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-04-17T17:54:53.269239Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.269389Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.288082Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.288420Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.314834Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.320933Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:132:2158], cookie=15368131784739963561, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-04-17T17:54:53.321146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:132:2158], cookie=15368131784739963561) 2025-04-17T17:54:53.321878Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:139:2163], cookie=14687953426528053890, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-04-17T17:54:53.322028Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:139:2163], cookie=14687953426528053890) 2025-04-17T17:54:53.322475Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:142:2166], cookie=10904607324312367320, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-04-17T17:54:53.322681Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-04-17T17:54:53.336944Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:142:2166], cookie=10904607324312367320) 2025-04-17T17:54:53.337655Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2171], cookie=12748573708117144243, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-04-17T17:54:53.337901Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-04-17T17:54:53.354451Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2171], cookie=12748573708117144243) 2025-04-17T17:54:53.669783Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.669852Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.684821Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.684926Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.717028Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.717367Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:132:2158], cookie=2990690299807558695, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-04-17T17:54:53.717625Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:53.731269Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:132:2158], cookie=2990690299807558695) 2025-04-17T17:54:53.732025Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:142:2166], cookie=15154271917528322943, path="/Root/Res", config={ }) 2025-04-17T17:54:53.732287Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:54:53.747950Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:142:2166], cookie=15154271917528322943) 2025-04-17T17:54:53.749754Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:147:2171]. Cookie: 10513563358262091609. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:53.749830Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:147:2171], cookie=10513563358262091609) 2025-04-17T17:54:53.750348Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:147:2171]. Cookie: 9559459578906732378. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-04-17T17:54:53.750391Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:147:2171], cookie=9559459578906732378) 2025-04-17T17:54:55.899315Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:55.899424Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:55.915912Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:55.916018Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:55.930711Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:55.931138Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:130:2156], cookie=3863890648042158301, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:54:55.931472Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:55.953772Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:130:2156], cookie=3863890648042158301) 2025-04-17T17:54:55.954690Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:140:2164], cookie=7859414987557283804, path="/Root/Res", config={ }) 2025-04-17T17:54:55.954906Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-17T17:54:55.972729Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:140:2164], cookie=7859414987557283804) 2025-04-17T17:54:55.973580Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:145:2169]. Cookie: 17205403680230539303. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:55.973635Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:145:2169], cookie=17205403680230539303) 2025-04-17T17:54:55.974223Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:145:2169]. Cookie: 17476680615433071428. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1018000 } } 2025-04-17T17:54:55.974270Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:145:2169], cookie=17476680615433071428) 2025-04-17T17:54:58.118270Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.118386Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.130250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.130360Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:58.144063Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:58.144471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=8888980851595388259, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:54:58.144761Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:54:58.167173Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=8888980851595388259) 2025-04-17T17:54:58.168108Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 17133486498157655675. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:54:58.168166Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=17133486498157655675) 2025-04-17T17:54:58.168551Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 13561919361515447975. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-17T17:54:58.168590Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=13561919361515447975) 2025-04-17T17:54:58.168866Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 3056480758097588618. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-17T17:54:58.168900Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=3056480758097588618) 2025-04-17T17:55:00.173433Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:00.173534Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:00.193131Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:00.193528Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:00.217621Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:00.218021Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=7690014700268061894, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-17T17:55:00.218337Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:55:00.230331Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=7690014700268061894) 2025-04-17T17:55:00.231167Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:142:2166]. Cookie: 13022967413267028744. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:55:00.231226Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:142:2166], cookie=13022967413267028744) 2025-04-17T17:55:00.231751Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:142:2166]. Cookie: 5581451607268284437. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-04-17T17:55:00.231795Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:142:2166], cookie=5581451607268284437) 2025-04-17T17:55:02.669557Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:173:2190]. Cookie: 853816218589473047. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:55:02.669851Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:173:2190], cookie=853816218589473047) 2025-04-17T17:55:02.671133Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:173:2190]. Cookie: 10179197302714130463. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-04-17T17:55:02.671223Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:173:2190], cookie=10179197302714130463) 2025-04-17T17:55:04.867699Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:200:2216]. Cookie: 10830256803468306972. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-17T17:55:04.867766Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:200:2216], cookie=10830256803468306972) 2025-04-17T17:55:04.868273Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:200:2216]. Cookie: 5328913511891707528. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-04-17T17:55:04.868316Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:200:2216], cookie=5328913511891707528) |80.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |80.2%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 17430, MsgBus: 20621 2025-04-17T17:55:01.005894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342128600505063:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:01.006044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c07/r3tmp/tmpmaZWIi/pdisk_1.dat 2025-04-17T17:55:01.339871Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17430, node 1 2025-04-17T17:55:01.391555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:01.391729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:01.393902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:01.412711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:01.412754Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:01.412764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:01.412897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20621 TClient is connected to server localhost:20621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:01.934523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:01.959015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.116286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.281549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.357888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:04.059656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342141485408712:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.059813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.420410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.455141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.494102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.588167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.644685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.679921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.716855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.793747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342141485409374:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.793846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.794103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342141485409379:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.797786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:04.809290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342141485409381:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:04.883983Z node 1 :TX_PROXY ERROR: Actor# [1:7494342141485409438:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:06.006245Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342128600505063:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:06.006308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [GOOD] >> KqpSystemView::QueryStatsScan [GOOD] >> KqpSystemView::NodesSimple [GOOD] >> KqpScan::ScanDuringSplit10 |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |80.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |80.2%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> KqpSysColV1::InnerJoinTables [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink >> KqpSysColV1::StreamSelectRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 31774, MsgBus: 27567 2025-04-17T17:55:01.645059Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342126293427500:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:01.646121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c00/r3tmp/tmpSRCtne/pdisk_1.dat 2025-04-17T17:55:02.038260Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:02.049004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:02.049166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:02.050507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31774, node 1 2025-04-17T17:55:02.215988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:02.216046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:02.216059Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:02.216189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27567 TClient is connected to server localhost:27567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:02.790397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.811489Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:02.829380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:02.987331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.169520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:03.245010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:04.991020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342139178331159:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.991147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.345387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.424586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.468913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.498568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.530381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.568682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.639295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.720917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342143473299125:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.720999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.721271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342143473299130:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.724931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:05.735970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342143473299132:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:05.794293Z node 1 :TX_PROXY ERROR: Actor# [1:7494342143473299186:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:06.645432Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342126293427500:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:06.645512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |80.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink |80.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 3964, MsgBus: 1063 2025-04-17T17:55:01.831806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342127964657377:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:01.831864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bf8/r3tmp/tmptTrimd/pdisk_1.dat 2025-04-17T17:55:02.220925Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:02.248758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:02.248828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3964, node 1 2025-04-17T17:55:02.250016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:02.322431Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:02.322454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:02.322460Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:02.322564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1063 TClient is connected to server localhost:1063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:02.909486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.925583Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:02.933150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:03.116693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:03.291401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:03.357946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.171068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342145144528346:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.171188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.487562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.515198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.543423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.574584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.606705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.675338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.743883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.823272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342145144529014:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.823349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.823462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342145144529019:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.827162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:05.838561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342145144529021:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:05.919477Z node 1 :TX_PROXY ERROR: Actor# [1:7494342145144529081:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:06.832586Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342127964657377:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:06.832645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:07.731376Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912507205, txId: 281474976710672] shutting down 2025-04-17T17:55:07.861227Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912507853, txId: 281474976710675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 20087, MsgBus: 15478 2025-04-17T17:55:00.852415Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342125684419114:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:00.856171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:00.883923Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342123347317131:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:00.884123Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:00.915109Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494342124312391066:2289];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:00.915966Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c11/r3tmp/tmpYjV5AK/pdisk_1.dat 2025-04-17T17:55:01.268740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:01.271423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:01.271688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:01.271750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:01.281402Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:01.281454Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:55:01.282538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:01.282811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:01.313594Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:01.324839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:01.324917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:01.336625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20087, node 1 2025-04-17T17:55:01.346607Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:55:01.346632Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:55:01.410523Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:01.410550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:01.410564Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:01.410709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15478 TClient is connected to server localhost:15478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:02.098905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.160431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.420068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.678800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:02.844948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:04.864590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342142864290352:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:04.864765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.245271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.351705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.456846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.518288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.607924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.689953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.754140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:05.827835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342147159258504:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.827992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.828358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342147159258509:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.832642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:05.852144Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342125684419114:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:05.852223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:05.855366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342147159258511:2414], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:05.883383Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342123347317131:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:05.883471Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:05.897756Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494342124312391066:2289];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:05.897844Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:05.917647Z node 1 :TX_PROXY ERROR: Actor# [1:7494342147159258599:4325] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:07.044466Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912507037, txId: 281474976710672] shutting down 2025-04-17T17:55:07.225784Z node 2 :BS_PROXY_PUT ERROR: [d966b0ec3563222e] Result# TEvPutResult {Id# [72075186224037917:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037917:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T17:55:07.225775Z node 3 :BS_PROXY_PUT ERROR: [e1377409f3822873] Result# TEvPutResult {Id# [72075186224037913:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037913:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> KqpSystemView::ReadSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 14816, MsgBus: 16355 2025-04-17T17:55:02.463318Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342130615775210:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:02.463401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bf3/r3tmp/tmpYHzppf/pdisk_1.dat 2025-04-17T17:55:02.822017Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14816, node 1 2025-04-17T17:55:02.891299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:02.891410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:02.904118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:02.938898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:02.938922Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:02.938941Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:02.939232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16355 TClient is connected to server localhost:16355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:03.474836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:03.515446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:03.653138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:03.816720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:55:03.893721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.859901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342143500678795:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.860030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.195488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.225875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.252799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.280065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.313385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.380407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.428651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.492124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342147795646756:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.492212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.492521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342147795646761:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.497119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:06.506285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342147795646763:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:06.597804Z node 1 :TX_PROXY ERROR: Actor# [1:7494342147795646820:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:07.461829Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342130615775210:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:07.472667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::NodesRange1 >> KqpScan::RemoteShardScan >> KqpSystemView::PartitionStatsSimple >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |80.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |80.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:31.935532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:31.935629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.935669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:31.935757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:31.935804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:31.935878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:31.935973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:31.936038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:31.936330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:32.017927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:32.017978Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:32.031618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:32.031897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:32.032003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:32.039178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:32.039305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:32.039824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.040022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:32.042340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.043299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.043371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.043496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:32.043543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.043574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:32.043667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.048945Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:32.159387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:32.159692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.159931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:32.160163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:32.160222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.162695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.162837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:32.163024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.163083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:32.163123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:32.163162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:32.164991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.165048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:32.165088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:32.167622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.167697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.167751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.167812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.171798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:32.174373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:32.174693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:32.176003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:32.176207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:32.176264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.176615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:32.176688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:32.176903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:32.177330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:32.181555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:32.181623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:32.181879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:32.181932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:32.182300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:32.182352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:32.182445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.182474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.182505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:32.182539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.182595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:32.182656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:32.182685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:32.182709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:32.182769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:32.182827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:32.182877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:32.184591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.184702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:32.184739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20983 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3B431D0A-B908-4CAD-9C22-8088D8577C06 amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2025-04-17T17:55:09.883678Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3459:5423], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2025-04-17T17:55:09.883873Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3458:5422] 2025-04-17T17:55:09.884053Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3459:5423], sender# [1:3458:5422], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20983 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 702F5009-B8A4-4AC2-B7CA-8E58D27B0839 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-04-17T17:55:09.887375Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3459:5423], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-04-17T17:55:09.887612Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3458:5422] 2025-04-17T17:55:09.887715Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3459:5423], sender# [1:3458:5422], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20983 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B8D3E67F-D944-4751-8757-3CA6715E5CF7 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-04-17T17:55:09.897992Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3459:5423], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-04-17T17:55:09.898039Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3459:5423], success# 1, error# , multipart# 1, uploadId# 1 2025-04-17T17:55:09.907434Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3459:5423], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20983 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C1E44BF4-2E09-4905-84B0-173AC5BC7FD9 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-04-17T17:55:09.916362Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3459:5423], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-04-17T17:55:09.916718Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3458:5422], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:55:09.937918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-17T17:55:09.937991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:55:09.938155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-17T17:55:09.938274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-17T17:55:09.938329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:09.938385Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:09.938424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:55:09.938461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:55:09.938632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:09.955635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:09.956279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:09.956331Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:55:09.956435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:55:09.956471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:09.956510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:55:09.956539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:09.956597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:55:09.956679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-04-17T17:55:09.956738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:09.956776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:55:09.956806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:55:09.956933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:55:09.964504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:55:09.964569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3444:5409] TestWaitNotification: OK eventTxId 102 >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::PDisksFields [GOOD] Test command err: 2025-04-17T17:50:48.494619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341043167609013:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:48.494672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000507/r3tmp/tmpiZNvyu/pdisk_1.dat 2025-04-17T17:50:48.984092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:48.984204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:48.986546Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:48.988616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19539, node 1 2025-04-17T17:50:49.110752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:49.110777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:49.110796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:49.110906Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:49.434498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:49.500341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:49.524652Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494341047907707355:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:49.524813Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:50:49.535571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:49.561779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:49.561886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:49.565190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:49.565242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:49.566114Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T17:50:49.570424Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T17:50:49.587561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:49.595322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:49.833707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:49.885797Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494341046032297314:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:49.886607Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:50:49.898622Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341046139942069:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:49.898699Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:50:49.928582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:50.031002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:50.031073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:50.039790Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:50:50.047025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:50.051105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:50.051177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:50.057640Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:50:50.058897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:53.502321Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341043167609013:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:53.502386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:53.642738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:50:53.883463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341064642446870:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:53.883625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:53.883750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341064642446880:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:53.888690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-17T17:50:53.979599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341064642446884:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-17T17:50:54.049775Z node 1 :TX_PROXY ERROR: Actor# [1:7494341068937414264:3018] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:54.530283Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494341047907707355:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:54.619860Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:54.739923Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2e00x9dc0ztdj311rh9jca, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY1MGRkYzgtNTQwOGNjYmUtZTBhOWQ0MmEtNTRlNjVmZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:54.798461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:50:54.886540Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494341046032297314:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:54.886598Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:54.899155Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494341046139942069:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:54.899208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:50:55.170266Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2e023n08y1gq0ds7k7r6n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY1MGRkYzgtNTQwOGNjYmUtZTBhOWQ0MmEtNTRlNjVmZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:50:55.193293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:50:55.427510Z node 1 :KQP_EXECUTER ERROR: TxId: 2814749 ... ve access permissions } 2025-04-17T17:53:37.980159Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:37.985231Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:53:38.018426Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [31:7494341767391616010:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:53:38.107963Z node 31 :TX_PROXY ERROR: Actor# [31:7494341771686583375:2717] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:38.211838Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e515s3txbnvaexewydz6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=MTEyZDQ4YzAtMWFmNTIxNzAtODU0ODk4MzctNDMxYzgxZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:38.392780Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e51dnc5hjw1gdw1c9tep9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=MjFkZTk2MjEtMTVmY2E5OTMtYjFiNzY4ZWYtNDVkNWE3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:38.398168Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7494341771686583448:2365], owner: [31:7494341771686583445:2363], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T17:53:38.404413Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7494341771686583448:2365], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:53:38.404972Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7494341771686583448:2365], row count: 1, finished: 1 2025-04-17T17:53:38.405037Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7494341771686583448:2365], owner: [31:7494341771686583445:2363], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T17:53:38.408246Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912418391, txId: 281474976715662] shutting down 2025-04-17T17:53:38.694829Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2e51ks824r6pyxqcavvtd3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=OGY4MDljNTgtY2MzZGIyNTQtM2U1ZTlkMS02N2IzNmVlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:38.699558Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7494341771686583485:2375], owner: [31:7494341771686583482:2373], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T17:53:38.703433Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7494341771686583485:2375], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:53:38.704213Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7494341771686583485:2375], row count: 1, finished: 1 2025-04-17T17:53:38.704277Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7494341771686583485:2375], owner: [31:7494341771686583482:2373], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T17:53:38.709117Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912418683, txId: 281474976715664] shutting down greater-or-equal assertion failed at ydb/core/sys_view/ut_kqp.cpp:586, void NKikimr::NSysView::(anonymous namespace)::TYsonFieldChecker::Uint64GreaterOrEquals(ui64): value.AsUint64() >= expected TBackTrace::Capture()+28 (0x1B9C416C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1BE81A70) ??+0 (0x1B42A12C) NKikimr::NSysView::NTestSuiteSystemView::TTestCasePartitionStatsFields::Execute_(NUnitTest::TTestContext&)+8701 (0x1B44012D) std::__y1::__function::__func, void ()>::operator()()+280 (0x1B582D08) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1BEB8C66) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1BE885F9) NKikimr::NSysView::NTestSuiteSystemView::TCurrentTest::Execute()+1204 (0x1B581BB4) NUnitTest::TTestFactory::Execute()+2438 (0x1BE89EC6) NUnitTest::RunMain(int, char**)+5213 (0x1BEB31DD) ??+0 (0x7F0B6BC35D90) __libc_start_main+128 (0x7F0B6BC35E40) _start+41 (0x18B81029) 2025-04-17T17:53:42.340454Z node 36 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[36:7494341787454108518:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:42.340523Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000507/r3tmp/tmpd8s09Q/pdisk_1.dat 2025-04-17T17:53:42.577618Z node 36 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:42.601871Z node 36 :HIVE WARN: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:42.601992Z node 36 :HIVE WARN: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:42.603777Z node 36 :HIVE WARN: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5814, node 36 2025-04-17T17:53:42.696389Z node 36 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:42.696410Z node 36 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:42.696420Z node 36 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:42.696593Z node 36 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:43.160209Z node 36 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:47.346324Z node 36 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[36:7494341787454108518:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:47.346417Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:53:48.234652Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [36:7494341813223912947:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.234777Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.235568Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [36:7494341813223912971:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:53:48.242769Z node 36 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:53:48.278131Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [36:7494341813223912973:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:53:48.363091Z node 36 :TX_PROXY ERROR: Actor# [36:7494341813223913026:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:53:48.694010Z node 36 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2e568xes8ks37c7pjq9t67, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=36&id=ZGM4Njg1YmUtNDJmZTQ3NTctNjJiNGNhOTItZjEyY2EwYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:53:48.696775Z node 36 :SYSTEM_VIEWS INFO: Scan started, actor: [36:7494341813223913063:2346], owner: [36:7494341813223913059:2344], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-17T17:53:48.697291Z node 36 :SYSTEM_VIEWS INFO: Scan prepared, actor: [36:7494341813223913063:2346], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:53:48.710480Z node 36 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [36:7494341813223913063:2346], row count: 1, finished: 1 2025-04-17T17:53:48.710566Z node 36 :SYSTEM_VIEWS INFO: Scan finished, actor: [36:7494341813223913063:2346], owner: [36:7494341813223913059:2344], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-17T17:53:48.713531Z node 36 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912428691, txId: 281474976710660] shutting down >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:33.640761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:33.640859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:33.640929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:33.640960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:33.640999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:33.641024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:33.641071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:33.641139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:33.641416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:33.711755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:33.711817Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:33.726278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:33.726717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:33.726880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:33.733845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:33.733976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:33.734490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:33.734669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:33.737323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:33.738574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:33.738657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:33.738840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:33.738889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:33.738925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:33.739082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.748381Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:33.864031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:33.864282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.864504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:33.864759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:33.864825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.870916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:33.871096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:33.871314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.871380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:33.871416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:33.871448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:33.873533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.873590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:33.873629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:33.875559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.875602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.875635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:33.875674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:33.878961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:33.880993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:33.881207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:33.882011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:33.882157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:33.882223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:33.882497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:33.882550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:33.882735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:33.882811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:33.889685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:33.889742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:33.890040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:33.890104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:33.890536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.890587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:33.890681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:33.890713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:33.890747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:33.890780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:33.890827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:33.890871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:33.890901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:33.890924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:33.891004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:33.891049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:33.891107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:33.893059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:33.893204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:33.893242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:12.083936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T17:55:12.084101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:12.087590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:55:12.087704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T17:55:12.088377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:12.088509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:12.088564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:55:12.088668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T17:55:12.088797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:55:12.104880Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3459:5423], attempt# 0 2025-04-17T17:55:12.127277Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3459:5423], sender# [1:3458:5422] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:21938 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 41BE0E08-709D-4A91-975C-3DA98A3134DE amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T17:55:12.139938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:12.139996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:55:12.140269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-17T17:55:12.140317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:55:12.141192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:12.141272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:12.142451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:12.142538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:12.142566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:55:12.142609Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:55:12.142660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:55:12.142739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:55:12.143419Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3459:5423], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:21938 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FAA4D272-6BB3-41C4-AC1E-757F6FF44C4F amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-17T17:55:12.149587Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3459:5423], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-17T17:55:12.149930Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3458:5422] 2025-04-17T17:55:12.150488Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3459:5423], sender# [1:3458:5422], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:21938 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2A6B0833-2BF2-45AD-A5BD-AC0AB8E10A6F amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2025-04-17T17:55:12.156524Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3459:5423], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-04-17T17:55:12.156589Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3459:5423], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-17T17:55:12.156997Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3458:5422], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:55:12.168684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:55:12.205320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-17T17:55:12.205402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:55:12.205586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-17T17:55:12.205736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-17T17:55:12.205813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:12.205868Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:12.205916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:55:12.205962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:55:12.206129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:12.211350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:12.211816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:12.211862Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:55:12.211963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:55:12.211992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:12.212026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:55:12.212052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:12.212083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:55:12.212153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-04-17T17:55:12.212197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:12.212230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:55:12.212253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:55:12.212371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:55:12.223323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:55:12.223395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3444:5409] TestWaitNotification: OK eventTxId 102 >> TColumnShardTestSchema::ColdTiers [GOOD] >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TTopicReaderTests::TestRun_ReadOneMessage |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:54:33.458156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:54:33.462522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:33.462595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:54:33.462666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:54:33.462794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:54:33.462824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:54:33.462923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:54:33.463008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:54:33.463347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:33.560583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:54:33.560648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:33.575081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:33.575467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:54:33.575605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:54:33.588220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:54:33.588363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:54:33.588841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:33.589020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:54:33.595673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:33.596701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:33.596761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:33.596900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:54:33.596952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:33.596994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:54:33.597143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.605375Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:54:33.717826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:54:33.718013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.718214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:54:33.718422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:54:33.718483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.724699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:33.724824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:54:33.725046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.725095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:54:33.725127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:54:33.725153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:54:33.729883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.729948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:54:33.729983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:54:33.733281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.733359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.733406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:33.733479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:54:33.737924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:54:33.742639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:54:33.742900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:54:33.744099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:54:33.744317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:54:33.744394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:33.744738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:54:33.744816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:54:33.745015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:54:33.745131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:54:33.749412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:54:33.749491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:54:33.749774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:54:33.749845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:54:33.750219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:54:33.750270Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:54:33.750370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:33.750404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:33.750436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:54:33.750469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:33.750517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:54:33.750565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:54:33.750595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:54:33.750622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:54:33.750679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:54:33.750725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:54:33.750781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:54:33.752700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:33.752821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:54:33.752867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:22150 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7BC55444-33C8-4ECF-837C-2161FBBBC278 amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2025-04-17T17:55:12.724031Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3459:5423], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2025-04-17T17:55:12.724436Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3458:5422] 2025-04-17T17:55:12.724657Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3459:5423], sender# [1:3458:5422], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:22150 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 094A4188-0048-4B28-AC9C-2719B95C022E amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-04-17T17:55:12.729704Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3459:5423], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-04-17T17:55:12.729907Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3458:5422] 2025-04-17T17:55:12.729995Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3459:5423], sender# [1:3458:5422], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:22150 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1C594BD4-CAA4-4CEB-88EC-19DD364AC1B2 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-04-17T17:55:12.733380Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3459:5423], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-04-17T17:55:12.733456Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3459:5423], success# 1, error# , multipart# 1, uploadId# 1 2025-04-17T17:55:12.740196Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3459:5423], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:22150 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F484AC7B-0212-4CC1-9E83-5125A6EE9450 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-04-17T17:55:12.749152Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3459:5423], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-04-17T17:55:12.749681Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3458:5422], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-17T17:55:12.764892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-17T17:55:12.764956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T17:55:12.765106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-17T17:55:12.765224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-17T17:55:12.765287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:12.765320Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:12.765369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T17:55:12.765415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T17:55:12.765589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:12.776834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:12.777502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:12.777548Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:55:12.777647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:55:12.777675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:12.777709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:55:12.777733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:12.777763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:55:12.777833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-04-17T17:55:12.777871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:12.777901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:55:12.777924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:55:12.778035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:55:12.787352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:55:12.787430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3444:5409] TestWaitNotification: OK eventTxId 102 |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913055.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913055.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913055.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913055.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913055.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913055.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124911855.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913055.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124913055.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124911855.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124911855.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124911855.000000s;Name=;Codec=}; 2025-04-17T17:54:15.859580Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:54:15.982766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:54:16.021327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:54:16.021625Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:54:16.032904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:54:16.033136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:54:16.033395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:54:16.033549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:54:16.033672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:54:16.033791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:54:16.033918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:54:16.034041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:54:16.034166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:54:16.034507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:54:16.034658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:54:16.034771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:54:16.082519Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:54:16.082692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:54:16.082784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:54:16.082962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:16.083128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:54:16.083210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:54:16.083255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:54:16.083346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:54:16.083402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:54:16.083447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:54:16.083478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:54:16.083625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:16.083699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:54:16.083738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:54:16.083765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:54:16.083845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:54:16.083895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:54:16.083936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:54:16.083963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:54:16.084030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:54:16.084063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:54:16.084089Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:54:16.084153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:54:16.084189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:54:16.084218Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:54:16.084612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-17T17:54:16.084693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-17T17:54:16.084768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-17T17:54:16.084839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-17T17:54:16.085038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:54:16.085108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:54:16.085149Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:54:16.085338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:54:16.085390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:54:16.085429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:54:16.085577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:54:16.085624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... 37184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-17T17:55:14.353715Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912204118;tx_id=18446744073709551615;;current_snapshot_ts=1744912457021; 2025-04-17T17:55:14.353794Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:55:14.353877Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:55:14.353963Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:55:14.354116Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:55:14.354464Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744912504118:max} readable: {1744912504118:max} at tablet 9437184 2025-04-17T17:55:14.354634Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T17:55:14.354840Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912504118:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T17:55:14.354919Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912504118:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T17:55:14.355460Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912504118:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T17:55:14.355624Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912504118:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T17:55:14.356279Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912504118:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1386:3391];trace_detailed=; 2025-04-17T17:55:14.356872Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T17:55:14.357163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T17:55:14.357459Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:14.359060Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:14.359685Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:55:14.359827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:14.359978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:14.360032Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1386:3391] finished for tablet 9437184 2025-04-17T17:55:14.360413Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1385:3390];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":1744912514356178,"name":"_full_task","f":1744912514356178,"d_finished":0,"c":0,"l":1744912514360090,"d":3912},"events":[{"name":"bootstrap","f":1744912514356489,"d_finished":2645,"c":1,"l":1744912514359134,"d":2645},{"a":1744912514359651,"name":"ack","f":1744912514359651,"d_finished":0,"c":0,"l":1744912514360090,"d":439},{"a":1744912514359620,"name":"processing","f":1744912514359620,"d_finished":0,"c":0,"l":1744912514360090,"d":470},{"name":"ProduceResults","f":1744912514357355,"d_finished":2057,"c":2,"l":1744912514360017,"d":2057},{"a":1744912514360021,"name":"Finish","f":1744912514360021,"d_finished":0,"c":0,"l":1744912514360090,"d":69}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:14.360471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1385:3390];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:55:14.360793Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1385:3390];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":1744912514356178,"name":"_full_task","f":1744912514356178,"d_finished":0,"c":0,"l":1744912514360510,"d":4332},"events":[{"name":"bootstrap","f":1744912514356489,"d_finished":2645,"c":1,"l":1744912514359134,"d":2645},{"a":1744912514359651,"name":"ack","f":1744912514359651,"d_finished":0,"c":0,"l":1744912514360510,"d":859},{"a":1744912514359620,"name":"processing","f":1744912514359620,"d_finished":0,"c":0,"l":1744912514360510,"d":890},{"name":"ProduceResults","f":1744912514357355,"d_finished":2057,"c":2,"l":1744912514360017,"d":2057},{"a":1744912514360021,"name":"Finish","f":1744912514360021,"d_finished":0,"c":0,"l":1744912514360510,"d":489}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1386:3391]->[1:1385:3390] 2025-04-17T17:55:14.360868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:55:14.355582Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T17:55:14.360908Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:55:14.360997Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> KqpSysColV1::StreamSelectRange [GOOD] |80.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |80.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |80.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 31371, MsgBus: 13152 2025-04-17T17:55:09.523308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342163240227792:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:09.524102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001be3/r3tmp/tmpEYMhwe/pdisk_1.dat 2025-04-17T17:55:09.924972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31371, node 1 2025-04-17T17:55:09.979047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:09.982138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:09.992555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:10.022657Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:10.022671Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:10.022676Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:10.022774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13152 TClient is connected to server localhost:13152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:10.636042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:10.650064Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:10.662971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:55:10.814537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:10.999011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:55:11.069674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:12.949050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342176125131464:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:12.949166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:13.262459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.335819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.367512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.437207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.530316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.621671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.693503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.768837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342180420099439:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:13.768948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:13.769235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342180420099445:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:13.773485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:13.794853Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:55:13.795116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342180420099447:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:13.891599Z node 1 :TX_PROXY ERROR: Actor# [1:7494342180420099504:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:14.526386Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342163240227792:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:14.526443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:15.335229Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912515367, txId: 281474976710672] shutting down >> KqpSystemView::ReadSuccess [GOOD] |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |80.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> ReadSessionImplTest::ReconnectOnTmpError >> TMonitoringTests::ValidActorId >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure >> TMonitoringTests::ValidActorId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 28103, MsgBus: 23513 2025-04-17T17:55:09.186555Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342161298498054:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:09.187121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001be4/r3tmp/tmph5ZqGB/pdisk_1.dat 2025-04-17T17:55:09.619830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:09.633484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:09.633603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:09.639235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28103, node 1 2025-04-17T17:55:09.710312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:09.710340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:09.710348Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:09.710508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23513 TClient is connected to server localhost:23513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:10.353957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:10.395144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:10.541434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:10.700834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:10.787833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:12.731078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342174183401720:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:12.731195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:13.088662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.128403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.201715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.244886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.281206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.328848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.365790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.467085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342178478369687:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:13.467186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:13.467243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342178478369692:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:13.471182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:13.489058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342178478369694:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:13.598638Z node 1 :TX_PROXY ERROR: Actor# [1:7494342178478369751:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:14.189503Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342161298498054:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:14.189574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:15.607986Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912515626, txId: 281474976710672] shutting down >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] Test command err: 2025-04-17T17:52:19.741047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341433054748351:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:19.741101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00082b/r3tmp/tmpq2L0JT/pdisk_1.dat 2025-04-17T17:52:20.702682Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:20.728937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:20.729029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:20.756103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:20.758323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 6671, node 1 2025-04-17T17:52:21.175635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:52:21.175663Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:52:21.175672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:52:21.175790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:52:21.789691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:52:24.743608Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341433054748351:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:52:24.743701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:52:35.674884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:52:35.674922Z node 1 :IMPORT WARN: Table profiles were not loaded >> Compression::WriteRAW >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> KqpSystemView::PartitionStatsSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 16439, MsgBus: 16266 2025-04-17T17:55:10.554695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342168755870324:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:10.558596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bd9/r3tmp/tmpNbb6N9/pdisk_1.dat 2025-04-17T17:55:11.034295Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:11.044475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:11.044620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:11.049557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16439, node 1 2025-04-17T17:55:11.148773Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:11.148792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:11.148800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:11.148912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16266 TClient is connected to server localhost:16266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:11.948615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:11.979820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:12.185835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:12.393142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:12.468460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:14.237347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342185935741273:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:14.237448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:14.656735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:14.726046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:14.760279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:14.849290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:14.920991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:14.980640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:15.023120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:15.097995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342190230709235:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:15.098080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:15.098323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342190230709240:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:15.102565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:15.119056Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:55:15.119289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342190230709242:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:15.216307Z node 1 :TX_PROXY ERROR: Actor# [1:7494342190230709298:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:15.559983Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342168755870324:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:15.586923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:16.305487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:55:16.463102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2e817p9tavbbxnk3rasvz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTUxNGQxZjktYzk3YTFmYjUtOTk2NWIxZjQtNTJlODQ3Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:16.476822Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912516458, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-04-17T17:55:17.668779Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.668806Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.668826Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:17.682554Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:55:17.682617Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.682654Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.684107Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005066s 2025-04-17T17:55:17.685175Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:17.692003Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T17:55:17.692129Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.693332Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.693354Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.693375Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:17.706514Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:55:17.706570Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.706597Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.706661Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007222s 2025-04-17T17:55:17.708085Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:17.708962Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T17:55:17.709050Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.710191Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.710212Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.710231Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:17.710590Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-17T17:55:17.710624Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.710652Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.710724Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.281954s 2025-04-17T17:55:17.716619Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:17.723561Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T17:55:17.723668Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.724836Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.724859Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.724883Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:17.725224Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-17T17:55:17.725269Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.725293Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.725358Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.256362s 2025-04-17T17:55:17.731306Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:17.731767Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T17:55:17.731853Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.778346Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.778378Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.778398Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:17.786700Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:17.787129Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:17.806833Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.807499Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-04-17T17:55:17.807556Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.807577Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.807638Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.236481s 2025-04-17T17:55:17.814938Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-17T17:55:17.857158Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.857184Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.857205Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:17.857685Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:17.863985Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:17.864206Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.864715Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:17.967155Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:17.967410Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T17:55:17.967479Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:17.967533Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-17T17:55:17.967622Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-17T17:55:18.070417Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T17:55:18.070572Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-17T17:55:18.071743Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.071765Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.071783Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:18.078474Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:18.090737Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:18.090910Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.093172Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:18.199211Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.201280Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T17:55:18.201355Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:18.201391Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-17T17:55:18.201476Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-17T17:55:18.201586Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-17T17:55:18.201881Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-17T17:55:18.201992Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T17:55:18.202096Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-04-17T17:55:18.317328Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.317391Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.317435Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:18.317877Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:18.318572Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:18.338922Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.340406Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:18.342203Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.342226Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.342246Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:18.343368Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:18.345674Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:18.345850Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.346036Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:18.346404Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T17:55:18.348742Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.348767Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.348787Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:18.349122Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:18.349793Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:18.350313Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.351489Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:18.352300Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.352633Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:18.352745Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:18.352810Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-17T17:55:18.354023Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.354056Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.354091Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:18.354440Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:18.355047Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:18.355158Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.355463Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-04-17T17:55:18.356503Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:18.356730Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-17T17:55:18.357113Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-17T17:55:18.357338Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-17T17:55:18.357432Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:18.357462Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:55:18.357494Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:55:18.357638Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-17T17:55:18.357671Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:55:18.357695Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-17T17:55:18.357715Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:55:18.357838Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-17T17:55:18.357961Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-17T17:55:18.357982Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-17T17:55:18.358003Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:55:18.358077Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-17T17:55:18.358104Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-17T17:55:18.358126Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-17T17:55:18.358146Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:55:18.358257Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-17T17:55:18.359700Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.359737Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.359760Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:18.360178Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:18.360675Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:18.360838Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.361086Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-17T17:55:18.362067Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:18.362304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-17T17:55:18.362647Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-17T17:55:18.362860Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-17T17:55:18.362959Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:18.362989Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:55:18.363010Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:55:18.363029Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-17T17:55:18.363058Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:55:18.363290Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-04-17T17:55:18.363382Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-17T17:55:18.363402Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-17T17:55:18.363419Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-17T17:55:18.363459Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-17T17:55:18.363482Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:55:18.363673Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-04-17T17:55:18.364998Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.365023Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.365050Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:18.366932Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:18.367612Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:18.367786Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:18.370429Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:18.371578Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:18.372331Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:18.373168Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-04-17T17:55:18.373310Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T17:55:18.373430Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:18.373471Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:55:18.373494Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-04-17T17:55:18.373512Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-04-17T17:55:18.373554Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-04-17T17:55:18.373581Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-17T17:55:18.373754Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-17T17:55:18.373921Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges |80.4%| [TA] $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::QueryStatsRetries [GOOD] Test command err: 2025-04-17T17:50:30.481716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494340963564952070:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:30.482647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000890/r3tmp/tmptSMdbh/pdisk_1.dat 2025-04-17T17:50:30.837077Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24302, node 1 2025-04-17T17:50:30.850169Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:30.850221Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:50:30.869346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:30.869465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:30.872069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:30.878843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:30.878866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:30.878872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:30.879002Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24970 TClient is connected to server localhost:24970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:31.341477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:33.260241Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-17T17:50:33.260270Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-17T17:50:33.298874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340976449855015:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:33.299206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:33.299785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494340976449855027:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:50:33.303304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:50:33.331980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494340976449855029:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:50:33.404491Z node 1 :TX_PROXY ERROR: Actor# [1:7494340976449855109:2769] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:50:33.405372Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-17T17:50:33.405457Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F000106C58 2025-04-17T17:50:33.405501Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7494340976449854996:2338], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01js2dzcte491cka3d6hkererj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzcxNDZjZTAtY2Y4MjE4NDMtOGE1ZjJiYTEtNTE3YTYxZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-17T17:50:33.405630Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-17T17:50:33.405683Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7494340976449854996:2338], queueSize: 1 2025-04-17T17:50:33.406534Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7494340976449854996:2338], compileActor: [1:7494340976449855128:2349] 2025-04-17T17:50:33.665387Z node 1 :KQP_YQL INFO: TraceId: 01js2dzcte491cka3d6hkererj, SessionId: CompileActor 2025-04-17 17:50:33.664 INFO ydb-core-sys_view-ut(pid=58337, tid=0x00007FD21E5B8640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-04-17T17:50:33.666377Z node 1 :KQP_YQL TRACE: TraceId: 01js2dzcte491cka3d6hkererj, SessionId: CompileActor 2025-04-17 17:50:33.665 TRACE ydb-core-sys_view-ut(pid=58337, tid=0x00007FD21E5B8640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (let $4 (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-17T17:50:33.666690Z node 1 :KQP_YQL DEBUG: TraceId: 01js2dzcte491cka3d6hkererj, SessionId: CompileActor 2025-04-17 17:50:33.666 DEBUG ydb-core-sys_view-ut(pid=58337, tid=0x00007FD21E5B8640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 253us 2025-04-17T17:50:33.667214Z node 1 :KQP_YQL DEBUG: TraceId: 01js2dzcte491cka3d6hkererj, SessionId: CompileActor 2025-04-17 17:50:33.667 DEBUG ydb-core-sys_view-ut(pid=58337, tid=0x00007FD21E5B8640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-17T17:50:33.668451Z node 1 :KQP_YQL DEBUG: TraceId: 01js2dzcte491cka3d6hkererj, SessionId: CompileActor 2025-04-17 17:50:33.668 DEBUG ydb-core-sys_view-ut(pid=58337, tid=0x00007FD21E5B8640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-04-17T17:50:33.669195Z node 1 :KQP_YQL TRACE: TraceId: 01js2dzcte491cka3d6hkererj, SessionId: CompileActor 2025-04-17 17:50:33.668 TRACE ydb-core-sys_view-ut(pid=58337, tid=0 ... UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:43.425844Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:43.453568Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:47.567256Z node 66 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[66:7494342045313502776:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:47.567409Z node 66 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:49.404766Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [66:7494342075378274882:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:49.404943Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:49.405047Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [66:7494342075378274894:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:49.411711Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:54:49.440195Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [66:7494342075378274896:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:54:49.535914Z node 66 :TX_PROXY ERROR: Actor# [66:7494342075378274970:2708] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:49.791865Z node 66 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2e76xs360rebgfvzg807d8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=66&id=MzYwNDNlNWUtNDkyNGM4NTMtNGY2NmNhYy02MzVlOTEyZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:50.010301Z node 66 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2e77b1fppjjey4c6q54cgz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=66&id=OTY4NmIwMjktNmU2MjY3NDItN2I4ZGNhNmMtY2YwNjIxMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:54:50.013540Z node 66 :SYSTEM_VIEWS INFO: Scan started, actor: [66:7494342079673242350:2368], owner: [66:7494342079673242346:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-04-17T17:54:50.014627Z node 66 :SYSTEM_VIEWS INFO: Scan prepared, actor: [66:7494342079673242350:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:54:50.015177Z node 66 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [66:7494342079673242350:2368], row count: 1, finished: 1 2025-04-17T17:54:50.015202Z node 66 :SYSTEM_VIEWS INFO: Scan finished, actor: [66:7494342079673242350:2368], owner: [66:7494342079673242346:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-04-17T17:54:50.018602Z node 66 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912490008, txId: 281474976710662] shutting down 2025-04-17T17:54:53.083793Z node 71 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[71:7494342092026184107:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:53.083926Z node 71 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000890/r3tmp/tmpRNTSMG/pdisk_1.dat 2025-04-17T17:54:53.440161Z node 71 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:53.518307Z node 71 :HIVE WARN: HIVE#72057594037968897 Node(71, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:53.518518Z node 71 :HIVE WARN: HIVE#72057594037968897 Node(71, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:53.523672Z node 71 :HIVE WARN: HIVE#72057594037968897 Node(71, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61369, node 71 2025-04-17T17:54:53.617351Z node 71 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:53.617386Z node 71 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:53.617405Z node 71 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:53.617636Z node 71 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:54.255238Z node 71 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:54.304867Z node 71 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:58.084354Z node 71 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[71:7494342092026184107:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:58.084487Z node 71 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:59.655963Z node 71 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [71:7494342117795988935:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:59.655964Z node 71 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [71:7494342117795988929:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:59.656152Z node 71 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:59.662856Z node 71 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:54:59.693032Z node 71 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [71:7494342117795988944:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:54:59.765621Z node 71 :TX_PROXY ERROR: Actor# [71:7494342117795989024:2733] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:59.995701Z node 71 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2e7gy59nqa62jtxrpf2jkp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=Y2Y5ZDdiYi01ZmViOWFjOC1mMTE0MTNhOC00MWMyNTJlZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:00.186521Z node 71 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e7h9f08gx79h9fm591077, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=71&id=NDI1ZTMwNmUtYzNhNDJiZGQtMTEyMmVlNWUtYmM1MWNmMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:00.189577Z node 71 :SYSTEM_VIEWS INFO: Scan started, actor: [71:7494342122090956403:2368], owner: [71:7494342122090956400:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:55:00.190786Z node 71 :SYSTEM_VIEWS INFO: Scan prepared, actor: [71:7494342122090956403:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T17:55:00.191683Z node 71 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [71:7494342122090956403:2368], row count: 1, finished: 1 2025-04-17T17:55:00.191757Z node 71 :SYSTEM_VIEWS INFO: Scan finished, actor: [71:7494342122090956403:2368], owner: [71:7494342122090956400:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-17T17:55:00.194535Z node 71 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912500185, txId: 281474976715662] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-04-17T17:54:53.500495Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.500650Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.517992Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.518372Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.544085Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:53.949586Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:53.949700Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:53.966106Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:53.966230Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:53.990963Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:54.388440Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:54.388547Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:54.402466Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:54.402599Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:54.417071Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:54.417656Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=7845725936072934884, session=0, seqNo=0) 2025-04-17T17:54:54.417841Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:54.440553Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=7845725936072934884, session=1) 2025-04-17T17:54:54.441118Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:139:2163], cookie=10481644268669272151) 2025-04-17T17:54:54.441231Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:139:2163], cookie=10481644268669272151) 2025-04-17T17:54:54.850007Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:54.862130Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:55.215455Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:55.228695Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:55.582516Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:55.599830Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:55.949454Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:55.961654Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:56.326091Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:56.338019Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:56.679197Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:56.691516Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.020817Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.033005Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.382143Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.394381Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.734503Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.746613Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.139312Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.151175Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.504081Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.516405Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.876928Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.889009Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.244348Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.256617Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.611080Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.630401Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.022417Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.034624Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.377773Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.391060Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.741882Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.754668Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.109341Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.121582Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.477812Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.489622Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.916506Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.929100Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.312235Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.325672Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.694559Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.706836Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.086419Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.102788Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.473752Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.485934Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.875199Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.889323Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.253512Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.270817Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.652986Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.665133Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.062562Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.079021Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.471632Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.484237Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.879117Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.891375Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.258983Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.271068Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.628820Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.641710Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.996650Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.008925Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.352680Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.367060Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.739460Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.759181Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.146578Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.159170Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.544025Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.559351Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.955948Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.970775Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:09.354050Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:09.366964Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:09.781315Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:09.794475Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:10.186610Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:10.205566Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:10.590751Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:10.607640Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:10.978570Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:10.995233Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:11.372992Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:11.386044Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:11.798980Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:11.811611Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:12.204937Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:12.221260Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:12.585437Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:12.602138Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:12.979306Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:12.994727Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:13.370579Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:13.387055Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:13.841365Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:13.856414Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:14.258550Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:14.271638Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:14.666496Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:14.686873Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:15.060028Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:15.086820Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:15.469899Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:15.490817Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:15.882578Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:15.898821Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:16.318507Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:16.338993Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:16.716203Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:16.737835Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:17.118551Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:17.135479Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:17.537467Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:17.551140Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:18.008931Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:55:18.009023Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:55:18.021818Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:55:18.035067Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:471:2478], cookie=15881573888666444116) 2025-04-17T17:55:18.035170Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:471:2478], cookie=15881573888666444116) 2025-04-17T17:55:18.718049Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:18.718151Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:18.741651Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:18.741764Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:18.762393Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:18.768826Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=9945999883843950291, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-17T17:55:18.769068Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-17T17:55:18.792946Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=9945999883843950291) 2025-04-17T17:55:18.794694Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:139:2163]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:55:18.794760Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:139:2163], cookie=0) 2025-04-17T17:55:18.795018Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T17:55:18.795051Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=0) 2025-04-17T17:55:18.842639Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-17T17:55:18.842751Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-17T17:55:18.843028Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:144:2168]) 2025-04-17T17:55:18.843173Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-04-17T17:55:18.886771Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 31413, MsgBus: 6030 2025-04-17T17:55:11.262631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342173053412490:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:11.262691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bcf/r3tmp/tmpTWz82n/pdisk_1.dat 2025-04-17T17:55:11.970945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:12.003122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:12.003200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:12.030982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31413, node 1 2025-04-17T17:55:12.261595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:12.261616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:12.261631Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:12.261770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6030 TClient is connected to server localhost:6030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:12.953330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:12.982518Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:12.991934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:13.152265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:13.356214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:13.473048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:15.591055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342190233283433:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:15.591183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:15.933812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:15.977801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:16.019081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:16.101392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:16.153520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:16.237628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:16.266573Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342173053412490:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:16.266652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:16.334513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:16.459072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342194528251404:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:16.459160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:16.459746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342194528251409:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:16.468842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:16.486676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342194528251411:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:16.541437Z node 1 :TX_PROXY ERROR: Actor# [1:7494342194528251467:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:18.035075Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912518021, txId: 281474976710672] shutting down |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage >> TMonitoringTests::InvalidActorId >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-04-17T17:55:19.341192Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.341232Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.341258Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:19.355876Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:19.356489Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T17:55:19.356547Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.366925Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.366948Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.366965Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:19.367329Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:19.367664Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T17:55:19.367710Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.368864Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.368887Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.368905Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:19.369214Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:55:19.369260Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.369286Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.369420Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-04-17T17:55:19.379279Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.379316Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.379334Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:19.379781Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-17T17:55:19.379837Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.379860Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.379945Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-04-17T17:55:19.382412Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-17T17:55:19.382436Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-17T17:55:19.382621Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:19.383018Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:19.383622Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:19.396403Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-17T17:55:19.400177Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:19.400566Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-04-17T17:55:19.404633Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-04-17T17:55:19.404944Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:19.404980Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:55:19.405007Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:55:19.405026Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-17T17:55:19.405052Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-17T17:55:19.405072Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-17T17:55:19.405101Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-04-17T17:55:19.405121Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-04-17T17:55:19.405166Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-04-17T17:55:19.405185Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-04-17T17:55:19.405202Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-04-17T17:55:19.405220Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-04-17T17:55:19.405234Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-04-17T17:55:19.405248Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-04-17T17:55:19.405271Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-04-17T17:55:19.405289Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-04-17T17:55:19.405337Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-04-17T17:55:19.405359Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-04-17T17:55:19.405375Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-04-17T17:55:19.405391Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-04-17T17:55:19.405407Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-04-17T17:55:19.405427Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-04-17T17:55:19.405456Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-04-17T17:55:19.405479Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-04-17T17:55:19.405514Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-04-17T17:55:19.405530Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-04-17T17:55:19.405546Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-04-17T17:55:19.405563Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-04-17T17:55:19.405579Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-04-17T17:55:19.405594Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-04-17T17:55:19.405619Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-04-17T17:55:19.405640Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-04-17T17:55:19.405722Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-04-17T17:55:19.405742Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-04-17T17:55:19.405760Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-04-17T17:55:19.405777Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-04-17T17:55:19.405796Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-04-17T17:55:19.405816Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-04-17T17:55:19.405841Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-04-17T17:55:19.405862Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-04-17T17:55:19.405878Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-04-17T17:55:19.405894Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-04-17T17:55:19.405909Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-04-17T17:55:19.405930Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-04-17T17:55:19.405946Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-04-17T17:55:19.405964Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-04-17T17:55:19.405980Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-04-17T17:55:19.405997Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-04-17T17:55:19.406019Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-04-17T17:55:19.406043Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-04-17T17:55:19.406102Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-17T17:55:19.408882Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-04-17T17:55:19.409280Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-04-17T17:55:19.409323Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-04-17T17:55:19.409358Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-04-17T17:55:19.409397Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-04-17T17:55:19.409427Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-04-17T17:55:19.409448Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-04-17T17:55:19.409479Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-04-17T17:55:19.409496Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-04-17T17:55:19.409540Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-04-17T17:55:19.409558Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-04-17T17:55:19.409575Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-04-17T17:55:19.409592Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-04-17T17:55:19.409610Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-04-17T17:55:19.409627Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-04-17T17:55:19.409648Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-04-17T17:55:19.409669Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-04-17T17:55:19.409709Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-04-17T17:55:19.409728Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-04-17T17:55:19.409745Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-04-17T17:55:19.409773Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-04-17T17:55:19.409798Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-04-17T17:55:19.409815Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-04-17T17:55:19.409847Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-04-17T17:55:19.409865Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-04-17T17:55:19.409883Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-04-17T17:55:19.409900Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-04-17T17:55:19.409919Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-04-17T17:55:19.409936Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-04-17T17:55:19.409954Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-04-17T17:55:19.409984Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-04-17T17:55:19.410012Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-04-17T17:55:19.410030Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-04-17T17:55:19.410107Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-04-17T17:55:19.410129Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-04-17T17:55:19.410147Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-04-17T17:55:19.410226Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-04-17T17:55:19.410253Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-04-17T17:55:19.410270Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-04-17T17:55:19.410302Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-04-17T17:55:19.410320Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-04-17T17:55:19.410336Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-04-17T17:55:19.410360Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-04-17T17:55:19.410379Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-04-17T17:55:19.410399Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-04-17T17:55:19.410472Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-04-17T17:55:19.410493Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-04-17T17:55:19.410513Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-04-17T17:55:19.410531Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-04-17T17:55:19.410547Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-04-17T17:55:19.410564Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-04-17T17:55:19.410612Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-17T17:55:19.410742Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-17T17:55:19.412114Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.412136Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.412159Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:19.425403Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:19.426018Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:19.426227Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.434054Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:19.531623Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.531863Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T17:55:19.531917Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:19.531957Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-17T17:55:19.532020Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-17T17:55:19.738274Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-17T17:55:19.839611Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-17T17:55:19.839744Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T17:55:19.840010Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-17T17:55:19.841165Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.841203Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.841223Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:19.849381Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:19.860297Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:19.870579Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.874709Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:19.979104Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:19.979354Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T17:55:19.979405Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:19.979441Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-17T17:55:19.979511Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-17T17:55:19.979617Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-17T17:55:19.979796Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-17T17:55:19.979859Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T17:55:19.979943Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TMonitoringTests::InvalidActorId [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |80.5%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.5%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TExternalTableTest::DropTableTwice >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TExternalTableTest::CreateExternalTable >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |80.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime >> TExternalTableTest::SchemeErrors >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-04-17T17:55:20.210624Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.210655Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.210675Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.214577Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.230849Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.267633Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.270014Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:20.270888Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:20.271310Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:20.271513Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-17T17:55:20.271652Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:20.271728Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.271756Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-17T17:55:20.271811Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T17:55:20.271830Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T17:55:20.275480Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.275516Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.275541Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.286701Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.287454Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.287661Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.290396Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-17T17:55:20.291522Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:20.291786Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-17T17:55:20.305914Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-17T17:55:20.306151Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-17T17:55:20.306390Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.306423Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:55:20.306458Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:55:20.306592Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-17T17:55:20.306625Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:55:20.306654Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-17T17:55:20.306677Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:55:20.306804Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-17T17:55:20.306880Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-17T17:55:20.306901Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-17T17:55:20.306917Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:55:20.306988Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-17T17:55:20.307011Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-17T17:55:20.307030Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-17T17:55:20.307048Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:55:20.307136Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-17T17:55:20.308500Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.308521Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.308541Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.325338Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.325814Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.325980Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.326312Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-04-17T17:55:20.327263Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:20.327479Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-17T17:55:20.330467Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-17T17:55:20.330706Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-17T17:55:20.330804Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.330849Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:55:20.330958Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-04-17T17:55:20.330992Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:55:20.331012Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:55:20.331069Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-04-17T17:55:20.331098Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:55:20.331114Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:55:20.331189Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-04-17T17:55:20.331212Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-17T17:55:20.331227Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStream ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:55:23.047631Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-04-17T17:55:23.227899Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-17T17:55:23.227937Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-17T17:55:23.227973Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:23.247755Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:23.248308Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:23.248503Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-17T17:55:23.248896Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-04-17T17:55:23.424599Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-04-17T17:55:23.425752Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:23.427478Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:55:23.429964Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:55:23.430835Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-17T17:55:23.437668Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-17T17:55:23.438527Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-17T17:55:23.439339Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-04-17T17:55:23.440165Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-04-17T17:55:23.448820Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-04-17T17:55:23.449614Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-04-17T17:55:23.449690Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-04-17T17:55:23.449921Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:55:23.459773Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-04-17T17:55:23.467226Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:23.467260Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:23.467286Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:23.490495Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:23.498439Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:23.498600Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:23.498910Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:23.499303Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-04-17T17:55:23.500770Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:23.500797Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:23.500819Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:23.510457Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:23.522562Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:23.522727Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:23.526693Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:23.530380Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:23.534269Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:23.534338Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T17:55:23.539769Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> TExternalTableTest::DropTableTwice [GOOD] >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |80.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::DropTableTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T17:55:23.919168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:23.919283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:23.919337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:23.919369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:23.919409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:23.919434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:23.919487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:23.919554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:23.919867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:24.023482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:24.023539Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:24.041617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:24.046386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:24.046591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:24.079129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:24.079311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:24.079952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.080825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:24.085533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.086826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:24.086888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.087079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:24.087135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:24.087180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:24.087338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.096682Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:55:24.236958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:24.237167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.237374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:24.237624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:24.237682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.247429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.247615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:24.247823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.247887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:24.247925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:24.247961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:24.257009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.257085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:24.257123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:24.267007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.267071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.267113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.267155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.273929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:24.282922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:24.283132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:24.284107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.284238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:24.284282Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.284538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:24.284594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.284749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:24.284827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:24.291127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:24.291177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:24.291395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.291438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:24.291749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.291791Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:24.291877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:24.291908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.291943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:24.291970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.292034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:24.292092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.292126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:24.292155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:24.292226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:24.292257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:24.292295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:24.294142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-17T17:55:24.457326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.457433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:24.457483Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-04-17T17:55:24.457649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:55:24.457722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-17T17:55:24.457904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:24.457983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:55:24.458029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:55:24.459193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:55:24.461481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2025-04-17T17:55:24.462282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:24.462317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:24.462465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:55:24.462585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:55:24.462706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.462754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-17T17:55:24.462801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-17T17:55:24.462825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T17:55:24.463099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.463149Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:55:24.463253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:55:24.463282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:55:24.463319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:55:24.463349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:55:24.463402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-17T17:55:24.463442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:55:24.463506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:55:24.463543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:55:24.463628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:55:24.463665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:55:24.463702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-04-17T17:55:24.463738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-17T17:55:24.463765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T17:55:24.463784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-17T17:55:24.464254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:24.464349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:24.464388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:55:24.464422Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T17:55:24.464455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T17:55:24.464869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:55:24.464928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T17:55:24.464994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:55:24.465521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:24.465588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:24.465617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:55:24.465643Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T17:55:24.465666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:24.466153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:24.466301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:55:24.466331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:55:24.466356Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:55:24.466378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:55:24.466448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-17T17:55:24.471050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:55:24.472504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:55:24.472610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:55:24.472663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T17:55:24.472958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:55:24.473004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:55:24.473411Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:55:24.473543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:55:24.473582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:369:2360] TestWaitNotification: OK eventTxId 103 2025-04-17T17:55:24.474077Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:24.474272Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 232us result status StatusPathDoesNotExist 2025-04-17T17:55:24.474448Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalTableTest::SchemeErrors [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> TExternalTableTest::ParallelCreateExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T17:55:24.809195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:24.809289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:24.809343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:24.809398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:24.809459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:24.809486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:24.809541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:24.809603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:24.809887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:24.948011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:24.948071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:24.975158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:24.975283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:24.975422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:24.991427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:24.991763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:24.992451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.992644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:24.994644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.995948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:24.996012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.996137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:24.996185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:24.996223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:24.996395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.011586Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:55:25.190932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.191156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.191388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:25.191629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:25.191687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.199630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:25.199779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:25.199981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.200049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:25.200084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:25.200113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:25.202887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.202954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:25.203007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:25.207205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.207261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.207321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:25.207371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:25.211274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:25.214969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:25.215181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:25.216122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:25.216250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:25.216294Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:25.216554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:25.216624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:25.216799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:25.216897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:25.220329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:25.220380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:25.220564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:25.220611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:25.220943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.220989Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:25.221086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:25.221120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:25.221173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:25.221201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:25.221236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:25.221275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:25.221308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:25.221350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:25.221439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:25.221477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:25.221507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:25.224102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... d: 2] 2025-04-17T17:55:25.391536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:25.391566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T17:55:25.391600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T17:55:25.391650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T17:55:25.391903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.391957Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T17:55:25.392052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:55:25.392084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:55:25.392124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:55:25.392162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:55:25.392196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T17:55:25.392250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:55:25.392287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:55:25.392315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T17:55:25.392388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:55:25.392443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T17:55:25.392476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T17:55:25.392503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T17:55:25.393445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:25.393537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:25.393566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:55:25.393607Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T17:55:25.393647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:25.398464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:25.398557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:25.398587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:55:25.398617Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:55:25.398648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:55:25.398727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T17:55:25.406229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:55:25.410864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T17:55:25.411105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:55:25.411143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T17:55:25.411555Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:55:25.462420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:55:25.462474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:304:2295] TestWaitNotification: OK eventTxId 101 2025-04-17T17:55:25.463049Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:25.463310Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 231us result status StatusSuccess 2025-04-17T17:55:25.463646Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-17T17:55:25.470771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.471108Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-04-17T17:55:25.471179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-17T17:55:25.471238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-17T17:55:25.478978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:25.479154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:55:25.479436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:55:25.479483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:55:25.479891Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:55:25.479989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:55:25.480026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:312:2303] TestWaitNotification: OK eventTxId 102 2025-04-17T17:55:25.480493Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:25.480660Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 186us result status StatusPathDoesNotExist 2025-04-17T17:55:25.480807Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T17:55:24.281561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:24.281677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:24.281732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:24.281771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:24.281811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:24.281837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:24.281890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:24.281972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:24.282390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:24.373630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:24.373694Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:24.389445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:24.389545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:24.389707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:24.398469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:24.398921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:24.399535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.399740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:24.401575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.402754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:24.402824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.402967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:24.403013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:24.403049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:24.403202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.409491Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:55:24.624489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:24.624720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.624958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:24.625179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:24.625237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.632221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.632363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:24.632565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.632637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:24.632686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:24.632733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:24.639077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.639155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:24.639187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:24.647128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.647207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.647260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.647326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.650808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:24.652951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:24.653134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:24.654006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.654128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:24.654172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.654450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:24.654501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.654688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:24.654781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:24.663631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:24.663687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:24.663879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.663921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:24.664267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.664316Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:24.664407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:24.664455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.664496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:24.664526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.664558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:24.664599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.664629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:24.664673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:24.664744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:24.664777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:24.664811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:24.670747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T17:55:25.731256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T17:55:25.731274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T17:55:25.732399Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.732473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.732506Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:55:25.732558Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:55:25.732600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:55:25.735404Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.735485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.735511Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:55:25.735557Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T17:55:25.735588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:55:25.736580Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.736649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.736679Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:55:25.736716Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:55:25.736746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:55:25.736808Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T17:55:25.738999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:55:25.739242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:55:25.740359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:55:25.740577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:55:25.740620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:55:25.740981Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:55:25.741081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:55:25.741121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:335:2326] TestWaitNotification: OK eventTxId 102 2025-04-17T17:55:25.741570Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:25.741740Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 205us result status StatusSuccess 2025-04-17T17:55:25.742004Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-17T17:55:25.745522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.745855Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-04-17T17:55:25.745946Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-04-17T17:55:25.746095Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-17T17:55:25.748658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-04-17T17:55:25.748820Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T17:55:25.749128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:55:25.749171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:55:25.749564Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:55:25.749646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:55:25.749680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:343:2334] TestWaitNotification: OK eventTxId 103 2025-04-17T17:55:25.750156Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:25.750337Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 217us result status StatusSuccess 2025-04-17T17:55:25.750597Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T17:55:24.625980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:24.626056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:24.626090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:24.626140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:24.626373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:24.626408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:24.626473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:24.626565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:24.626891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:24.711403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:24.711461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:24.726958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:24.727060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:24.727217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:24.736372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:24.736745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:24.737354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.737526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:24.739623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.740841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:24.740895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.741031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:24.741077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:24.741112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:24.741299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.747666Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:55:24.885835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:24.886037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.886262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:24.886423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:24.886466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.888367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.888489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:24.888623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.888675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:24.888704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:24.888725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:24.890153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.890228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:24.890262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:24.894488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.894558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.894610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.894664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.897582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:24.901452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:24.901672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:24.902638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.902770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:24.902813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.903062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:24.903111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.903269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:24.903366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:24.906639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:24.906682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:24.906852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.906893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:24.907213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.907258Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:24.907340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:24.907376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.907439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:24.907485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.907524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:24.907565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.907595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:24.907645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:24.907710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:24.907743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:24.907771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:24.909678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 126 2025-04-17T17:55:25.343339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.343888Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } 2025-04-17T17:55:25.344012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2025-04-17T17:55:25.344320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-04-17T17:55:25.347714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:25.347914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-04-17T17:55:25.351510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.351896Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-04-17T17:55:25.351986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-04-17T17:55:25.352121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-04-17T17:55:25.354428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:25.354622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-04-17T17:55:25.357655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.357942Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-04-17T17:55:25.358100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-04-17T17:55:25.358230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-04-17T17:55:25.366044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:25.366266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-04-17T17:55:25.369116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.369420Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-04-17T17:55:25.369510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-04-17T17:55:25.443185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-04-17T17:55:25.446802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:25.447005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-04-17T17:55:25.450737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.451124Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-04-17T17:55:25.451217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-04-17T17:55:25.454814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-04-17T17:55:25.460481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:25.460663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-04-17T17:55:25.463883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.464237Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-04-17T17:55:25.464339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-04-17T17:55:25.464531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-17T17:55:25.473434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:25.473573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T17:55:25.061632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:25.061732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:25.061777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:25.061809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:25.061852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:25.061876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:25.061940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:25.062005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:25.062415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:25.177358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:25.177427Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:25.199182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:25.199752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:25.199913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:25.227078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:25.227255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:25.227851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:25.228435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:25.231585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:25.232838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:25.232915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:25.233067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:25.233106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:25.233144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:25.233301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.239830Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:55:25.384777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.384992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.385229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:25.385428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:25.385475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.395232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:25.395392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:25.395640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.395706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:25.395755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:25.395802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:25.397914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.398010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:25.398051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:25.400038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.400084Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.400126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:25.400189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:25.403558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:25.405639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:25.405861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:25.406845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:25.406967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:25.407008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:25.407279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:25.407337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:25.407512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:25.407715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:25.412074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:25.412124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:25.412313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:25.412374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:25.412675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.412726Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:25.412809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:25.412856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:25.412898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:25.412941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:25.412996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:25.413033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:25.413062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:25.413111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:25.413180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:25.413225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:25.413253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:25.415036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... LocalPathId: 3] was 2 2025-04-17T17:55:25.712947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:55:25.713083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:55:25.719231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:25.719291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:25.719488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:55:25.719589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:55:25.719708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:25.719745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T17:55:25.719782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-17T17:55:25.719803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-17T17:55:25.720077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:55:25.720118Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:55:25.720217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:55:25.720249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:25.720288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:55:25.720314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:25.720344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T17:55:25.720393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:55:25.720426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:55:25.720454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:55:25.720521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:55:25.720555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T17:55:25.720603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-04-17T17:55:25.720628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T17:55:25.721334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.721411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.721439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:55:25.721475Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-17T17:55:25.721527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:55:25.730832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.730948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:55:25.802407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:55:25.802473Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T17:55:25.802509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:55:25.802614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T17:55:25.810980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:55:25.818937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:55:25.819204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:55:25.819297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:55:25.819808Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:55:25.819897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:55:25.819930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:326:2317] TestWaitNotification: OK eventTxId 102 2025-04-17T17:55:25.820400Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:25.820607Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 230us result status StatusSuccess 2025-04-17T17:55:25.820911Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-17T17:55:25.824113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:25.824478Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-04-17T17:55:25.824568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2025-04-17T17:55:25.824700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2025-04-17T17:55:25.827161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-17T17:55:25.827318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T17:55:25.827600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:55:25.827655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:55:25.828082Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:55:25.828167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:55:25.828212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:334:2325] TestWaitNotification: OK eventTxId 103 >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort >> TExternalTableTest::DropExternalTable >> TExternalTableTest::ParallelCreateExternalTable [GOOD] |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TExternalTableTest::ParallelCreateSameExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T17:55:27.035622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:27.035760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:27.035814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:27.035848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:27.035898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:27.035922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:27.035978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:27.036042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:27.036363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:27.120524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:27.120591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:27.140222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:27.140868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:27.141043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:27.163939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:27.164173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:27.164874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:27.165692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:27.169632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:27.171206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:27.171289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:27.171467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:27.171536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:27.171594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:27.171805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:27.179691Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:55:27.331247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:27.331493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:27.331771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:27.332047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:27.332112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:27.338490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:27.338691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:27.338935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:27.339015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:27.339055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:27.339097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:27.343486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:27.343560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:27.343617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:27.348216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:27.348282Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:27.348327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:27.348374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:27.357808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:27.363472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:27.363710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:27.364803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:27.365008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:27.365064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:27.365366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:27.365436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:27.365650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:27.365817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:27.375689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:27.375759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:27.375981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:27.376032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:27.376363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:27.376415Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:27.376515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:27.376551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:27.376587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:27.376620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:27.376720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:27.376783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:27.376824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:27.376856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:27.376962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:27.377004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:27.377034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:27.382635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... satisfy waiter [1:373:2364] 2025-04-17T17:55:27.637291Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-17T17:55:27.637470Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-04-17T17:55:27.637517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-17T17:55:27.637544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:373:2364] 2025-04-17T17:55:27.637646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-04-17T17:55:27.637690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:373:2364] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-04-17T17:55:27.638315Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:27.638582Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 290us result status StatusSuccess 2025-04-17T17:55:27.638953Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:27.639874Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:27.640116Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 231us result status StatusSuccess 2025-04-17T17:55:27.640454Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:27.648151Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:27.648440Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 259us result status StatusSuccess 2025-04-17T17:55:27.648923Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:27.649625Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:27.649829Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 206us result status StatusSuccess 2025-04-17T17:55:27.650131Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:27.656429Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:27.656717Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 324us result status StatusSuccess 2025-04-17T17:55:27.657120Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::ReadOnlyMode >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> TExternalTableTest::ReplaceExternalTableIfNotExists >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] >> TExternalTableTest::Decimal [GOOD] >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TExternalTableTest::ReadOnlyMode [GOOD] |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |80.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T17:55:28.177298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:28.177402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:28.177457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:28.177493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:28.177540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:28.177568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:28.177638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:28.177712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:28.178064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:28.268530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:28.268598Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:28.288115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:28.288238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:28.288414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:28.301622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:28.302245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:28.302937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:28.303174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:28.305775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:28.307234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:28.307319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:28.307463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:28.307522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:28.307563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:28.307807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:28.316763Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:55:28.445605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:28.445818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:28.446021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:28.446255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:28.446315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:28.449103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:28.449257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:28.449499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:28.449564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:28.449600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:28.449651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:28.454077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:28.454157Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:28.454215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:28.456187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:28.456249Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:28.456297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:28.456373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:28.460085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:28.463822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:28.464020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:28.464922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:28.465045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:28.465081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:28.465339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:28.465402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:28.465579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:28.465642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:28.468021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:28.468075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:28.468347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:28.468388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:28.468705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:28.468748Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:28.468840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:28.468893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:28.468944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:28.468978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:28.469013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:28.469048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:28.469079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:28.469106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:28.469176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:28.469239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:28.469271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:28.471154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... TOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-17T17:55:29.742261Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:29.742404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 8589936748 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:29.742463Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 101:0 HandleReply TEvOperationPlan: step# 5000003 2025-04-17T17:55:29.742604Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T17:55:29.742789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:29.742882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:55:29.742947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:55:29.754630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:55:29.755690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T17:55:29.763727Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:29.763775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:29.763925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:55:29.764043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:55:29.764108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:55:29.764214Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:29.764250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T17:55:29.764287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-17T17:55:29.764310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-17T17:55:29.764331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T17:55:29.764595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.764643Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T17:55:29.764743Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:55:29.764783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:55:29.764827Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T17:55:29.764908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:55:29.764951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T17:55:29.764994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T17:55:29.765033Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T17:55:29.765069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T17:55:29.765145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:55:29.765187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:55:29.765226Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-04-17T17:55:29.765267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T17:55:29.765296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T17:55:29.765319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T17:55:29.780019Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:29.780144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:29.780187Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:55:29.780236Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:55:29.780304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:55:29.786884Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:29.787028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:29.787062Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:55:29.787102Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T17:55:29.787139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:55:29.788483Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:29.788573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T17:55:29.788600Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T17:55:29.788630Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T17:55:29.788661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:55:29.788742Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T17:55:29.802665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:55:29.803070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T17:55:29.810781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T17:55:29.811117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T17:55:29.811170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T17:55:29.811609Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T17:55:29.811729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T17:55:29.811779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:335:2326] TestWaitNotification: OK eventTxId 101 2025-04-17T17:55:29.812310Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:29.812542Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 270us result status StatusSuccess 2025-04-17T17:55:29.812896Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144913054.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913054.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913054.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913054.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913054.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913054.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913054.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124911854.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913054.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913054.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124911854.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124911854.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124911854.000000s;Name=;Codec=}; 2025-04-17T17:54:14.992343Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:54:15.206574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:54:15.267728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:54:15.268051Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:54:15.289471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:54:15.289744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:54:15.290036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:54:15.290527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:54:15.290715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:54:15.290882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:54:15.291048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:54:15.291203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:54:15.291357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:54:15.291516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:54:15.291666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:54:15.291839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:54:15.347264Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:54:15.347428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:54:15.347513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:54:15.347700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:15.347881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:54:15.347961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:54:15.348003Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:54:15.348092Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:54:15.348154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:54:15.348198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:54:15.348231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:54:15.348377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:15.348431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:54:15.348465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:54:15.348492Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:54:15.348578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:54:15.348625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:54:15.348664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:54:15.348689Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:54:15.348760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:54:15.348795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:54:15.348826Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:54:15.348871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:54:15.348911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:54:15.348941Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:54:15.349323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-17T17:54:15.349411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T17:54:15.349485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-17T17:54:15.349558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-17T17:54:15.349758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:54:15.349831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:54:15.349870Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:54:15.350059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:54:15.350122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:54:15.350159Z node 1 :TX_COLUMNSHARD NOTICE: tabl ... id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T17:55:28.841943Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912203605;tx_id=18446744073709551615;;current_snapshot_ts=1744912456241; 2025-04-17T17:55:28.841982Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:55:28.842027Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:55:28.842063Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:55:28.842147Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:55:28.842781Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744912503605:max} readable: {1744912503605:max} at tablet 9437184 2025-04-17T17:55:28.842915Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T17:55:28.843053Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503605:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T17:55:28.843113Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503605:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T17:55:28.843581Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503605:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T17:55:28.843689Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503605:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T17:55:28.844130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503605:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1960:3965];trace_detailed=; 2025-04-17T17:55:28.844516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T17:55:28.844709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T17:55:28.844896Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:28.845006Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:28.845382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:55:28.845486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:28.845586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:28.845618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1960:3965] finished for tablet 9437184 2025-04-17T17:55:28.845997Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1959:3964];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744912528844070,"name":"_full_task","f":1744912528844070,"d_finished":0,"c":0,"l":1744912528845669,"d":1599},"events":[{"name":"bootstrap","f":1744912528844238,"d_finished":793,"c":1,"l":1744912528845031,"d":793},{"a":1744912528845356,"name":"ack","f":1744912528845356,"d_finished":0,"c":0,"l":1744912528845669,"d":313},{"a":1744912528845338,"name":"processing","f":1744912528845338,"d_finished":0,"c":0,"l":1744912528845669,"d":331},{"name":"ProduceResults","f":1744912528844801,"d_finished":416,"c":2,"l":1744912528845604,"d":416},{"a":1744912528845606,"name":"Finish","f":1744912528845606,"d_finished":0,"c":0,"l":1744912528845669,"d":63}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:28.846059Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1959:3964];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:55:28.846455Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1959:3964];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744912528844070,"name":"_full_task","f":1744912528844070,"d_finished":0,"c":0,"l":1744912528846095,"d":2025},"events":[{"name":"bootstrap","f":1744912528844238,"d_finished":793,"c":1,"l":1744912528845031,"d":793},{"a":1744912528845356,"name":"ack","f":1744912528845356,"d_finished":0,"c":0,"l":1744912528846095,"d":739},{"a":1744912528845338,"name":"processing","f":1744912528845338,"d_finished":0,"c":0,"l":1744912528846095,"d":757},{"name":"ProduceResults","f":1744912528844801,"d_finished":416,"c":2,"l":1744912528845604,"d":416},{"a":1744912528845606,"name":"Finish","f":1744912528845606,"d_finished":0,"c":0,"l":1744912528846095,"d":489}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1960:3965]->[1:1959:3964] 2025-04-17T17:55:28.846545Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:55:28.843664Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T17:55:28.846580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:55:28.846675Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T17:55:29.168006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:29.168121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:29.168175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:29.168213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:29.168258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:29.168292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:29.168355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:29.168442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:29.168780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:29.300182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:29.300248Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:29.332043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:29.332693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:29.332887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:29.395709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:29.395908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:29.396683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:29.406571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:29.418972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:29.420420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:29.420499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:29.420674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:29.420737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:29.420789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:29.420943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.448479Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:55:29.816232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:29.816460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.816719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:29.816983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:29.817041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.819749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:29.819884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:29.820101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.820166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:29.820212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:29.820245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:29.822443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.822509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:29.822547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:29.830016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.830086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.830150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:29.830217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:29.833948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:29.847206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:29.847456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:29.848504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:29.848661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:29.848716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:29.849002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:29.849062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:29.849263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:29.849361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:29.854614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:29.854686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:29.854896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:29.854940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:29.855225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.855273Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:29.855377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:29.855415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:29.855470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:29.855513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:29.855577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:29.855622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:29.855671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:29.855705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:29.855793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:29.855849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:29.855883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:29.863648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... t reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:55:30.083651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 125, subscribers: 0 2025-04-17T17:55:30.086483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-04-17T17:55:30.086988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-04-17T17:55:30.088626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 TestModificationResult got TxId: 127, wait until txId: 127 2025-04-17T17:55:30.089267Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:30.089504Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 248us result status StatusSuccess 2025-04-17T17:55:30.089853Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:30.090425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:30.090577Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 144us result status StatusSuccess 2025-04-17T17:55:30.090864Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-04-17T17:55:30.091139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-04-17T17:55:30.091183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-04-17T17:55:30.091278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-04-17T17:55:30.091303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-04-17T17:55:30.091355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-04-17T17:55:30.091397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-04-17T17:55:30.091948Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-04-17T17:55:30.092786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-17T17:55:30.092853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:343:2334] 2025-04-17T17:55:30.093156Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-17T17:55:30.093222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-17T17:55:30.093246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:343:2334] 2025-04-17T17:55:30.093316Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-04-17T17:55:30.093409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-04-17T17:55:30.093433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:343:2334] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-04-17T17:55:30.093987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:30.094266Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 230us result status StatusSuccess 2025-04-17T17:55:30.094619Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-04-17T17:55:30.097870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:30.101311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-04-17T17:55:30.101440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-04-17T17:55:30.101593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-17T17:55:30.111334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-04-17T17:55:30.111519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T17:55:29.764810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:29.764902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:29.764951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:29.764980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:29.765018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:29.765046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:29.765099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:29.765158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:29.765468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:29.972687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:29.972733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:29.998270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:29.998894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:29.999077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:30.011133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:30.011724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:30.012334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:30.012923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:30.015930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:30.017105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:30.017169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:30.017310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:30.017365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:30.017411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:30.017561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.024926Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:55:30.160242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:30.160468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.160714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:30.160938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:30.161001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.163410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:30.163538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:30.163732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.163805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:30.163855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:30.163897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:30.165947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.166018Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:30.166051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:30.168171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.168221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.168261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:30.168324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:30.172035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:30.174197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:30.174368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:30.175345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:30.175496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:30.175542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:30.175819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:30.175883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:30.176068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:30.176146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:30.178312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:30.178376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:30.178551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:30.178589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:30.178875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.179084Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:30.179187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:30.179222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:30.179258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:30.179286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:30.179343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:30.179394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:30.179425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:30.179469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:30.179531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:30.179587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:30.179620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:30.181517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... 04, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T17:55:30.309964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:55:30.310800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:55:30.310873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:55:30.310911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:55:30.310953Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-17T17:55:30.310982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:55:30.311043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T17:55:30.313308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-17T17:55:30.313446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-17T17:55:30.314071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:30.314255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:30.314313Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-04-17T17:55:30.314416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-17T17:55:30.314598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:30.314677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:55:30.315092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:55:30.316480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-04-17T17:55:30.318156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:30.318207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:30.318339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:55:30.318472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T17:55:30.318577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:30.318618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-17T17:55:30.318655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-17T17:55:30.318675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-17T17:55:30.318916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.318952Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T17:55:30.319044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:55:30.319077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:55:30.319111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:55:30.319139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:55:30.319173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T17:55:30.319229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:55:30.319284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:55:30.319327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:55:30.319400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T17:55:30.319433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:55:30.319467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-17T17:55:30.319495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T17:55:30.319519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-04-17T17:55:30.320317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:55:30.320411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:55:30.320447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:55:30.320480Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:55:30.320517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T17:55:30.321973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:55:30.322061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:55:30.322094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:55:30.322121Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-17T17:55:30.322155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:55:30.322275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-17T17:55:30.325106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:55:30.325930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T17:55:30.326195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T17:55:30.326238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T17:55:30.326715Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T17:55:30.326809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:55:30.326843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:395:2386] TestWaitNotification: OK eventTxId 104 2025-04-17T17:55:30.327380Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:55:30.327576Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 208us result status StatusSuccess 2025-04-17T17:55:30.327906Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.6%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T17:55:29.232787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:29.232916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:29.232963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:29.233012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:29.233061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:29.233086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:29.233145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:29.233242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:29.233638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:29.352636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T17:55:29.352688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:29.366370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:29.366512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:29.366671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:29.386618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:29.387203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:29.387835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:29.388032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:29.390152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:29.391314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:29.391369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:29.391497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:29.391542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:29.391575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:29.391767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.419780Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:55:29.717330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:29.717545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.717784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:29.717978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:29.718036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.723543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:29.723766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:29.723970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.724030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:29.724076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:29.724113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:29.734834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.734904Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:29.734942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:29.737098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.737156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.737207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:29.737251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:29.740814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:29.751132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:29.751346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:29.752332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:29.752478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:29.752532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:29.752785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:29.752834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:29.752988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:29.753070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:29.755334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:29.755376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:29.755531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:29.755579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:29.755877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:29.755920Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:29.756019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:29.756056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:29.756100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:29.756141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:29.756185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:29.756224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:29.756253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:29.756280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:29.756346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:29.756381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:29.756407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:29.758121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... HEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:30.424788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:55:30.424860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-17T17:55:30.431543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusAccepted TxId: 129 SchemeshardId: 72057594046678944 PathId: 5, at schemeshard: 72057594046678944 2025-04-17T17:55:30.431755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-04-17T17:55:30.432032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:30.432072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:30.432344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-17T17:55:30.432463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:30.432501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-17T17:55:30.432546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-04-17T17:55:30.432747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.432804Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:30.432855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-04-17T17:55:30.432988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:30.434506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:55:30.434615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:55:30.434651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-04-17T17:55:30.434685Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T17:55:30.434736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-17T17:55:30.440849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:55:30.440955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:55:30.440983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-04-17T17:55:30.441021Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-04-17T17:55:30.441054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-17T17:55:30.441138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-04-17T17:55:30.446970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-04-17T17:55:30.447399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2025-04-17T17:55:30.448815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-17T17:55:30.450283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:30.450409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:30.450458Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-04-17T17:55:30.450597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 129:0 128 -> 240 2025-04-17T17:55:30.451140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T17:55:30.451222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-17T17:55:30.451819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2025-04-17T17:55:30.453772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:30.453832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:30.453991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-17T17:55:30.454115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:30.454144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-17T17:55:30.454188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-04-17T17:55:30.454405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.454443Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 129:0 ProgressState 2025-04-17T17:55:30.454528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-17T17:55:30.454559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-17T17:55:30.454596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-17T17:55:30.454636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-17T17:55:30.454669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-04-17T17:55:30.454726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-17T17:55:30.454764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-04-17T17:55:30.454795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-04-17T17:55:30.454869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-17T17:55:30.454908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-04-17T17:55:30.454942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T17:55:30.455008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-04-17T17:55:30.456043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:55:30.456124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:55:30.456158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-04-17T17:55:30.456215Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T17:55:30.456256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-17T17:55:30.461384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:55:30.461505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T17:55:30.461543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-04-17T17:55:30.461574Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-04-17T17:55:30.461606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-17T17:55:30.461690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-04-17T17:55:30.470925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-17T17:55:30.479115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913054.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913054.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913054.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913054.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913054.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913054.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124911854.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913054.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124913054.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124911854.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124911854.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124911854.000000s;Name=;Codec=}; 2025-04-17T17:54:14.881879Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:54:15.019355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:54:15.057845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:54:15.058223Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:54:15.067086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:54:15.067336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:54:15.067572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:54:15.067711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:54:15.067843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:54:15.067952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:54:15.068065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:54:15.068181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:54:15.068288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:54:15.068395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:54:15.068502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:54:15.068603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:54:15.103365Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:54:15.103554Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:54:15.103623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:54:15.103813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:15.104017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:54:15.104109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:54:15.104155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:54:15.104260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:54:15.104322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:54:15.104371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:54:15.104400Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:54:15.104557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:15.104617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:54:15.104655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:54:15.104683Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:54:15.104763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:54:15.104820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:54:15.104861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:54:15.104894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:54:15.104964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:54:15.104999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:54:15.105025Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:54:15.105075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:54:15.105122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:54:15.105158Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:54:15.105552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-17T17:54:15.105636Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T17:54:15.105713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-17T17:54:15.105791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-17T17:54:15.105983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:54:15.106042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:54:15.106088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:54:15.110504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:54:15.110598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:54:15.110641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T17:55:29.849790Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912203446;tx_id=18446744073709551615;;current_snapshot_ts=1744912456082; 2025-04-17T17:55:29.849852Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:55:29.849909Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:55:29.849956Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:55:29.850039Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:55:29.850275Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744912503446:max} readable: {1744912503446:max} at tablet 9437184 2025-04-17T17:55:29.850401Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T17:55:29.850537Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503446:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T17:55:29.850590Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503446:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T17:55:29.850960Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503446:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T17:55:29.851031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503446:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T17:55:29.851458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744912503446:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1974:3979];trace_detailed=; 2025-04-17T17:55:29.851844Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T17:55:29.852073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T17:55:29.852226Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:29.852353Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:29.852724Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:55:29.852821Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:29.852931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:29.852974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1974:3979] finished for tablet 9437184 2025-04-17T17:55:29.853379Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1973:3978];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744912529851396,"name":"_full_task","f":1744912529851396,"d_finished":0,"c":0,"l":1744912529853031,"d":1635},"events":[{"name":"bootstrap","f":1744912529851565,"d_finished":811,"c":1,"l":1744912529852376,"d":811},{"a":1744912529852703,"name":"ack","f":1744912529852703,"d_finished":0,"c":0,"l":1744912529853031,"d":328},{"a":1744912529852680,"name":"processing","f":1744912529852680,"d_finished":0,"c":0,"l":1744912529853031,"d":351},{"name":"ProduceResults","f":1744912529852157,"d_finished":421,"c":2,"l":1744912529852952,"d":421},{"a":1744912529852956,"name":"Finish","f":1744912529852956,"d_finished":0,"c":0,"l":1744912529853031,"d":75}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:55:29.853453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1973:3978];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:55:29.853890Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1973:3978];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744912529851396,"name":"_full_task","f":1744912529851396,"d_finished":0,"c":0,"l":1744912529853503,"d":2107},"events":[{"name":"bootstrap","f":1744912529851565,"d_finished":811,"c":1,"l":1744912529852376,"d":811},{"a":1744912529852703,"name":"ack","f":1744912529852703,"d_finished":0,"c":0,"l":1744912529853503,"d":800},{"a":1744912529852680,"name":"processing","f":1744912529852680,"d_finished":0,"c":0,"l":1744912529853503,"d":823},{"name":"ProduceResults","f":1744912529852157,"d_finished":421,"c":2,"l":1744912529852952,"d":421},{"a":1744912529852956,"name":"Finish","f":1744912529852956,"d_finished":0,"c":0,"l":1744912529853503,"d":547}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1974:3979]->[1:1973:3978] 2025-04-17T17:55:29.853985Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:55:29.851006Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T17:55:29.854045Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:55:29.854167Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 |80.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> TestYmqHttpProxy::TestGetQueueUrl >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed >> TestKinesisHttpProxy::TestPing >> KqpScan::ScanRetryReadRanges [GOOD] >> KqpSystemView::NodesRange1 [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> BSCReadOnlyPDisk::ReadOnlyOneByOne >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |80.6%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |80.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |80.6%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2025-04-17T17:55:12.526255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:12.527162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:12.527371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:12.528587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:12.529273Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:12.529337Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001882/r3tmp/tmpjKDBQK/pdisk_1.dat 2025-04-17T17:55:12.958015Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:13.196020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:55:13.320721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:13.320851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:13.332410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:13.332539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:13.351681Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:13.352250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:13.352546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:13.686051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:14.592968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1399:2835], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:14.593050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1409:2840], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:14.593104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:14.598954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:55:15.205052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1413:2843], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:55:15.334002Z node 1 :TX_PROXY ERROR: Actor# [1:1543:2914] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:16.444316Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e7zgz9kgxc6anp36y34sz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUxYmJlMGEtOGU3MWVkOWMtNWFmMjhkMjYtMzExMzU2ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 2 2025-04-17T17:55:17.266297Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e81c886k8v6bnd762jv2c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI1ZmVmMGMtNTIzNzhlMjgtMTgxZTAxODAtNGY5Zjc1ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [1:1622:2966] -> [2:1578:2437] -- EvScanData from [2:1626:2444]: pass 2025-04-17T17:55:18.087128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e81c886k8v6bnd762jv2c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI1ZmVmMGMtNTIzNzhlMjgtMTgxZTAxODAtNGY5Zjc1ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2025-04-17T17:55:18.090145Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-17T17:55:27.740594Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:27.740924Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:27.741119Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:27.743097Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:27.743612Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:27.743768Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001882/r3tmp/tmp5iAoa0/pdisk_1.dat 2025-04-17T17:55:28.161453Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:28.405079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:55:28.518989Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:28.519164Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:28.523481Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:28.523627Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:28.546288Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T17:55:28.546972Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:28.547484Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:28.902291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:29.708620Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1399:2835], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:29.708721Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1410:2840], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:29.708781Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:29.725734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:55:30.351408Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1413:2843], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:55:30.467852Z node 3 :TX_PROXY ERROR: Actor# [3:1543:2914] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:31.171549Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e8e9a0ceqgta6mxn2z12d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDk0OTUzM2MtMmEyNzk3YS0yMzVhZjVjZS1mMjZlZGE5NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 4 2025-04-17T17:55:31.905380Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e8fr549wn1ft86rsywvza, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTgzMDE4NmEtYjA5YjBkNzEtMTY2ZjUyYzQtYzBlZTJkYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [3:1623:2967] -> [4:1578:2437] -- EvScanData from [4:1627:2444]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}]},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2} 2025-04-17T17:55:31.918970Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 8512, MsgBus: 10104 2025-04-17T17:55:11.309922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342171034597918:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:11.321311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:11.368737Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342171004430538:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:11.369138Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:11.406057Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494342171453888107:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:11.406102Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:11.438555Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342172709227572:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:11.438595Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:11.590061Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494342171457062402:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:11.607662Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bd1/r3tmp/tmpjiTxlL/pdisk_1.dat 2025-04-17T17:55:12.407459Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:12.422335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:12.426367Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:12.487648Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:12.714369Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:12.918837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:12.918947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:12.920495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:12.920546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:12.925884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:12.925952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:12.926127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:12.926162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:12.926534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:12.926583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:12.931505Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T17:55:12.931553Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:12.931567Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T17:55:12.931581Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:55:12.931810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:12.932330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:12.933458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:12.943318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:12.957802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:13.103183Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8512, node 1 2025-04-17T17:55:13.327558Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:55:13.327670Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:55:13.412739Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:13.909179Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:13.909205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:13.909226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:13.909360Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10104 2025-04-17T17:55:16.332080Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342171034597918:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:16.332187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:16.373936Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342171004430538:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:16.374001Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:16.448560Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494342172709227572:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:16.448645Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:16.470853Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494342171453888107:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:16.470939Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:16.548505Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494342171457062402:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:16.550863Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:10104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:16.793015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:16.863557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:17.503932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:18.561237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:18.951038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:23.506109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342222574207573:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:23.510908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:24.305797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:24.453578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:24.597952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:24.736259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:24.906143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:25.135476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:25.457860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:25.622478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342231164142988:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:25.622541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:25.622876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342231164142993:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:25.626370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:25.682489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342231164142995:2420], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725669 completed, doublechecking } 2025-04-17T17:55:25.785768Z node 1 :TX_PROXY ERROR: Actor# [1:7494342231164143070:4293] txid# 281474976725670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:27.894989Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912527874, txId: 281474976725672] shutting down 2025-04-17T17:55:28.016342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:55:28.016384Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:28.144870Z node 4 :BS_PROXY_PUT ERROR: [ca135e541e534833] Result# TEvPutResult {Id# [72075186224037891:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037891:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T17:55:28.143350Z node 3 :BS_PROXY_PUT ERROR: [41fd90673f9f50d0] Result# TEvPutResult {Id# [72075186224037895:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037895:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T17:55:28.143655Z node 5 :BS_PROXY_PUT ERROR: [0ab2883fd744570f] Result# TEvPutResult {Id# [72075186224037892:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037892:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T17:55:28.163590Z node 2 :BS_PROXY_PUT ERROR: [d0db66b176c5c920] Result# TEvPutResult {Id# [72075186224037897:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037897:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 16017878353888149698 2025-04-17T17:55:34.259231Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.259432Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.259508Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.259573Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.259636Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.259694Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.259777Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.260822Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.260918Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.260966Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.261013Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.261075Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.261134Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.261199Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.261299Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.261371Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.261415Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.261479Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.261518Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.261550Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.261587Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:34.271828Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.271927Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.271978Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.272059Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.272116Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.272169Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:34.272218Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-04-17T17:54:52.080176Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:52.080299Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:52.106201Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:52.106507Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:52.130895Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:52.131455Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=3486487813719125220, session=0, seqNo=0) 2025-04-17T17:54:52.131637Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:52.143330Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=3486487813719125220, session=1) 2025-04-17T17:54:52.143554Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=5900234322329403334, session=0, seqNo=0) 2025-04-17T17:54:52.143630Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:52.157829Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=5900234322329403334, session=2) 2025-04-17T17:54:52.158432Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:143:2167], cookie=16203474902141875080, name="Sem1", limit=1) 2025-04-17T17:54:52.158566Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:54:52.173924Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:143:2167], cookie=16203474902141875080) 2025-04-17T17:54:52.174253Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-17T17:54:52.174427Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:54:52.174617Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-17T17:54:52.186448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:54:52.186551Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:54:52.187196Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2175], cookie=2896490424428455750, name="Sem1") 2025-04-17T17:54:52.187291Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2175], cookie=2896490424428455750) 2025-04-17T17:54:52.187721Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2178], cookie=6391877681033611100, name="Sem1") 2025-04-17T17:54:52.187785Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2178], cookie=6391877681033611100) 2025-04-17T17:54:52.607611Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:52.623511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:52.998450Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:53.011816Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:53.369968Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:53.381974Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:53.731992Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:53.747143Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:54.113409Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:54.126267Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:54.467910Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:54.482016Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:54.822724Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:54.836257Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:55.175955Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:55.191930Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:55.552216Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:55.568571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:55.950988Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:55.963378Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:56.333389Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:56.345589Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:56.713192Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:56.726091Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.079934Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.091853Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.442260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.454564Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.836607Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.848804Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.210016Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.222281Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.580476Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.592576Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.963165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.975237Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.325322Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.337401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.709663Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.722128Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.089061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.101587Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.455828Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.467888Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.836895Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.851582Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.210502Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.227832Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.618522Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.630537Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.997366Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.009979Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.382609Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.395434Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.758820Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.771231Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.158478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.170906Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.569579Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.581584Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.934751Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.951493Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.332201Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.345139Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.702620Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.719741Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.105686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.123781Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.500740Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.513203Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.874695Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.886988Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.253770Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.266244Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.617412Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.629749Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.981375Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.993700Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.364593Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.376970Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.738881Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.752444Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.118593Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.132116Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.499531Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.515175Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.942587Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.956040Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:09.379504Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:09.396361 ... UG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:25.196341Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:25.611218Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:25.630858Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:26.050696Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:26.066919Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:26.506688Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:26.518883Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:26.911385Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:26.926973Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:27.314343Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:27.334773Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:27.735342Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:27.756511Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:28.158508Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:28.172856Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:28.553374Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:28.569922Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:28.945916Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:28.958825Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:29.327470Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:29.346873Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:29.760473Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:29.778756Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:30.215213Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:30.236935Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:30.746501Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:30.766929Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:31.172695Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:31.185443Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:31.566518Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:31.587136Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:31.987783Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:32.003160Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:32.410496Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:32.423912Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:32.818560Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:32.841501Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:33.231569Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:33.248666Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:33.661616Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:33.674897Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:34.055190Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:34.075007Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:34.462222Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:34.486976Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:34.898571Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:55:34.898652Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:55:34.898702Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-17T17:55:34.911145Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:55:34.927057Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:410:2409], cookie=15028929942678340843, name="Sem1") 2025-04-17T17:55:34.927163Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:410:2409], cookie=15028929942678340843) 2025-04-17T17:55:35.829002Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:35.829122Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:35.887293Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:35.887905Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:35.914265Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:35.914790Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=8752545480607151314, session=0, seqNo=0) 2025-04-17T17:55:35.914962Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:55:35.928227Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=8752545480607151314, session=1) 2025-04-17T17:55:35.928536Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=9083876282485479315, session=0, seqNo=0) 2025-04-17T17:55:35.928659Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:55:35.946835Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=9083876282485479315, session=2) 2025-04-17T17:55:35.947161Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=9803478509739838556, session=0, seqNo=0) 2025-04-17T17:55:35.947303Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-17T17:55:35.964883Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=9803478509739838556, session=3) 2025-04-17T17:55:35.965483Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=9246596423925728151, name="Sem1", limit=3) 2025-04-17T17:55:35.965631Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:55:35.980445Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=9246596423925728151) 2025-04-17T17:55:35.980757Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-17T17:55:35.980921Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:55:35.981144Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-17T17:55:35.981225Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-17T17:55:35.981346Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-17T17:55:35.995957Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-17T17:55:35.996044Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-04-17T17:55:35.996077Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-17T17:55:35.996729Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2177], cookie=8371732308833523401, name="Sem1") 2025-04-17T17:55:35.996823Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2177], cookie=8371732308833523401) 2025-04-17T17:55:35.997296Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=7852641156437501463, name="Sem1") 2025-04-17T17:55:35.997383Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=7852641156437501463) 2025-04-17T17:55:35.997637Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=444, session=1, semaphore="Sem1" count=1) 2025-04-17T17:55:35.997761Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-17T17:55:36.014832Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=444) 2025-04-17T17:55:36.015533Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2185], cookie=2684284419313173729, name="Sem1") 2025-04-17T17:55:36.015631Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2185], cookie=2684284419313173729) 2025-04-17T17:55:36.016138Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:164:2188], cookie=4530725823876980581, name="Sem1") 2025-04-17T17:55:36.016208Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:164:2188], cookie=4530725823876980581) 2025-04-17T17:55:36.041864Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:36.041963Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:36.042618Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:36.043196Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:36.095019Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:36.095210Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:55:36.095291Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-17T17:55:36.095321Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-17T17:55:36.095677Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:204:2218], cookie=11602072805126429399, name="Sem1") 2025-04-17T17:55:36.095781Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:204:2218], cookie=11602072805126429399) 2025-04-17T17:55:36.096395Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:213:2226], cookie=754164539043040374, name="Sem1") 2025-04-17T17:55:36.096470Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:213:2226], cookie=754164539043040374) >> HttpRequest::Probe [GOOD] >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 4819893977621581939 2025-04-17T17:55:36.914678Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.914868Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.914956Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.915047Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.915137Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.915235Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.915350Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.915429Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.916517Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.916609Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.916675Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.916734Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.916790Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.916845Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.916900Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.916955Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.917047Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.917114Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.917166Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.917212Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.917250Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.917288Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.917322Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.917354Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T17:55:36.928075Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.928190Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.928254Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.928315Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.928373Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.928468Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.928528Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:36.928593Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T17:55:37.130469Z 1 00h01m30.011024s :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "Some error reason" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> GenericFederatedQuery::YdbManagedSelectAll |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> GenericFederatedQuery::IcebergHiveBasicSelectAll >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> GenericFederatedQuery::YdbFilterPushdown >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> TestKinesisHttpProxy::CreateDeleteStream >> TestKinesisHttpProxy::TestPing [GOOD] >> TKesusTest::TestAcquireSemaphore [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectAll >> YdbOlapStore::LogPagingAfter [GOOD] >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> TestKinesisHttpProxy::TestRequestBadJson >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-04-17T17:54:56.520147Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:56.520277Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.539586Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.539889Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.567874Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.568417Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=6798238130212498303, session=0, seqNo=0) 2025-04-17T17:54:56.568593Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:56.581184Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=6798238130212498303, session=1) 2025-04-17T17:54:56.581536Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=10282905539146389034, session=0, seqNo=0) 2025-04-17T17:54:56.581657Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:56.593801Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=10282905539146389034, session=2) 2025-04-17T17:54:56.594648Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:56.594822Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:56.594947Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:56.595153Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Lock2" count=1) 2025-04-17T17:54:56.595221Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-17T17:54:56.595288Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-17T17:54:56.595424Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=333, session=1, semaphore="Lock2" count=1) 2025-04-17T17:54:56.595493Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-17T17:54:56.607748Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:54:56.607844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:54:56.607872Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=333) 2025-04-17T17:54:56.608497Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=10309373723227227865, name="Lock1") 2025-04-17T17:54:56.608575Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=10309373723227227865) 2025-04-17T17:54:56.609018Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2175], cookie=11536926772354454932, name="Lock2") 2025-04-17T17:54:56.609097Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2175], cookie=11536926772354454932) 2025-04-17T17:54:56.621472Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:56.621553Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.622108Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.622902Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.661637Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.661782Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:56.661821Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-17T17:54:56.661849Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-17T17:54:56.662136Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:191:2205], cookie=5976980730278588205, name="Lock1") 2025-04-17T17:54:56.662236Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:191:2205], cookie=5976980730278588205) 2025-04-17T17:54:56.662698Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:199:2212], cookie=5104286880841111085, name="Lock2") 2025-04-17T17:54:56.662762Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:199:2212], cookie=5104286880841111085) 2025-04-17T17:54:57.087082Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.099357Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.448517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.460329Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.800013Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.812072Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.161912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.173852Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.527817Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.540261Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.891373Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.903557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.232538Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.244661Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.585900Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.597906Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.953841Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.966286Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.349800Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.361798Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.727218Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.743016Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.105518Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.117667Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.475369Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.487883Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.843978Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.863101Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.270838Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.287056Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.667290Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.683340Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.070493Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.083543Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.451545Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.464594Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.822374Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.838139Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.258596Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.270809Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.656826Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.670672Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.065259Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.083856Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.479105Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.491473Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.846607Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.860388Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.240172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.252478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.603711Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.622475Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.978090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.990398Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.346023Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.363372Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.723619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.738986Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.142602Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:365:2367], cookie=7351154852187760145, name="Lock1") 2025-04-17T17:55:08.142734Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:365:2367], cookie=7351154852187760145) 2025-04-17T17:55:08.143311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:368:2370], cookie=6878829443234689044, name="Lock2") 2025-04-17T17:55:08.143383Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:368:2370], cookie=6878829443234689044) 2025-04-17T17:55:08.185620Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.198375Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.589664Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.606517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.985012Z node 1 :KESUS_TABLET DEBUG: [72057594 ... 25-04-17T17:55:31.055299Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:31.455179Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:31.471960Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:31.879578Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:31.893843Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:32.314854Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:32.334931Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:32.733539Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:32.751020Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:33.197243Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:33.215244Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:33.616996Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:33.634862Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:34.050471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:34.070835Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:34.490685Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:34.507077Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:34.912901Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:34.936775Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:35.344946Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:35.357204Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:35.774880Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:35.794108Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:36.192258Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:36.211017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:36.590563Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:36.608309Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:37.011463Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:37.026580Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:37.421057Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:37.436613Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:37.842602Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:37.860053Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:38.298745Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:38.322263Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:38.729208Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:38.748018Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:39.142602Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:39.171975Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:39.574633Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:55:39.574714Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:55:39.574769Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-17T17:55:39.574870Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-17T17:55:39.574932Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-17T17:55:39.574967Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:55:39.588101Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:55:39.588809Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:329:2342], cookie=4627506183125602475, name="Lock1") 2025-04-17T17:55:39.588900Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:329:2342], cookie=4627506183125602475) 2025-04-17T17:55:39.589437Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:332:2345], cookie=1962615497778547114, name="Lock2") 2025-04-17T17:55:39.589515Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:332:2345], cookie=1962615497778547114) 2025-04-17T17:55:39.590019Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:335:2348], cookie=14738565871716528506) 2025-04-17T17:55:39.590083Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:335:2348], cookie=14738565871716528506) 2025-04-17T17:55:39.623143Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:39.623244Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:39.623889Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:39.624574Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:39.701294Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:39.701453Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-17T17:55:39.701505Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-17T17:55:39.701868Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:375:2378], cookie=953624689528127379) 2025-04-17T17:55:39.701945Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:375:2378], cookie=953624689528127379) 2025-04-17T17:55:39.702608Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:382:2384], cookie=1820419237643017606, name="Lock1") 2025-04-17T17:55:39.702686Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:382:2384], cookie=1820419237643017606) 2025-04-17T17:55:39.703278Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:385:2387], cookie=17846941353339771253, name="Lock2") 2025-04-17T17:55:39.703352Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:385:2387], cookie=17846941353339771253) 2025-04-17T17:55:40.524838Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:40.524950Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:40.572748Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:40.578541Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:40.608207Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:40.608757Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=10873263014641314742, session=0, seqNo=0) 2025-04-17T17:55:40.608943Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:55:40.624546Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=10873263014641314742, session=1) 2025-04-17T17:55:40.624905Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=576319523104673682, session=0, seqNo=0) 2025-04-17T17:55:40.625041Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:55:40.640274Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=576319523104673682, session=2) 2025-04-17T17:55:40.640632Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-17T17:55:40.656279Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-17T17:55:40.656947Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=4872334122175578022, name="Sem1", limit=1) 2025-04-17T17:55:40.657121Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:55:40.671513Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=4872334122175578022) 2025-04-17T17:55:40.672024Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-04-17T17:55:40.684258Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-17T17:55:40.684651Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=1, semaphore="Sem1" count=1) 2025-04-17T17:55:40.684838Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:55:40.685036Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=2, semaphore="Sem1" count=1) 2025-04-17T17:55:40.700210Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-04-17T17:55:40.700301Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-17T17:55:40.700972Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2179], cookie=6000836491200022623, name="Sem1") 2025-04-17T17:55:40.701069Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2179], cookie=6000836491200022623) 2025-04-17T17:55:40.701612Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2182], cookie=14643674173866283707, name="Sem1") 2025-04-17T17:55:40.701697Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2182], cookie=14643674173866283707) 2025-04-17T17:55:40.702193Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:161:2185], cookie=14573451079146963090, name="Sem1", force=0) 2025-04-17T17:55:40.716295Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:161:2185], cookie=14573451079146963090) 2025-04-17T17:55:40.716983Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:166:2190], cookie=10626103797107685169, name="Sem1", force=1) 2025-04-17T17:55:40.717094Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-17T17:55:40.731738Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:166:2190], cookie=10626103797107685169) >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: 2025-04-17T17:48:09.432498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:454:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:48:09.432822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:48:09.432999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001547/r3tmp/tmp6wucz9/pdisk_1.dat 2025-04-17T17:48:10.713132Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27394, node 1 2025-04-17T17:48:11.218418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:48:11.218470Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:48:11.218500Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:48:11.218982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:48:11.221325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:48:11.320487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:11.320593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:11.351492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14546 2025-04-17T17:48:14.287470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:48:33.822665Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T17:48:33.948639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:33.948758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:34.018062Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:48:34.036334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:34.419995Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:34.420583Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:34.421069Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:34.421232Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:34.421506Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:34.421591Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:34.421693Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:34.421808Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:34.421876Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T17:48:34.711362Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:48:34.711463Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:48:34.731766Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:48:35.743729Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:36.278203Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T17:48:36.278847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T17:48:36.398284Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T17:48:36.414322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T17:48:36.421309Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T17:48:36.422008Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T17:48:36.425474Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T17:48:36.426142Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T17:48:36.428526Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T17:48:36.429105Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T17:48:36.434515Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T17:48:36.660681Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:48:36.661881Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T17:48:36.759719Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1884:2606] 2025-04-17T17:48:36.823034Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1899:2615] 2025-04-17T17:48:36.845459Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1899:2615], schemeshard id = 72075186224037897 2025-04-17T17:48:36.949978Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T17:48:37.124096Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T17:48:37.124441Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T17:48:37.125123Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T17:48:37.302436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T17:48:37.405570Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T17:48:37.414378Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T17:48:38.960591Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T17:48:40.042687Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T17:48:40.138376Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T17:48:44.684003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:44.684775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:48:44.857431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T17:48:46.554839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:48:46.555169Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:48:46.555620Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:48:46.555802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:48:46.555927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:48:46.556040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:48:46.556155Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:48:46.556343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:48:46.556492Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:48:46.556653Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:48:46.556767Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:48:46.556908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2380:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:48:46.666005Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2393:2886];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:48:46.666109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2393:2886];tablet_id=72075186224037900;process= ... 2707Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:55:25.222734Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:55:25.222760Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:55:25.222788Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T17:55:26.545461Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T17:55:26.545548Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=D F 1{7/ 2025-04-17T17:55:26.545610Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T17:55:27.874426Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T17:55:27.874596Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T17:55:27.874663Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T17:55:27.884011Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T17:55:27.900167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T17:55:27.900549Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T17:55:27.904509Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T17:55:27.947260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T17:55:27.973221Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T17:55:27.973445Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T17:55:27.982353Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15895:9993], server id = [2:15900:9998], tablet id = 72075186224037899, status = OK 2025-04-17T17:55:27.982567Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15895:9993], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.128341Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15896:9994], server id = [2:15901:9999], tablet id = 72075186224037900, status = OK 2025-04-17T17:55:28.128482Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15896:9994], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.128853Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15897:9995], server id = [2:15902:10000], tablet id = 72075186224037901, status = OK 2025-04-17T17:55:28.128915Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15897:9995], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.130313Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15898:9996], server id = [2:15903:10001], tablet id = 72075186224037902, status = OK 2025-04-17T17:55:28.130409Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15898:9996], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.131529Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15899:9997], server id = [2:15905:10003], tablet id = 72075186224037903, status = OK 2025-04-17T17:55:28.131598Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15899:9997], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.142283Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T17:55:28.144031Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15895:9993], server id = [2:15900:9998], tablet id = 72075186224037899 2025-04-17T17:55:28.144092Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.144903Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T17:55:28.145460Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T17:55:28.146093Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15896:9994], server id = [2:15901:9999], tablet id = 72075186224037900 2025-04-17T17:55:28.146126Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.146412Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T17:55:28.146692Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15897:9995], server id = [2:15902:10000], tablet id = 72075186224037901 2025-04-17T17:55:28.146720Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.146998Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15909:10007], server id = [2:15911:10009], tablet id = 72075186224037904, status = OK 2025-04-17T17:55:28.147089Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15909:10007], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.147193Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T17:55:28.148047Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15898:9996], server id = [2:15903:10001], tablet id = 72075186224037902 2025-04-17T17:55:28.153495Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.154269Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15912:10010], server id = [2:15914:10012], tablet id = 72075186224037905, status = OK 2025-04-17T17:55:28.154390Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15912:10010], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.154542Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15913:10011], server id = [2:15915:10013], tablet id = 72075186224037906, status = OK 2025-04-17T17:55:28.154599Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15913:10011], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.155805Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15899:9997], server id = [2:15905:10003], tablet id = 72075186224037903 2025-04-17T17:55:28.155846Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.156400Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15916:10014], server id = [2:15917:10015], tablet id = 72075186224037907, status = OK 2025-04-17T17:55:28.156479Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15916:10014], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.157190Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15918:10016], server id = [2:15920:10018], tablet id = 72075186224037908, status = OK 2025-04-17T17:55:28.157293Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15918:10016], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T17:55:28.157501Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T17:55:28.158097Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15909:10007], server id = [2:15911:10009], tablet id = 72075186224037904 2025-04-17T17:55:28.158131Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.163516Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T17:55:28.164286Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T17:55:28.164873Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15912:10010], server id = [2:15914:10012], tablet id = 72075186224037905 2025-04-17T17:55:28.164913Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.165017Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T17:55:28.165242Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15913:10011], server id = [2:15915:10013], tablet id = 72075186224037906 2025-04-17T17:55:28.165275Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.165334Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T17:55:28.165426Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T17:55:28.165674Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T17:55:28.176760Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T17:55:28.177371Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15916:10014], server id = [2:15917:10015], tablet id = 72075186224037907 2025-04-17T17:55:28.177416Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.208470Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T17:55:28.703660Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15918:10016], server id = [2:15920:10018], tablet id = 72075186224037908 2025-04-17T17:55:28.703723Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T17:55:28.938428Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T17:55:30.694293Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjI3ZWJkZDQtNmY5MmNhMTYtOTkwOGNkMzEtMmM1MmJlZjU=, TxId: 2025-04-17T17:55:30.694437Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjI3ZWJkZDQtNmY5MmNhMTYtOTkwOGNkMzEtMmM1MmJlZjU=, TxId: 2025-04-17T17:55:30.703159Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T17:55:30.738149Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T17:55:30.738250Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=D F 1{7/, ActorId=[1:4079:3316] 2025-04-17T17:55:30.750554Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:15961:9305]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T17:55:30.751122Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T17:55:30.751185Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T17:55:30.755993Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T17:55:30.756068Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T17:55:30.759781Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-04-17T17:55:31.040000Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink >> GenericFederatedQuery::IcebergHiveTokenSelectAll >> GenericFederatedQuery::ClickHouseManagedSelectAll ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogPagingAfter [GOOD] Test command err: 2025-04-17T17:53:08.428783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341644908472164:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:08.428851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a41/r3tmp/tmpAmVtFK/pdisk_1.dat 2025-04-17T17:53:09.096339Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:09.131927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:09.132001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:09.137954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21709, node 1 2025-04-17T17:53:09.440626Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:09.440659Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:09.440667Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:09.440834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:53:09.721819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:12236 2025-04-17T17:53:09.967464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:53:10.100999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:53:10.101198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:53:10.101470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:53:10.101631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:53:10.101735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:53:10.101861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:53:10.101974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:53:10.102112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:53:10.102254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:53:10.102393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:53:10.102522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:53:10.102651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494341653498407820:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:53:10.139004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:53:10.139073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:53:10.139295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:53:10.139395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:53:10.139509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:53:10.139639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:53:10.139742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:53:10.139853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:53:10.139970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:53:10.140067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:53:10.140190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:53:10.140289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494341653498407823:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:53:10.179217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341653498407825:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:53:10.179345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341653498407825:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:53:10.179635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341653498407825:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:53:10.179810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341653498407825:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:53:10.179963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341653498407825:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:53:10.180116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341653498407825:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:53:10.180249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341653498407825:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:53:10.180392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341653498407825:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:53:10.180547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494341653498407825:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=S ... 4342283232728360:3190], CA [28:7494342283232728401:3224], CA [28:7494342283232728396:3219], CA [28:7494342283232728336:3172], CA [28:7494342283232728364:3193], CA [28:7494342283232728391:3214], CA [28:7494342283232728404:3227], CA [28:7494342283232728399:3222], CA [28:7494342283232728394:3217], CA [28:7494342283232728340:3175], CA [28:7494342283232728389:3212], CA [28:7494342283232728344:3178], CA [28:7494342283232728392:3215], CA [28:7494342283232728375:3202], CA [28:7494342283232728369:3197], CA [28:7494342283232728409:3231], CA [28:7494342283232728373:3200], CA [28:7494342283232728345:3179], CA [28:7494342283232728379:3205], CA [28:7494342283232728383:3208], CA [28:7494342283232728377:3203], CA [28:7494342283232728349:3182], CA [28:7494342283232728330:3169], CA [28:7494342283232728346:3180], CA [28:7494342283232728353:3185], CA [28:7494342283232728387:3211], CA [28:7494342283232728381:3206], CA [28:7494342283232728328:3167], CA [28:7494342283232728385:3209], CA [28:7494342283232728357:3188], CA [28:7494342283232728361:3191], CA [28:7494342283232728333:3170], CA [28:7494342283232728402:3225], CA [28:7494342283232728397:3220], CA [28:7494342283232728365:3194], CA [28:7494342283232728337:3173], CA [28:7494342283232728405:3228], CA [28:7494342283232728400:3223], CA [28:7494342283232728334:3171], CA [28:7494342283232728341:3176], CA [28:7494342283232728395:3218], CA [28:7494342283232728390:3213], CA [28:7494342283232728398:3221], CA [28:7494342283232728338:3174], CA [28:7494342283232728393:3216], CA [28:7494342283232728406:3229], CA [28:7494342283232728342:3177], CA [28:7494342283232728370:3198], CA [28:7494342283232728374:3201], CA [28:7494342283232728368:3196], CA [28:7494342283232728408:3230], CA [28:7494342283232728371:3199], CA [28:7494342283232728378:3204], CA [28:7494342283232728350:3183], CA [28:7494342283232728348:3181], CA [28:7494342283232728382:3207], CA [28:7494342283232728354:3186], CA [28:7494342283232728329:3168], CA [28:7494342283232728358:3189], CA [28:7494342283232728352:3184], CA [28:7494342283232728386:3210], 2025-04-17T17:55:37.949768Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7494342283232728322:3151] TxId: 281474976710670. Ctx: { TraceId: 01js2e8mmr3g0s35h47hf6j4cm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZTFhODJjODUtNjZjNWJjYmQtYmYxY2FmYTMtNDQ1M2YzNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7494342283232728352:3184], task: 19, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 817 Tasks { TaskId: 19 CpuTimeUs: 578 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 27 BuildCpuTimeUs: 551 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-gkdwnlcfnq" NodeId: 28 CreateTimeMs: 1744912537865 UpdateTimeMs: 1744912537896 } MaxMemoryUsage: 1048576 } 2025-04-17T17:55:37.949910Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7494342283232728322:3151] TxId: 281474976710670. Ctx: { TraceId: 01js2e8mmr3g0s35h47hf6j4cm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZTFhODJjODUtNjZjNWJjYmQtYmYxY2FmYTMtNDQ1M2YzNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7494342283232728327:3166], CA [28:7494342283232728356:3187], CA [28:7494342283232728362:3192], CA [28:7494342283232728403:3226], CA [28:7494342283232728366:3195], CA [28:7494342283232728360:3190], CA [28:7494342283232728401:3224], CA [28:7494342283232728396:3219], CA [28:7494342283232728336:3172], CA [28:7494342283232728364:3193], CA [28:7494342283232728391:3214], CA [28:7494342283232728404:3227], CA [28:7494342283232728399:3222], CA [28:7494342283232728394:3217], CA [28:7494342283232728340:3175], CA [28:7494342283232728389:3212], CA [28:7494342283232728344:3178], CA [28:7494342283232728392:3215], CA [28:7494342283232728375:3202], CA [28:7494342283232728369:3197], CA [28:7494342283232728409:3231], CA [28:7494342283232728373:3200], CA [28:7494342283232728345:3179], CA [28:7494342283232728379:3205], CA [28:7494342283232728383:3208], CA [28:7494342283232728377:3203], CA [28:7494342283232728349:3182], CA [28:7494342283232728330:3169], CA [28:7494342283232728346:3180], CA [28:7494342283232728353:3185], CA [28:7494342283232728387:3211], CA [28:7494342283232728381:3206], CA [28:7494342283232728328:3167], CA [28:7494342283232728385:3209], CA [28:7494342283232728357:3188], CA [28:7494342283232728361:3191], CA [28:7494342283232728333:3170], CA [28:7494342283232728402:3225], CA [28:7494342283232728397:3220], CA [28:7494342283232728365:3194], CA [28:7494342283232728337:3173], CA [28:7494342283232728405:3228], CA [28:7494342283232728400:3223], CA [28:7494342283232728334:3171], CA [28:7494342283232728341:3176], CA [28:7494342283232728395:3218], CA [28:7494342283232728390:3213], CA [28:7494342283232728398:3221], CA [28:7494342283232728338:3174], CA [28:7494342283232728393:3216], CA [28:7494342283232728406:3229], CA [28:7494342283232728342:3177], CA [28:7494342283232728370:3198], CA [28:7494342283232728374:3201], CA [28:7494342283232728368:3196], CA [28:7494342283232728408:3230], CA [28:7494342283232728371:3199], CA [28:7494342283232728378:3204], CA [28:7494342283232728350:3183], CA [28:7494342283232728348:3181], CA [28:7494342283232728382:3207], CA [28:7494342283232728354:3186], CA [28:7494342283232728329:3168], CA [28:7494342283232728358:3189], CA [28:7494342283232728352:3184], CA [28:7494342283232728386:3210], 2025-04-17T17:55:37.950252Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7494342283232728322:3151] TxId: 281474976710670. Ctx: { TraceId: 01js2e8mmr3g0s35h47hf6j4cm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZTFhODJjODUtNjZjNWJjYmQtYmYxY2FmYTMtNDQ1M2YzNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7494342283232728353:3185], task: 20, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 794 Tasks { TaskId: 20 CpuTimeUs: 602 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 31 BuildCpuTimeUs: 571 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-gkdwnlcfnq" NodeId: 28 CreateTimeMs: 1744912537865 UpdateTimeMs: 1744912537897 } MaxMemoryUsage: 1048576 } 2025-04-17T17:55:37.950457Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7494342283232728322:3151] TxId: 281474976710670. Ctx: { TraceId: 01js2e8mmr3g0s35h47hf6j4cm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZTFhODJjODUtNjZjNWJjYmQtYmYxY2FmYTMtNDQ1M2YzNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7494342283232728327:3166], CA [28:7494342283232728356:3187], CA [28:7494342283232728362:3192], CA [28:7494342283232728403:3226], CA [28:7494342283232728366:3195], CA [28:7494342283232728360:3190], CA [28:7494342283232728401:3224], CA [28:7494342283232728396:3219], CA [28:7494342283232728336:3172], CA [28:7494342283232728364:3193], CA [28:7494342283232728391:3214], CA [28:7494342283232728404:3227], CA [28:7494342283232728399:3222], CA [28:7494342283232728394:3217], CA [28:7494342283232728340:3175], CA [28:7494342283232728389:3212], CA [28:7494342283232728344:3178], CA [28:7494342283232728392:3215], CA [28:7494342283232728375:3202], CA [28:7494342283232728369:3197], CA [28:7494342283232728409:3231], CA [28:7494342283232728373:3200], CA [28:7494342283232728345:3179], CA [28:7494342283232728379:3205], CA [28:7494342283232728383:3208], CA [28:7494342283232728377:3203], CA [28:7494342283232728349:3182], CA [28:7494342283232728330:3169], CA [28:7494342283232728346:3180], CA [28:7494342283232728353:3185], CA [28:7494342283232728387:3211], CA [28:7494342283232728381:3206], CA [28:7494342283232728328:3167], CA [28:7494342283232728385:3209], CA [28:7494342283232728357:3188], CA [28:7494342283232728361:3191], CA [28:7494342283232728333:3170], CA [28:7494342283232728402:3225], CA [28:7494342283232728397:3220], CA [28:7494342283232728365:3194], CA [28:7494342283232728337:3173], CA [28:7494342283232728405:3228], CA [28:7494342283232728400:3223], CA [28:7494342283232728334:3171], CA [28:7494342283232728341:3176], CA [28:7494342283232728395:3218], CA [28:7494342283232728390:3213], CA [28:7494342283232728398:3221], CA [28:7494342283232728338:3174], CA [28:7494342283232728393:3216], CA [28:7494342283232728406:3229], CA [28:7494342283232728342:3177], CA [28:7494342283232728370:3198], CA [28:7494342283232728374:3201], CA [28:7494342283232728368:3196], CA [28:7494342283232728408:3230], CA [28:7494342283232728371:3199], CA [28:7494342283232728378:3204], CA [28:7494342283232728350:3183], CA [28:7494342283232728348:3181], CA [28:7494342283232728382:3207], CA [28:7494342283232728354:3186], CA [28:7494342283232728329:3168], CA [28:7494342283232728358:3189], CA [28:7494342283232728352:3184], CA [28:7494342283232728386:3210], 2025-04-17T17:55:37.950529Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7494342283232728322:3151] TxId: 281474976710670. Ctx: { TraceId: 01js2e8mmr3g0s35h47hf6j4cm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZTFhODJjODUtNjZjNWJjYmQtYmYxY2FmYTMtNDQ1M2YzNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7494342283232728396:3219], task: 54, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-17T17:55:37.950647Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7494342283232728322:3151] TxId: 281474976710670. Ctx: { TraceId: 01js2e8mmr3g0s35h47hf6j4cm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZTFhODJjODUtNjZjNWJjYmQtYmYxY2FmYTMtNDQ1M2YzNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7494342283232728327:3166], CA [28:7494342283232728356:3187], CA [28:7494342283232728362:3192], CA [28:7494342283232728403:3226], CA [28:7494342283232728366:3195], CA [28:7494342283232728360:3190], CA [28:7494342283232728401:3224], CA [28:7494342283232728396:3219], CA [28:7494342283232728336:3172], CA [28:7494342283232728364:3193], CA [28:7494342283232728391:3214], CA [28:7494342283232728404:3227], CA [28:7494342283232728399:3222], CA [28:7494342283232728394:3217], CA [28:7494342283232728340:3175], CA [28:7494342283232728389:3212], CA [28:7494342283232728344:3178], CA [28:7494342283232728392:3215], CA [28:7494342283232728375:3202], CA [28:7494342283232728369:3197], CA [28:7494342283232728409:3231], CA [28:7494342283232728373:3200], CA [28:7494342283232728345:3179], CA [28:7494342283232728379:3205], CA [28:7494342283232728383:3208], CA [28:7494342283232728377:3203], CA [28:7494342283232728349:3182], CA [28:7494342283232728330:3169], CA [28:7494342283232728346:3180], CA [28:7494342283232728353:3185], CA [28:7494342283232728387:3211], CA [28:7494342283232728381:3206], CA [28:7494342283232728328:3167], CA [28:7494342283232728385:3209], CA [28:7494342283232728357:3188], CA [28:7494342283232728361:3191], CA [28:7494342283232728333:3170], CA [28:7494342283232728402:3225], CA [28:7494342283232728397:3220], CA [28:7494342283232728365:3194], CA [28:7494342283232728337:3173], CA [28:7494342283232728405:3228], CA [28:7494342283232728400:3223], CA [28:7494342283232728334:3171], CA [28:7494342283232728341:3176], CA [28:7494342283232728395:3218], CA [28:7494342283232728390:3213], CA [28:7494342283232728398:3221], CA [28:7494342283232728338:3174], CA [28:7494342283232728393:3216], CA [28:7494342283232728406:3229], CA [28:7494342283232728342:3177], CA [28:7494342283232728370:3198], CA [28:7494342283232728374:3201], CA [28:7494342283232728368:3196], CA [28:7494342283232728408:3230], CA [28:7494342283232728371:3199], CA [28:7494342283232728378:3204], CA [28:7494342283232728350:3183], CA [28:7494342283232728348:3181], CA [28:7494342283232728382:3207], CA [28:7494342283232728354:3186], CA [28:7494342283232728329:3168], CA [28:7494342283232728358:3189], CA [28:7494342283232728352:3184], CA [28:7494342283232728386:3210], >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> KqpPg::TableDeleteAllData-useSink [GOOD] >> KqpPg::PgUpdateCompoundKey+useSink >> GenericFederatedQuery::IcebergHiveSaSelectAll >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> KqpScan::ScanPg [GOOD] |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-04-17T17:54:58.516690Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:58.516840Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:58.549105Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:58.549524Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:58.575618Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:58.576082Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=4486510390164960939, session=0, seqNo=0) 2025-04-17T17:54:58.576223Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:58.588315Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=4486510390164960939, session=1) 2025-04-17T17:54:58.589088Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-17T17:54:58.589232Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:58.589357Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:58.601719Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-17T17:54:58.602028Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:58.617506Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-17T17:54:58.618247Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=10963724513390652886, name="Lock1") 2025-04-17T17:54:58.618351Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=10963724513390652886) 2025-04-17T17:54:59.103048Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:59.103170Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:59.118478Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:59.118614Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:59.145287Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:59.145888Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=694910691434352487, session=0, seqNo=0) 2025-04-17T17:54:59.145999Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:59.157949Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=694910691434352487, session=1) 2025-04-17T17:54:59.158240Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=10112149412398744596, session=0, seqNo=0) 2025-04-17T17:54:59.158345Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:54:59.170139Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=10112149412398744596, session=2) 2025-04-17T17:54:59.171043Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-17T17:54:59.171165Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-17T17:54:59.171238Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-17T17:54:59.183137Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-04-17T17:54:59.183401Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-17T17:54:59.183510Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-17T17:54:59.183587Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-17T17:54:59.195730Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=112) 2025-04-17T17:54:59.196102Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-17T17:54:59.196303Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-17T17:54:59.208124Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=222) 2025-04-17T17:54:59.208225Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=223) 2025-04-17T17:54:59.208630Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-17T17:54:59.209022Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-17T17:54:59.221224Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=333) 2025-04-17T17:54:59.221304Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=334) 2025-04-17T17:54:59.619713Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.635041Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.975263Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.987186Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.337943Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.349791Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.702651Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.716842Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.079424Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.095126Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.426410Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.438487Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.787256Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.806261Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.172897Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.195027Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.552803Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.565396Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.966936Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.984240Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.372496Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.384956Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.750080Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.767122Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.120724Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.133321Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.494561Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.512242Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.954713Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.977482Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.416512Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.438800Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.797404Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.810978Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.163724Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.175912Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.526447Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.539021Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.925832Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.938029Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.318211Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.330554Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.698285Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.710977Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.094375Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.110413Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.480096Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.499417Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.886976Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.902975Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:09.291865Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:09.311136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:09.684857Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:09.705233Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:10.114671Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:10.134823Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:10.529670Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:10.547593Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:10.899200Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2025-04-17T17:55:10.899454Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2025-04-17T17:55:10.917968Z node 2 :KESUS_TABLET DEBUG: [72 ... te 2025-04-17T17:55:39.654691Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:40.063426Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:40.076892Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:40.455680Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:40.476087Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:40.869962Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:40.902566Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:41.305435Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:41.320951Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:41.702562Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:41.715724Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:42.153126Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:42.171226Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:42.594636Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:42.614935Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:42.982650Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:42.998230Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:43.377719Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:43.392365Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:43.772752Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:43.787644Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:44.234120Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:44.248593Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:44.634595Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:44.648871Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:45.038543Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:45.058649Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:45.462550Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:45.479218Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:45.876286Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:45.893955Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:46.334518Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:46.356012Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:46.731941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:46.746989Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:47.140201Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:47.155237Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:47.590519Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:47.605997Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:48.013234Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:48.042644Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:48.436363Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:48.451064Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:48.858526Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:48.878752Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:49.279814Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:49.293018Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:49.670106Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:49.683904Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:50.075342Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:50.092503Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:50.625874Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-04-17T17:55:50.625974Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-17T17:55:50.646895Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-04-17T17:55:50.671110Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:534:2530], cookie=16691511973800441327) 2025-04-17T17:55:50.671236Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:534:2530], cookie=16691511973800441327) 2025-04-17T17:55:50.671812Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:537:2533], cookie=2594773610019767153) 2025-04-17T17:55:50.671914Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:537:2533], cookie=2594773610019767153) 2025-04-17T17:55:50.672403Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:540:2536], cookie=4551000586484753640, name="Lock1") 2025-04-17T17:55:50.672504Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:540:2536], cookie=4551000586484753640) 2025-04-17T17:55:50.673046Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:543:2539], cookie=801200071232664782, name="Lock1") 2025-04-17T17:55:50.673116Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:543:2539], cookie=801200071232664782) 2025-04-17T17:55:51.315514Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:51.315628Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:51.337961Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:51.340099Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:51.374861Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:51.375397Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=8969479419146054459, session=0, seqNo=0) 2025-04-17T17:55:51.375558Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:55:51.398861Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=8969479419146054459, session=1) 2025-04-17T17:55:51.399225Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=10457861808607190273, session=0, seqNo=0) 2025-04-17T17:55:51.399369Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-17T17:55:51.418867Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=10457861808607190273, session=2) 2025-04-17T17:55:51.419197Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=2875109999113169104, session=0, seqNo=0) 2025-04-17T17:55:51.419330Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-17T17:55:51.442833Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=2875109999113169104, session=3) 2025-04-17T17:55:51.443469Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=2136394836234272625, name="Sem1", limit=3) 2025-04-17T17:55:51.443618Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-17T17:55:51.462602Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=2136394836234272625) 2025-04-17T17:55:51.462936Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-17T17:55:51.463098Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-17T17:55:51.463285Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=2) 2025-04-17T17:55:51.463475Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-17T17:55:51.482809Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-17T17:55:51.482890Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-04-17T17:55:51.482920Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-17T17:55:51.483487Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2178], cookie=12337818903955971539, name="Sem1") 2025-04-17T17:55:51.483571Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2178], cookie=12337818903955971539) 2025-04-17T17:55:51.484048Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:157:2181], cookie=2420254011168687448, name="Sem1") 2025-04-17T17:55:51.484120Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:157:2181], cookie=2420254011168687448) 2025-04-17T17:55:51.484336Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=444, name="Sem1") 2025-04-17T17:55:51.484431Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-17T17:55:51.484491Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-17T17:55:51.484546Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-17T17:55:51.506835Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=444) 2025-04-17T17:55:51.507542Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2186], cookie=12375229902967731648, name="Sem1") 2025-04-17T17:55:51.507647Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2186], cookie=12375229902967731648) 2025-04-17T17:55:51.508182Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:165:2189], cookie=1041258336672736468, name="Sem1") 2025-04-17T17:55:51.508255Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:165:2189], cookie=1041258336672736468) >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-04-17T17:54:56.784032Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:54:56.784165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:54:56.802791Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:54:56.803093Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:54:56.828213Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:54:56.828817Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=12075819053400434940, session=0, seqNo=0) 2025-04-17T17:54:56.828991Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:54:56.841164Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=12075819053400434940, session=1) 2025-04-17T17:54:56.841941Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:132:2158], cookie=9096996312612046809 2025-04-17T17:54:56.842467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:145:2169], cookie=15422371247151674029) 2025-04-17T17:54:56.842576Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:145:2169], cookie=15422371247151674029) 2025-04-17T17:54:57.245640Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.257531Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.607773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.619695Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:57.966789Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:57.982984Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.321620Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.335232Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:58.706491Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:58.718654Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.064936Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.077225Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.406552Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.418919Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:54:59.758574Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:54:59.771263Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.121459Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.133575Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.536801Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.549086Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:00.916029Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:00.934252Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.294422Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.306609Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:01.671841Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:01.685267Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.043074Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.061014Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.494493Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.508322Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:02.885380Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:02.901109Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.274536Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.289032Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:03.638296Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:03.650172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.026540Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.054393Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.447714Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.460855Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:04.835684Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:04.854900Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.261766Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.285005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:05.670347Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:05.682515Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.035568Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.047860Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.409257Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.421616Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:06.781265Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:06.793810Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.144339Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.156742Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.518273Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.535237Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:07.905691Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:07.918165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.322509Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.341456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:08.748311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:08.771078Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:09.158753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:09.175015Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:09.533193Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:09.554874Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:09.933709Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:09.952557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:10.350857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:10.364852Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:10.762508Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:10.774725Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:11.158519Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:11.181434Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:11.538571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:11.554124Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:11.921693Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:11.937751Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:12.375028Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:12.399458Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:12.787634Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:12.803435Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:13.203329Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:13.224896Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:13.620937Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:13.640044Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:14.046731Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:14.067791Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:14.517115Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:14.535317Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:14.928110Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:14.947002Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:15.335173Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:15.356631Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:15.723632Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:15.748293Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:16.158652Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:16.176406Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:16.613247Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:16.629964Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:17.020935Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:17.046491Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:17.457442Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:17.475202Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:17.858642Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:17.870928Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:18.258047Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:18.287172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] T ... eck::Execute 2025-04-17T17:55:31.766881Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:32.194530Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:32.218993Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:32.649043Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:32.666955Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:33.070827Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:33.091133Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:33.506598Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:33.526995Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:33.913082Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:33.925574Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:34.349186Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:34.362465Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:34.762594Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:34.780154Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:35.186773Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:35.203627Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:35.606609Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:35.620252Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:36.049770Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:36.068711Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:36.506824Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:36.522807Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:36.911360Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:36.927008Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:37.334626Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:37.352685Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:37.745224Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:37.758994Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:38.150758Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:38.164182Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:38.641505Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:38.661446Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:39.086677Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:39.102860Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:39.522102Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:39.539697Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:39.945062Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:39.958683Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:40.338725Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:40.354987Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:40.806205Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:40.826655Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:41.223500Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:41.242949Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:41.659593Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:41.682078Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:42.088402Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:42.107050Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:42.506583Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:42.535472Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:43.002568Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:43.030882Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:43.395261Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:43.418990Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:43.816980Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:43.835244Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:44.230610Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:44.263263Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:44.666581Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:44.681696Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:45.086568Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:45.106911Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:45.489111Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:45.505611Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:45.918732Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:45.935001Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:46.322514Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:46.340684Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:46.694351Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:46.707032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:47.234653Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:47.256995Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:47.647867Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:47.663641Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:48.054530Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:48.070100Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:48.481825Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:48.499841Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:48.896604Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:48.922508Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:49.342631Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:49.367008Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:49.785513Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:49.800877Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:50.192929Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:50.212201Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:50.608909Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:50.629770Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:51.031498Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-17T17:55:51.044304Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-17T17:55:51.423535Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-17T17:55:51.423608Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-17T17:55:51.439200Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-17T17:55:51.451345Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:572:2566], cookie=4709088314110615205) 2025-04-17T17:55:51.451471Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:572:2566], cookie=4709088314110615205) 2025-04-17T17:55:52.306290Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:52.306392Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:52.324828Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:52.324935Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:52.358224Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:52.359233Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-17T17:55:52.359402Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:55:52.372100Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:130:2156], cookie=12345, session=1) 2025-04-17T17:55:52.372853Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=23456, session=1, seqNo=0) 2025-04-17T17:55:52.388059Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=23456, session=1) 2025-04-17T17:55:53.070321Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-17T17:55:53.070530Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-17T17:55:53.093870Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-17T17:55:53.093992Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-17T17:55:53.129004Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-17T17:55:53.129942Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-17T17:55:53.130097Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-17T17:55:53.153987Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2025-04-17T17:55:53.154876Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:136:2161], cookie=23456, session=1, seqNo=0) 2025-04-17T17:55:53.173538Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:136:2161], cookie=23456, session=1) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2025-04-17T17:55:15.260783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:15.261685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:15.261864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:15.262962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:15.263630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:15.263692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001870/r3tmp/tmpzFaLS1/pdisk_1.dat 2025-04-17T17:55:15.719892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:15.964342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:55:16.095345Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:55:16.097600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:16.097729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:16.099762Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:55:16.100473Z node 2 :TX_PROXY DEBUG: actor# [2:238:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:55:16.107272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:16.107400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:16.108393Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-04-17T17:55:16.123714Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:16.124366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:16.124851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:16.451799Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Handle TEvProposeTransaction 2025-04-17T17:55:16.451878Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:55:16.452084Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1234:2747] 2025-04-17T17:55:16.605682Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:55:16.605800Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:55:16.606572Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:55:16.606679Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:55:16.607050Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:55:16.607268Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:55:16.607377Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:55:16.607713Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:55:16.609317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:16.617501Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:55:16.617599Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 SEND to# [1:1141:2687] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:55:16.722851Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1285:2390] 2025-04-17T17:55:16.723105Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:16.773795Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:16.774041Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:55:16.778838Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:55:16.778998Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:55:16.779065Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:55:16.779574Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:55:16.779989Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:55:16.780129Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1309:2390] in generation 1 2025-04-17T17:55:16.808748Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:55:16.875941Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:55:16.876139Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:55:16.876264Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1312:2407] 2025-04-17T17:55:16.876318Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:55:16.876360Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:55:16.876410Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:55:16.876770Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:55:16.876872Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:55:16.877008Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:55:16.877048Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:55:16.877094Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:55:16.877134Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:55:16.923445Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1268:2777], serverId# [2:1316:2408], sessionId# [0:0:0] 2025-04-17T17:55:16.923954Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:55:16.924220Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:55:16.924333Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:55:16.926724Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:55:16.939600Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:55:16.939733Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:55:17.159903Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-17T17:55:17.160319Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-17T17:55:17.160475Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T17:55:17.209514Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-17T17:55:17.209809Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-17T17:55:17.209975Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T17:55:17.210369Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T17:55:17.242296Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1347:2799], serverId# [2:1349:2418], sessionId# [0:0:0] 2025-04-17T17:55:17.248742Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:55:17.248855Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:55:17.249244Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:55:17.249300Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:55:17.249390Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T17:55:17.249694Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T17:55:17.249877Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025 ... 271646922 2025-04-17T17:55:35.622117Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1650:2979], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01js2e8j9h9dzr3gd93xzcm713. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll inputs 2025-04-17T17:55:35.622140Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1650:2979], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01js2e8j9h9dzr3gd93xzcm713. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll sources 2025-04-17T17:55:35.622210Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1650:2979], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01js2e8j9h9dzr3gd93xzcm713. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Resume execution, run status: Finished 2025-04-17T17:55:35.622239Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1650:2979], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01js2e8j9h9dzr3gd93xzcm713. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. ProcessOutputsState.Inflight: 0 2025-04-17T17:55:35.622268Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1650:2979], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01js2e8j9h9dzr3gd93xzcm713. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Do not drain channelId: 1, finished 2025-04-17T17:55:35.622297Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. Tasks execution finished 2025-04-17T17:55:35.622329Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1650:2979], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01js2e8j9h9dzr3gd93xzcm713. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-17T17:55:35.622410Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. pass away 2025-04-17T17:55:35.622500Z node 3 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715664;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T17:55:35.622636Z node 3 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715664, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T17:55:35.622799Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1647:2940] TxId: 281474976715664. Ctx: { TraceId: 01js2e8j9h9dzr3gd93xzcm713, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1650:2979], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2161 Tasks { TaskId: 1 CpuTimeUs: 595 FinishTimeMs: 1744912535622 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 71 BuildCpuTimeUs: 524 HostName: "ghrun-gkdwnlcfnq" NodeId: 3 CreateTimeMs: 1744912535619 UpdateTimeMs: 1744912535622 } MaxMemoryUsage: 1048576 } 2025-04-17T17:55:35.622852Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715664. Ctx: { TraceId: 01js2e8j9h9dzr3gd93xzcm713, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1650:2979] 2025-04-17T17:55:35.622955Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1647:2940] TxId: 281474976715664. Ctx: { TraceId: 01js2e8j9h9dzr3gd93xzcm713, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T17:55:35.623014Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1647:2940] TxId: 281474976715664. Ctx: { TraceId: 01js2e8j9h9dzr3gd93xzcm713, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-17T17:55:35.623063Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1647:2940] TxId: 281474976715664. Ctx: { TraceId: 01js2e8j9h9dzr3gd93xzcm713, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWQxYzg1NzItZTQ0NDY1MjYtZDZkYWNhZWEtNTYzY2U4Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002161s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T17:55:35.624074Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-17T17:55:35.624168Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] Handle TEvProposeTransaction 2025-04-17T17:55:35.624245Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] TxId# 0 ProcessProposeTransaction 2025-04-17T17:55:35.624420Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1652:2980] SnapshotReq marker# P0 2025-04-17T17:55:35.624946Z node 3 :TX_PROXY DEBUG: Actor# [3:1654:2980] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-04-17T17:55:35.625673Z node 3 :TX_PROXY DEBUG: Actor# [3:1654:2980] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-04-17T17:55:35.625778Z node 3 :TX_PROXY DEBUG: Actor# [3:1652:2980] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-04-17T17:55:45.198865Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:491:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:45.199465Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:45.199872Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:45.200841Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:486:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:45.201222Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:45.201674Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001870/r3tmp/tmpmMofPD/pdisk_1.dat 2025-04-17T17:55:45.773316Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:46.048713Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:55:46.197407Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:46.197599Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:46.222922Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:46.223105Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:46.244182Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-17T17:55:46.245224Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:46.245824Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:46.553401Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:47.439955Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1400:2835], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:47.440072Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1410:2840], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:47.440164Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:47.453868Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:55:47.977787Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1414:2843], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:55:48.051474Z node 5 :TX_PROXY ERROR: Actor# [5:1544:2914] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:48.880231Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2e8zkdar4nbxynwmqc6vye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZGRjZjRiMTctYmJkZDVjM2UtNzY2N2VmNWEtYzVkZmIxYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:49.923201Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2e916214rtr7bqgdcnyfp2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTVkYTdlNzctMjM2NjllNTAtYjdkODhhZjctNDgxMThkMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:50.883868Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2e916214rtr7bqgdcnyfp2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTVkYTdlNzctMjM2NjllNTAtYjdkODhhZjctNDgxMThkMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:50.886718Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> BsControllerConfig::Basic |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |80.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |80.7%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> GenericFederatedQuery::IcebergHiveBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectConstant >> BsControllerConfig::OverlayMapCrossReferences |80.7%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> TConsoleConfigTests::TestAddConfigItem ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-04-17T17:55:20.553808Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.553841Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.553862Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.554543Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.575811Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.576131Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.576627Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:20.577131Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.577276Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:20.577455Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.577514Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:55:20.578953Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.578979Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.579005Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.579450Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.580040Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.580209Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.582323Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:20.582848Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.582982Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:20.583099Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.583156Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:55:20.584366Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.584390Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.584432Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.584747Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.590325Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.590513Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.592096Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:20.592981Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.593219Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:20.594132Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.594203Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:55:20.598538Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.598616Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.598661Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.598959Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.614798Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.614985Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.615313Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:20.617096Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.617650Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:20.617785Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.617842Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:55:20.618876Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.618900Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.618936Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.619247Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.619846Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.619977Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.620241Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:20.620652Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.620806Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:20.620884Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.620922Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-17T17:55:20.621605Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.621631Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.621650Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.622309Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.624175Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.624304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.626906Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:20.627346Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.630656Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:20.630767Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.630818Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-17T17:55:20.631864Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.631885Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.631908Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.642573Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.648746Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.648961Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.650414Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:20.651240Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.654346Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:20.658524Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.658594Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-17T17:55:20.659791Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.659817Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.659841Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:20.670611Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:20.671652Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:20.671788Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.676656Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:20.678285Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:20.678729Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:20.678821Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:20.678862Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:55:20.705232Z :ReadSession INFO: Random seed for debugging is 1744912520705200 2025-04-17T17:55:21.435567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342216365237160:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:21.435682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:21.561353Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342212587822095:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:21.561402Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error ... de 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2025-04-17T17:55:41.422740Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-17T17:55:41.422770Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-17T17:55:41.422814Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2025-04-17T17:55:41.426690Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:55:41.426742Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:55:41.426847Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_11017199750540696144_v1 2025-04-17T17:55:41.426970Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:55:41.426984Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:55:41.426999Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:55:41.427014Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:55:41.427024Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:55:41.427034Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:55:41.427048Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:55:41.427063Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:55:41.427090Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:55:41.431246Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:55:41.431304Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:55:41.431370Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2025-04-17T17:55:41.431958Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2025-04-17T17:55:41.432009Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2025-04-17T17:55:41.432052Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2025-04-17T17:55:41.432963Z :DEBUG: [/Root] [/Root] [956e8f24-280e52ce-5eb38d1e-4ec0c08d] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2025-04-17T17:55:41.456029Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e584789a-a22f04c6-68f70c78-79bb4ce5_0] Write session will now close 2025-04-17T17:55:41.456114Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e584789a-a22f04c6-68f70c78-79bb4ce5_0] Write session: aborting 2025-04-17T17:55:41.456723Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e584789a-a22f04c6-68f70c78-79bb4ce5_0] Write session: gracefully shut down, all writes complete 2025-04-17T17:55:41.456770Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e584789a-a22f04c6-68f70c78-79bb4ce5_0] Write session: destroy 2025-04-17T17:55:41.459797Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|e584789a-a22f04c6-68f70c78-79bb4ce5_0 grpc read done: success: 0 data: 2025-04-17T17:55:41.459828Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|e584789a-a22f04c6-68f70c78-79bb4ce5_0 grpc read failed 2025-04-17T17:55:41.459860Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|e584789a-a22f04c6-68f70c78-79bb4ce5_0 grpc closed 2025-04-17T17:55:41.459874Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|e584789a-a22f04c6-68f70c78-79bb4ce5_0 is DEAD 2025-04-17T17:55:41.460484Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T17:55:41.464759Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7494342302264585959:2616] destroyed 2025-04-17T17:55:41.464812Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:55:43.938344Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-04-17T17:55:51.405700Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-04-17T17:55:51.465020Z :INFO: [/Root] [/Root] [956e8f24-280e52ce-5eb38d1e-4ec0c08d] Closing read session. Close timeout: 0.000000s 2025-04-17T17:55:51.465097Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-04-17T17:55:51.465141Z :INFO: [/Root] [/Root] [956e8f24-280e52ce-5eb38d1e-4ec0c08d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16638 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:55:51.465262Z :NOTICE: [/Root] [/Root] [956e8f24-280e52ce-5eb38d1e-4ec0c08d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T17:55:51.465308Z :DEBUG: [/Root] [/Root] [956e8f24-280e52ce-5eb38d1e-4ec0c08d] [dc1] Abort session to cluster 2025-04-17T17:55:51.465883Z :NOTICE: [/Root] [/Root] [956e8f24-280e52ce-5eb38d1e-4ec0c08d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:55:51.467312Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 grpc read done: success# 0, data# { } 2025-04-17T17:55:51.467354Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 grpc read failed 2025-04-17T17:55:51.467386Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 closed 2025-04-17T17:55:51.491930Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_11017199750540696144_v1 2025-04-17T17:55:51.491983Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7494342272199814367:2533] destroyed 2025-04-17T17:55:51.492029Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_11017199750540696144_v1 2025-04-17T17:55:51.490707Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11017199750540696144_v1 is DEAD 2025-04-17T17:55:51.491319Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7494342272199814364:2530] disconnected; active server actors: 1 2025-04-17T17:55:51.491346Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7494342272199814364:2530] client user disconnected session shared/user_1_1_11017199750540696144_v1 2025-04-17T17:55:52.270256Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710720, task: 1, CA Id [1:7494342349509226855:2733]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-17T17:55:52.307111Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710720, task: 1, CA Id [1:7494342349509226855:2733]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:55:52.356543Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710720, task: 1, CA Id [1:7494342349509226855:2733]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:55:52.435124Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710720, task: 1, CA Id [1:7494342349509226855:2733]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:55:52.518896Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710720, task: 1, CA Id [1:7494342349509226855:2733]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:55:53.613130Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:53.613181Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:53.613211Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:53.613706Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:55:53.614252Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:55:53.614442Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:53.614743Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:55:53.615412Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:53.615829Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:55:53.620482Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-17T17:55:53.620583Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:55:53.620653Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:55:53.620696Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-17T17:55:53.620877Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T17:55:53.620930Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> GenericFederatedQuery::IcebergHadoopSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant >> TestYmqHttpProxy::TestGetQueueAttributes >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumers >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool >> KqpPg::PgUpdateCompoundKey+useSink [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> GenericFederatedQuery::IcebergHiveTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestAutoKind >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2025-04-17T17:55:18.012469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:18.013393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:18.013554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:18.014619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:18.015256Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:18.015328Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00185c/r3tmp/tmpBHJ1UP/pdisk_1.dat 2025-04-17T17:55:18.461998Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:18.669876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.776008Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:55:18.777636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:18.777757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:18.782636Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:55:18.783507Z node 2 :TX_PROXY DEBUG: actor# [2:238:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:55:18.787235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:18.787328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:18.788171Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-04-17T17:55:18.815630Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:18.816402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:18.816818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:19.122587Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Handle TEvProposeTransaction 2025-04-17T17:55:19.122659Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:55:19.122807Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1231:2744] 2025-04-17T17:55:19.283882Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:55:19.284002Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:55:19.284757Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:55:19.284860Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:55:19.285237Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:55:19.285431Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:55:19.285545Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:55:19.285921Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:55:19.290451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:19.295973Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:55:19.296076Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 SEND to# [1:1141:2687] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:55:19.414279Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1307:2800] 2025-04-17T17:55:19.414568Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:19.467326Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1312:2804] 2025-04-17T17:55:19.467575Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:19.481672Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1314:2805] 2025-04-17T17:55:19.481858Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:19.488877Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:19.489938Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:55:19.491324Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T17:55:19.491397Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T17:55:19.491448Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T17:55:19.491756Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:55:19.492579Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:55:19.492648Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:1405:2800] in generation 1 2025-04-17T17:55:19.497688Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:19.498317Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:55:19.499483Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-04-17T17:55:19.499549Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-04-17T17:55:19.499604Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-04-17T17:55:19.499839Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:55:19.500762Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:55:19.500826Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [1:1420:2804] in generation 1 2025-04-17T17:55:19.508561Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:19.508696Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:55:19.509763Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2025-04-17T17:55:19.509817Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037892 2025-04-17T17:55:19.509860Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037892 2025-04-17T17:55:19.510145Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:55:19.510288Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:55:19.510346Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037892 persisting started state actor id [1:1435:2805] in generation 1 2025-04-17T17:55:19.523368Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1412:2399] 2025-04-17T17:55:19.523614Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:19.600949Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [2:1418:2400] 2025-04-17T17:55:19.601120Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:19.611773Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037893 actor [2:1424:2402] 2025-04-17T17:55:19.611962Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:19.619448Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:19.620327Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:55:19.621325Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:55:19.621373Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:55:19.621421Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:55:19.621747Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:55:19.621938Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:55:19.621981Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1492:2399] in generation 1 2025-04-17T17:55:19.623648Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [2:1427:2403] 2025-04-17T17:55:19.623813Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:19.631226Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:19.632449Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:55:19.633537Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-17T17:55:19.633589Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-17T17:55:19.633624Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-17T17:55:19.633844Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:55:19.634424Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:55:19.634481Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [2:1502:2400] in generation 1 2025-04-17T17:55:19.640774Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:19.641500Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:55:19.642782Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: Q ... e ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T17:55:58.180854Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Send stats to executor actor [5:1975:2972] TaskId: 1 Stats: CpuTimeUs: 510 Tasks { TaskId: 1 CpuTimeUs: 266 FinishTimeMs: 1744912558180 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 123 BuildCpuTimeUs: 143 HostName: "ghrun-gkdwnlcfnq" NodeId: 5 CreateTimeMs: 1744912558179 CurrentWaitOutputTimeUs: 53 UpdateTimeMs: 1744912558180 } MaxMemoryUsage: 1048576 2025-04-17T17:55:58.181163Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1975:2972] TxId: 281474976715667. Ctx: { TraceId: 01js2e96nk6bphbh21fpe5nnkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [5:1646:2972], seqNo: 1, nRows: 1 2025-04-17T17:55:58.181272Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-17T17:55:58.181444Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 1978 RawX2: 21474839639 } } DstEndpoint { ActorId { RawX1: 1975 RawX2: 21474839452 } } InMemory: true } 2025-04-17T17:55:58.181504Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-04-17T17:55:58.181550Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-04-17T17:55:58.181599Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-04-17T17:55:58.181639Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-04-17T17:55:58.181678Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-04-17T17:55:58.181747Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-17T17:55:58.181979Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1975:2972] TxId: 281474976715667. Ctx: { TraceId: 01js2e96nk6bphbh21fpe5nnkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1978:3159], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 510 Tasks { TaskId: 1 CpuTimeUs: 266 FinishTimeMs: 1744912558180 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 123 BuildCpuTimeUs: 143 HostName: "ghrun-gkdwnlcfnq" NodeId: 5 CreateTimeMs: 1744912558179 CurrentWaitOutputTimeUs: 53 UpdateTimeMs: 1744912558180 } MaxMemoryUsage: 1048576 } 2025-04-17T17:55:58.182088Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1975:2972] TxId: 281474976715667. Ctx: { TraceId: 01js2e96nk6bphbh21fpe5nnkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1978:3159], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 2025-04-17T17:55:58.182456Z node 5 :KQP_EXECUTER DEBUG: TxId: 281474976715667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1979:3159] 2025-04-17T17:55:58.182529Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2025-04-17T17:55:58.182575Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2025-04-17T17:55:58.182600Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Resume compute actor 2025-04-17T17:55:58.182721Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T17:55:58.182763Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-04-17T17:55:58.182788Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-04-17T17:55:58.182838Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-04-17T17:55:58.182867Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-04-17T17:55:58.182889Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-04-17T17:55:58.182911Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished 2025-04-17T17:55:58.182941Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1978:3159], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01js2e96nk6bphbh21fpe5nnkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T17:55:58.183021Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. pass away 2025-04-17T17:55:58.183108Z node 5 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T17:55:58.183248Z node 5 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715667, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T17:55:58.183453Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1975:2972] TxId: 281474976715667. Ctx: { TraceId: 01js2e96nk6bphbh21fpe5nnkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1978:3159], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2090 Tasks { TaskId: 1 CpuTimeUs: 273 FinishTimeMs: 1744912558182 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 143 HostName: "ghrun-gkdwnlcfnq" NodeId: 5 CreateTimeMs: 1744912558179 UpdateTimeMs: 1744912558182 } MaxMemoryUsage: 1048576 } 2025-04-17T17:55:58.183516Z node 5 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01js2e96nk6bphbh21fpe5nnkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [5:1978:3159] 2025-04-17T17:55:58.183621Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1975:2972] TxId: 281474976715667. Ctx: { TraceId: 01js2e96nk6bphbh21fpe5nnkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T17:55:58.183687Z node 5 :KQP_EXECUTER TRACE: ActorId: [5:1975:2972] TxId: 281474976715667. Ctx: { TraceId: 01js2e96nk6bphbh21fpe5nnkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-17T17:55:58.183730Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1975:2972] TxId: 281474976715667. Ctx: { TraceId: 01js2e96nk6bphbh21fpe5nnkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQwYzMzNGUtYWFlZTZhMy0xOGJlY2E5Mi0yMzAxMDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002090s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 1030 |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |80.8%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] |80.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 29031, MsgBus: 25442 2025-04-17T17:55:40.099158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342295797559512:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:40.099229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001879/r3tmp/tmpxYgBLJ/pdisk_1.dat 2025-04-17T17:55:40.724656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:40.724748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:40.734831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29031, node 1 2025-04-17T17:55:40.791838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:40.810962Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:55:40.960370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:40.960395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:40.960402Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:40.960560Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25442 TClient is connected to server localhost:25442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:41.716386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:41.738612Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:43.987751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342308682462069:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:43.987887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.360200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:55:44.515679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342312977429488:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.515763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.515972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342312977429493:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.518985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:55:44.528409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342312977429495:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:55:44.600212Z node 1 :TX_PROXY ERROR: Actor# [1:7494342312977429535:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:45.100409Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342295797559512:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:45.100487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:45.478107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:46.037217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:55:46.649718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:55:47.210344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:55:47.934833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:55:48.560422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:55:48.612202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:55:51.287972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710710:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 15713, MsgBus: 7443 2025-04-17T17:55:52.668593Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342348652890218:2226];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001879/r3tmp/tmpLk9Nbr/pdisk_1.dat 2025-04-17T17:55:52.755562Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:52.842498Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:52.870535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:52.870616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:52.875792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15713, node 2 2025-04-17T17:55:52.975761Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:52.975778Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:52.975784Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:52.975887Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7443 TClient is connected to server localhost:7443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:53.516438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:53.524254Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20255, MsgBus: 12334 2025-04-17T17:55:56.646086Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494342362801341014:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:56.647087Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001879/r3tmp/tmp5rGAJO/pdisk_1.dat 2025-04-17T17:55:56.875657Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:56.902889Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:56.902975Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:56.911420Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20255, node 3 2025-04-17T17:55:56.994829Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:56.994851Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:56.994860Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:56.994985Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12334 TClient is connected to server localhost:12334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:57.563661Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:57.574607Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> IncrementalRestoreScan::Empty >> IncrementalRestoreScan::ChangeSenderEmpty >> IncrementalRestoreScan::ChangeSenderSimple >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 >> GenericFederatedQuery::IcebergHiveSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestAffectedConfigs >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> BsControllerConfig::ReassignGroupDisk >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] |80.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> BsControllerConfig::ManyPDisksRestarts >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> TestYmqHttpProxy::TestDeleteQueue >> TestYmqHttpProxy::TestCreateQueueWithTags >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> IncrementalRestoreScan::Empty [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] Test command err: 2025-04-17T17:49:14.591787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3122:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:14.593418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:14.594055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:14.594498Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3128:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:14.595382Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:1530:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:14.637561Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:14.638660Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:14.639508Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:14.639557Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:14.639735Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1533:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:14.640776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3119:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:14.641116Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:14.641785Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:14.641901Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:14.642542Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:14.642745Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3131:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:14.643458Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3134:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:14.644229Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:14.644913Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:14.644959Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:14.645890Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:14.645994Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3137:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:14.652146Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3125:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:14.653031Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:14.653087Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:14.653752Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:14.653885Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:49:15.329724Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:15.717324Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:49:15.747007Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:49:16.344586Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 22164, node 1 TClient is connected to server localhost:24475 2025-04-17T17:49:16.713215Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:16.713283Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:16.713325Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:16.714003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:50:44.352507Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:2878:2430], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:44.354452Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:50:44.355177Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:50:44.358257Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:2881:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:44.358957Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3139:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:44.360165Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:50:44.360414Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:2884:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:44.360963Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:50:44.361064Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:50:44.361418Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:2899:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:44.361713Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:50:44.361794Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:50:44.362051Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:2893:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:44.362211Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:2896:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:50:44.362964Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:50:44.363445Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:50:44.363725Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:50:44.363792Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:50:44.363852Z node 18 :METADATA_PROVIDER ERROR: fline=accessor ... ta/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:57.712500Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:52:57.712674Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:57.713526Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [27:1538:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:57.714738Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:57.714797Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:52:57.715464Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:57.716445Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:52:57.717353Z node 24 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [24:3111:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:57.718563Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:57.718996Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:52:57.720702Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [20:3151:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:57.721337Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:57.722096Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:52:58.636525Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:58.985246Z node 19 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:52:59.042765Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:53:00.100409Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 11542, node 19 TClient is connected to server localhost:12179 2025-04-17T17:53:01.212615Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:01.212718Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:01.212797Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:01.221883Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:55:23.676636Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:3129:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:23.679034Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.679944Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:23.681914Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [34:1963:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:23.683997Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.684287Z node 35 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [35:1966:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:23.694281Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:23.695203Z node 30 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [30:3132:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:23.695427Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.696134Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:23.697152Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.697647Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:23.705248Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:3125:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:23.706332Z node 33 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [33:1960:2373], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:23.707798Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.708126Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [31:3135:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:23.708762Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:23.708972Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:23.709712Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.709891Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.710077Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [36:1969:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:23.710792Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:23.711000Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [32:3138:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:55:23.712438Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.713116Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.713249Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:55:23.714729Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:55:24.659279Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:25.051240Z node 28 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:55:25.150667Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:55:26.860002Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 5836, node 28 TClient is connected to server localhost:22818 2025-04-17T17:55:27.852693Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:27.852811Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:27.852986Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:27.854001Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration >> GenericFederatedQuery::IcebergHiveBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectCount >> BasicUsage::BrokenCredentialsProvider [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2025-04-17T17:48:46.288856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:46.288928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:46.542012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:48.450946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:48.451015Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:48.493307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:49.893076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:49.893145Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:49.933918Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:51.381882Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:51.381949Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:51.424029Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:53.110371Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:53.110442Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:53.159472Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:55.073761Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:55.073849Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:55.231381Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:56.865653Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:56.865724Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:56.929217Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:48:58.346540Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:48:58.346617Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:48:58.395696Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:00.819978Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:00.820057Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:00.883183Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:03.204288Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:03.204369Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:03.259689Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:06.042779Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:06.042859Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:06.090262Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:08.482527Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:08.482624Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:08.563539Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:10.838256Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:10.838335Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:10.905521Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:13.304573Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:13.304659Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:13.364923Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:15.664056Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:15.664152Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:15.715688Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:17.862157Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:17.862270Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:17.943590Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:20.475763Z node 17 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:20.475852Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:20.519837Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:21.139355Z node 17 :CMS_CONFIGS ERROR: Unexpected config sender died for subscription id=1 2025-04-17T17:49:21.860710Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:21.860811Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:21.913337Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:22.857303Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:49:22.857431Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:22.935001Z node 18 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-04-17T17:49:23.790228Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:23.790314Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:23.838655Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:24.691432Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:49:24.691527Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:24.770569Z node 19 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[19:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-04-17T17:49:25.356738Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:25.356841Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:25.405640Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:28.954851Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:28.954949Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:29.006548Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:32.311416Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:32.311486Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:32.356497Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:33.459667Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:33.459761Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:33.499264Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:34.578558Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:49:34.578634Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:34.619658Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:49:40.742079Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:49:40.742199Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:47.043841Z node 24 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2025-04-17T17:53:47.960490Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:53:47.960564Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:48.010067Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:53:54.942981Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:53:54.943073Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:57.982471Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:55:57.982568Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:58.064428Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:56:01.076116Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:56:01.076231Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:01.136094Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:56:02.900434Z node 28 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:56:02.900542Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:02.975958Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T17:56:04.922666Z node 29 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:56:04.922779Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:04.987816Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2025-04-17T17:56:06.983893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:06.988502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:06.988615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018e6/r3tmp/tmpUOZNWw/pdisk_1.dat 2025-04-17T17:56:07.571816Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Exhausted 2025-04-17T17:56:07.571988Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-04-17T17:56:07.572037Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Finish 0 >> BsControllerConfig::ReassignGroupDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-04-17T17:55:15.916180Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342188897625537:2182];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:15.936963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:16.404189Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:55:16.394616Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d96/r3tmp/tmpcuhbIA/pdisk_1.dat 2025-04-17T17:55:16.550253Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:17.062649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:17.074150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:17.074980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:17.075577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:17.075617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:17.082298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:17.090013Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:17.090141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:17.090934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13892, node 1 2025-04-17T17:55:17.346886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/000d96/r3tmp/yandexsNCRSl.tmp 2025-04-17T17:55:17.346906Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/000d96/r3tmp/yandexsNCRSl.tmp 2025-04-17T17:55:17.347088Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/000d96/r3tmp/yandexsNCRSl.tmp 2025-04-17T17:55:17.347205Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:55:17.503487Z INFO: TTestServer started on Port 17027 GrpcPort 13892 TClient is connected to server localhost:17027 PQClient connected to localhost:13892 === TenantModeEnabled() = 0 === Init PQ - start server on port 13892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:18.178559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:55:18.178765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.178937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:55:18.179129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:55:18.179156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.188224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:55:18.188375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:55:18.188563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.188600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:55:18.188612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-04-17T17:55:18.188640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-04-17T17:55:18.202851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-04-17T17:55:18.202881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-04-17T17:55:18.202900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-04-17T17:55:18.203609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.203643Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:55:18.203674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-04-17T17:55:18.207347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.207400Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.207419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-17T17:55:18.207464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-04-17T17:55:18.280752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:18.287222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-04-17T17:55:18.287385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:55:18.290263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912518335, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:55:18.290440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912518335 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:55:18.290464Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-17T17:55:18.290724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-04-17T17:55:18.290753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-17T17:55:18.290911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:55:18.290988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T17:55:18.293109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:55:18.293140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:55:18.293282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:55:18.293298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494342197487560668:2395], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-04-17T17:55:18.293337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.293357Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-04-17T17:55:18.293453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-04-17T17:55:18.293467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-17T17:55:18.293484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-04-17T17:55:18.293493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-17T17:55:18.293505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-04-17T17:55:18.293525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-17T17:55:18.293565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2025-04-17T17:55:18.293580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2025-04-17T17:55:18.293628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate ... --topic1] consumer cli register reading session ReadingSession "shared/cli_5_1_14422431832836660159_v1" (Sender=[5:7494342402099126066:2656], Pipe=[5:7494342402099126073:2656], Partitions=[], ActiveFamilyCount=0) 2025-04-17T17:56:05.275827Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli rebalancing was scheduled 2025-04-17T17:56:05.275889Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-17T17:56:05.275949Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_14422431832836660159_v1" (Sender=[5:7494342402099126066:2656], Pipe=[5:7494342402099126073:2656], Partitions=[], ActiveFamilyCount=0) 2025-04-17T17:56:05.276033Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_14422431832836660159_v1" sender [5:7494342402099126066:2656] lock partition 0 for ReadingSession "shared/cli_5_1_14422431832836660159_v1" (Sender=[5:7494342402099126066:2656], Pipe=[5:7494342402099126073:2656], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-17T17:56:05.276108Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-17T17:56:05.276140Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000225s 2025-04-17T17:56:05.278759Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7494342402099126077:2659] 2025-04-17T17:56:05.279053Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_14422431832836660159_v1:1 with generation 1 2025-04-17T17:56:05.284355Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1744912565237 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-17T17:56:05.284413Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-04-17T17:56:05.284478Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 sending to client partition status 2025-04-17T17:56:05.289728Z :INFO: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-04-17T17:56:05.292885Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-04-17T17:56:05.293058Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-17T17:56:05.293116Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-17T17:56:05.293153Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-04-17T17:56:05.293214Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-04-17T17:56:05.293229Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1TEvPartitionReady. Aval parts: 1 2025-04-17T17:56:05.293282Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 performing read request: guid# 8725746b-9052e301-44d7d2bb-fe93ab83, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-17T17:56:05.293406Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 8725746b-9052e301-44d7d2bb-fe93ab83 2025-04-17T17:56:05.295044Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1744912565027 CreateTimestampMS: 1744912565025 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1744912565117 CreateTimestampMS: 1744912565026 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1744912565117 CreateTimestampMS: 1744912565026 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-04-17T17:56:05.295235Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-04-17T17:56:05.295274Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 8725746b-9052e301-44d7d2bb-fe93ab83 has messages 1 2025-04-17T17:56:05.295365Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 read done: guid# 8725746b-9052e301-44d7d2bb-fe93ab83, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 366 2025-04-17T17:56:05.295395Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 response to read: guid# 8725746b-9052e301-44d7d2bb-fe93ab83 2025-04-17T17:56:05.295625Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 Process answer. Aval parts: 0 2025-04-17T17:56:05.297817Z :DEBUG: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] [] Got ReadResponse, serverBytesSize = 366, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428434 2025-04-17T17:56:05.297959Z :DEBUG: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428434 2025-04-17T17:56:05.302325Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-04-17T17:56:05.302386Z :DEBUG: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] [] Returning serverBytesSize = 366 to budget 2025-04-17T17:56:05.302429Z :DEBUG: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] [] In ContinueReadingDataImpl, ReadSizeBudget = 366, ReadSizeServerDelta = 52428434 2025-04-17T17:56:05.302768Z :DEBUG: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-17T17:56:05.304274Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-17T17:56:05.304337Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-17T17:56:05.304383Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-04-17T17:56:05.304633Z :DEBUG: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-04-17T17:56:05.304704Z :DEBUG: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] [] Returning serverBytesSize = 0 to budget 2025-04-17T17:56:05.304894Z :INFO: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] Closing read session. Close timeout: 0.000000s 2025-04-17T17:56:05.304933Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-04-17T17:56:05.305004Z :INFO: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 70 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:56:05.305126Z :NOTICE: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T17:56:05.305829Z :DEBUG: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] [] Abort session to cluster 2025-04-17T17:56:05.306372Z :NOTICE: [] [] [1ab59d85-80022db9-fdfdbea9-3d1b544d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:56:05.305299Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 grpc read done: success# 1, data# { read_request { bytes_size: 366 } } 2025-04-17T17:56:05.305494Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 got read request: guid# e52157bb-271d85fc-9a3ff5bf-8156fd01 2025-04-17T17:56:05.309465Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 grpc read done: success# 0, data# { } 2025-04-17T17:56:05.309500Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 grpc read failed 2025-04-17T17:56:05.309525Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 grpc closed 2025-04-17T17:56:05.309560Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14422431832836660159_v1 is DEAD 2025-04-17T17:56:05.311760Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_14422431832836660159_v1 2025-04-17T17:56:05.311571Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7494342402099126073:2656] disconnected; active server actors: 1 2025-04-17T17:56:05.311608Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7494342402099126073:2656] client cli disconnected session shared/cli_5_1_14422431832836660159_v1 >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:263:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:263:2078] Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:281:2068] recipient: [1:263:2078] 2025-04-17T17:56:04.829818Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T17:56:04.835993Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T17:56:04.836327Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T17:56:04.879655Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:56:04.880175Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T17:56:04.880726Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T17:56:04.880757Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T17:56:04.880949Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T17:56:04.897586Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T17:56:04.897706Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T17:56:04.897840Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T17:56:04.897927Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:56:04.898044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:56:04.898109Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:306:2068] recipient: [1:22:2069] 2025-04-17T17:56:04.914766Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T17:56:04.914916Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:56:04.926736Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:56:04.926878Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:56:04.926951Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:56:04.927016Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:56:04.927110Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:56:04.927167Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:56:04.927223Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:56:04.927289Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:56:04.938032Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:56:04.938149Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:56:04.948845Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:56:04.948984Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T17:56:04.950089Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T17:56:04.950136Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T17:56:04.950339Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T17:56:04.950388Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T17:56:04.964778Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T17:56:04.965295Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk 2025-04-17T17:56:04.965334Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2025-04-17T17:56:04.965371Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2025-04-17T17:56:04.965393Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2025-04-17T17:56:04.965420Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2025-04-17T17:56:04.965441Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2025-04-17T17:56:04.965462Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2025-04-17T17:56:04.965479Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2025-04-17T17:56:04.965502Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2025-04-17T17:56:04.965522Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2025-04-17T17:56:04.965563Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2025-04-17T17:56:04.965598Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-04-17T17:56:04.993148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:263:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:263:2078] Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:281:2068] recipient: [13:263:2078] 2025-04-17T17:56:06.810169Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T17:56:06.811031Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T17:56:06.811203Z node 13 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T17:56:06.812325Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:56:06.812782Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T17:56:06.813326Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T17:56:06.813350Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T17:56:06.813553Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T17:56:06.821131Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T17:56:06.821233Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T17:56:06.821314Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T17:56:06.821381Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:56:06.821457Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:56:06.821510Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:306:2068] recipient: [13:22:2069] 2025-04-17T17:56:06.833103Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T17:56:06.833308Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:56:06.844124Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:56:06.844258Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:56:06.844342Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:56:06.844422Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:56:06.844540Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:56:06.844608Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:56:06.844645Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:56:06.844744Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:56:06.856775Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:56:06.856916Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:56:06.869147Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:56:06.869297Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T17:56:06.870571Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T17:56:06.870623Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T17:56:06.870814Z node 13 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T17:56:06.870871Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T17:56:06.871679Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T17:56:06.872278Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk 2025-04-17T17:56:06.872321Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2025-04-17T17:56:06.872348Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2025-04-17T17:56:06.872371Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2025-04-17T17:56:06.872395Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2025-04-17T17:56:06.872427Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2025-04-17T17:56:06.872452Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2025-04-17T17:56:06.872496Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2025-04-17T17:56:06.872543Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2025-04-17T17:56:06.872578Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2025-04-17T17:56:06.872620Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2025-04-17T17:56:06.872646Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 24:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-04-17T17:56:06.898409Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" >> GenericFederatedQuery::IcebergHadoopSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> BsControllerConfig::ExtendByCreatingSeparateBox >> BsControllerConfig::OverlayMap ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-04-17T17:55:18.877028Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1744912518876996 2025-04-17T17:55:19.392473Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342205977795873:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:19.392518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:19.554847Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342206515357661:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:19.554891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:20.052043Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:55:20.071491Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017f4/r3tmp/tmp5iXjBm/pdisk_1.dat 2025-04-17T17:55:20.509733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:20.583778Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:20.922619Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:20.935700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:20.935779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:20.946292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:20.946361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:20.961164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:20.967784Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:20.971936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15906, node 1 2025-04-17T17:55:21.281793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0017f4/r3tmp/yandex0r8FJf.tmp 2025-04-17T17:55:21.281821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0017f4/r3tmp/yandex0r8FJf.tmp 2025-04-17T17:55:21.281973Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0017f4/r3tmp/yandex0r8FJf.tmp 2025-04-17T17:55:21.282083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:55:21.474467Z INFO: TTestServer started on Port 10120 GrpcPort 15906 TClient is connected to server localhost:10120 PQClient connected to localhost:15906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:22.200099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T17:55:24.397417Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342205977795873:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:24.397517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:24.558303Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342206515357661:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:24.558380Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:25.085007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342231747600567:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:25.085089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342231747600556:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:25.085444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:25.094515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T17:55:25.151791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342231747600570:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:55:25.582641Z node 1 :TX_PROXY ERROR: Actor# [1:7494342231747600653:2690] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:25.625503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:25.648334Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342231747600673:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:25.650421Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTYxNDY4YzgtYTQ1OWRlZTItOTM3YTE0MDQtZGJlZTQwNGI=, ActorId: [1:7494342231747600544:2338], ActorState: ExecuteState, TraceId: 01js2e89rt3b5fh38g8md3w3s2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:25.652954Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:55:25.652077Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342232285161770:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:25.653323Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTg0MzlhZTYtNWJiMTFkNTktN2U2NGE2M2ItNTM3YzRjZjU=, ActorId: [2:7494342232285161730:2310], ActorState: ExecuteState, TraceId: 01js2e89t54ekaxqbgr3jnkay3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:25.653678Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:55:25.834554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:26.024630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:15906", true, true, 1000); 2025-04-17T17:55:26.372502Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2e8awq5htaad4xcat6crbs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI4M2QxMWQtYjM0MGYzNzUtYTMwNGZiZTMtMWZiNTc5YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342236042568381:2990] === CheckClustersList. Ok 2025-04-17T17:55:32.657622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:15906 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ... 1 server" ip=ipv6:[::1]:58744 proto=v1 topic=test-topic durationSec=0 2025-04-17T17:56:06.062729Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T17:56:06.065553Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-17T17:56:06.065686Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-17T17:56:06.065698Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T17:56:06.065709Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-17T17:56:06.065726Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342408520810949:2499] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T17:56:06.076822Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342408520810949:2499] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-17T17:56:06.353843Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342408520810949:2499] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-17T17:56:06.354544Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7494342408520811006:2499] connected; active server actors: 1 2025-04-17T17:56:06.354638Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342408520810949:2499] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-17T17:56:06.354657Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342408520810949:2499] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-17T17:56:06.358267Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7494342408520811006:2499] disconnected; active server actors: 1 2025-04-17T17:56:06.358313Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7494342408520811006:2499] disconnected no session 2025-04-17T17:56:06.502621Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7494342408520811031:2499], now have 1 active actors on pipe 2025-04-17T17:56:06.503288Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:56:06.503327Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:56:06.503435Z node 6 :PERSQUEUE INFO: new Cookie src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-17T17:56:06.503557Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T17:56:06.503615Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:56:06.504425Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:56:06.504456Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:56:06.504534Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:56:06.496232Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342408520810949:2499] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T17:56:06.496280Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342408520810949:2499] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-17T17:56:06.496299Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342408520810949:2499] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-17T17:56:06.496335Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T17:56:06.503001Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-04-17T17:56:06.504936Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0 2025-04-17T17:56:06.506733Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744912566506 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:56:06.506869Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T17:56:06.510359Z :INFO: [] MessageGroupId [src] SessionId [src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0] Write session: close. Timeout = 0 ms 2025-04-17T17:56:06.510430Z :INFO: [] MessageGroupId [src] SessionId [src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0] Write session will now close 2025-04-17T17:56:06.510486Z :DEBUG: [] MessageGroupId [src] SessionId [src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0] Write session: aborting 2025-04-17T17:56:06.511000Z :INFO: [] MessageGroupId [src] SessionId [src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0] Write session: gracefully shut down, all writes complete 2025-04-17T17:56:06.511053Z :DEBUG: [] MessageGroupId [src] SessionId [src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0] Write session: destroy 2025-04-17T17:56:06.524529Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7494342408520811031:2499] destroyed 2025-04-17T17:56:06.524591Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:56:06.522094Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0 grpc read done: success: 0 data: 2025-04-17T17:56:06.522122Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0 grpc read failed 2025-04-17T17:56:06.522152Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0 grpc closed 2025-04-17T17:56:06.522203Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|7dbd37ac-7247875d-88b5f257-6ee1f14b_0 is DEAD 2025-04-17T17:56:06.523345Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T17:56:06.553425Z :INFO: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] Starting read session 2025-04-17T17:56:06.553486Z :DEBUG: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] Starting session to cluster null (localhost:15830) 2025-04-17T17:56:06.556194Z :DEBUG: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:06.556247Z :DEBUG: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:06.556295Z :DEBUG: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] [null] Reconnecting session to cluster null in 0.000000s 2025-04-17T17:56:06.557678Z :ERROR: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-04-17T17:56:06.557741Z :DEBUG: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:06.557776Z :DEBUG: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:06.557903Z :INFO: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-04-17T17:56:06.558072Z :NOTICE: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:56:06.558108Z :DEBUG: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-04-17T17:56:06.558212Z :INFO: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] Closing read session. Close timeout: 0.000000s 2025-04-17T17:56:06.558251Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T17:56:06.558292Z :INFO: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:56:06.558377Z :NOTICE: [/Root] [/Root] [3b194458-8c9de0c0-bc8a4c20-20fe7a49] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:56:06.803157Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715684, task: 1, CA Id [5:7494342408520811059:2516]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-17T17:56:06.840096Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715684, task: 1, CA Id [5:7494342408520811059:2516]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:56:06.886821Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715684, task: 1, CA Id [5:7494342408520811059:2516]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:56:06.971942Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715684, task: 1, CA Id [5:7494342408520811059:2516]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:56:07.050783Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715684, task: 1, CA Id [5:7494342408520811059:2516]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:56:07.240284Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715684, task: 1, CA Id [5:7494342408520811059:2516]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2025-04-17T17:56:06.990265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:06.990760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:06.990817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018d1/r3tmp/tmpE6xWEt/pdisk_1.dat 2025-04-17T17:56:07.775362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-17T17:56:07.775676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.775993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:56:07.776348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:56:07.776458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.777489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:56:07.777680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:56:07.778006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.778103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:56:07.778153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:56:07.778675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:56:07.779548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.779632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:56:07.779682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:56:07.780339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.780394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.780445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:56:07.780515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:56:07.796408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:56:07.797265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:56:07.797510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:56:07.799080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:56:07.799151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-17T17:56:07.799225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:56:07.870414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-17T17:56:07.870515Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:07.931118Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:56:07.932222Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:56:07.932547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:07.932709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:07.947353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:08.046887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:56:08.047050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:56:08.047128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:56:08.047439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:56:08.047514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:56:08.052707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:56:08.052845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T17:56:08.053985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:56:08.054052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:56:08.054263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:56:08.054313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:571:2498], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-17T17:56:08.054896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:08.054970Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-17T17:56:08.055088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:56:08.055123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:08.055174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:56:08.055208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:08.055246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:56:08.055286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:08.055326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:56:08.055362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:56:08.055439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-17T17:56:08.055486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-17T17:56:08.055521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-17T17:56:08.057909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-17T17:56:08.058028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-17T17:56:08.058067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-17T17:56:08.058137Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-17T17:56:08.062303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:56:08.062503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-17T17:56:08.062576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:593:2518] 2025-04-17T17:56:08.063552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-17T17:56:08.067119Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:56:08.067207Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:56:08.067488Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:56:08.079408Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:56:08.079506Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:56:08.086894Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:56:08.087013Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavi ... 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-17T17:56:08.941484Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-04-17T17:56:08.941629Z node 1 :TX_PROXY DEBUG: Actor# [1:829:2682] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-04-17T17:56:08.942059Z node 1 :TX_PROXY DEBUG: Actor# [1:829:2682] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:56:08.942146Z node 1 :TX_PROXY DEBUG: Actor# [1:829:2682] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2025-04-17T17:56:08.948553Z node 1 :TX_PROXY DEBUG: Actor# [1:829:2682] Handle TEvDescribeSchemeResult Forward to# [1:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T17:56:08.949478Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:831:2684] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:56:08.949821Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:831:2684] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:56:08.950132Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:831:2684] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T17:56:08.955586Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:831:2684] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2025-04-17T17:56:07.149594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:07.150146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:07.150237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018db/r3tmp/tmp7nlbWp/pdisk_1.dat 2025-04-17T17:56:07.697879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-17T17:56:07.698155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.706146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:56:07.706514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:56:07.706607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.707548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:56:07.718507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:56:07.718989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.725661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:56:07.725761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:56:07.736792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:56:07.739233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.739330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:56:07.739375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:56:07.739934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.739989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:07.740032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:56:07.740106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:56:07.744050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:56:07.744670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:56:07.758329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:56:07.774835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:56:07.774918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-17T17:56:07.774981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-17T17:56:07.894265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-17T17:56:07.894342Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:07.943136Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:56:07.944236Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:56:07.944470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:07.944589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:07.956390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:08.045132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:56:08.046388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:56:08.046497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:56:08.048349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:56:08.048439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-17T17:56:08.048662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:56:08.048738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T17:56:08.049715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:56:08.049779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:56:08.049957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:56:08.050002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:571:2498], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-17T17:56:08.050562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:08.050629Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-17T17:56:08.050741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:56:08.050777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:08.052879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:56:08.052944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:08.052999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:56:08.053042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:08.053081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:56:08.053112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:56:08.053196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-17T17:56:08.053242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-17T17:56:08.053280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-17T17:56:08.055749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-17T17:56:08.055860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-17T17:56:08.055899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-17T17:56:08.055966Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-17T17:56:08.056059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:56:08.056180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-17T17:56:08.056219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:593:2518] 2025-04-17T17:56:08.063498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-17T17:56:08.068962Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:56:08.069060Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:56:08.069259Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:56:08.079353Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:56:08.079463Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:56:08.082922Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:56:08.084004Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavi ... hemeshard: 72057594046644480 2025-04-17T17:56:08.912915Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-17T17:56:08.913034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-17T17:56:08.913068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-17T17:56:08.913126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-17T17:56:08.913157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-17T17:56:08.913190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2025-04-17T17:56:08.913250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:593:2518] message: TxId: 281474976715658 2025-04-17T17:56:08.913295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-17T17:56:08.913341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-17T17:56:08.913388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715658:0 2025-04-17T17:56:08.913500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T17:56:08.914402Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-04-17T17:56:08.914513Z node 1 :TX_PROXY DEBUG: Actor# [1:817:2676] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-04-17T17:56:08.916268Z node 1 :TX_PROXY DEBUG: Actor# [1:817:2676] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:56:08.916392Z node 1 :TX_PROXY DEBUG: Actor# [1:817:2676] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2025-04-17T17:56:08.917437Z node 1 :TX_PROXY DEBUG: Actor# [1:817:2676] Handle TEvDescribeSchemeResult Forward to# [1:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T17:56:08.918572Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:827:2680], serverId# [1:828:2681], sessionId# [0:0:0] 2025-04-17T17:56:08.926057Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2682] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:56:08.926392Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2682] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:56:08.926742Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2682] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T17:56:08.933323Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2025-04-17T17:56:08.947646Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-17T17:56:08.948093Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvGetProxyServicesRequest 2025-04-17T17:56:08.948216Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:833:2682] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T17:56:08.959151Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:834:2686], serverId# [1:835:2687], sessionId# [0:0:0] 2025-04-17T17:56:09.010831Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:833:2682] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T17:56:09.010972Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2682] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T17:56:09.011107Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:833:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-17T17:56:09.011176Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2682] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T17:56:09.011362Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2682] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> BsControllerConfig::OverlayMap [GOOD] |80.8%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-04-17T17:55:15.785479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342188047848433:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:15.785519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:15.862557Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342188575992443:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:15.862611Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:16.244106Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:55:16.278142Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000da4/r3tmp/tmp0hEfF0/pdisk_1.dat 2025-04-17T17:55:16.842316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:16.874008Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:17.004213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:17.004313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:17.023263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:17.023345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:17.031839Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:17.031951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:17.034260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16150, node 1 2025-04-17T17:55:17.109001Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:17.128822Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:55:17.128885Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:55:17.392793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/000da4/r3tmp/yandextmD14f.tmp 2025-04-17T17:55:17.392819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/000da4/r3tmp/yandextmD14f.tmp 2025-04-17T17:55:17.393002Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/000da4/r3tmp/yandextmD14f.tmp 2025-04-17T17:55:17.393126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:55:17.566639Z INFO: TTestServer started on Port 62112 GrpcPort 16150 TClient is connected to server localhost:62112 PQClient connected to localhost:16150 === TenantModeEnabled() = 0 === Init PQ - start server on port 16150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:18.339609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:55:18.339801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.339998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:55:18.340184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:55:18.340209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:18.342996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:55:18.343116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:55:18.343244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.343277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:55:18.343290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T17:55:18.343305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-17T17:55:18.347138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.347183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:55:18.347206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T17:55:18.355104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.355147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.355165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:55:18.355201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:55:18.359231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:18.359472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:18.359485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T17:55:18.359526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:18.363046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T17:55:18.363276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:55:18.367719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912518412, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:55:18.367871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912518412 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:55:18.367894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:55:18.370524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T17:55:18.370573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:55:18.370737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:55:18.370781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T17:55:18.378900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:55:18.378942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:55:18.379117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:55:18.379130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494342196637783553:2445], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T17:55:18.379163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:55:18.379184Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T17:55:18.379262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:55:18.379286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:55:18.379302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:55:18.379313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:55:18. ... Id: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1744912567394 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-17T17:56:07.429758Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-04-17T17:56:07.429822Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 sending to client partition status 2025-04-17T17:56:07.432157Z :INFO: [] [] [687e6bda-8471dd63-590ba6-7c85760c] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-04-17T17:56:07.432713Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-04-17T17:56:07.432857Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-17T17:56:07.432927Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-17T17:56:07.432964Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-04-17T17:56:07.433026Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-04-17T17:56:07.433044Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1TEvPartitionReady. Aval parts: 1 2025-04-17T17:56:07.433093Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 performing read request: guid# 7374b93c-1ce0ce8e-df0be20c-e197c00d, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-17T17:56:07.433226Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 7374b93c-1ce0ce8e-df0be20c-e197c00d 2025-04-17T17:56:07.434701Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1744912567292 CreateTimestampMS: 1744912567287 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1744912567362 CreateTimestampMS: 1744912567288 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1744912567369 CreateTimestampMS: 1744912567288 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-04-17T17:56:07.434923Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-04-17T17:56:07.434978Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 7374b93c-1ce0ce8e-df0be20c-e197c00d has messages 1 2025-04-17T17:56:07.435077Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 read done: guid# 7374b93c-1ce0ce8e-df0be20c-e197c00d, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2025-04-17T17:56:07.435112Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 response to read: guid# 7374b93c-1ce0ce8e-df0be20c-e197c00d 2025-04-17T17:56:07.435319Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 Process answer. Aval parts: 0 2025-04-17T17:56:07.437460Z :DEBUG: [] [] [687e6bda-8471dd63-590ba6-7c85760c] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-17T17:56:07.437614Z :DEBUG: [] [] [687e6bda-8471dd63-590ba6-7c85760c] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-17T17:56:07.442330Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-04-17T17:56:07.442407Z :DEBUG: [] [] [687e6bda-8471dd63-590ba6-7c85760c] [] Returning serverBytesSize = 490 to budget 2025-04-17T17:56:07.442447Z :DEBUG: [] [] [687e6bda-8471dd63-590ba6-7c85760c] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2025-04-17T17:56:07.442797Z :DEBUG: [] [] [687e6bda-8471dd63-590ba6-7c85760c] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-17T17:56:07.443397Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2025-04-17T17:56:07.443533Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 got read request: guid# 8936ac15-15a5ca2f-b86a268c-942f77d5 2025-04-17T17:56:07.443824Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-17T17:56:07.443889Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-17T17:56:07.443922Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-17T17:56:07.443993Z :DEBUG: [] [] [687e6bda-8471dd63-590ba6-7c85760c] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-04-17T17:56:07.444056Z :DEBUG: [] [] [687e6bda-8471dd63-590ba6-7c85760c] [] Returning serverBytesSize = 0 to budget 2025-04-17T17:56:07.444288Z :INFO: [] [] [687e6bda-8471dd63-590ba6-7c85760c] Closing read session. Close timeout: 0.000000s 2025-04-17T17:56:07.444336Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-04-17T17:56:07.444386Z :INFO: [] [] [687e6bda-8471dd63-590ba6-7c85760c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 53 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:56:07.444490Z :NOTICE: [] [] [687e6bda-8471dd63-590ba6-7c85760c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T17:56:07.444544Z :DEBUG: [] [] [687e6bda-8471dd63-590ba6-7c85760c] [] Abort session to cluster 2025-04-17T17:56:07.445056Z :NOTICE: [] [] [687e6bda-8471dd63-590ba6-7c85760c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:56:07.449542Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 grpc read done: success# 0, data# { } 2025-04-17T17:56:07.449596Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 grpc read failed 2025-04-17T17:56:07.449631Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 grpc closed 2025-04-17T17:56:07.449681Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_9343894614844347266_v1 is DEAD 2025-04-17T17:56:07.455000Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7494342411057628700:2651] disconnected; active server actors: 1 2025-04-17T17:56:07.455039Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7494342411057628700:2651] client cli disconnected session shared/cli_5_1_9343894614844347266_v1 2025-04-17T17:56:07.455414Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_9343894614844347266_v1 2025-04-17T17:56:08.394854Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 10] state 'Ready' dataSize 824 rowCount 2 cpuUsage 0 2025-04-17T17:56:08.494969Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T17:56:08.495133Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 10 shard idx 72057594046644480:2 data size 824 row count 2 2025-04-17T17:56:08.495210Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037889 maps to shardIdx: 72057594046644480:2 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], pathId map=Cluster, is column=0, is olap=0, RowCount 2, DataSize 824 2025-04-17T17:56:08.495299Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037889 2025-04-17T17:56:08.498276Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T17:56:08.700471Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 12] state 'Ready' dataSize 656 rowCount 2 cpuUsage 0 2025-04-17T17:56:08.802455Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T17:56:08.802606Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 12 shard idx 72057594046644480:4 data size 656 row count 2 2025-04-17T17:56:08.802674Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037891 maps to shardIdx: 72057594046644480:4 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], pathId map=Versions, is column=0, is olap=0, RowCount 2, DataSize 656 2025-04-17T17:56:08.802747Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037891 2025-04-17T17:56:08.802878Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T17:56:08.982829Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:56:08.982861Z node 5 :IMPORT WARN: Table profiles were not loaded >> GenericFederatedQuery::IcebergHiveTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop >> TInterconnectTest::TestBlobEvent >> TestProtocols::TestConnectProtocol >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected >> TInterconnectTest::OldFormat >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> LocalTableWriter::WriteTable |80.9%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> LocalTableWriter::WaitTxIds |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> TInterconnectTest::OldNbs [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 12553, MsgBus: 14504 2025-04-17T17:51:34.688512Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341240455256117:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:34.688796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014b8/r3tmp/tmpkIAarB/pdisk_1.dat 2025-04-17T17:51:35.347013Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:51:35.348751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:51:35.348830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:51:35.385508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12553, node 1 2025-04-17T17:51:35.681015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:51:35.681038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:51:35.681045Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:51:35.686455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14504 TClient is connected to server localhost:14504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:51:36.755729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 16 2025-04-17T17:51:39.530056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:51:39.678321Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341240455256117:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:51:39.678416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:51:39.744996Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2025-04-17T17:51:39.775498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341261930093226:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:39.775554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341261930093237:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:39.775596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:51:39.779818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T17:51:39.796769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341261930093240:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T17:51:39.858974Z node 1 :TX_PROXY ERROR: Actor# [1:7494341261930093292:2400] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 2025-04-17T17:51:40.294427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::"char", '0'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::"char", '1'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::"char", '2'::"char"] ); 21 2025-04-17T17:51:40.973838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T17:51:41.018033Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int2, '2'::int2] ); 23 2025-04-17T17:51:41.439141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:51:41.502207Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int4, '0'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int4, '1'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int4, '2'::int4] ); 20 2025-04-17T17:51:41.989009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.035882Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int8, '0'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int8, '1'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int8, '2'::int8] ); 700 2025-04-17T17:51:42.443061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.490159Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float4, '0.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float4, '1.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float4, '2.5'::float4] ); 701 2025-04-17T17:51:42.912277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-17T17:51:42.962763Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float8, '0.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float8, '1.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float8, '2.5'::float8] ); 25 2025-04-17T17:51:43.341350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '0'::int2, ARRAY ['text 0'::text, 'text 0'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '1'::int2, ARRAY ['text 1'::text, 'text 1'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '2'::int2, ARRAY ['text 2'::text, 'text 2'::text] ); 1042 2025-04-17T17:51:43.726233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-17T17:51:43.780934Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '0'::int2, ARRAY ['bpchar 0'::bpchar, 'bpchar 0'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '1'::int2, ARRAY ['bpchar 1'::bpchar, 'bpchar 1'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '2'::int2, ARRAY ['bpchar 2'::bpchar, 'bpchar 2'::bpchar] ); 1043 2025-04-17T17:51:44.225546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-17T17:51:44.307791Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1015_b (key, value) VALUES ( '0'::int2, ARRAY ['varchar 0'::varc ... PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:49.150615Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:49.174296Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:52.886324Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7494342326493332895:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:52.886437Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:55.083499Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7494342360853071943:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:55.083638Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:55.115300Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:55:55.314492Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7494342360853072049:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:55.314630Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:55.315165Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7494342360853072054:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:55.321617Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:55:55.337249Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7494342360853072056:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:55:55.407721Z node 9 :TX_PROXY ERROR: Actor# [9:7494342360853072108:2409] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:55.903779Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7494342360853072154:2364], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-04-17T17:55:55.904774Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YjRiNjIzMDktMWYyZmQ4YS1hZTI3MTkzYy1jODlkZGU4ZQ==, ActorId: [9:7494342360853072147:2360], ActorState: ExecuteState, TraceId: 01js2e97sf51rs75k260h3rsr8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:55:56.131001Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 17877, MsgBus: 11071 2025-04-17T17:55:59.587515Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494342377618270865:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:59.587897Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014b8/r3tmp/tmpJZJOpY/pdisk_1.dat 2025-04-17T17:55:59.847469Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:59.883857Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:59.884022Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:59.885329Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17877, node 10 2025-04-17T17:56:00.058227Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:00.058257Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:00.058272Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:00.058654Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11071 TClient is connected to server localhost:11071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:01.207834Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:01.217182Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:04.590331Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494342377618270865:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:04.590432Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:06.702459Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494342407683042615:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:06.702639Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:06.723502Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:56:06.859659Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494342407683042723:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:06.859851Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:06.860371Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494342407683042728:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:06.865874Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:56:06.884269Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494342407683042730:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:56:06.965099Z node 10 :TX_PROXY ERROR: Actor# [10:7494342407683042781:2408] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:07.808745Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7494342411978010155:2374], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-04-17T17:56:07.810457Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ODQ5YjRjOWYtYWE3ODc1YzctZTBhOTA1ZGItOGU1MGQ1ZGY=, ActorId: [10:7494342411978010147:2370], ActorState: ExecuteState, TraceId: 01js2e9ke0cvqktx4k31xthqrk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:56:07.829902Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> TestYmqHttpProxy::TestDeleteMessage |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2025-04-17T17:56:13.994458Z node 4 :INTERCONNECT WARN: Handshake [4:20:2056] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T17:56:14.542740Z node 5 :INTERCONNECT WARN: Handshake [5:18:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T17:56:15.080383Z node 8 :INTERCONNECT WARN: Handshake [8:20:2056] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T17:56:15.082916Z node 7 :INTERCONNECT WARN: Handshake [7:18:2057] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |80.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::IcebergHiveSaSelectConstant [GOOD] >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> BsControllerConfig::DeleteStoragePool [GOOD] |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |80.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> KqpCost::OlapPointLookup >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc |80.9%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:201:2076] 2025-04-17T17:55:55.415366Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T17:55:55.419105Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T17:55:55.419429Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T17:55:55.421114Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:55:55.421371Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T17:55:55.421917Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T17:55:55.421957Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T17:55:55.422154Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T17:55:55.433987Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T17:55:55.434106Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T17:55:55.434269Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T17:55:55.434366Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:55:55.434481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:55:55.434548Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-04-17T17:55:55.445740Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T17:55:55.445879Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:55:55.458747Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:55:55.458859Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:55:55.458947Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:55:55.459028Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:55:55.459121Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:55:55.459165Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:55:55.459196Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:55:55.459235Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:55:55.470648Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:55:55.470783Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:55:55.481828Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:55:55.481934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T17:55:55.482778Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T17:55:55.482827Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T17:55:55.483008Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T17:55:55.483053Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T17:55:55.493016Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:195:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:195:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:195:2076] 2025-04-17T17:55:57.353367Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T17:55:57.354435Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T17:55:57.354642Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T17:55:57.355838Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:55:57.356308Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T17:55:57.356799Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T17:55:57.356829Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T17:55:57.357007Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T17:55:57.368153Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T17:55:57.368273Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T17:55:57.368371Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T17:55:57.368474Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:55:57.368590Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:55:57.368654Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-04-17T17:55:57.379853Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T17:55:57.380030Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:55:57.390742Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:55:57.390878Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:55:57.390971Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:55:57.391049Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:55:57.391182Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:55:57.391246Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:55:57.391297Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:55:57.391353Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:55:57.402055Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:55:57.402199Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:55:57.412905Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:55:57.413031Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T17:55:57.414259Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T17:55:57.414326Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T17:55:57.414523Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T17:55:57.414581Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T17:55:57.415042Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2906:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2906:2116] Leader for TabletID 72057594037932033 is [21:3016:2118] sender: [21:3017:2106] recipient: [21:2906:2116] 2025-04-17T17:56:00.224997Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T17:56:00.226031Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T17:56:00.226350Z n ... 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 96:1002 Path# /dev/disk3 2025-04-17T17:56:08.697708Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1000 Path# /dev/disk1 2025-04-17T17:56:08.697735Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1001 Path# /dev/disk2 2025-04-17T17:56:08.697760Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2025-04-17T17:56:08.697785Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1000 Path# /dev/disk1 2025-04-17T17:56:08.697811Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1001 Path# /dev/disk2 2025-04-17T17:56:08.697838Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1002 Path# /dev/disk3 2025-04-17T17:56:08.697865Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1000 Path# /dev/disk1 2025-04-17T17:56:08.697891Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1001 Path# /dev/disk2 2025-04-17T17:56:08.697918Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1002 Path# /dev/disk3 2025-04-17T17:56:08.697945Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1000 Path# /dev/disk1 2025-04-17T17:56:08.697977Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1001 Path# /dev/disk2 2025-04-17T17:56:08.698004Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1002 Path# /dev/disk3 2025-04-17T17:56:08.698030Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1000 Path# /dev/disk1 2025-04-17T17:56:08.698054Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1001 Path# /dev/disk2 2025-04-17T17:56:08.698081Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1002 Path# /dev/disk3 2025-04-17T17:56:08.698108Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1000 Path# /dev/disk1 2025-04-17T17:56:08.698152Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1001 Path# /dev/disk2 2025-04-17T17:56:08.698330Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1002 Path# /dev/disk3 2025-04-17T17:56:08.698375Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1000 Path# /dev/disk1 2025-04-17T17:56:08.698402Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1001 Path# /dev/disk2 2025-04-17T17:56:08.698428Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1002 Path# /dev/disk3 2025-04-17T17:56:08.698454Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1000 Path# /dev/disk1 2025-04-17T17:56:08.698478Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1001 Path# /dev/disk2 2025-04-17T17:56:08.698505Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2025-04-17T17:56:08.698531Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1000 Path# /dev/disk1 2025-04-17T17:56:08.698559Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1001 Path# /dev/disk2 2025-04-17T17:56:08.698591Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1002 Path# /dev/disk3 2025-04-17T17:56:08.698618Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1000 Path# /dev/disk1 2025-04-17T17:56:08.698645Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1001 Path# /dev/disk2 2025-04-17T17:56:08.698672Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1002 Path# /dev/disk3 2025-04-17T17:56:08.698697Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1000 Path# /dev/disk1 2025-04-17T17:56:08.698722Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1001 Path# /dev/disk2 2025-04-17T17:56:08.698749Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2025-04-17T17:56:08.698797Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1000 Path# /dev/disk1 2025-04-17T17:56:08.698826Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1001 Path# /dev/disk2 2025-04-17T17:56:08.698853Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2025-04-17T17:56:08.698876Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1000 Path# /dev/disk1 2025-04-17T17:56:08.698901Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1001 Path# /dev/disk2 2025-04-17T17:56:08.698929Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2025-04-17T17:56:08.698956Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1000 Path# /dev/disk1 2025-04-17T17:56:08.698982Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1001 Path# /dev/disk2 2025-04-17T17:56:08.699009Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2025-04-17T17:56:08.699034Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1000 Path# /dev/disk1 2025-04-17T17:56:08.699060Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1001 Path# /dev/disk2 2025-04-17T17:56:08.699086Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2025-04-17T17:56:08.699113Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1000 Path# /dev/disk1 2025-04-17T17:56:08.699138Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1001 Path# /dev/disk2 2025-04-17T17:56:08.699164Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2025-04-17T17:56:08.699191Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1000 Path# /dev/disk1 2025-04-17T17:56:08.699217Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1001 Path# /dev/disk2 2025-04-17T17:56:08.699248Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1002 Path# /dev/disk3 2025-04-17T17:56:08.699278Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1000 Path# /dev/disk1 2025-04-17T17:56:08.699302Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1001 Path# /dev/disk2 2025-04-17T17:56:08.699330Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2025-04-17T17:56:08.699354Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1000 Path# /dev/disk1 2025-04-17T17:56:08.699379Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1001 Path# /dev/disk2 2025-04-17T17:56:08.699406Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2025-04-17T17:56:08.699436Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1000 Path# /dev/disk1 2025-04-17T17:56:08.699462Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1001 Path# /dev/disk2 2025-04-17T17:56:08.699487Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2025-04-17T17:56:08.699513Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1000 Path# /dev/disk1 2025-04-17T17:56:08.699538Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1001 Path# /dev/disk2 2025-04-17T17:56:08.699566Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2025-04-17T17:56:08.699592Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1000 Path# /dev/disk1 2025-04-17T17:56:08.699619Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1001 Path# /dev/disk2 2025-04-17T17:56:08.699644Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2025-04-17T17:56:08.699672Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1000 Path# /dev/disk1 2025-04-17T17:56:08.699699Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1001 Path# /dev/disk2 2025-04-17T17:56:08.699728Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2025-04-17T17:56:08.699754Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1000 Path# /dev/disk1 2025-04-17T17:56:08.699780Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1001 Path# /dev/disk2 2025-04-17T17:56:08.699805Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1002 Path# /dev/disk3 2025-04-17T17:56:08.719904Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T17:56:08.912395Z node 71 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.192609s 2025-04-17T17:56:08.912532Z node 71 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.192767s 2025-04-17T17:56:08.933058Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2025-04-17T17:56:09.026297Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-04-17T17:55:15.782572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342186889883083:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:15.782618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000902/r3tmp/tmpod85dj/pdisk_1.dat 2025-04-17T17:55:16.586107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:16.594399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:16.594504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:16.607162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5401, node 1 2025-04-17T17:55:16.778493Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:16.778514Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:16.778522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:16.778665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:17.080340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:15172 2025-04-17T17:55:17.406773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:17.416381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:55:17.420280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:17.447026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:17.575656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:17.618566Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-17T17:55:17.625338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:17.687125Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-17T17:55:17.695966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:17.768348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:17.816632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:17.865246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:17.956311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:18.040723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:18.115209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:19.778956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342204069753712:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:19.779054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342204069753704:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:19.779226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:19.784586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T17:55:19.799417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342204069753718:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T17:55:19.865795Z node 1 :TX_PROXY ERROR: Actor# [1:7494342204069753771:2919] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:20.227687Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2e84jy80atzc5b8pbm32m0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmEzZDFiNGQtOGU4MjNhMDAtZGI1NzVhMWUtYWZhYmE5MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root waiting... 2025-04-17T17:55:20.290769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:55:20.330730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:20.374013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:20.429077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:20.513304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:55:20.566046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:20.621058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:20.674540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:20.748725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T17:55:20.783983Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342186889883083:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:20.786051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:20.860504Z node 1 :HTTP INFO: Listening on http://127.0.0.1:8413 2025-04-17T17:55:21.870414Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T17:55:21.870550Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-17T17:55:21.872013Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T17:55:21.874270Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T17:55:21.875684Z node 1 :HTTP INFO: Listening on http://[::]:7347 2025-04-17T17:55:21.935694Z node 1 :SQS INFO: Request SQS users list 2025-04-17T17:55:21.935730Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T17:55:21.935779Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-17T17:55:21.935805Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-17T17:55:21.940299Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-17T17:55:21.940430Z node 1 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program: ( ... cutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:14.284799Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 33ms 2025-04-17T17:56:14.285321Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:14.285360Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-04-17T17:56:14.285509Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 34ms 2025-04-17T17:56:14.286084Z node 7 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:14.287887Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:14.287912Z node 7 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 36ms 2025-04-17T17:56:14.288184Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:14.288218Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-04-17T17:56:14.295859Z node 7 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 43ms 2025-04-17T17:56:14.296326Z node 7 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:14.542707Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7494342443071050432:2443]: Pool not found 2025-04-17T17:56:14.542893Z node 7 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-04-17T17:56:15.014490Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7494342443071050448:2446]: Pool not found 2025-04-17T17:56:15.015444Z node 7 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-04-17T17:56:15.021104Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494342447366017854:2462], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:15.021223Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7494342447366017855:2463], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-17T17:56:15.021293Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:15.201778Z node 7 :HTTP DEBUG: (#37,[::1]:60484) incoming connection opened Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-04-17T17:56:15.201862Z node 7 :HTTP DEBUG: (#37,[::1]:60484) -> (POST /Root) 2025-04-17T17:56:15.202045Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [784a:8a00:6050:0:604a:8a00:6050:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 78c9d308-af903480-4303d5ad-b230e900 2025-04-17T17:56:15.202363Z node 7 :HTTP_PROXY INFO: http request [UnknownMethodName] requestId [78c9d308-af903480-4303d5ad-b230e900] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2025-04-17T17:56:15.202549Z node 7 :HTTP DEBUG: (#37,[::1]:60484) <- (400 InvalidAction) 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-04-17T17:56:15.202604Z node 7 :HTTP DEBUG: (#37,[::1]:60484) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 3 { } 0 2025-04-17T17:56:15.202642Z node 7 :HTTP DEBUG: (#37,[::1]:60484) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 78c9d308-af903480-4303d5ad-b230e900 x-amz-crc32: 139748724 Content-Type: application/x-amz-json-1.1 Content-Length: 76 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-04-17T17:56:15.202736Z node 7 :HTTP DEBUG: (#37,[::1]:60484) connection closed >> KqpCost::OlapRangeFullScan >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> LocalTableWriter::WriteTable [GOOD] >> LocalTableWriter::WaitTxIds [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |81.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor >> KqpCost::Range |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2025-04-17T17:56:14.848973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342442693719843:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:14.849030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a28/r3tmp/tmpUnpnJQ/pdisk_1.dat 2025-04-17T17:56:15.435448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:15.435558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:15.437625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:15.533845Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11645 TServer::EnableGrpc on GrpcPort 8250, node 1 2025-04-17T17:56:15.970980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:15.971010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:15.971017Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:15.971148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:16.664446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:16.759369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912577044 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-17T17:56:17.107878Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342455578622444:2359] Handshake: worker# [1:7494342451283655052:2296] 2025-04-17T17:56:17.108157Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342455578622444:2359] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:56:17.108392Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342455578622444:2359] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T17:56:17.108444Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342455578622444:2359] Send handshake: worker# [1:7494342451283655052:2296] 2025-04-17T17:56:17.108936Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342455578622444:2359] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T17:56:17.109150Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342455578622444:2359] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2025-04-17T17:56:17.109305Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494342455578622447:2359] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T17:56:17.109345Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342455578622444:2359] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T17:56:17.109451Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494342455578622447:2359] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-17T17:56:17.130289Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494342455578622447:2359] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T17:56:17.130399Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342455578622444:2359] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T17:56:17.130472Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342455578622444:2359] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2025-04-17T17:56:14.942808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342443428661438:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:14.943320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a03/r3tmp/tmp9omTYC/pdisk_1.dat 2025-04-17T17:56:15.516327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:15.516429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:15.518973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:15.544284Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1928 TServer::EnableGrpc on GrpcPort 22026, node 1 2025-04-17T17:56:15.970905Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:15.970946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:15.970958Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:15.971107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:16.665534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:16.759332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912577044 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-17T17:56:17.125468Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handshake: worker# [1:7494342456313563908:2361] 2025-04-17T17:56:17.125770Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:56:17.126049Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T17:56:17.126094Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Send handshake: worker# [1:7494342456313563908:2361] 2025-04-17T17:56:17.126597Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T17:56:17.131873Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-17T17:56:17.132009Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-04-17T17:56:17.132220Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494342456313563911:2360] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T17:56:17.132268Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T17:56:17.132331Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494342456313563911:2360] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-17T17:56:17.136629Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494342456313563911:2360] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T17:56:17.136708Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T17:56:17.136783Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-17T17:56:18.130244Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-04-17T17:56:18.130417Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2025-04-17T17:56:18.130517Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494342456313563911:2360] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-17T17:56:18.142827Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494342456313563911:2360] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T17:56:18.142905Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T17:56:18.142977Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494342456313563907:2360] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 27485, MsgBus: 14268 2025-04-17T17:55:38.291890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342289157191578:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:38.292848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00188b/r3tmp/tmpjHsDyF/pdisk_1.dat 2025-04-17T17:55:39.332241Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:39.346451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:39.347900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:39.348029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:39.351788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27485, node 1 2025-04-17T17:55:39.639674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:39.639703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:39.639712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:39.639830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14268 TClient is connected to server localhost:14268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:40.692797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:43.004345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342306337061400:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:43.004484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:43.301078Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342289157191578:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:43.314821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:43.361048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:55:43.518447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342310632028819:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:43.518528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:43.518931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342310632028824:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:43.529530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:55:43.541878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342310632028826:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:55:43.602242Z node 1 :TX_PROXY ERROR: Actor# [1:7494342310632028866:2403] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:44.451102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:44.986553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-17T17:55:45.525809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:55:46.140837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:55:46.728294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T17:55:47.351809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:55:47.460371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:55:49.994033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-17T17:55:50.043484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-17T17:55:50.044968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-17T17:55:50.056046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } des ... se_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 24878, MsgBus: 64560 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00188b/r3tmp/tmp9Pq11b/pdisk_1.dat 2025-04-17T17:56:07.867747Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:08.024873Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:08.077553Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:08.077643Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:08.080478Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24878, node 3 2025-04-17T17:56:08.201055Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:08.201077Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:08.201085Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:08.201200Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64560 TClient is connected to server localhost:64560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:08.708834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:08.723268Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:12.054670Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342434471173948:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:12.054763Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:12.075876Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:56:12.143484Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342434471174066:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:12.143627Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:12.144393Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342434471174072:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:12.148675Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:56:12.160434Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T17:56:12.160649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494342434471174074:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:56:12.241569Z node 3 :TX_PROXY ERROR: Actor# [3:7494342434471174114:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:12.830751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:13.463658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:56:14.094820Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T17:56:14.918827Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:56:15.517414Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:56:16.045490Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:56:16.101822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:56:18.537308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710700:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown >> KqpCost::IndexLookupAndTake+useSink |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |81.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> GenericFederatedQuery::IcebergHiveBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> DstCreator::WithSyncIndexAndIntermediateDir >> TestYmqHttpProxy::TestDeleteMessage [GOOD] |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |81.0%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> KqpEffects::InsertAbort_Select_Success >> KqpEffects::InsertAbort_Params_Success >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |81.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch >> GenericFederatedQuery::IcebergHiveTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TestKinesisHttpProxy::TestCounters >> KqpCost::PointLookup [GOOD] >> KqpCost::OlapRangeFullScan [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> KqpCost::OlapPointLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 6237, MsgBus: 61361 2025-04-17T17:56:18.344424Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342459509883933:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:18.344606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b12/r3tmp/tmpFa9SW8/pdisk_1.dat 2025-04-17T17:56:18.867385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:18.872991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:18.873112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:18.881837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6237, node 1 2025-04-17T17:56:18.994699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:18.994722Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:18.994727Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:18.994859Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61361 TClient is connected to server localhost:61361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:20.053736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:20.095147Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:20.112862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:20.296726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:20.639196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:20.833047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:22.696928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342476689754666:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:22.697068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:23.036732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:23.087017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:23.117496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:23.172274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:23.243870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:23.286233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:23.334531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:23.339286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342459509883933:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:23.339348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:23.436551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342480984722633:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:23.436615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:23.436956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342480984722638:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:23.441158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:23.472657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342480984722640:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:23.538883Z node 1 :TX_PROXY ERROR: Actor# [1:7494342480984722697:3561] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-04-17T17:55:19.874276Z :ReadSession INFO: Random seed for debugging is 1744912519874240 2025-04-17T17:55:20.626512Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342212030683832:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:20.626560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:20.769488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342207959259123:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:20.769543Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:21.190222Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017e3/r3tmp/tmpDeBk31/pdisk_1.dat 2025-04-17T17:55:21.229420Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:55:21.665054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:21.802923Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:21.987150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:21.987240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:22.013836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:22.013904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:22.053023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:22.057211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:22.063916Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:22.065383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22180, node 1 2025-04-17T17:55:22.360470Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0017e3/r3tmp/yandexyF5UFR.tmp 2025-04-17T17:55:22.360491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0017e3/r3tmp/yandexyF5UFR.tmp 2025-04-17T17:55:22.360652Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0017e3/r3tmp/yandexyF5UFR.tmp 2025-04-17T17:55:22.360754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:55:22.439618Z INFO: TTestServer started on Port 24963 GrpcPort 22180 TClient is connected to server localhost:24963 PQClient connected to localhost:22180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:23.042755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T17:55:25.630270Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342212030683832:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:25.630340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:25.772125Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342207959259123:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:25.772175Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:26.190975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342233729063231:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:26.191082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342233729063198:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:26.191316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:26.199215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:55:26.233196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342233729063235:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:55:26.450410Z node 2 :TX_PROXY ERROR: Actor# [2:7494342233729063263:2132] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:26.938087Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342233729063270:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:26.938651Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWU5MTdiN2MtMTk2NzVmZDItZjE3NzY1MmMtZmJhMzE3YmU=, ActorId: [2:7494342233729063195:2309], ActorState: ExecuteState, TraceId: 01js2e8av394tshm0y73bp2c3y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:26.940711Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:55:26.940878Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342237800488649:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:26.944311Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjI3Zjc2MTctYjAyMTI2NjctNGIzMTAyZTQtNmFlYzljNjU=, ActorId: [1:7494342237800488612:2338], ActorState: ExecuteState, TraceId: 01js2e8b0f89n6y1t5hm80yg0r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:26.944663Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:55:26.949461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:55:27.140832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:27.318690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:22180", true, true, 1000); 2025-04-17T17:55:27.603138Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2e8c2p6ax2tkzw23h8rvj5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdmNDQ4YjEtY2NmYzU2NjEtN2FiNDY1OTQtNTY2ODNkMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342242095456364:2985] === CheckClustersList. Ok 2025-04-17T17:55:32.760577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:22180 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorRea ... ser_7_1_16938829498271788247_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset3 2025-04-17T17:56:23.123626Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 8dcbcbab-604bddc6-5eca863b-37f15ba1 has messages 1 2025-04-17T17:56:23.123701Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 read done: guid# 8dcbcbab-604bddc6-5eca863b-37f15ba1, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2025-04-17T17:56:23.123728Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 response to read: guid# 8dcbcbab-604bddc6-5eca863b-37f15ba1 2025-04-17T17:56:23.123984Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 Process answer. Aval parts: 0 2025-04-17T17:56:23.124820Z :DEBUG: [/Root] [/Root] [421e3215-28362ee7-a1b52ab6-49e1ed0] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:23.125299Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-17T17:56:23.125240Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 grpc read done: success# 1, data# { read { } } 2025-04-17T17:56:23.125357Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 got read request: guid# 50fd4da5-4d57a16b-1cd59b4f-c5bf755c 2025-04-17T17:56:23.125484Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2025-04-17T17:56:23.125555Z :DEBUG: [/Root] [/Root] [421e3215-28362ee7-a1b52ab6-49e1ed0] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-17T17:56:21.978000Z WriteTime: 2025-04-17T17:56:21.981000Z Ip: "ipv6:[::1]:47524" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:47524" } } } } 2025-04-17T17:56:23.125744Z :INFO: [/Root] [/Root] [421e3215-28362ee7-a1b52ab6-49e1ed0] Closing read session. Close timeout: 3.000000s 2025-04-17T17:56:23.125783Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-17T17:56:23.125845Z :INFO: [/Root] [/Root] [421e3215-28362ee7-a1b52ab6-49e1ed0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1487 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:56:23.126588Z :INFO: [/Root] [/Root] [421e3215-28362ee7-a1b52ab6-49e1ed0] Closing read session. Close timeout: 0.000000s 2025-04-17T17:56:23.126659Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-17T17:56:23.126713Z :INFO: [/Root] [/Root] [421e3215-28362ee7-a1b52ab6-49e1ed0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1488 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:56:23.126857Z :NOTICE: [/Root] [/Root] [421e3215-28362ee7-a1b52ab6-49e1ed0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:56:23.131601Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 grpc read done: success# 0, data# { } 2025-04-17T17:56:23.131624Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 grpc read failed 2025-04-17T17:56:23.131661Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 grpc closed 2025-04-17T17:56:23.131702Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_16938829498271788247_v1 is DEAD 2025-04-17T17:56:23.133442Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7494342473064346309:2544] disconnected; active server actors: 1 2025-04-17T17:56:23.133477Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7494342473064346309:2544] client user disconnected session shared/user_7_1_16938829498271788247_v1 2025-04-17T17:56:23.139600Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_16938829498271788247_v1 2025-04-17T17:56:23.139667Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7494342473064346311:2547] destroyed 2025-04-17T17:56:23.139736Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_16938829498271788247_v1 2025-04-17T17:56:24.005267Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:56:24.005298Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:25.659459Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.659500Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.659541Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:25.669066Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:25.678956Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:25.679226Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.680566Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-04-17T17:56:25.682285Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.682327Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.682361Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:25.692049Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:25.710471Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:25.710657Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.711033Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:25.712258Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:56:25.718080Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-04-17T17:56:25.718290Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-04-17T17:56:25.718510Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:25.718562Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:56:25.718592Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:56:25.718644Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-17T17:56:25.727963Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.728008Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.728041Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:25.735274Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:25.766437Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:25.766596Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.766979Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:25.767795Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.769838Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:25.771714Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:25.771819Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T17:56:25.771949Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-04-17T17:56:25.775151Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.775202Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.775238Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:25.786497Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:25.804618Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:25.804773Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.805594Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:25.806379Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:25.806467Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:25.806556Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |81.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 18905, MsgBus: 8041 2025-04-17T17:56:18.434376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342459376357790:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:18.436903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b0b/r3tmp/tmpN0AiZo/pdisk_1.dat 2025-04-17T17:56:18.856657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:18.866450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:18.866583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:18.869731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18905, node 1 2025-04-17T17:56:18.950777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:18.950815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:18.950829Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:18.951010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8041 TClient is connected to server localhost:8041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:19.701287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:19.731165Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:19.749925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:19.968934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:20.128557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:20.227853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:21.975597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342472261261460:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:21.975757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:22.310704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.352237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.396996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.453563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.500197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.582130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.645589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.737672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342476556229428:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:22.737761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:22.737962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342476556229433:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:22.742148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:22.754155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342476556229435:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:22.836852Z node 1 :TX_PROXY ERROR: Actor# [1:7494342476556229491:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:23.434138Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342459376357790:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:23.452510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:24.084869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:56:24.179893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:56:24.180105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:56:24.180374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:56:24.180529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:56:24.180648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:56:24.180776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:56:24.180898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:56:24.181029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:56:24.184780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:56:24.185022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:56:24.185211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:56:24.185326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7494342485146164457:2514];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:56:24.185704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494342485146164459:2515];tablet_id=72075186224037923;process=TTxInitSc ... scription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:56:24.549748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:56:24.549775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:56:24.549827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:56:24.549847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:56:24.549885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:56:24.549909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:56:24.550383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:56:24.550416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:56:24.550555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:56:24.550578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:56:24.550718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:56:24.550744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:56:24.550877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:56:24.550899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:56:24.551004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:56:24.551030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:56:24.551372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:56:24.551395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:56:24.551464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:56:24.551487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:56:24.551612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:56:24.551632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:56:24.551701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:56:24.551724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:56:24.551776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:56:24.551819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:56:24.551867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:56:24.551891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:56:24.552297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:56:24.552327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:56:24.552467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:56:24.552512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:56:24.552624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:56:24.552649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:56:24.552778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:56:24.552817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:56:24.552906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:56:24.552929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:56:24.611648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.616020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.621624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.622670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.629262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.635082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.635082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.640790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.641969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.649638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:24.854654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T17:56:24.855316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;self_id=[1:7494342485146164587:2531];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037926;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037931;receive=72075186224037930; 2025-04-17T17:56:24.855667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T17:56:24.856685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; query_phases { duration_us: 311421 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 72 } } cpu_time_us: 181882 } compilation { duration_us: 582174 cpu_time_us: 574775 } process_cpu_time_us: 390 total_duration_us: 906784 total_cpu_time_us: 757047 >> KqpCost::Range [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15626, MsgBus: 14977 2025-04-17T17:56:17.929620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342455482771714:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:17.930025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b1d/r3tmp/tmpMzVQGP/pdisk_1.dat 2025-04-17T17:56:18.624855Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:18.630623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:18.630743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:18.632593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15626, node 1 2025-04-17T17:56:18.719322Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:18.719345Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:18.719355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:18.721927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14977 TClient is connected to server localhost:14977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:19.381541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:19.397691Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:19.413026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:19.653401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:19.861452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:19.975978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:22.080391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342476957609822:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:22.080481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:22.468857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.568307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.651130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.723069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.775864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.832032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.908553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:22.912300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342455482771714:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:22.912364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:22.999617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342476957610498:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:22.999726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:22.999961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342476957610503:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:23.003387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:23.018059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342476957610505:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:23.119061Z node 1 :TX_PROXY ERROR: Actor# [1:7494342481252577858:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:24.644395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:56:24.931367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:56:24.931562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:56:24.931802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:56:24.931900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:56:24.931988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:56:24.932071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:56:24.932169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:56:24.932255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:56:24.932349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:56:24.932434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:56:24.932522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:56:24.932613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:56:24.970525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[1:7494342485547545616:2517];tablet_id=72075186224037931;process=TTxIni ... ;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:56:25.194706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:56:25.194773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:56:25.194795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:56:25.194842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:56:25.194859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:56:25.194890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:56:25.194909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:56:25.195273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:56:25.195308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:56:25.195441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:56:25.195463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:56:25.195575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:56:25.195617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:56:25.195754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:56:25.195774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:56:25.195863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:56:25.195879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:56:25.257570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:56:25.257647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:56:25.257726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:56:25.257756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:56:25.257909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:56:25.257932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:56:25.257996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:56:25.258018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:56:25.258064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:56:25.258093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:56:25.258132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:56:25.258156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:56:25.270346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:56:25.270407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:56:25.270547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:56:25.270571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:56:25.270664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:56:25.270686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:56:25.270830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:56:25.270860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:56:25.270938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:56:25.270957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:56:25.303547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.306469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.311368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.316919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.320380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.325905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.330772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.340172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.348331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.353740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T17:56:25.621059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T17:56:25.621431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T17:56:25.623476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;self_id=[1:7494342485547545642:2520];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037926;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037931;receive=72075186224037930; 2025-04-17T17:56:25.623787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2 |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |81.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2025-04-17T17:56:17.717048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.719850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.719927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e4f/r3tmp/tmpBTmEHP/pdisk_1.dat 2025-04-17T17:56:18.489477Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.151152s 2025-04-17T17:56:18.489625Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.151384s 2025-04-17T17:56:18.561241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:18.710598Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:18.783025Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:56:18.783973Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:56:18.784246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:18.784378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:18.823055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:18.940282Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:56:18.940361Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:56:18.940550Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T17:56:19.080018Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:56:19.080155Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:56:19.080787Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:56:19.080888Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:56:19.081218Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:56:19.081429Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:56:19.081535Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:56:19.109764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:56:19.110421Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:56:19.111169Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:56:19.111263Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:56:19.147792Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:56:19.149240Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:56:19.149746Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-04-17T17:56:19.150024Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:56:19.163591Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:56:19.196346Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:56:19.197910Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:56:19.198438Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-04-17T17:56:19.198632Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:56:19.206264Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:56:19.206719Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:56:19.206840Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:56:19.208349Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:56:19.208424Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:56:19.208479Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:56:19.208883Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:56:19.209018Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:56:19.209101Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:705:2573] in generation 1 2025-04-17T17:56:19.209453Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:56:19.209510Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:56:19.210561Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T17:56:19.210612Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T17:56:19.210656Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T17:56:19.210851Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:56:19.210957Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:56:19.211004Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-04-17T17:56:19.223106Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:56:19.257005Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:56:19.257230Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:56:19.257366Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-04-17T17:56:19.257406Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:56:19.257438Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:56:19.257475Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:19.257804Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:19.257874Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:19.257956Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:56:19.257992Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T17:56:19.258051Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:56:19.258145Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-04-17T17:56:19.258197Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T17:56:19.258233Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T17:56:19.258260Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:56:19.258595Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:19.258633Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:19.258886Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:56:19.258998Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:56:19.259115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:56:19.259164Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:56:19.259214Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:56:19.259255Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:56:19.259289Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:56:19.259343Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:56:19.259392Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:56:19.259446Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T17:56:19.259524Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T17:56:19.259992Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:690:2584], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:19.260035Z node 1 :TX_DATASHARD TRACE: StateWork, processi ... an for [0:2] at 72075186224037888 executing on unit CheckWrite 2025-04-17T17:56:27.379786Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:56:27.379822Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:56:27.379856Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:56:27.379910Z node 2 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-17T17:56:27.379950Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T17:56:27.379995Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:56:27.380022Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T17:56:27.380045Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:56:27.380078Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T17:56:27.380148Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:56:27.380298Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=1 2025-04-17T17:56:27.380347Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T17:56:27.380412Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:56:27.380440Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T17:56:27.380474Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-17T17:56:27.380515Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T17:56:27.380599Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:56:27.380645Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-17T17:56:27.380692Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:56:27.380724Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:56:27.380768Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T17:56:27.380790Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:56:27.380817Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-17T17:56:27.394818Z node 2 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-17T17:56:27.394900Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T17:56:27.394958Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-17T17:56:27.395051Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:27.396975Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-04-17T17:56:27.397037Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715661 ProcessProposeTransaction 2025-04-17T17:56:27.397138Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:861:2691] DataReq marker# P0 2025-04-17T17:56:27.397265Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2025-04-17T17:56:27.397497Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-04-17T17:56:27.397706Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-17T17:56:27.397795Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-04-17T17:56:27.398151Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:861:2691], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 861 RawX2: 8589937283 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t]\003\000\000\000\000\000\000\021\203\n\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2025-04-17T17:56:27.398324Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:56:27.398437Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:56:27.398651Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-04-17T17:56:27.398745Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-04-17T17:56:27.398805Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-17T17:56:27.398863Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-04-17T17:56:27.398906Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:56:27.398941Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:56:27.398990Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-17T17:56:27.399061Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-04-17T17:56:27.399107Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-17T17:56:27.399131Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:56:27.399168Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-04-17T17:56:27.399193Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2025-04-17T17:56:27.399223Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-17T17:56:27.399280Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-17T17:56:27.399305Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-17T17:56:27.399326Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T17:56:27.399377Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:861:2691] for [0:281474976715661] at 72075186224037888 2025-04-17T17:56:27.399431Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-17T17:56:27.399490Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:56:27.399577Z node 2 :TX_PROXY DEBUG: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2025-04-17T17:56:27.399646Z node 2 :TX_PROXY DEBUG: Collected all clerance requests, txid: 281474976715661 2025-04-17T17:56:27.399705Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2025-04-17T17:56:27.399827Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:861:2691], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2025-04-17T17:56:27.399868Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-17T17:56:27.399982Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:861:2691], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-04-17T17:56:27.400017Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-17T17:56:27.400086Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:27.400136Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:27.400199Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:56:27.400245Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:56:27.400287Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-17T17:56:27.400326Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T17:56:27.400368Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715661] at 72075186224037888 2025-04-17T17:56:27.400404Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-17T17:56:27.400445Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-17T17:56:27.400482Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2025-04-17T17:56:27.400519Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2025-04-17T17:56:27.400792Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-17T17:56:27.400828Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T17:56:27.400869Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:56:27.400913Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:56:27.400954Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:56:27.401022Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:56:27.401475Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:867:2696], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T17:56:27.401512Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 6639, MsgBus: 14625 2025-04-17T17:56:20.280711Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342465992409367:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:20.280759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b00/r3tmp/tmp8gqTRi/pdisk_1.dat 2025-04-17T17:56:20.772182Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:20.779544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:20.779647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:20.782233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6639, node 1 2025-04-17T17:56:21.011371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:21.011400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:21.011410Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:21.011555Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14625 TClient is connected to server localhost:14625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:22.025695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:22.061448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:22.301681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:22.616117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:22.705949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:24.704601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342483172280335:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:24.704703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:25.007409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:25.045794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:25.086095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:25.145112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:25.210756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:25.283384Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342465992409367:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:25.283440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:25.291099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:25.372952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:25.476606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342487467248297:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:25.476724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:25.477114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342487467248302:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:25.482040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:25.500035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342487467248304:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:25.554708Z node 1 :TX_PROXY ERROR: Actor# [1:7494342487467248362:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd >> Donor::ConsistentWritesWhenSwitchingToDonorMode >> Donor::CheckOnlineReadRequestToDonor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2025-04-17T17:56:23.806481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342482456730619:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:23.814222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001979/r3tmp/tmps8UvlX/pdisk_1.dat 2025-04-17T17:56:24.361157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:24.361253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:24.362933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:24.366303Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6354 TServer::EnableGrpc on GrpcPort 26521, node 1 2025-04-17T17:56:24.626916Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:24.626948Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:24.626955Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:24.627093Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:25.038874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:25.056826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912585598 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744912585094 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912585598 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-04-17T17:56:25.732374Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:56:25.732511Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T17:56:25.732525Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T17:56:25.733453Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T17:56:28.079514Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744912585598, tx_id: 281474976710658 } } } 2025-04-17T17:56:28.079930Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T17:56:28.082503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-17T17:56:28.086687Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-17T17:56:28.086702Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-17T17:56:28.155778Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-17T17:56:28.157104Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744912588188 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 7 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Gen ... maVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 8 PathOwnerId: 72057594046644480 } 2025-04-17T17:56:28.196830Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 8] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744912588188 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744912588188 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" >> GenericFederatedQuery::IcebergHiveSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveSaFilterPushdown >> KqpCost::IndexLookupAndTake+useSink [GOOD] >> Donor::SlayAfterWiping >> Donor::SkipBadDonor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2025-04-17T17:56:14.932181Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:56:14.984339Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:56:14.987398Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T17:56:14.987682Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:56:15.041602Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:56:15.052074Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:56:15.162417Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:56:15.170299Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T17:56:15.177588Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T17:56:15.177765Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T17:56:15.178252Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:56:15.178859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:56:15.178943Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T17:56:15.180882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:56:15.180926Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:15.330352Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:56:15.368218Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T17:56:15.368462Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:56:15.373130Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T17:56:15.373218Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T17:56:15.373262Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T17:56:15.373304Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:56:15.373490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:15.373549Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:15.373856Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T17:56:15.373959Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T17:56:15.374064Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:56:15.374093Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:56:15.374135Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T17:56:15.374167Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T17:56:15.374231Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T17:56:15.374269Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T17:56:15.374315Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T17:56:15.374441Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:15.374483Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:15.374559Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T17:56:15.382762Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T17:56:15.382855Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:56:15.382968Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T17:56:15.383122Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T17:56:15.383169Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T17:56:15.383276Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T17:56:15.383328Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:56:15.383365Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T17:56:15.383429Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T17:56:15.383467Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:56:15.383822Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T17:56:15.383887Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T17:56:15.383925Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T17:56:15.383957Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:56:15.384016Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T17:56:15.384069Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T17:56:15.384117Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T17:56:15.384167Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T17:56:15.384192Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T17:56:15.399088Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T17:56:15.399181Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T17:56:15.399217Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T17:56:15.399256Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T17:56:15.399320Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T17:56:15.399945Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:15.400001Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:15.400065Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T17:56:15.400150Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T17:56:15.400191Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:56:15.400322Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T17:56:15.400362Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:56:15.400431Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T17:56:15.400468Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T17:56:15.408545Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T17:56:15.408649Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:56:15.408934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:15.408986Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:56:15.409054Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T17:56:15.409111Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:56:15.409146Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T17:56:15.409187Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T17:56:15.409224Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T17:56:15.409290Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:56:15.409338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T17:56:15.409385Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T17:56:15.409421Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T17:56:15.409611Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T17:56:15.409650Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T17:56:15.409678Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T17:56:15.409702Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T17:56:15.409726Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T17:56:15.409796Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T17:56:15.409821Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T17:56:15.409866Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T17:56:15.409895Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T17:56:15.409942Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T17:56:15.409990Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T17:56:15.410036Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T17:56:15.410091Z node 1 :TX_DATA ... [1000005:154] at 9437184 on unit CompleteOperation 2025-04-17T17:56:29.413069Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-04-17T17:56:29.413115Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-17T17:56:29.413160Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T17:56:29.413442Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:56:29.413477Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2025-04-17T17:56:29.413514Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-04-17T17:56:29.413560Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T17:56:29.413590Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:56:29.413696Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T17:56:29.413726Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:56:29.413749Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-04-17T17:56:29.413805Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-04-17T17:56:29.413851Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T17:56:29.413876Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:56:29.413970Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T17:56:29.413991Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T17:56:29.414013Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:56:29.414035Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-17T17:56:29.414078Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-04-17T17:56:29.414123Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T17:56:29.414146Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:56:29.414316Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T17:56:29.414340Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T17:56:29.414362Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:56:29.414387Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-17T17:56:29.414435Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-04-17T17:56:29.414483Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T17:56:29.414523Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:56:29.414627Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T17:56:29.414676Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:56:29.414702Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-17T17:56:29.414736Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-04-17T17:56:29.414776Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T17:56:29.414798Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:56:29.414908Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:56:29.414939Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-17T17:56:29.414986Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-04-17T17:56:29.415039Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T17:56:29.415068Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:56:29.415150Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T17:56:29.415175Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T17:56:29.415252Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-04-17T17:56:29.415309Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T17:56:29.415332Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T17:56:29.415556Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T17:56:29.415611Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.415654Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T17:56:29.415742Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-17T17:56:29.415765Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.415785Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-17T17:56:29.415898Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T17:56:29.415928Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.415949Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T17:56:29.415996Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-17T17:56:29.416017Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.416049Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-17T17:56:29.416198Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T17:56:29.416224Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.416244Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T17:56:29.416380Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-17T17:56:29.416421Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.416442Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-17T17:56:29.416547Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T17:56:29.416571Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.416594Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-17T17:56:29.416651Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-17T17:56:29.416678Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.416699Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-17T17:56:29.416784Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T17:56:29.416806Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.416826Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T17:56:29.416892Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T17:56:29.416924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.416955Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T17:56:29.417048Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T17:56:29.417075Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:56:29.417098Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::CheckOnlineReadRequestToDonor [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 9038, MsgBus: 7323 2025-04-17T17:56:22.281994Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342477988295310:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:22.282207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001af5/r3tmp/tmpEhZbnT/pdisk_1.dat 2025-04-17T17:56:22.945065Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:22.961659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:22.961768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:22.963469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9038, node 1 2025-04-17T17:56:23.283072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:23.283520Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:23.283530Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:23.283708Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7323 TClient is connected to server localhost:7323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:24.128183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:24.202167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:24.529467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:24.737348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:24.833321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:27.142744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342499463133403:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:27.142917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:27.270297Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342477988295310:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:27.296578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:27.534203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:27.616072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:27.654258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:27.683943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:27.721395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:27.764242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:27.809859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:27.889368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342499463134064:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:27.889458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:27.889569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342499463134069:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:27.893492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:27.909941Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:56:27.911614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342499463134071:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:27.984780Z node 1 :TX_PROXY ERROR: Actor# [1:7494342499463134128:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:29.047368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::CheckOnlineReadRequestToDonor [GOOD] Test command err: RandomSeed# 12193575184159425956 2025-04-17T17:56:31.898743Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T17:56:31.901082Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3299203331931549208] 2025-04-17T17:56:31.923301Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:0:0:0:2097152:1] 2025-04-17T17:56:31.923576Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 1 PartsResurrected# 1 >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown >> Donor::ContinueWithFaultyDonor >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> TestKinesisHttpProxy::TestCounters [GOOD] >> LabeledDbCounters::OneTabletRestart [GOOD] >> LabeledDbCounters::TwoTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 16302, MsgBus: 12629 2025-04-17T17:56:22.741681Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342475185261121:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:22.776022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aed/r3tmp/tmpdrZGBg/pdisk_1.dat 2025-04-17T17:56:23.483433Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:23.485707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:23.485831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:23.494114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16302, node 1 2025-04-17T17:56:23.836670Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:23.836695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:23.836703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:23.836818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12629 TClient is connected to server localhost:12629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:24.885132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:24.919863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:25.175930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:25.520722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:25.675837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:27.742281Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342475185261121:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:27.742349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:27.927189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342496660099277:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:27.927310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.340779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:28.375129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:28.426424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:28.470546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:28.512965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:28.560083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:28.645415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:28.736668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342500955067254:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.736761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.737045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342500955067259:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.740836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:28.762250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342500955067261:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:28.844419Z node 1 :TX_PROXY ERROR: Actor# [1:7494342500955067318:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:30.336592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> Donor::SlayAfterWiping [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> Donor::SkipBadDonor [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete 2025-04-17 17:56:07,765 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:56:08,165 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 18656 764M 767M 756M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/6873/00192e/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modul 22029 2.0G 2.0G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/6873/00192e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_del 30566 487M 480M 481M └─ moto_server s3 --port 31934 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 241, in test_ttl_delete self.ydb_client.query(""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...nner/.ya/build/build_root/6873/00192e/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/00192e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/00192e', '--source-root', '/home/runner/.ya/build/build_root/6873/00192e/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/00192e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...nner/.ya/build/build_root/6873/00192e/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/00192e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/00192e', '--source-root', '/home/runner/.ya/build/build_root/6873/00192e/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/00192e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK >> TestKinesisHttpProxy::TestEmptyHttpBody ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 15391901872090640623 2025-04-17T17:56:32.884623Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T17:56:32.886826Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 746236959009695949] 2025-04-17T17:56:32.909334Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SkipBadDonor [GOOD] Test command err: RandomSeed# 11195511051397766698 2025-04-17T17:56:33.442375Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T17:56:33.444812Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12204926587688936619] 2025-04-17T17:56:33.465276Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TestYmqHttpProxy::TestListQueueTags ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:236:2060] recipient: [1:218:2142] 2025-04-17T17:55:29.925735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:29.925871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:29.925912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:29.926006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:29.926083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:29.926119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:29.926232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:29.926307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:29.926657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:30.075135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:55:30.075213Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:30.092063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:30.092493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:30.092675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:30.110920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:30.111309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:30.112108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:30.112402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:30.147982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:30.149624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:30.149714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:30.149918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:30.149980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:30.150030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:30.150214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.177145Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:348:2060] recipient: [1:17:2064] 2025-04-17T17:55:30.450403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:30.450647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.450886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:30.451115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:30.451170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.454694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:30.454834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:30.455120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.455181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:30.455217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:30.455247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:30.457467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.457524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:30.457582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:30.459421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.459473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.459532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:30.459579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:30.463424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:30.467323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:30.467570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:30.468606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:30.468748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 243 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:30.468797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:30.469144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:30.469211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:30.469380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:30.469487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:30.479930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:30.480020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:30.480233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:30.480272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:315:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:30.480566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:30.480618Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:30.480874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:30.480907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:30.480958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:30.480992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:30.481044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:30.481089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:30.481124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:30.481168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:30.481258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:30.481300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:30.481331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:30.483677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:55:30.483827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:55:30.483865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemaChanged> complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:56:33.938123Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:33.938221Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:0 2025-04-17T17:56:33.938349Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:968:2736] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-04-17T17:56:33.938481Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:235:2153], Recipient [7:968:2736]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-04-17T17:56:33.938530Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T17:56:33.938579Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2025-04-17T17:56:33.938657Z node 7 :TX_DATASHARD DEBUG: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2025-04-17T17:56:33.939003Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:56:33.939056Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:56:33.939145Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T17:56:33.939196Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T17:56:33.939347Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:56:33.939391Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 2/3 2025-04-17T17:56:33.939744Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-17T17:56:33.939801Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 2/3 2025-04-17T17:56:33.939850Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-17T17:56:33.939902Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-17T17:56:33.940321Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-17T17:56:33.940356Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:33.940403Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:2 2025-04-17T17:56:33.940500Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:964:2733] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-04-17T17:56:33.940639Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:235:2153], Recipient [7:964:2733]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-04-17T17:56:33.940683Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T17:56:33.940716Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409550 state Ready 2025-04-17T17:56:33.940775Z node 7 :TX_DATASHARD DEBUG: 72075186233409550 Got TEvSchemaChangedResult from SS at 72075186233409550 2025-04-17T17:56:33.940986Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:56:33.941024Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:56:33.941068Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-17T17:56:33.941129Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:2 ProgressState 2025-04-17T17:56:33.941214Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:56:33.941254Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-17T17:56:33.941285Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T17:56:33.941321Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-17T17:56:33.941345Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T17:56:33.941386Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-17T17:56:33.941467Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:571:2397] message: TxId: 104 2025-04-17T17:56:33.941525Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T17:56:33.941578Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:56:33.941614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:56:33.941749Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-04-17T17:56:33.941797Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-17T17:56:33.941839Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-17T17:56:33.941880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-04-17T17:56:33.941904Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-17T17:56:33.941925Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-17T17:56:33.941971Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-04-17T17:56:33.949880Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:33.950064Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:33.950210Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:571:2397] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2025-04-17T17:56:33.950393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:56:33.950444Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1023:2776] 2025-04-17T17:56:33.950732Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1025:2778], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:33.950803Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:33.950836Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-17T17:56:33.951935Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:547:2102], Recipient [7:235:2153] 2025-04-17T17:56:33.951990Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:56:33.963984Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:56:33.964624Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-17T17:56:33.964686Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-17T17:56:34.009338Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:56:34.017479Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:56:34.017742Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2025-04-17T17:56:34.017845Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T17:56:34.018465Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T17:56:34.018521Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T17:56:34.019134Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1094:2847], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:34.019203Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:34.019245Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:56:34.019477Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:571:2397], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2025-04-17T17:56:34.019518Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:56:34.019612Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T17:56:34.019769Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T17:56:34.019813Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1092:2845] 2025-04-17T17:56:34.020039Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1094:2847], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:34.020084Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:34.020124Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |81.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> Donor::ContinueWithFaultyDonor [GOOD] |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |81.2%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ContinueWithFaultyDonor [GOOD] Test command err: RandomSeed# 3434841318005348683 2025-04-17T17:56:35.397435Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T17:56:35.399840Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10066833103279631370] 2025-04-17T17:56:35.421618Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2025-04-17T17:55:33.756829Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342264609287595:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:33.756863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000816/r3tmp/tmp2d8iNL/pdisk_1.dat 2025-04-17T17:55:34.479871Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:34.482045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:34.482148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:34.496033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3941, node 1 2025-04-17T17:55:34.614781Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:34.614817Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:34.614850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:34.614998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:34.977679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.003602Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:5724 2025-04-17T17:55:35.241143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.251286Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:55:35.261563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.312357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.490131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:35.579897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:35.681294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.738713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.817783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.881997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.966040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.052296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.132373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.139069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342286084125468:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:38.139215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:38.139582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342286084125480:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:38.144558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T17:55:38.177892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342286084125482:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T17:55:38.261136Z node 1 :TX_PROXY ERROR: Actor# [1:7494342286084125533:2922] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:38.760698Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342264609287595:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:38.760804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:39.221177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2e8pgn0v1vh6k4bz650v3n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYxOTJhMGYtMTZjOWYwODEtZWViOGQwMzAtYTM2NjMxMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:39.271684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.326394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.421511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.481587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.576003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.624488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.672056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.747667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.818651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.910059Z node 1 :HTTP INFO: Listening on http://127.0.0.1:2895 2025-04-17T17:55:40.925473Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T17:55:40.925614Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-17T17:55:40.927109Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T17:55:40.930642Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T17:55:40.930960Z node 1 :HTTP INFO: Listening on http://[::]:1041 2025-04-17T17:55:40.982913Z node 1 :SQS INFO: Request SQS users list 2025-04-17T17:55:40.982957Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T17:55:40.983039Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-17T17:55:40.983083Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-17T17:55:40.988786Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-17T17:55:40.988913Z node 1 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromName (Parameter 'FROM_NAME (DataType 'Utf8String))) (let batchS ... ndleSqsResponse DeleteMessageBatch { RequestId: "7b592003-e9c26c08-f99b9887-27356741" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "7b592003-e9c26c08-f99b9887-27356741" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-17T17:56:34.601620Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7494342528508299139:2549]: DeleteMessageBatch { RequestId: "7b592003-e9c26c08-f99b9887-27356741" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "7b592003-e9c26c08-f99b9887-27356741" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-17T17:56:34.601677Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7494342528508299140:3824]. Found: 1 2025-04-17T17:56:34.602322Z node 7 :SQS TRACE: Request [7b592003-e9c26c08-f99b9887-27356741] HandleResponse: { DeleteMessageBatch { RequestId: "7b592003-e9c26c08-f99b9887-27356741" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "7b592003-e9c26c08-f99b9887-27356741" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-04-17T17:56:34.602399Z node 7 :SQS DEBUG: Request [7b592003-e9c26c08-f99b9887-27356741] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "7b592003-e9c26c08-f99b9887-27356741" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "7b592003-e9c26c08-f99b9887-27356741" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-04-17T17:56:34.602780Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [7b592003-e9c26c08-f99b9887-27356741] Got succesfult GRPC response. 2025-04-17T17:56:34.602913Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [7b592003-e9c26c08-f99b9887-27356741] reply ok 2025-04-17T17:56:34.603029Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [7b592003-e9c26c08-f99b9887-27356741] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 222 SourceAddress: 1803:d01:6050:0:3:d01:6050:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2025-04-17T17:56:34.603116Z node 7 :HTTP DEBUG: (#37,[::1]:59098) <- (200 ) 2025-04-17T17:56:34.603205Z node 7 :HTTP DEBUG: (#37,[::1]:59098) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2025-04-17T17:56:34.606338Z node 7 :HTTP DEBUG: (#37,[::1]:59100) incoming connection opened 2025-04-17T17:56:34.606415Z node 7 :HTTP DEBUG: (#37,[::1]:59100) -> (POST /Root) 2025-04-17T17:56:34.606539Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [3828:dd00:6050:0:2028:dd00:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: ca8f9406-9378c8c5-742fa3c6-63809756 2025-04-17T17:56:34.606921Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [ca8f9406-9378c8c5-742fa3c6-63809756] got new request from [3828:dd00:6050:0:2028:dd00:6050:0] 2025-04-17T17:56:34.607264Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [ca8f9406-9378c8c5-742fa3c6-63809756] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-17T17:56:34.607281Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [ca8f9406-9378c8c5-742fa3c6-63809756] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-17T17:56:34.609675Z node 7 :SQS DEBUG: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: ca8f9406-9378c8c5-742fa3c6-63809756 2025-04-17T17:56:34.609793Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2025-04-17T17:56:34.609802Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Request proxy started 2025-04-17T17:56:34.610546Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2025-04-17T17:56:34.610820Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976715715 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-04-17T17:56:34.610838Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 8ms 2025-04-17T17:56:34.610991Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976715715 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-04-17T17:56:34.611018Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2025-04-17T17:56:34.611082Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 10ms 2025-04-17T17:56:34.611205Z node 7 :SQS DEBUG: Request [] Sending executed reply 2025-04-17T17:56:34.611307Z node 7 :SQS DEBUG: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/1] 2025-04-17T17:56:34.611577Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Get configuration duration: 2ms 2025-04-17T17:56:34.611695Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Send get leader node request to sqs service for cloud4/000000000000000101v0 2025-04-17T17:56:34.611719Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Leader node for queue [cloud4/000000000000000101v0] is 7 2025-04-17T17:56:34.611740Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Got leader node for queue response. Node id: 7. Status: 0 2025-04-17T17:56:34.611820Z node 7 :SQS TRACE: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" 2025-04-17T17:56:34.611890Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" 2025-04-17T17:56:34.611935Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Request started. Actor: [7:7494342528508299172:3851] 2025-04-17T17:56:34.611970Z node 7 :SQS TRACE: Inc local leader ref for actor [7:7494342528508299172:3851] 2025-04-17T17:56:34.611984Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2025-04-17T17:56:34.612017Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Get configuration duration: 0ms 2025-04-17T17:56:34.612032Z node 7 :SQS TRACE: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Got configuration. Root url: http://ghrun-gkdwnlcfnq.auto.internal:8771, Shards: 4, Fail: 0 2025-04-17T17:56:34.612057Z node 7 :SQS TRACE: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2025-04-17T17:56:34.612068Z node 7 :SQS TRACE: Request [ca8f9406-9378c8c5-742fa3c6-63809756] DoRoutine 2025-04-17T17:56:34.612112Z node 7 :SQS TRACE: Increment active message requests for [cloud4/000000000000000101v0/0]. ActiveMessageRequests: 1 2025-04-17T17:56:34.612128Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Received empty result from shard 0 infly. Infly capacity: 0. Messages count: 0 2025-04-17T17:56:34.612155Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] No known messages in this shard. Skip attempt to add messages to infly 2025-04-17T17:56:34.612165Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Already tried to add messages to infly 2025-04-17T17:56:34.612199Z node 7 :SQS TRACE: Decrement active message requests for [[cloud4/000000000000000101v0/0]. ActiveMessageRequests: 0 2025-04-17T17:56:34.612254Z node 7 :SQS TRACE: Request [ca8f9406-9378c8c5-742fa3c6-63809756] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" } } 2025-04-17T17:56:34.612329Z node 7 :SQS TRACE: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Sending sqs response: { ReceiveMessage { RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" } RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-04-17T17:56:34.612460Z node 7 :SQS DEBUG: Request ReceiveMessage working duration: 0ms 2025-04-17T17:56:34.612614Z node 7 :SQS TRACE: HandleSqsResponse ReceiveMessage { RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" } RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-17T17:56:34.612747Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7494342528508299171:2553]: ReceiveMessage { RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" } RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-17T17:56:34.612824Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7494342528508299172:3851]. Found: 1 Http output full {} 2025-04-17T17:56:34.615172Z node 7 :SQS TRACE: Request [ca8f9406-9378c8c5-742fa3c6-63809756] HandleResponse: { ReceiveMessage { RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" } RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-04-17T17:56:34.615253Z node 7 :SQS DEBUG: Request [ca8f9406-9378c8c5-742fa3c6-63809756] Sending reply from proxy actor: { ReceiveMessage { RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" } RequestId: "ca8f9406-9378c8c5-742fa3c6-63809756" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-04-17T17:56:34.615498Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [ca8f9406-9378c8c5-742fa3c6-63809756] Got succesfult GRPC response. 2025-04-17T17:56:34.615548Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [ca8f9406-9378c8c5-742fa3c6-63809756] reply ok 2025-04-17T17:56:34.615639Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [ca8f9406-9378c8c5-742fa3c6-63809756] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 179 SourceAddress: 3828:dd00:6050:0:2028:dd00:6050:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2025-04-17T17:56:34.615735Z node 7 :HTTP DEBUG: (#37,[::1]:59100) <- (200 ) 2025-04-17T17:56:34.615817Z node 7 :HTTP DEBUG: (#37,[::1]:59100) connection closed |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |81.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> ObjectStorageListingTest::FilterListing >> ObjectStorageListingTest::ListingNoFilter |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |81.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink >> TestProgram::YqlKernelEndsWith >> TestProgram::YqlKernelEndsWith [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like >> TestProgram::Like [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"7,9\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N5(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N3(15):{\"i\":\"7,16\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N6(15):{\"i\":\"7,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N4(23):{\"i\":\"17\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N7(23):{\"i\":\"18\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N8(54):{\"i\":\"19,20\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N9(54):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N1->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","p":{"kernel":{"class_name":"SIMPLE"}},"o":"21","t":"Calculation"},"w":54,"id":8},"2":{"p":{"i":"7,16","p":{"kernel":{"class_name":"SIMPLE"}},"o":"17","t":"Calculation"},"w":15,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"18","t":"Calculation"},"w":15,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":54,"id":9},"7":{"p":{"i":"18","p":{"kernel":{"class_name":"SIMPLE"}},"o":"20","t":"Calculation"},"w":23,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"6":{"p":{"i":"17","p":{"kernel":{"class_name":"SIMPLE"}},"o":"19","t":"Calculation"},"w":23,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow11BooleanTypeE; |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |81.3%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> BackupRestore::RestoreTablePartitioningSettings >> ObjectStorageListingTest::FilterListing [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] 2025-04-17 17:56:42,379 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:56:42,713 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 154822 46.0M 45.7M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6873/001856/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk6/testing_out_stuff/tes 155082 1.5G 1.5G 1007M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6873/001856/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unit 167276 1.5G 1.5G 1007M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6873/001856/ydb/core/kqp/ut/federated_query/generic_ut/test-results/u Test command err: Trying to start YDB, gRPC: 1200, MsgBus: 1778 2025-04-17T17:55:44.648182Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342314221768773:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:44.648230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001856/r3tmp/tmplrg3Kg/pdisk_1.dat 2025-04-17T17:55:45.254364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:45.284982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:45.285242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:45.287007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1200, node 1 2025-04-17T17:55:45.558908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:45.558932Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:45.558938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:45.559055Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1778 TClient is connected to server localhost:1778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:46.300132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:46.338681Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:49.006281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342331401638426:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:49.006428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:49.311128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:55:49.451745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342335696605842:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:49.451801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342335696605847:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:49.451865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:49.455061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:55:49.467509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342335696605849:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:55:49.535263Z node 1 :TX_PROXY ERROR: Actor# [1:7494342335696605889:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:49.650268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342314221768773:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:49.650324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:50.383443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:50.979891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-17T17:55:51.551088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:55:52.254274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:55:52.780578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:55:53.234881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:55:53.306833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:55:55.160385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-17T17:55:55.220560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-17T17:55:55.222155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-17T17:55:55.224085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: " ... at schemeshard: 72057594046644480 2025-04-17T17:56:33.174534Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-17T17:56:34.249651Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.061373Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.944969Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T17:56:36.074276Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.227004Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.236538Z node 4 :KQP_SLOW_LOG WARN: TraceId: "01js2ea9cfdwxb60ams1bqp1jn", SessionId: ydb://session/3?node_id=4&id=ODk3ODFjYzEtYTU5MGE2NjMtNDVlYTQyYmMtNTJjMGU5ZWM=, Slow query, duration: 10.009878s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT external_data_source_p (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"Iceberg\",\n DATABASE_NAME=\"pgdb\",\n WAREHOUSE_TYPE=\"s3\",\n WAREHOUSE_S3_REGION=\"s3_region\",\n WAREHOUSE_S3_ENDPOINT=\"s3_endpoint\",\n WAREHOUSE_S3_URI=\"s3_uri\",\n \n AUTH_METHOD=\"TOKEN\",\n TOKEN_SECRET_NAME=\"external_data_source_p\"\n ,\n \n CATALOG_TYPE=\"hive\",\n CATALOG_HIVE_URI=\"hive_uri\"\n ,\n USE_TLS=\"FALSE\"\n );\n ", parameters: 0b Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001856/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk6/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001856/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk6/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> BackupRestoreS3::TestAllPrimitiveTypes-PRIMITIVE_TYPE_ID_UNSPECIFIED [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UINT16 >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2025-04-17T17:56:41.280483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:41.281028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:41.281088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00187b/r3tmp/tmpk1orEr/pdisk_1.dat 2025-04-17T17:56:41.822152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:41.876707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:41.918848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:41.918995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:41.930706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:42.020813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:56:42.089088Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:56:42.089352Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:56:42.139771Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:56:42.139907Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:56:42.141640Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:56:42.141737Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:56:42.141853Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:56:42.142247Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:56:42.142399Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:56:42.142481Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:56:42.154971Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:56:42.182951Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:56:42.183192Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:56:42.183326Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:56:42.183366Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:56:42.183414Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:56:42.183449Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:42.183906Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:56:42.184034Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:56:42.184168Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:56:42.184222Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:56:42.184278Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:56:42.184321Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:56:42.184424Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:56:42.184900Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:56:42.185182Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:56:42.185277Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:56:42.187013Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:56:42.197813Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:56:42.197947Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:56:42.362650Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:56:42.371662Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:56:42.371760Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:42.372705Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:56:42.372758Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:56:42.372823Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T17:56:42.373131Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T17:56:42.373290Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:56:42.374268Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:56:42.374355Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:56:42.376728Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:56:42.380612Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:56:42.382399Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T17:56:42.382453Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:42.383023Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T17:56:42.383096Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:56:42.384587Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:56:42.384647Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:56:42.384705Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:56:42.384770Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:56:42.384821Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T17:56:42.384933Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:42.396941Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:56:42.399809Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T17:56:42.400504Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T17:56:42.400597Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:56:42.436034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:42.436205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:42.436306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:42.449539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:56:42.465151Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:56:42.646911Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:56:42.650281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:56:42.827316Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:43.222840Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ean9g2r65rrj9vzvjwpht, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg2MzJhMTMtM2M5NDA2MjgtZWI5ODg2NC0xMzg3OWI2Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:43.229346Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T17:56:43.229906Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T17:56:43.230128Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-17T17:56:43.241174Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:43.246277Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] 2025-04-17T17:56:43.246508Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-17T17:56:43.246723Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-04-17T17:56:43.246933Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] 2025-04-17T17:56:43.248939Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:870:2700], serverId# [1:871:2701], sessionId# [0:0:0] 2025-04-17T17:56:43.249146Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-17T17:56:43.249334Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-04-17T17:56:43.249510Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:870:2700], serverId# [1:871:2701], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28008, MsgBus: 1703 2025-04-17T17:56:24.518737Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342485249767864:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:24.519379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a6f/r3tmp/tmpaNXlCG/pdisk_1.dat 2025-04-17T17:56:25.060345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:25.060477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:25.063057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:25.092577Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28008, node 1 2025-04-17T17:56:25.214730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:25.214752Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:25.214758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:25.214875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1703 TClient is connected to server localhost:1703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:26.176318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:26.241871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:26.423039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:26.682906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:26.794383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:28.757442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342502429638836:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.757569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:29.176434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.242545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.329980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.372251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.410961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.490764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.521041Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342485249767864:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:29.521125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:29.568838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.647932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342506724606805:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:29.648026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:29.648263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342506724606810:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:29.652442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:29.667877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342506724606812:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:29.762867Z node 1 :TX_PROXY ERROR: Actor# [1:7494342506724606873:3560] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:31.270687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25714, MsgBus: 64772 2025-04-17T17:56:32.935730Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342517719363583:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:32.935777Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a6f/r3tmp/tmpnYWRju/pdisk_1.dat 2025-04-17T17:56:33.220210Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:33.246324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:33.246412Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:33.251404Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25714, node 2 2025-04-17T17:56:33.369796Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:33.369819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:33.369830Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:33.369948Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64772 TClient is connected to server localhost:64772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:34.175687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:34.183437Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:56:34.197640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:34.318901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:34.565642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:34.662929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:37.723573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342539194201785:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:37.723674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:37.850391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:37.935952Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342517719363583:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:37.936022Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:37.945370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:38.049491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:38.152309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:38.220434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:38.297993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:38.386161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:38.564142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342543489169759:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:38.564248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:38.564610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342543489169764:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:38.568610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:38.586515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342543489169766:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:56:38.661345Z node 2 :TX_PROXY ERROR: Actor# [2:7494342543489169822:3557] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:40.548035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T17:56:41.359914Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494342556374072286:2539], TxId: 281474976715676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGRkMzgxY2MtNTJkYjYwOGItZGJhMjc2MTMtMjQwZjZiNzY=. CustomerSuppliedId : . TraceId : 01js2eakrf3jx0mf9h5n5nbvxy. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-17T17:56:41.360702Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494342556374072287:2540], TxId: 281474976715676, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2eakrf3jx0mf9h5n5nbvxy. SessionId : ydb://session/3?node_id=2&id=OGRkMzgxY2MtNTJkYjYwOGItZGJhMjc2MTMtMjQwZjZiNzY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494342556374072283:2509], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T17:56:41.361155Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGRkMzgxY2MtNTJkYjYwOGItZGJhMjc2MTMtMjQwZjZiNzY=, ActorId: [2:7494342552079104732:2509], ActorState: ExecuteState, TraceId: 01js2eakrf3jx0mf9h5n5nbvxy, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8786, MsgBus: 28451 2025-04-17T17:56:24.745614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342486210065147:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:24.745682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a5a/r3tmp/tmpFnAf2h/pdisk_1.dat 2025-04-17T17:56:25.345760Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:25.351277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:25.351381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:25.357930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8786, node 1 2025-04-17T17:56:25.602681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:25.602730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:25.602739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:25.602878Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28451 TClient is connected to server localhost:28451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:26.702070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:26.747020Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:26.772678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:26.947903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:27.185341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:27.339629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:29.423100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342507684903180:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:29.423243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:29.750307Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342486210065147:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:29.750419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:29.983536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.030522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.128816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.223007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.287903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.393426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.505795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.594571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342511979871159:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.594668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.596645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342511979871164:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.600534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:30.625572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342511979871166:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:30.727245Z node 1 :TX_PROXY ERROR: Actor# [1:7494342511979871224:3562] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17236, MsgBus: 26787 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a5a/r3tmp/tmp6K2tHb/pdisk_1.dat 2025-04-17T17:56:34.218352Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:34.239051Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:34.254992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:34.255086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:34.258320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17236, node 2 2025-04-17T17:56:34.338750Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:34.338773Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:34.338784Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:34.338905Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26787 TClient is connected to server localhost:26787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:56:35.073250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.111655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:35.203937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:35.438535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:35.541437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:39.432078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342550526922486:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:39.432204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:39.483343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:39.546760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:39.636736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:39.701229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:39.789872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:39.892364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:39.971855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.126700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342554821890458:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.126826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.130579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342554821890463:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.135353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:40.175817Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342554821890465:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:40.277008Z node 2 :TX_PROXY ERROR: Actor# [2:7494342554821890530:3577] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:41.704172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:56:42.028140Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710674; 2025-04-17T17:56:42.104170Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494342563411825637:2516], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7494342559116858174:2516]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7494342563411825637:2516].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T17:56:42.104765Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494342559116858330:2516], SessionActorId: [2:7494342559116858174:2516], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7494342559116858174:2516]. isRollback=0 2025-04-17T17:56:42.105034Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzBlZjE3OWUtZDc1ZmVjOTEtMWJiMTZlZGItYzg4NmU0Y2Y=, ActorId: [2:7494342559116858174:2516], ActorState: ExecuteState, TraceId: 01js2eamr5a8cxfng3xqvw5qvm, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7494342559116858331:2516] from: [2:7494342559116858330:2516] 2025-04-17T17:56:42.105124Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494342559116858331:2516] TxId: 281474976710674. Ctx: { TraceId: 01js2eamr5a8cxfng3xqvw5qvm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzBlZjE3OWUtZDc1ZmVjOTEtMWJiMTZlZGItYzg4NmU0Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T17:56:42.105345Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzBlZjE3OWUtZDc1ZmVjOTEtMWJiMTZlZGItYzg4NmU0Y2Y=, ActorId: [2:7494342559116858174:2516], ActorState: ExecuteState, TraceId: 01js2eamr5a8cxfng3xqvw5qvm, Create QueryResponse for error on request, msg: >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView >> ObjectStorageListingTest::ListingNoFilter [GOOD] >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |81.4%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-PRIMITIVE_TYPE_ID_UNSPECIFIED [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INT8 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2025-04-17T17:56:35.697216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342530103060041:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:35.697459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d4a/r3tmp/tmp7jdFWR/pdisk_1.dat 2025-04-17T17:56:36.646442Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:36.718367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:36.718481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:36.727140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:36.728183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15029, node 1 2025-04-17T17:56:36.946896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001d4a/r3tmp/yandexbXvFhE.tmp 2025-04-17T17:56:36.946921Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001d4a/r3tmp/yandexbXvFhE.tmp 2025-04-17T17:56:36.947091Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001d4a/r3tmp/yandexbXvFhE.tmp 2025-04-17T17:56:36.947220Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17075 PQClient connected to localhost:15029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:38.059716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:38.094858Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:56:38.128446Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:38.142543Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T17:56:40.697188Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342530103060041:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:40.706732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:40.950681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342551577897260:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.950809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342551577897271:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.950893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.955890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T17:56:41.003663Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:56:41.011424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342551577897275:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:56:41.426353Z node 1 :TX_PROXY ERROR: Actor# [1:7494342555872864636:2397] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:41.463504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:41.586934Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342555872864645:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:41.587799Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTU0OWJlNjItMWYxZmJhN2UtY2FlYzIwYzYtYTFiZmRhNmI=, ActorId: [1:7494342551577897257:2334], ActorState: ExecuteState, TraceId: 01js2eaktw3j5erqvc3yccwkr0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:41.613853Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:56:41.617604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:41.797675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:56:42.420069Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2eamskdmzec5bewf8dpjaq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTk1ZGI1ZS05MjYyMDQ0ZC1lMDVkMGY1NS03YjBjYzQzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25588, MsgBus: 3670 2025-04-17T17:56:24.866321Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342485103394928:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:24.866389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a54/r3tmp/tmppvprQc/pdisk_1.dat 2025-04-17T17:56:25.560655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:25.579830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:25.579930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:25.591439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25588, node 1 2025-04-17T17:56:25.834770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:25.834789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:25.834796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:25.834903Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3670 TClient is connected to server localhost:3670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:26.636894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:26.661904Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:26.683799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:26.916797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:27.126290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:27.249907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:29.278247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342506578233098:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:29.278365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:29.703034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.791207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.866920Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342485103394928:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:29.867000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:29.884170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.933341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.995934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.081771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.141161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.316570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342510873201079:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.316768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.317189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342510873201086:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.321847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:30.336752Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:56:30.339644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342510873201088:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:30.411731Z node 1 :TX_PROXY ERROR: Actor# [1:7494342510873201146:3562] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:31.518028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64949, MsgBus: 10091 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a54/r3tmp/tmp174AaM/pdisk_1.dat 2025-04-17T17:56:34.001606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:34.119746Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:34.123170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:34.123270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:34.126364Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64949, node 2 2025-04-17T17:56:34.338744Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:34.338774Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:34.338781Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:34.338902Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10091 TClient is connected to server localhost:10091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:35.417685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:35.460821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:35.607927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:35.890430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:36.015202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:40.240707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342552239361813:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.240782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.293181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.360729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.404216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.451628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.486134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.530650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.573820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.719411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342552239362477:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.719534Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.727083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342552239362482:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.731985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:40.744972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342552239362484:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:56:40.808927Z node 2 :TX_PROXY ERROR: Actor# [2:7494342552239362539:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:42.521437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-04-17T17:56:45.784758Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.784790Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.784812Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:45.797550Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:45.802475Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:45.820552Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.822524Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:45.824016Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.824041Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.824061Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:45.830457Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:45.841013Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:45.841222Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.842380Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:45.842820Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T17:56:45.843987Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.844017Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.844044Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:45.861133Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:45.874485Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:45.874703Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.875106Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:45.875912Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.878718Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:45.879709Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:45.879785Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-17T17:56:45.881150Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.881177Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.881207Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:45.898395Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:45.906639Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:45.906888Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.907288Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-04-17T17:56:45.908392Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:56:45.908617Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-17T17:56:45.910072Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-17T17:56:45.910344Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-17T17:56:45.914430Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:45.914481Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:56:45.914525Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:56:45.914707Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-04-17T17:56:45.914837Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:56:45.914859Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-17T17:56:45.914880Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:56:45.915015Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-04-17T17:56:45.915069Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-17T17:56:45.915092Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-17T17:56:45.915110Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:56:45.915208Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-04-17T17:56:45.915268Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-17T17:56:45.915289Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-17T17:56:45.915312Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:56:45.915431Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-04-17T17:56:45.923939Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.923978Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.924002Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:45.938506Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:45.951013Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:45.951248Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.954446Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-17T17:56:45.955568Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:56:45.955797Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-17T17:56:45.956105Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-17T17:56:45.956335Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-17T17:56:45.956472Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:45.956505Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:56:45.956528Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:56:45.956545Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-17T17:56:45.956589Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:56:45.956819Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2025-04-17T17:56:45.956922Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-17T17:56:45.956959Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-17T17:56:45.956984Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-17T17:56:45.957007Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-17T17:56:45.957035Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:56:45.957209Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2025-04-17T17:56:45.963628Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.963659Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.963687Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:45.978905Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:45.994658Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:45.994935Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:45.998444Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:45.999648Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:56:46.000483Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:56:46.002459Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-04-17T17:56:46.002578Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T17:56:46.006306Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:46.006359Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:56:46.006387Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-04-17T17:56:46.006405Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-04-17T17:56:46.006459Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-04-17T17:56:46.006507Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-17T17:56:46.006682Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2025-04-17T17:56:46.006862Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2025-04-17T17:56:42.560111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:42.562302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:42.562384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018a6/r3tmp/tmphyYoSY/pdisk_1.dat 2025-04-17T17:56:43.048721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:56:43.108793Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:43.157726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:43.157867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:43.169259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:43.271911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:56:43.326089Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:56:43.326346Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:56:43.400487Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:56:43.400610Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:56:43.402316Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:56:43.402423Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:56:43.402487Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:56:43.402849Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:56:43.402990Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:56:43.403080Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:56:43.413885Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:56:43.471875Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:56:43.472114Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:56:43.472258Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:56:43.472297Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:56:43.472330Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:56:43.472385Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:43.472869Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:56:43.472996Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:56:43.473131Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:56:43.473172Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:56:43.473245Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:56:43.473293Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:56:43.473394Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:56:43.473865Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:56:43.474111Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:56:43.478334Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:56:43.480325Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:56:43.494802Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:56:43.494929Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:56:43.660271Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:56:43.664835Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:56:43.664914Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:43.665697Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:56:43.665754Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:56:43.665812Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T17:56:43.666051Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T17:56:43.670413Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:56:43.671410Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:56:43.671485Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T17:56:43.673539Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T17:56:43.673941Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:56:43.679763Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T17:56:43.679825Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:43.680423Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T17:56:43.680501Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:56:43.681821Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:56:43.681879Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:56:43.681934Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T17:56:43.681996Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T17:56:43.682044Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T17:56:43.682153Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:43.694314Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:56:43.696580Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T17:56:43.697147Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T17:56:43.697235Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T17:56:43.723325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:43.723467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:43.723554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:43.737091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:56:43.759865Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:56:43.942020Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:56:43.954360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:56:44.102067Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:44.761059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eapj0ebbp031kwve6wwbh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGM5YmRmMjQtM2RjOTIyZTMtNGNkMzc0ODctOThiYTFlOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:44.767475Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T17:56:44.767977Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T17:56:44.768250Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-17T17:56:44.780284Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:56:44.785106Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] 2025-04-17T17:56:44.785328Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-17T17:56:44.785610Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2025-04-17T17:56:44.785798Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |81.4%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:226:2060] recipient: [1:221:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:226:2060] recipient: [1:221:2144] Leader for TabletID 72057594046678944 is [1:243:2158] sender: [1:246:2060] recipient: [1:221:2144] 2025-04-17T17:55:23.845481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:23.845574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:23.845616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:23.845665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:23.845710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:23.845735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:23.845807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:23.845870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:23.846292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:23.947906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:55:23.947989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:23.970750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:23.971082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:23.971230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:23.976560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:23.976716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:23.977348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:23.977544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:23.979539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:23.980976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:23.981041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:23.981145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:23.981197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:23.981232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:23.981378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:23.994035Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:243:2158] sender: [1:352:2060] recipient: [1:17:2064] 2025-04-17T17:55:24.183674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:24.183933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.184176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:24.184408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:24.184472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.191231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.191397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:24.191632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.191710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:24.191758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:24.191789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:24.194420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.194512Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:24.194560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:24.202515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.202594Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.202689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.202738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.206492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:24.209162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:24.209562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:24.210681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:24.210832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 241 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:24.210916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.211256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:24.211317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:24.211503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:24.211633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:24.214959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:24.215007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:24.215213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:24.215257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:319:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:24.215649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:24.215712Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:24.215822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:24.215858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.215894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:24.215923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.215956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:24.215996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:24.216026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:24.216054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:24.216146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:24.216220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:24.216252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:24.218355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:55:24.218491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:55:24.218525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T17:56:46.076846Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-17T17:56:46.076876Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-04-17T17:56:46.076902Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-17T17:56:46.076982Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-17T17:56:46.077022Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:56:46.079925Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:46.080764Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:56:46.080805Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:46.081197Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T17:56:46.081229Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T17:56:46.081469Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-17T17:56:46.081517Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-17T17:56:46.082570Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:676:2502], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:46.082648Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:46.082696Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:56:46.082898Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:575:2401], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-04-17T17:56:46.082932Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:56:46.083010Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-17T17:56:46.083111Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T17:56:46.083154Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:674:2500] 2025-04-17T17:56:46.083371Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:676:2502], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:46.083409Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:46.083452Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2025-04-17T17:56:46.083907Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:551:2102], Recipient [7:235:2153] 2025-04-17T17:56:46.083958Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:56:46.087934Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 551 RawX2: 34359740470 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:56:46.088304Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T17:56:46.088466Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-17T17:56:46.088676Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:56:46.091692Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:56:46.091959Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-04-17T17:56:46.092035Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-17T17:56:46.092510Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-17T17:56:46.092558Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-17T17:56:46.093005Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:682:2508], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:46.093065Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:46.093104Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:56:46.093243Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:575:2401], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2025-04-17T17:56:46.093282Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:56:46.093366Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-17T17:56:46.093483Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-17T17:56:46.093532Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:680:2506] 2025-04-17T17:56:46.093747Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:682:2508], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:46.093782Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:46.093819Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-17T17:56:46.094480Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:551:2102], Recipient [7:235:2153] 2025-04-17T17:56:46.094536Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:56:46.097201Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 551 RawX2: 34359740470 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:56:46.097542Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T17:56:46.097616Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2025-04-17T17:56:46.097831Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:56:46.100437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:56:46.100705Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-04-17T17:56:46.100775Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-17T17:56:46.101200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-17T17:56:46.101251Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-17T17:56:46.101741Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:688:2514], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:46.101807Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:46.101858Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:56:46.101978Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:575:2401], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2025-04-17T17:56:46.102021Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T17:56:46.102105Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-17T17:56:46.102254Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-17T17:56:46.102322Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:686:2512] 2025-04-17T17:56:46.102564Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:688:2514], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:46.102609Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:46.102649Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2025-04-17T17:55:33.643298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342267433243968:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:33.643339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000838/r3tmp/tmpGi5gb7/pdisk_1.dat 2025-04-17T17:55:34.266602Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:34.269269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:34.269390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:34.277753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1477, node 1 2025-04-17T17:55:34.433594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:34.433614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:34.433630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:34.433746Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:34.814090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:34.855083Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:20349 2025-04-17T17:55:35.063510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.076746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:55:35.078701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.112324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.275201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:35.386564Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-17T17:55:35.398944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:35.506352Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-17T17:55:35.521454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.584771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.664507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.727620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.785840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.838080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.886415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:37.779228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342284613114369:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:37.779305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342284613114358:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:37.779425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:37.783923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T17:55:37.798524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342284613114372:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T17:55:37.864414Z node 1 :TX_PROXY ERROR: Actor# [1:7494342284613114423:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:38.255357Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2e8p5d7hkm1jrbbys8fazy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmJhMGE0OTMtYjIzZDA5YjMtZTEzNGJkMDQtMjdlYzg4NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:38.348485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.407148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.470914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.525382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.578003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.648433Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342267433243968:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:38.648512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:38.664995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.714435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.753842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.801355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.894577Z node 1 :HTTP INFO: Listening on http://127.0.0.1:8468 2025-04-17T17:55:39.896381Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T17:55:39.896647Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T17:55:39.897692Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-17T17:55:39.897898Z node 1 :HTTP INFO: Listening on http://[::]:29685 2025-04-17T17:55:39.900782Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T17:55:39.948409Z node 1 :SQS INFO: Request SQS users list 2025-04-17T17:55:39.948450Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T17:55:39.956633Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-17T17:55:39.956764Z node 1 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program: ( (let fromUser ( ... se { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:43.448024Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 40ms 2025-04-17T17:56:43.448335Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:43.448370Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-04-17T17:56:43.448471Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 42ms 2025-04-17T17:56:43.448804Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:43.499328Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:43.499374Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 56ms 2025-04-17T17:56:43.499913Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:43.499959Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-04-17T17:56:43.500122Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 56ms 2025-04-17T17:56:43.500824Z node 8 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T17:56:43.663562Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7494342567782851800:2448]: Pool not found 2025-04-17T17:56:43.664455Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2025-04-17T17:56:44.359059Z node 8 :HTTP DEBUG: (#37,[::1]:60958) incoming connection opened 2025-04-17T17:56:44.359150Z node 8 :HTTP DEBUG: (#37,[::1]:60958) -> (POST /Root) 2025-04-17T17:56:44.359283Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [78b3:8400:6050:0:60b3:8400:6050:0] request [CreateStream] url [/Root] database [/Root] requestId: ebf3772e-cf4232f3-65fe3339-6743215c 2025-04-17T17:56:44.359854Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [ebf3772e-cf4232f3-65fe3339-6743215c] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2025-04-17T17:56:44.360125Z node 8 :HTTP DEBUG: (#37,[::1]:60958) <- (400 MissingParameter) 2025-04-17T17:56:44.360175Z node 8 :HTTP DEBUG: (#37,[::1]:60958) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 4 null 0 2025-04-17T17:56:44.360225Z node 8 :HTTP DEBUG: (#37,[::1]:60958) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: ebf3772e-cf4232f3-65fe3339-6743215c x-amz-crc32: 851558042 Content-Type: application/x-amz-json-1.1 Content-Length: 127 {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2025-04-17T17:56:44.360319Z node 8 :HTTP DEBUG: (#37,[::1]:60958) connection closed 2025-04-17T17:56:44.388447Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7494342567782851783:2447]: Pool not found 2025-04-17T17:56:44.388835Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-04-17T17:56:44.406959Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494342572077819217:2467], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:44.407085Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7494342572077819218:2468], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-17T17:56:44.407147Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2025-04-17T17:55:33.526581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342266688805566:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:33.527344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000895/r3tmp/tmpn9TFEh/pdisk_1.dat 2025-04-17T17:55:34.356363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:34.356459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:34.366537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:34.370119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2534, node 1 2025-04-17T17:55:34.667502Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:34.667528Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:34.667535Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:34.667659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:35.178360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21315 2025-04-17T17:55:35.615644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.623779Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:55:35.626441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.642649Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T17:55:35.657064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:35.933806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:35.997120Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-17T17:55:36.001585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:36.056095Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-17T17:55:36.060038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.109469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.176846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.226656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.283874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.374813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:36.461612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:55:38.352527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342288163643487:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:38.352685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:38.353244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342288163643499:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:38.361097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T17:55:38.380124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342288163643501:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T17:55:38.481467Z node 1 :TX_PROXY ERROR: Actor# [1:7494342288163643552:2922] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:38.530289Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342266688805566:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:38.530361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:38.924526Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2e8pqaf8xced39ftm157xr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWUxODQ2ZGItZjcyZDRhYzEtMzBkY2E0MWMtMzk1YWVmYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:38.989162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.071194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.118949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.197894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.245734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.292517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.365836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.407313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.448921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:39.536371Z node 1 :HTTP INFO: Listening on http://127.0.0.1:6101 2025-04-17T17:55:40.540750Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T17:55:40.540876Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-17T17:55:40.541608Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T17:55:40.542206Z node 1 :HTTP INFO: Listening on http://[::]:61380 2025-04-17T17:55:40.543349Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T17:55:40.593043Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-17T17:55:40.593051Z node 1 :SQS INFO: Request SQS users list 2025-04-17T17:55:40.593081Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T17:55:40.593110Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-17T17:55:40.597421Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(id ... : "createdTimestamp" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } Member { Name: "infly" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "DelayDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "RandomId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "VisibilityDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "inflyCount" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "inflyVersion" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "messageCount" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "readOffset" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } Value { Struct { Optional { Optional { Uint64: 1744912604641 } } } Struct { Optional { } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Uint64: 0 } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Optional { Uint64: 0 } } } } } } 2025-04-17T17:56:45.473469Z node 7 :SQS DEBUG: Infly version for shard [cloud4/000000000000000101v0/1]: 0 2025-04-17T17:56:45.676073Z node 7 :SQS TRACE: Request [b639b9af-50a6cc87-2f5c68b9-35898c62] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976710714 Step: 1744912605702 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-04-17T17:56:45.676112Z node 7 :SQS DEBUG: Request [b639b9af-50a6cc87-2f5c68b9-35898c62] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 203ms 2025-04-17T17:56:45.676676Z node 7 :SQS TRACE: Request [b639b9af-50a6cc87-2f5c68b9-35898c62] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976710714 Step: 1744912605702 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-04-17T17:56:45.676774Z node 7 :SQS TRACE: Request [b639b9af-50a6cc87-2f5c68b9-35898c62] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{}"} 2025-04-17T17:56:45.676925Z node 7 :SQS DEBUG: Request [b639b9af-50a6cc87-2f5c68b9-35898c62] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 205ms 2025-04-17T17:56:45.677411Z node 7 :SQS DEBUG: Created new Deduplication cleanup actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7494342572962507362:3804] 2025-04-17T17:56:45.677442Z node 7 :SQS DEBUG: Created new Reads cleanup actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7494342572962507363:3805] 2025-04-17T17:56:45.677472Z node 7 :SQS DEBUG: Created new retention actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7494342572962507364:3806] 2025-04-17T17:56:45.677505Z node 7 :SQS DEBUG: Created new purge actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7494342572962507365:3807] 2025-04-17T17:56:45.677517Z node 7 :SQS DEBUG: Request [b639b9af-50a6cc87-2f5c68b9-35898c62] Sending executed reply 2025-04-17T17:56:45.677905Z node 7 :SQS INFO: Request [95307d3d-7c9fc69b-d37d8a6a-c20e408d] Bootstrap cleanup actor for queue [cloud4/000000000000000301v0] 2025-04-17T17:56:45.677932Z node 7 :SQS INFO: Request [63366d25-9f2dbeca-37bbd986-ff283218] Bootstrap retention actor for queue [cloud4/000000000000000301v0] 2025-04-17T17:56:45.677954Z node 7 :SQS INFO: Request [6c47fe8a-cb0380a6-687f5e2f-e5d7262e] Create purge actor for queue /Root/SQS/cloud4/000000000000000301v0 2025-04-17T17:56:45.678041Z node 7 :SQS DEBUG: Request [5b749377-e3164486-b713a827-9ef813c4] Get configuration duration: 286ms 2025-04-17T17:56:45.678058Z node 7 :SQS TRACE: Request [5b749377-e3164486-b713a827-9ef813c4] Got configuration. Root url: http://ghrun-gkdwnlcfnq.auto.internal:8771, Shards: 1, Fail: 0 2025-04-17T17:56:45.678076Z node 7 :SQS TRACE: Request [5b749377-e3164486-b713a827-9ef813c4] DoRoutine 2025-04-17T17:56:45.678145Z node 7 :SQS TRACE: Request [5b749377-e3164486-b713a827-9ef813c4] SendReplyAndDie from action actor { ListQueueTags { RequestId: "5b749377-e3164486-b713a827-9ef813c4" } } 2025-04-17T17:56:45.678255Z node 7 :SQS INFO: Request [e8be6f0a-ebdd5e0f-e1d00e8e-3843f8e] Bootstrap cleanup actor for queue [cloud4/000000000000000301v0] 2025-04-17T17:56:45.682339Z node 7 :SQS TRACE: Request [5b749377-e3164486-b713a827-9ef813c4] Sending sqs response: { ListQueueTags { RequestId: "5b749377-e3164486-b713a827-9ef813c4" } RequestId: "5b749377-e3164486-b713a827-9ef813c4" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-04-17T17:56:45.682527Z node 7 :SQS TRACE: HandleSqsResponse ListQueueTags { RequestId: "5b749377-e3164486-b713a827-9ef813c4" } RequestId: "5b749377-e3164486-b713a827-9ef813c4" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-04-17T17:56:45.682591Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7494342572962507332:2555]: ListQueueTags { RequestId: "5b749377-e3164486-b713a827-9ef813c4" } RequestId: "5b749377-e3164486-b713a827-9ef813c4" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-04-17T17:56:45.682641Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7494342572962507337:3783]. Found: 1 2025-04-17T17:56:45.686376Z node 7 :SQS TRACE: Request [5b749377-e3164486-b713a827-9ef813c4] HandleResponse: { ListQueueTags { RequestId: "5b749377-e3164486-b713a827-9ef813c4" } RequestId: "5b749377-e3164486-b713a827-9ef813c4" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2025-04-17T17:56:45.686463Z node 7 :SQS DEBUG: Request [5b749377-e3164486-b713a827-9ef813c4] Sending reply from proxy actor: { ListQueueTags { RequestId: "5b749377-e3164486-b713a827-9ef813c4" } RequestId: "5b749377-e3164486-b713a827-9ef813c4" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-04-17T17:56:45.689996Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [5b749377-e3164486-b713a827-9ef813c4] Got succesfult GRPC response. 2025-04-17T17:56:45.690063Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [5b749377-e3164486-b713a827-9ef813c4] reply ok 2025-04-17T17:56:45.690159Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [5b749377-e3164486-b713a827-9ef813c4] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 179 SourceAddress: 1840:1600:6050:0:40:1600:6050:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2025-04-17T17:56:45.690314Z node 7 :HTTP DEBUG: (#37,[::1]:37402) <- (200 ) Http output full {} 2025-04-17T17:56:45.690414Z node 7 :HTTP DEBUG: (#37,[::1]:37402) connection closed >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> TSequence::CreateSequenceParallel >> TSequence::CreateSequence >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] >> CompressExecutor::TestReorderedExecutor >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |81.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-04-17T17:56:46.789542Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:46.789581Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:46.789627Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:46.790020Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:46.790689Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:46.805542Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:46.806326Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:46.807127Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:56:46.807562Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:56:46.807784Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-17T17:56:46.807914Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:46.807991Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:46.808025Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-17T17:56:46.808059Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T17:56:46.808085Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T17:56:46.864744Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:46.864783Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:46.864839Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:46.865180Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:46.865672Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:46.865836Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:46.866827Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-17T17:56:46.867781Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:56:46.868022Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-17T17:56:46.868346Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-17T17:56:46.868559Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-17T17:56:46.868695Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:46.868743Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:56:46.868776Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:56:46.868939Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-04-17T17:56:46.869059Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:56:46.869081Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-17T17:56:46.869106Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:56:46.869248Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-04-17T17:56:46.869308Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-17T17:56:46.869329Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-17T17:56:46.869347Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:56:46.869425Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-04-17T17:56:46.869506Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-17T17:56:46.869534Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-17T17:56:46.869560Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:56:46.869671Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-04-17T17:56:46.871631Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:46.871667Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:46.871689Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:46.872087Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:46.872661Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:46.872839Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:46.873081Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-04-17T17:56:46.874188Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T17:56:46.874421Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-17T17:56:46.874791Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-17T17:56:46.875067Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-17T17:56:46.875183Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:46.875225Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:56:46.875361Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2025-04-17T17:56:46.875460Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:56:46.875484Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:56:46.875557Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2025-04-17T17:56:46.875623Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:56:46.875644Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-17T17:56:46.875731Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2025-04-17T17:56:46.875789Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-17T17:56:46.875811Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:56:49.287919Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2025-04-17T17:56:49.408252Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-17T17:56:49.408314Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-17T17:56:49.408359Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:49.408738Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:49.409414Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:49.409596Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-17T17:56:49.410006Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-04-17T17:56:49.530959Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-04-17T17:56:49.532232Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:49.534957Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:56:49.538425Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:56:49.539418Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-17T17:56:49.676269Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-17T17:56:49.677245Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-17T17:56:49.678167Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-04-17T17:56:49.679233Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-04-17T17:56:49.688786Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-04-17T17:56:49.689756Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-04-17T17:56:49.689823Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-04-17T17:56:49.690002Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-17T17:56:49.700841Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2025-04-17T17:56:49.707554Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:49.707615Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:49.707647Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:49.713072Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:49.722439Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:49.722644Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:49.726407Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:49.726867Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-04-17T17:56:49.731255Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:49.731284Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:49.738304Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:49.738733Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:49.739153Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:49.739316Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:49.742781Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:49.742953Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:49.746278Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:49.746353Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T17:56:49.746567Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] >> OperationMapping::IndexBuildCanceled >> OperationMapping::IndexBuildCanceled [GOOD] >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2920:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2920:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2920:2116] 2025-04-17T17:56:10.986074Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T17:56:10.990581Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T17:56:10.990879Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T17:56:10.992736Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:56:10.993040Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T17:56:10.993420Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T17:56:10.993437Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T17:56:10.993626Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T17:56:11.002615Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T17:56:11.002732Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T17:56:11.002856Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T17:56:11.002946Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:56:11.003032Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:56:11.003122Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-04-17T17:56:11.015476Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T17:56:11.015625Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:56:11.026456Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:56:11.026594Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:56:11.026683Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:56:11.026793Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:56:11.027207Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:56:11.027320Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:56:11.027377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:56:11.027439Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:56:11.038272Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:56:11.038413Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:56:11.049245Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:56:11.049383Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T17:56:11.050611Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T17:56:11.050659Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T17:56:11.050838Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T17:56:11.050883Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T17:56:11.062594Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-04-17T17:56:11.063380Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-04-17T17:56:11.063428Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-04-17T17:56:11.063463Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-04-17T17:56:11.063492Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-04-17T17:56:11.063515Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-04-17T17:56:11.063555Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-04-17T17:56:11.063579Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-04-17T17:56:11.063628Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-17T17:56:11.063652Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-04-17T17:56:11.063675Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-17T17:56:11.063697Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-17T17:56:11.063718Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-17T17:56:11.063743Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-04-17T17:56:11.063766Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-17T17:56:11.063792Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-04-17T17:56:11.063827Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-04-17T17:56:11.063852Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-04-17T17:56:11.063876Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-04-17T17:56:11.063898Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-04-17T17:56:11.063921Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-04-17T17:56:11.063946Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-04-17T17:56:11.063984Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-04-17T17:56:11.064025Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-04-17T17:56:11.064068Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-04-17T17:56:11.064092Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-04-17T17:56:11.064119Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-04-17T17:56:11.064140Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-04-17T17:56:11.064162Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-04-17T17:56:11.064196Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-04-17T17:56:11.064223Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-04-17T17:56:11.064245Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-04-17T17:56:11.064269Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-04-17T17:56:11.064292Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Cr ... ER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2025-04-17T17:56:40.980342Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2025-04-17T17:56:40.980388Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2025-04-17T17:56:40.980412Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2025-04-17T17:56:40.980436Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2025-04-17T17:56:40.980459Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2025-04-17T17:56:40.980482Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2025-04-17T17:56:40.980503Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2025-04-17T17:56:40.980527Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2025-04-17T17:56:40.980545Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2025-04-17T17:56:40.980561Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2025-04-17T17:56:40.980579Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2025-04-17T17:56:40.980594Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2025-04-17T17:56:40.980610Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2025-04-17T17:56:40.980627Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2025-04-17T17:56:40.980643Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2025-04-17T17:56:40.980661Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2025-04-17T17:56:40.980676Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2025-04-17T17:56:40.980699Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2025-04-17T17:56:40.980726Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2025-04-17T17:56:40.980743Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2025-04-17T17:56:41.304628Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.329844s 2025-04-17T17:56:41.304815Z node 161 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.330048s 2025-04-17T17:56:41.367546Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2025-04-17T17:56:41.369579Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2025-04-17T17:56:41.369641Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2025-04-17T17:56:41.369673Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2025-04-17T17:56:41.369700Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2025-04-17T17:56:41.369731Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2025-04-17T17:56:41.369757Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2025-04-17T17:56:41.369782Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2025-04-17T17:56:41.369808Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2025-04-17T17:56:41.369832Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2025-04-17T17:56:41.369858Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2025-04-17T17:56:41.369883Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2025-04-17T17:56:41.369907Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2025-04-17T17:56:41.369931Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2025-04-17T17:56:41.369957Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2025-04-17T17:56:41.369983Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2025-04-17T17:56:41.370007Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2025-04-17T17:56:41.370031Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2025-04-17T17:56:41.370057Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2025-04-17T17:56:41.370082Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2025-04-17T17:56:41.370111Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2025-04-17T17:56:41.370137Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2025-04-17T17:56:41.370163Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2025-04-17T17:56:41.370231Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2025-04-17T17:56:41.370258Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2025-04-17T17:56:41.370284Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2025-04-17T17:56:41.370310Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2025-04-17T17:56:41.370354Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2025-04-17T17:56:41.370391Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2025-04-17T17:56:41.370421Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2025-04-17T17:56:41.370448Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-04-17T17:56:50.251286Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.251322Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.251351Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:50.283933Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:50.284735Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T17:56:50.284821Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.287495Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.287518Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.287538Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:50.288053Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:50.288543Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T17:56:50.288595Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.289550Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.289571Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.289591Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:50.289901Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:56:50.289941Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.289965Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.290082Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-04-17T17:56:50.290951Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.290968Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.290985Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:50.306527Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-17T17:56:50.306596Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.306623Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.306701Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-04-17T17:56:50.308264Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-17T17:56:50.308292Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-17T17:56:50.308316Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:50.314643Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:50.326440Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:50.345942Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-17T17:56:50.350585Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:50.350976Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-04-17T17:56:50.355060Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-04-17T17:56:50.356342Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:50.356398Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T17:56:50.356424Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T17:56:50.356445Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-17T17:56:50.356477Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-17T17:56:50.356500Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-17T17:56:50.356519Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-04-17T17:56:50.356539Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-04-17T17:56:50.356586Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-04-17T17:56:50.356609Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-04-17T17:56:50.356637Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-04-17T17:56:50.356660Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-04-17T17:56:50.356679Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-04-17T17:56:50.356697Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-04-17T17:56:50.356715Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-04-17T17:56:50.356744Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-04-17T17:56:50.356794Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-04-17T17:56:50.356812Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-04-17T17:56:50.356829Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-04-17T17:56:50.356848Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-04-17T17:56:50.356868Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-04-17T17:56:50.356886Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-04-17T17:56:50.356905Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-04-17T17:56:50.356923Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-04-17T17:56:50.356953Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-04-17T17:56:50.356976Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-04-17T17:56:50.356994Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-04-17T17:56:50.357013Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-04-17T17:56:50.357030Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-04-17T17:56:50.357048Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-04-17T17:56:50.357070Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-04-17T17:56:50.357106Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-04-17T17:56:50.357189Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-04-17T17:56:50.357210Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-04-17T17:56:50.357227Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-04-17T17:56:50.357244Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-04-17T17:56:50.357265Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-04-17T17:56:50.357284Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-04-17T17:56:50.357302Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-04-17T17:56:50.357324Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-04-17T17:56:50.357342Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-04-17T17:56:50.357361Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-04-17T17:56:50.357385Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-04-17T17:56:50.357403Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-04-17T17:56:50.357422Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-04-17T17:56:50.357441Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-04-17T17:56:50.357459Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-04-17T17:56:50.357489Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-04-17T17:56:50.357512Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-04-17T17:56:50.357548Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-04-17T17:56:50.357606Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-17T17:56:50.360037Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-04-17T17:56:50.360240Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-04-17T17:56:50.360276Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-04-17T17:56:50.360308Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-04-17T17:56:50.360329Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-04-17T17:56:50.360352Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-04-17T17:56:50.360366Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-04-17T17:56:50.360390Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-04-17T17:56:50.360416Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-04-17T17:56:50.360457Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-04-17T17:56:50.360475Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-04-17T17:56:50.360490Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-04-17T17:56:50.360505Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-04-17T17:56:50.360535Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-04-17T17:56:50.360553Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-04-17T17:56:50.360569Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-04-17T17:56:50.360583Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-04-17T17:56:50.360619Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-04-17T17:56:50.360633Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-04-17T17:56:50.360650Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-04-17T17:56:50.360665Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-04-17T17:56:50.360682Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-04-17T17:56:50.360696Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-04-17T17:56:50.360716Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-04-17T17:56:50.360746Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-04-17T17:56:50.360763Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-04-17T17:56:50.360778Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-04-17T17:56:50.360794Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-04-17T17:56:50.360809Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-04-17T17:56:50.360836Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-04-17T17:56:50.360854Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-04-17T17:56:50.360869Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-04-17T17:56:50.360884Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-04-17T17:56:50.360942Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-04-17T17:56:50.360959Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-04-17T17:56:50.360975Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-04-17T17:56:50.360990Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-04-17T17:56:50.361006Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-04-17T17:56:50.361026Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-04-17T17:56:50.361048Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-04-17T17:56:50.361072Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-04-17T17:56:50.361087Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-04-17T17:56:50.361103Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-04-17T17:56:50.361117Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-04-17T17:56:50.361135Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-04-17T17:56:50.361217Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-04-17T17:56:50.361237Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-04-17T17:56:50.361251Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-04-17T17:56:50.361265Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-04-17T17:56:50.361281Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-04-17T17:56:50.361296Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-04-17T17:56:50.361347Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-17T17:56:50.361482Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-17T17:56:50.367508Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.367534Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.367556Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:50.398459Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:50.399742Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:50.399951Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.402666Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:50.501606Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.501845Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T17:56:50.501906Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:50.501946Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-17T17:56:50.502009Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-17T17:56:50.706280Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-17T17:56:50.810395Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-17T17:56:50.810538Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T17:56:50.810683Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-17T17:56:50.811791Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.811811Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.811829Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:50.812271Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:50.812622Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:50.812757Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.822452Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:50.923148Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:50.926101Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T17:56:50.926285Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:50.926329Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-17T17:56:50.926425Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-17T17:56:50.926556Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-17T17:56:50.926783Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-17T17:56:50.926865Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T17:56:50.926967Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 6830, MsgBus: 4515 2025-04-17T17:56:29.847283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342505572800569:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:29.847326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4f/r3tmp/tmp9AqzcX/pdisk_1.dat 2025-04-17T17:56:30.632304Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:30.634130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:30.634360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:30.638326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6830, node 1 2025-04-17T17:56:30.961711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:30.961730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:30.961736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:30.961851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4515 TClient is connected to server localhost:4515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:31.946470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:31.970726Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:31.988743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:32.203629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:32.486818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:32.686761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:34.854371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342505572800569:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:34.854437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:34.991238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342527047638855:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:34.991376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:35.514116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.584357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.659801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.699424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.759430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.878246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.945104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:36.067050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342535637574122:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:36.067164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:36.074377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342535637574127:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:36.079719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:36.104747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342535637574129:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:36.172589Z node 1 :TX_PROXY ERROR: Actor# [1:7494342535637574188:3569] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:37.862828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:56:38.851200Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGZmMmMxMDItODg0YjlmZWMtNGVhZjZmOTYtNzM4NzEzMWU=, ActorId: [1:7494342544227509450:2550], ActorState: ExecuteState, TraceId: 01js2eahpg5he7v7ztye65s2j5, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-17T17:56:38.865144Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGZmMmMxMDItODg0YjlmZWMtNGVhZjZmOTYtNzM4NzEzMWU=, ActorId: [1:7494342544227509450:2550], ActorState: ReadyState, TraceId: 01js2eahtgbgrzyt16ynm9kv9q, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 21262, MsgBus: 28220 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4f/r3tmp/tmp0WVBzL/pdisk_1.dat 2025-04-17T17:56:40.631382Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:40.844024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:40.844116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:40.847247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21262, node 2 2025-04-17T17:56:40.898493Z node 2 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:40.898690Z node 2 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:40.898749Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:41.114824Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:41.114843Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:41.114855Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:41.114977Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28220 TClient is connected to server localhost:28220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:41.784478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:41.808706Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:41.828224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:41.938154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:42.289718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:42.394783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:46.280499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342580732570356:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:46.280586Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:46.311385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:46.359256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:46.442360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:46.502645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:46.574115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:46.645366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:46.722700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:56:46.818800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342580732571028:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:46.818893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:46.819365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342580732571033:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:46.823528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:56:46.838719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342580732571035:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:56:46.915812Z node 2 :TX_PROXY ERROR: Actor# [2:7494342580732571093:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:48.808860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> BsControllerTest::SelfHealBlock4Plus2 >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> BsControllerTest::SelfHealMirror3dc >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> BackupRestoreS3::RestoreTablePartitioningSettings >> BackupRestoreS3::TestAllPrimitiveTypes-UINT16 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UINT32 >> BsControllerTest::TestLocalBrokenRelocation |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |81.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 >> TKeyValueTracingTest::WriteSmall >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter 2025-04-17 17:56:40,974 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:56:41,312 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 24148 46.0M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6873/000c42/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args 25871 3.6G 3.5G 3.5G └─ ydb-core-viewer-ut --trace-path-append /home/runner/.ya/build/build_root/6873/000c42/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/ytest.report.trace Test command err: 2025-04-17T17:47:35.639827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:114:2160], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:47:35.640258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:47:35.640309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 7103, node 1 TClient is connected to server localhost:2204 2025-04-17T17:49:40.038690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3123:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.038890Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.038973Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:40.040966Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1740:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.041828Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3132:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.042038Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3135:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.042705Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.042854Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.043278Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.043357Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:40.043783Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:40.044183Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:40.044813Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:1743:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.045851Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3119:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.046040Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.046496Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:40.046816Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.047837Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:40.048711Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:1737:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.049800Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.050339Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3126:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.050490Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3129:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:49:40.051537Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.051608Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:49:40.051708Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:40.051981Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:49:40.052023Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:49:40.415140Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:49:40.650064Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:49:40.669607Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:49:41.076328Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 65050, node 2 TClient is connected to server localhost:28489 2025-04-17T17:49:41.423746Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:49:41.423815Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:49:41.423863Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:49:41.424940Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:51:19.012099Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3153:2436], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:19.015845Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:19.015989Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:51:19.018752Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:3157:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:19.019588Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3146:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:19.019911Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:1606:2245], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:19.020686Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:19.020871Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:19.021695Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:51:19.021883Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:51:19.022670Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:51:19.023233Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:51:19.023444Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1609:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:19.023977Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:3163:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:51:19.024811Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path e ... tect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:53:36.466152Z node 24 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [24:3136:2377], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:53:36.468106Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:36.468209Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:36.468921Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:53:36.468978Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:53:36.469696Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:36.470013Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:53:37.131238Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:37.416487Z node 20 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:53:37.450694Z node 20 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:53:38.457880Z node 20 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 21277, node 20 TClient is connected to server localhost:8560 2025-04-17T17:53:39.219926Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:39.220049Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:39.220128Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:39.221381Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:56:17.443720Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:3171:2436], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.445641Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.446980Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:56:17.448563Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [34:1998:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.448743Z node 35 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [35:2001:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.450020Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [32:3125:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.450127Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.450243Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.450782Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:56:17.450844Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:56:17.451471Z node 30 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [30:3167:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.451708Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [31:3174:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.451896Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.453041Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.453181Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.454087Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:56:17.454149Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:56:17.454273Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:56:17.458935Z node 33 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [33:1216:2178], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.459457Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:56:17.459754Z node 37 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [37:2007:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.460385Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [36:2004:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:56:17.461472Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.461677Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.462014Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:17.462094Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T17:56:17.462595Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T17:56:18.235310Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:18.585206Z node 29 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T17:56:18.641893Z node 29 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T17:56:20.172172Z node 29 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 29369, node 29 TClient is connected to server localhost:31516 2025-04-17T17:56:21.351132Z node 29 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:21.351266Z node 29 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:21.351372Z node 29 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:21.352488Z node 29 :NET_CLASSIFIER ERROR: got bad distributable configuration Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/000c42/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/000c42/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TKeyValueTracingTest::ReadHuge >> TKeyValueTracingTest::ReadSmall ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2025-04-17T17:47:34.598238Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:34.708001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:34.734385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:34.734659Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:34.742707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:34.742940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:34.743183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:34.743389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:34.743514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:34.743621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:34.743738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:34.743869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:34.743994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:34.744134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.744206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:34.744270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:34.771592Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:34.771772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:34.771832Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:34.771970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.772114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:34.772194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:34.772240Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:34.772322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:34.772576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:34.772623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:34.772652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:34.772887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.772964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:34.773005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:34.773040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:34.773129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:34.773179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:34.773221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:34.773246Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:34.773359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:34.773408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:34.773438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:34.773485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:34.773527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:34.773554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:34.773968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-17T17:47:34.774052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-17T17:47:34.774128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-17T17:47:34.775965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=1775; 2025-04-17T17:47:34.776218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:34.776288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:34.776323Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:34.776522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:34.776569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.776600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.776759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:34.776819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:34.776853Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:34.777025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:34.777068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:34.777102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:34.777237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:34.777283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:34.777328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish ... _shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=653514; 2025-04-17T17:56:38.616535Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:14556:16507];tablet_id=9437184;parent=[1:14518:16476];fline=manager.cpp:82;event=ask_data;request=request_id=336;1={portions_count=120};; 2025-04-17T17:56:38.619725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14518:16476];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=120;path_id=1; 2025-04-17T17:56:38.633440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14518:16476];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; 2025-04-17T17:56:39.493609Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T17:56:39.493722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-04-17T17:56:39.494146Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=336; 2025-04-17T17:56:39.494215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=431; 2025-04-17T17:56:39.508600Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T17:56:39.508724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-04-17T17:56:39.529059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=20215; 2025-04-17T17:56:39.578452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=47922; 2025-04-17T17:56:39.578797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=49620; 2025-04-17T17:56:39.579029Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=143; 2025-04-17T17:56:39.579240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=149; 2025-04-17T17:56:39.581528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=2215; 2025-04-17T17:56:39.582064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=418; 2025-04-17T17:56:39.699088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=116930; 2025-04-17T17:56:39.829547Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=130302; 2025-04-17T17:56:39.829718Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=45; 2025-04-17T17:56:39.829821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=34; 2025-04-17T17:56:39.829880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-17T17:56:39.829951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-17T17:56:39.830006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-04-17T17:56:39.830098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=53; 2025-04-17T17:56:39.830152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-17T17:56:39.830300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=85; 2025-04-17T17:56:39.830360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-17T17:56:39.830434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2025-04-17T17:56:39.830534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=59; 2025-04-17T17:56:39.830632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=60; 2025-04-17T17:56:39.830674Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=321886; 2025-04-17T17:56:39.830898Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113977136;raw_bytes=176366848;count=46;records=1845000} inactive {blob_bytes=159801008;raw_bytes=247705372;count=74;records=2587618} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T17:56:39.831634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14518:16476];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T17:56:39.831728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14518:16476];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T17:56:39.831829Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T17:56:39.831898Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T17:56:39.832233Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:56:39.832328Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:56:39.832564Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=19; 2025-04-17T17:56:39.832661Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744911759929;tx_id=18446744073709551615;;current_snapshot_ts=1744912055769; 2025-04-17T17:56:39.832723Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=19;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:56:39.832808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:56:39.832865Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:56:39.832991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:56:39.836096Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:56:39.839933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T17:56:39.841839Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T17:56:39.841896Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T17:56:39.841938Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T17:56:39.842025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:56:39.842130Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:56:39.842265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=19; 2025-04-17T17:56:39.842376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744911759929;tx_id=18446744073709551615;;current_snapshot_ts=1744912055769; 2025-04-17T17:56:39.842453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=19;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:56:39.842528Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:56:39.842598Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:56:39.842728Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-17T17:56:39.842812Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14518:16476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> BackupRestore::TestAllPrimitiveTypes-INT8 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UINT16 >> KqpStats::JoinNoStatsYql >> TKeyValueTracingTest::WriteSmall [FAIL] >> TKeyValueTracingTest::ReadSmall [FAIL] >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TKeyValueTracingTest::ReadHuge [FAIL] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestore::RestoreTableSplitBoundaries |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFEA342D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFEAEF58) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7F5F8650AD90) __libc_start_main+128 (0x7F5F8650AE40) _start+41 (0xD80D029) |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.5%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:56:49.803114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:56:49.803189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:56:49.803226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:56:49.803255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:56:49.803307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:56:49.803343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:56:49.803410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:56:49.803485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:56:49.803810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:56:50.064930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:56:50.064992Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:50.099133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:56:50.099303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:56:50.099462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:56:50.124250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:56:50.124669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:56:50.125363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:56:50.125794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:56:50.134500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:56:50.135875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:56:50.135943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:56:50.136077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:56:50.136122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:56:50.136164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:56:50.136422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.164574Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:56:50.494513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:56:50.494764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.495020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:56:50.495323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:56:50.495378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.511091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:56:50.511233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:56:50.511427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.511487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:56:50.511536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:56:50.511569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:56:50.519105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.519176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:56:50.519213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:56:50.527075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.527135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.527191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:56:50.527241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:56:50.541782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:56:50.555176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:56:50.555393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:56:50.556370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:56:50.556520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:56:50.556586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:56:50.556865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:56:50.556918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:56:50.557076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:56:50.557165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:56:50.563212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:56:50.563267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:56:50.563470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:56:50.563510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:56:50.563821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.563869Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:56:50.563963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:56:50.564004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:50.564056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:56:50.564103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:50.564135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:56:50.564181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:50.564241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:56:50.564268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:56:50.564335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:56:50.564368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:56:50.564396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:56:50.566483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:56:50.566608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:56:50.566642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... peration in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:56:59.272898Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T17:56:59.272938Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T17:56:59.273056Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-04-17T17:56:59.273092Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:56:59.274154Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.274221Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2025-04-17T17:56:59.274334Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:335:2315] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-04-17T17:56:59.274982Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:56:59.275027Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:56:59.275086Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-17T17:56:59.275152Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:56:59.275535Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T17:56:59.275715Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:56:59.275759Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-17T17:56:59.275814Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-17T17:56:59.275864Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-17T17:56:59.275914Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-17T17:56:59.275968Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-04-17T17:56:59.276676Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.276720Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2025-04-17T17:56:59.276797Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:342:2319] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-04-17T17:56:59.277118Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:56:59.277157Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.277962Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:56:59.278003Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:56:59.278058Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:56:59.278105Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:56:59.278456Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:56:59.278576Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:56:59.278614Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-17T17:56:59.278648Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-17T17:56:59.278689Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-17T17:56:59.278721Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-17T17:56:59.278766Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-04-17T17:56:59.278844Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:411:2369] message: TxId: 102 2025-04-17T17:56:59.278900Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-17T17:56:59.278968Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:56:59.279022Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:56:59.279170Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:56:59.279225Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-17T17:56:59.279250Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-17T17:56:59.279286Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T17:56:59.279314Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-17T17:56:59.279337Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-17T17:56:59.279385Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T17:56:59.279416Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-17T17:56:59.279438Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-17T17:56:59.279488Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-17T17:56:59.279956Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435084, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-04-17T17:56:59.280004Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-04-17T17:56:59.280080Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:56:59.280138Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-17T17:56:59.280232Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:56:59.281201Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:56:59.281242Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.281326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:56:59.281355Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.281397Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:56:59.281462Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.281519Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:56:59.281544Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.283550Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:56:59.283593Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.283673Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.285633Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:56:59.285743Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:411:2369] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-04-17T17:56:59.285931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:56:59.286013Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:517:2468] 2025-04-17T17:56:59.286264Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:56:59.286445Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:519:2470], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:59.286486Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:56:59.286517Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-17T17:56:59.287061Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:597:2547], Recipient [7:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T17:56:59.287128Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:56:59.287263Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:56:59.287543Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 274us result status StatusPathDoesNotExist 2025-04-17T17:56:59.287737Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |81.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> TSequence::AlterTableSetDefaultFromSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2025-04-17T17:56:54.423054Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-17T17:56:54.423105Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-17T17:56:54.423192Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-17T17:56:54.423213Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-17T17:56:54.423267Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-17T17:56:54.423289Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-17T17:56:54.423332Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-17T17:56:54.423367Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-17T17:56:54.423402Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-17T17:56:54.423424Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-17T17:56:54.423454Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-17T17:56:54.423474Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-17T17:56:54.423510Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-17T17:56:54.423540Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-17T17:56:54.423577Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-17T17:56:54.423597Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-17T17:56:54.423628Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-17T17:56:54.423659Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-17T17:56:54.423702Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-17T17:56:54.423722Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-17T17:56:54.423776Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-17T17:56:54.423798Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-17T17:56:54.423831Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-17T17:56:54.423850Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-17T17:56:54.423883Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-17T17:56:54.423912Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-17T17:56:54.423956Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-17T17:56:54.423976Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-17T17:56:54.424024Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-17T17:56:54.424043Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-17T17:56:54.424083Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-17T17:56:54.424101Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-17T17:56:54.424135Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-17T17:56:54.424154Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-17T17:56:54.424185Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-17T17:56:54.424205Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-17T17:56:54.424265Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-17T17:56:54.424286Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-17T17:56:54.424332Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-17T17:56:54.424350Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-17T17:56:54.424380Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-17T17:56:54.424402Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-17T17:56:54.424434Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-17T17:56:54.424452Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-17T17:56:54.424486Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-17T17:56:54.424505Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-17T17:56:54.424535Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-17T17:56:54.424554Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-17T17:56:54.424587Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-17T17:56:54.424626Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-17T17:56:54.424666Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-17T17:56:54.424686Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-17T17:56:54.424745Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-17T17:56:54.424768Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-17T17:56:54.424801Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-17T17:56:54.424827Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-17T17:56:54.424861Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-17T17:56:54.424891Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-17T17:56:54.424933Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-17T17:56:54.424952Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-17T17:56:54.424983Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-17T17:56:54.425002Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-17T17:56:54.425034Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-17T17:56:54.425052Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-17T17:56:54.425102Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-04-17T17:56:54.425123Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-04-17T17:56:54.425157Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-04-17T17:56:54.425175Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-04-17T17:56:54.425220Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-04-17T17:56:54.425241Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-04-17T17:56:54.425279Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-04-17T17:56:54.425310Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-04-17T17:56:54.440892Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-04-17T17:56:54.442310Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-04-17T17:56:54.442374Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-04-17T17:56:54.442433Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-04-17T17:56:54.442472Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-04-17T17:56:54.442521Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-04-17T17:56:54.442559Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-04-17T17:56:54.442595Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-04-17T17:56:54.442628Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-04-17T17:56:54.442666Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-04-17T17:56:54.442701Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-04-17T17:56:54.442738Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-04-17T17:56:54.442773Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-04-17T17:56:54.442807Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-04-17T17:56:54.442843Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-04-17T17:56:54.442881Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-04-17T17:56:54.442916Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-04-17T17:56:54.442949Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-04-17T17:56:54.442984Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-04-17T17:56:54.443039Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-04-17T17:56:54.443078Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-04-17T17:56:54.443115Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-04-17T17:56:54.443179Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-04-17T17:56:54.443227Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-04-17T17:56:54.443290Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-04-17T17:56:54.443327Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-04-17T17:56:54.443360Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-04-17T17:56:54.443393Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-04-17T17:56:54.443447Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-04-17T17:56:54.443482Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-04-17T17:56:54.443517Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-04-17T17:56:54.443552Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-04-17T17:56:54.443589Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-04-17T17:56:54.443623Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-04-17T17:56:54.443659Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2025-04-17T17:56:59.027670Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2025-04-17T17:56:59.027712Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2025-04-17T17:56:59.027753Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2025-04-17T17:56:59.027794Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2025-04-17T17:56:59.028379Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-17T17:56:59.028487Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2025-04-17T17:56:59.028532Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2025-04-17T17:56:59.028573Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2025-04-17T17:56:59.028625Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2025-04-17T17:56:59.028684Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2025-04-17T17:56:59.028743Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2025-04-17T17:56:59.028790Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2025-04-17T17:56:59.028835Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2025-04-17T17:56:59.028880Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2025-04-17T17:56:59.028922Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2025-04-17T17:56:59.028965Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2025-04-17T17:56:59.029005Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2025-04-17T17:56:59.029071Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2025-04-17T17:56:59.029113Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2025-04-17T17:56:59.029155Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2025-04-17T17:56:59.029194Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2025-04-17T17:56:59.032043Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-17T17:56:59.032138Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2025-04-17T17:56:59.032190Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2025-04-17T17:56:59.032237Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2025-04-17T17:56:59.032292Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2025-04-17T17:56:59.032354Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2025-04-17T17:56:59.032405Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2025-04-17T17:56:59.032443Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2025-04-17T17:56:59.032488Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2025-04-17T17:56:59.032526Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2025-04-17T17:56:59.032566Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2025-04-17T17:56:59.032621Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2025-04-17T17:56:59.033044Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T17:56:59.033098Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2025-04-17T17:56:59.033164Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2025-04-17T17:56:59.033223Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2025-04-17T17:56:59.033269Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2025-04-17T17:56:59.033309Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2025-04-17T17:56:59.033347Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2025-04-17T17:56:59.033425Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2025-04-17T17:56:59.033463Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2025-04-17T17:56:59.037026Z 4 01h25m01.189560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2025-04-17T17:56:59.037672Z 7 01h25m01.346560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2025-04-17T17:56:59.038222Z 10 01h25m01.466560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2025-04-17T17:56:59.038733Z 2 01h25m01.613560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2025-04-17T17:56:59.039218Z 2 01h25m02.091560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2025-04-17T17:56:59.039721Z 4 01h25m02.105560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2025-04-17T17:56:59.040193Z 4 01h25m02.797560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2025-04-17T17:56:59.045517Z 5 01h25m03.062560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2025-04-17T17:56:59.046149Z 8 01h25m03.342560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2025-04-17T17:56:59.046659Z 7 01h25m03.378560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2025-04-17T17:56:59.047088Z 5 01h25m03.548560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2025-04-17T17:56:59.047500Z 10 01h25m03.874560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2025-04-17T17:56:59.047902Z 4 01h25m04.297560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2025-04-17T17:56:59.048357Z 10 01h25m04.795560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2025-04-17T17:56:59.050391Z 7 01h25m05.013560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2025-04-17T17:56:59.050925Z 7 01h25m05.895560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2025-04-17T17:56:59.051391Z 2 01h25m08.150560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2025-04-17T17:56:59.052417Z 1 01h25m08.151072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.052494Z 1 01h25m08.151072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2025-04-17T17:56:59.053096Z 2 01h25m13.329560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2025-04-17T17:56:59.053923Z 1 01h25m13.330072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.053975Z 1 01h25m13.330072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2025-04-17T17:56:59.054076Z 4 01h25m13.734560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2025-04-17T17:56:59.061404Z 1 01h25m13.735072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.061492Z 1 01h25m13.735072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2025-04-17T17:56:59.061681Z 5 01h25m14.310560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2025-04-17T17:56:59.062830Z 1 01h25m14.311072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.062905Z 1 01h25m14.311072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2025-04-17T17:56:59.064149Z 10 01h25m15.825560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2025-04-17T17:56:59.065270Z 1 01h25m15.826072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.065333Z 1 01h25m15.826072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2025-04-17T17:56:59.065497Z 5 01h25m16.579560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2025-04-17T17:56:59.070658Z 1 01h25m16.580072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.070742Z 1 01h25m16.580072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2025-04-17T17:56:59.070929Z 7 01h25m17.832560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2025-04-17T17:56:59.072251Z 1 01h25m17.833072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.072357Z 1 01h25m17.833072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2025-04-17T17:56:59.072520Z 10 01h25m19.108560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2025-04-17T17:56:59.075941Z 1 01h25m19.109072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.076012Z 1 01h25m19.109072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2025-04-17T17:56:59.076770Z 10 01h25m22.766560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2025-04-17T17:56:59.077918Z 1 01h25m22.767072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.077976Z 1 01h25m22.767072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2025-04-17T17:56:59.078825Z 4 01h25m25.044560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2025-04-17T17:56:59.079722Z 1 01h25m25.045072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.079774Z 1 01h25m25.045072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2025-04-17T17:56:59.080631Z 4 01h25m29.654560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2025-04-17T17:56:59.081346Z 1 01h25m29.655072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.081398Z 1 01h25m29.655072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2025-04-17T17:56:59.082932Z 7 01h25m32.809560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2025-04-17T17:56:59.083859Z 1 01h25m32.810072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.083921Z 1 01h25m32.810072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2025-04-17T17:56:59.096008Z 7 01h25m34.241560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2025-04-17T17:56:59.097389Z 1 01h25m34.242072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.097479Z 1 01h25m34.242072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2025-04-17T17:56:59.097651Z 4 01h25m34.617560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2025-04-17T17:56:59.098799Z 1 01h25m34.618072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.098865Z 1 01h25m34.618072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2025-04-17T17:56:59.099516Z 8 01h25m35.748560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2025-04-17T17:56:59.100504Z 1 01h25m35.749072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.100560Z 1 01h25m35.749072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2025-04-17T17:56:59.100693Z 7 01h25m35.985560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2025-04-17T17:56:59.101596Z 1 01h25m35.986072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T17:56:59.101662Z 1 01h25m35.986072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFEA8DFC) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFEAF5DE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7FABF6426D90) __libc_start_main+128 (0x7FABF6426E40) _start+41 (0xD80D029) >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFEA8DFC) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFEAF9CE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7F851C90CD90) __libc_start_main+128 (0x7F851C90CE40) _start+41 (0xD80D029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:56:49.757680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:56:49.757765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:56:49.757799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:56:49.757828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:56:49.757889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:56:49.757914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:56:49.757979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:56:49.758048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:56:49.758644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:56:49.893271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:56:49.893327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:49.919502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:56:49.920008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:56:49.920189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:56:49.948576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:56:49.948811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:56:49.949518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:56:49.949786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:56:49.957378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:56:49.958788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:56:49.958857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:56:49.959078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:56:49.959129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:56:49.959167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:56:49.959275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:56:49.975748Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:56:50.295750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:56:50.296092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.296437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:56:50.296786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:56:50.296866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.315420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:56:50.315620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:56:50.315878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.315966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:56:50.316007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:56:50.316045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:56:50.323354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.323448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:56:50.323523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:56:50.331646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.331732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.331794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:56:50.331867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:56:50.336128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:56:50.343156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:56:50.343430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:56:50.344647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:56:50.344827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:56:50.344883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:56:50.345319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:56:50.345391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:56:50.345594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:56:50.345713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:56:50.359570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:56:50.359635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:56:50.359859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:56:50.359907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:56:50.360391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:56:50.360466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:56:50.360575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:56:50.360635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:50.360698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:56:50.360736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:50.360783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:56:50.360836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:56:50.360876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:56:50.360909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:56:50.360996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:56:50.361040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:56:50.361079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:56:50.363434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:56:50.363583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:56:50.363631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-17T17:57:00.889528Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2025-04-17T17:57:00.889577Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:57:00.892230Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [7:989:2935], Recipient [7:124:2150]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1762 } } 2025-04-17T17:57:00.892296Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-04-17T17:57:00.892384Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1762 } } 2025-04-17T17:57:00.892419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-04-17T17:57:00.892601Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1762 } } 2025-04-17T17:57:00.892723Z node 7 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1762 } } 2025-04-17T17:57:00.892792Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 114 2025-04-17T17:57:00.893926Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1053:2991], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:00.893993Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:00.894043Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T17:57:00.894268Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:989:2935], Recipient [7:124:2150]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 989 RawX2: 30064774007 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-17T17:57:00.894311Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-17T17:57:00.894437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 989 RawX2: 30064774007 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-17T17:57:00.894492Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-04-17T17:57:00.894679Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 989 RawX2: 30064774007 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-17T17:57:00.894758Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T17:57:00.894899Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 989 RawX2: 30064774007 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-17T17:57:00.894986Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T17:57:00.895042Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-17T17:57:00.895102Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T17:57:00.895164Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2025-04-17T17:57:00.895394Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:57:00.895772Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:57:00.899381Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-04-17T17:57:00.899475Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:57:00.900259Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-04-17T17:57:00.900334Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:57:00.900580Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-17T17:57:00.900623Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:57:00.901099Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-17T17:57:00.901160Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:57:00.901214Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2025-04-17T17:57:00.901349Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:989:2935] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-04-17T17:57:00.901808Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:124:2150], Recipient [7:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T17:57:00.901850Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T17:57:00.901909Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-17T17:57:00.901963Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2025-04-17T17:57:00.902112Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:57:00.902155Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-04-17T17:57:00.902225Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-17T17:57:00.902276Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-04-17T17:57:00.902323Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-17T17:57:00.902411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-04-17T17:57:00.902513Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:393:2361] message: TxId: 114 2025-04-17T17:57:00.902582Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-17T17:57:00.902634Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2025-04-17T17:57:00.902683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2025-04-17T17:57:00.902843Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T17:57:00.905131Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T17:57:00.905279Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:393:2361] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-04-17T17:57:00.905485Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-04-17T17:57:00.905540Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1019:2957] 2025-04-17T17:57:00.905846Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1021:2959], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:57:00.905904Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:57:00.905937Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-04-17T17:57:00.907113Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1062:3000], Recipient [7:124:2150]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-04-17T17:57:00.907182Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T17:57:00.910679Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:57:00.910981Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-04-17T17:57:00.911497Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-04-17T17:57:00.911786Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T17:57:00.914958Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:57:00.915168Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-04-17T17:57:00.915242Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 >> KqpStats::StreamLookupStats+StreamLookupJoin >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UINT32 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UINT64 |81.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::Explain >> KqpExplain::SsaProgramInJsonPlan >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> KqpStats::JoinNoStatsScan >> BackupRestore::TestAllPrimitiveTypes-UINT16 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INT64 >> KqpQuery::Now >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2025-04-17T17:56:53.460993Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-17T17:56:53.461058Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-17T17:56:53.461165Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-17T17:56:53.461185Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-17T17:56:53.461240Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-17T17:56:53.461268Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-17T17:56:53.461302Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-17T17:56:53.461321Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-17T17:56:53.461378Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-17T17:56:53.461402Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-17T17:56:53.461454Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-17T17:56:53.461476Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-17T17:56:53.461513Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-17T17:56:53.461544Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-17T17:56:53.461581Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-17T17:56:53.461601Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-17T17:56:53.461654Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-17T17:56:53.461674Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-17T17:56:53.461709Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-17T17:56:53.461729Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-17T17:56:53.461789Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-17T17:56:53.461812Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-17T17:56:53.461846Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-17T17:56:53.461865Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-17T17:56:53.461898Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-17T17:56:53.461914Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-17T17:56:53.461945Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-17T17:56:53.461963Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-17T17:56:53.461998Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-17T17:56:53.462029Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-17T17:56:53.462082Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-17T17:56:53.462101Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-17T17:56:53.462230Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-17T17:56:53.462256Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-17T17:56:53.462297Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-17T17:56:53.462320Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-17T17:56:53.462399Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-17T17:56:53.462425Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-17T17:56:53.462477Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-17T17:56:53.462498Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-17T17:56:53.462534Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-17T17:56:53.462555Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-17T17:56:53.462590Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-17T17:56:53.462614Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-17T17:56:53.462654Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-17T17:56:53.462692Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-17T17:56:53.462748Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-17T17:56:53.462773Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-17T17:56:53.462813Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-17T17:56:53.462833Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-17T17:56:53.462873Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-17T17:56:53.462909Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-17T17:56:53.462967Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-17T17:56:53.462993Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-17T17:56:53.463034Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-17T17:56:53.463055Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-17T17:56:53.463090Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-17T17:56:53.463111Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-17T17:56:53.463147Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-17T17:56:53.463166Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-17T17:56:53.463204Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-17T17:56:53.463226Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-17T17:56:53.463284Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-17T17:56:53.463309Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-17T17:56:53.478815Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2157:49] Status# ERROR ClientId# [1:2157:49] ServerId# [0:0:0] PipeClient# [1:2157:49] 2025-04-17T17:56:53.480012Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2158:37] Status# ERROR ClientId# [2:2158:37] ServerId# [0:0:0] PipeClient# [2:2158:37] 2025-04-17T17:56:53.480086Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2159:37] Status# ERROR ClientId# [3:2159:37] ServerId# [0:0:0] PipeClient# [3:2159:37] 2025-04-17T17:56:53.480146Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2160:37] Status# ERROR ClientId# [4:2160:37] ServerId# [0:0:0] PipeClient# [4:2160:37] 2025-04-17T17:56:53.480189Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2161:37] Status# ERROR ClientId# [5:2161:37] ServerId# [0:0:0] PipeClient# [5:2161:37] 2025-04-17T17:56:53.480223Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2162:37] Status# ERROR ClientId# [6:2162:37] ServerId# [0:0:0] PipeClient# [6:2162:37] 2025-04-17T17:56:53.480280Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2163:37] Status# ERROR ClientId# [7:2163:37] ServerId# [0:0:0] PipeClient# [7:2163:37] 2025-04-17T17:56:53.480328Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2164:37] Status# ERROR ClientId# [8:2164:37] ServerId# [0:0:0] PipeClient# [8:2164:37] 2025-04-17T17:56:53.480374Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2165:37] Status# ERROR ClientId# [9:2165:37] ServerId# [0:0:0] PipeClient# [9:2165:37] 2025-04-17T17:56:53.480413Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2166:37] Status# ERROR ClientId# [10:2166:37] ServerId# [0:0:0] PipeClient# [10:2166:37] 2025-04-17T17:56:53.480449Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2167:37] Status# ERROR ClientId# [11:2167:37] ServerId# [0:0:0] PipeClient# [11:2167:37] 2025-04-17T17:56:53.480483Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2168:37] Status# ERROR ClientId# [12:2168:37] ServerId# [0:0:0] PipeClient# [12:2168:37] 2025-04-17T17:56:53.480522Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2169:37] Status# ERROR ClientId# [13:2169:37] ServerId# [0:0:0] PipeClient# [13:2169:37] 2025-04-17T17:56:53.480561Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2170:37] Status# ERROR ClientId# [14:2170:37] ServerId# [0:0:0] PipeClient# [14:2170:37] 2025-04-17T17:56:53.480597Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2171:37] Status# ERROR ClientId# [15:2171:37] ServerId# [0:0:0] PipeClient# [15:2171:37] 2025-04-17T17:56:53.480633Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2172:37] Status# ERROR ClientId# [16:2172:37] ServerId# [0:0:0] PipeClient# [16:2172:37] 2025-04-17T17:56:53.480668Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2173:37] Status# ERROR ClientId# [17:2173:37] ServerId# [0:0:0] PipeClient# [17:2173:37] 2025-04-17T17:56:53.480707Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2174:37] Status# ERROR ClientId# [18:2174:37] ServerId# [0:0:0] PipeClient# [18:2174:37] 2025-04-17T17:56:53.480768Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2175:37] Status# ERROR ClientId# [19:2175:37] ServerId# [0:0:0] PipeClient# [19:2175:37] 2025-04-17T17:56:53.480833Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2176:37] Status# ERROR ClientId# [20:2176:37] ServerId# [0:0:0] PipeClient# [20:2176:37] 2025-04-17T17:56:53.480870Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2177:37] Status# ERROR ClientId# [21:2177:37] ServerId# [0:0:0] PipeClient# [21:2177:37] 2025-04-17T17:56:53.480930Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2178:37] Status# ERROR ClientId# [22:2178:37] ServerId# [0:0:0] PipeClient# [22:2178:37] 2025-04-17T17:56:53.480971Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2179:37] Status# ERROR ClientId# [23:2179:37] ServerId# [0:0:0] PipeClient# [23:2179:37] 2025-04-17T17:56:53.481008Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2180:37] Status# ERROR ClientId# [24:2180:37] ServerId# [0:0:0] PipeClient# [24:2180:37] 2025-04-17T17:56:53.481072Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2181:37] Status# ERROR ClientId# [25:2181:37] ServerId# [0:0:0] PipeClient# [25:2181:37] 2025-04-17T17:56:53.481116Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2182:37] Status# ERROR ClientId# [26:2182:37] ServerId# [0:0:0] PipeClient# [26:2182:37] 2025-04-17T17:56:53.481161Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2183:37] Status# ERROR ClientId# [27:2183:37] ServerId# [0:0:0] PipeClient# [27:2183:37] 2025-04-17T17:56:53.481197Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2184:37] Status# ERROR ClientId# [28:2184:37] ServerId# [0:0:0] PipeClient# [28:2184:37] 2025-04-17T17:56:53.481241Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2185:37] Status# ERROR ClientId# [29:2185:37] ServerId# [0:0:0] PipeClient# [29:2185:37] 2025-04-17T17:56:53.481278Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2186:37] Status# ERROR ClientId# [30:2186:37] ServerId# [0:0:0] PipeClient# [30:2186:37] 2025-04-17T17:56:53.481333Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2187:37] Status# ERROR ClientId# [31:2187:37] ServerId# [0:0:0] PipeClient# [31:2187:37] 2025-04-17T17:56:53.481371Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2188:37] Status# ERROR ClientId# [32:2188:37] ServerId# [0:0:0] PipeClient# [32:2188:37] 2025-04-17T17:56:53.653232Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.136533s 2025-04-17T17:56:53.653382Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.136705s 2025-04-17T17:56:53.670570Z 1 00h00m00.002560s :BS_NODE DEBUG: [1] CheckState from [1:2257:73] expected 1 current 0 2025-04-17T17:56:53.670655Z 2 00h00m00.002560s :BS_NODE DEBUG: [2] CheckState from [2:2258:38] expected 1 current 0 2025-04-17T17:56:53.670685Z 3 00h00m00.002560s :BS_NODE DEBUG: [3] CheckState from [3:2259:38] expected 1 current 0 2025-04-17T17:56:53.670728Z 4 00h00m00.002560s :BS_NODE DEBUG: [4] CheckState from [4:2260:38] expected 1 current 0 2025-04-17T17:56:53.670786Z 5 00h00m00.002560s :BS_NODE DEBUG: [5] CheckState from [5:2261:38] expected 1 current 0 2025-04-17T17:56:53.670813Z 6 00h00m00.002560s :BS_NODE DEBUG: [6] CheckState from [6:2262:38] expected 1 current 0 2025-04-17T17:56:53.670842Z 7 00h00m00.002560s :BS_NODE DEBUG: [7] CheckState from [7 ... 25-04-17T17:57:04.022755Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.022824Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-17T17:57:04.022873Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] VDiskId# [80000036:5:0:1:0] -> [80000036:6:0:1:0] 2025-04-17T17:57:04.022948Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-17T17:57:04.022992Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] VDiskId# [80000036:5:0:5:0] -> [80000036:6:0:5:0] 2025-04-17T17:57:04.023067Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-04-17T17:57:04.023111Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000036:5:0:4:0] -> [80000036:6:0:4:0] 2025-04-17T17:57:04.023189Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-17T17:57:04.023225Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000036:6:0:0:0] PDiskId# 1001 VSlotId# 1015 created 2025-04-17T17:57:04.023284Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000036:6:0:0:0] status changed to INIT_PENDING 2025-04-17T17:57:04.023365Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-17T17:57:04.023415Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000036:5:0:6:0] -> [80000036:6:0:6:0] 2025-04-17T17:57:04.023495Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-17T17:57:04.023539Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000036:5:0:7:0] -> [80000036:6:0:7:0] 2025-04-17T17:57:04.023623Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-04-17T17:57:04.023671Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] VDiskId# [80000036:5:0:2:0] -> [80000036:6:0:2:0] 2025-04-17T17:57:04.023746Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-17T17:57:04.023792Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] VDiskId# [80000036:5:0:3:0] -> [80000036:6:0:3:0] 2025-04-17T17:57:04.023883Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.023948Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-17T17:57:04.023987Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] VDiskId# [80000026:5:0:1:0] -> [80000026:6:0:1:0] 2025-04-17T17:57:04.024056Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-17T17:57:04.024100Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] VDiskId# [80000026:5:0:3:0] -> [80000026:6:0:3:0] 2025-04-17T17:57:04.024169Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-04-17T17:57:04.026362Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000026:5:0:4:0] -> [80000026:6:0:4:0] 2025-04-17T17:57:04.026529Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-17T17:57:04.026576Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000026:6:0:0:0] PDiskId# 1001 VSlotId# 1016 created 2025-04-17T17:57:04.026652Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000026:6:0:0:0] status changed to INIT_PENDING 2025-04-17T17:57:04.026736Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-17T17:57:04.026786Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] VDiskId# [80000026:5:0:2:0] -> [80000026:6:0:2:0] 2025-04-17T17:57:04.026881Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-17T17:57:04.026926Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000026:5:0:6:0] -> [80000026:6:0:6:0] 2025-04-17T17:57:04.026998Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-17T17:57:04.027040Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000026:5:0:7:0] -> [80000026:6:0:7:0] 2025-04-17T17:57:04.027117Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-17T17:57:04.027165Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] VDiskId# [80000026:5:0:5:0] -> [80000026:6:0:5:0] 2025-04-17T17:57:04.027265Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.027330Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-17T17:57:04.027371Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] VDiskId# [80000016:4:0:1:0] -> [80000016:5:0:1:0] 2025-04-17T17:57:04.027438Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-04-17T17:57:04.027482Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000016:4:0:4:0] -> [80000016:5:0:4:0] 2025-04-17T17:57:04.027553Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-17T17:57:04.027591Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000016:5:0:0:0] PDiskId# 1001 VSlotId# 1017 created 2025-04-17T17:57:04.030044Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000016:5:0:0:0] status changed to INIT_PENDING 2025-04-17T17:57:04.030244Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-17T17:57:04.030309Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] VDiskId# [80000016:4:0:5:0] -> [80000016:5:0:5:0] 2025-04-17T17:57:04.030396Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-17T17:57:04.030442Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000016:4:0:6:0] -> [80000016:5:0:6:0] 2025-04-17T17:57:04.030509Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-17T17:57:04.030553Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000016:4:0:7:0] -> [80000016:5:0:7:0] 2025-04-17T17:57:04.030630Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-04-17T17:57:04.030674Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] VDiskId# [80000016:4:0:2:0] -> [80000016:5:0:2:0] 2025-04-17T17:57:04.030748Z 16 05h15m00.117920s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T17:57:04.030790Z 16 05h15m00.117920s :BS_NODE DEBUG: [16] VDiskId# [80000016:4:0:3:0] -> [80000016:5:0:3:0] 2025-04-17T17:57:04.030899Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.030966Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-17T17:57:04.031008Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] VDiskId# [80000006:4:0:1:0] -> [80000006:5:0:1:0] 2025-04-17T17:57:04.031074Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-04-17T17:57:04.031115Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000006:4:0:4:0] -> [80000006:5:0:4:0] 2025-04-17T17:57:04.031185Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-17T17:57:04.031222Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000006:5:0:0:0] PDiskId# 1001 VSlotId# 1018 created 2025-04-17T17:57:04.031290Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000006:5:0:0:0] status changed to INIT_PENDING 2025-04-17T17:57:04.031368Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-17T17:57:04.031415Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000006:4:0:6:0] -> [80000006:5:0:6:0] 2025-04-17T17:57:04.031482Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-17T17:57:04.031523Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000006:4:0:7:0] -> [80000006:5:0:7:0] 2025-04-17T17:57:04.031594Z 25 05h15m00.117920s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-17T17:57:04.031640Z 25 05h15m00.117920s :BS_NODE DEBUG: [25] VDiskId# [80000006:4:0:3:0] -> [80000006:5:0:3:0] 2025-04-17T17:57:04.031713Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-04-17T17:57:04.031759Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] VDiskId# [80000006:4:0:2:0] -> [80000006:5:0:2:0] 2025-04-17T17:57:04.031836Z 16 05h15m00.117920s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T17:57:04.031883Z 16 05h15m00.117920s :BS_NODE DEBUG: [16] VDiskId# [80000006:4:0:5:0] -> [80000006:5:0:5:0] 2025-04-17T17:57:04.037360Z 5 05h15m01.296920s :BS_NODE DEBUG: [5] VDiskId# [80000006:5:0:0:0] status changed to REPLICATING 2025-04-17T17:57:04.037816Z 5 05h15m01.767920s :BS_NODE DEBUG: [5] VDiskId# [80000016:5:0:0:0] status changed to REPLICATING 2025-04-17T17:57:04.039265Z 5 05h15m01.904920s :BS_NODE DEBUG: [5] VDiskId# [8000001e:5:0:0:0] status changed to REPLICATING 2025-04-17T17:57:04.041623Z 24 05h15m02.389920s :BS_NODE DEBUG: [24] VDiskId# [80000030:4:0:2:0] status changed to REPLICATING 2025-04-17T17:57:04.042348Z 5 05h15m02.687920s :BS_NODE DEBUG: [5] VDiskId# [8000003e:5:0:0:0] status changed to REPLICATING 2025-04-17T17:57:04.043159Z 5 05h15m02.808920s :BS_NODE DEBUG: [5] VDiskId# [80000011:4:0:4:0] status changed to REPLICATING 2025-04-17T17:57:04.043216Z 5 05h15m02.808920s :BS_NODE DEBUG: [5] VDiskId# [80000036:6:0:0:0] status changed to REPLICATING 2025-04-17T17:57:04.044111Z 5 05h15m03.625920s :BS_NODE DEBUG: [5] VDiskId# [8000002e:5:0:0:0] status changed to REPLICATING 2025-04-17T17:57:04.053559Z 5 05h15m03.995920s :BS_NODE DEBUG: [5] VDiskId# [8000000e:5:0:0:0] status changed to REPLICATING 2025-04-17T17:57:04.055943Z 5 05h15m05.948920s :BS_NODE DEBUG: [5] VDiskId# [80000026:6:0:0:0] status changed to REPLICATING 2025-04-17T17:57:04.056971Z 5 05h15m06.723920s :BS_NODE DEBUG: [5] VDiskId# [80000006:5:0:0:0] status changed to READY 2025-04-17T17:57:04.058400Z 17 05h15m06.724432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.058464Z 17 05h15m06.724432s :BS_NODE DEBUG: [17] VDiskId# [80000006:4:0:0:0] destroyed 2025-04-17T17:57:04.058899Z 5 05h15m10.340920s :BS_NODE DEBUG: [5] VDiskId# [80000016:5:0:0:0] status changed to READY 2025-04-17T17:57:04.060154Z 17 05h15m10.341432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.060208Z 17 05h15m10.341432s :BS_NODE DEBUG: [17] VDiskId# [80000016:4:0:0:0] destroyed 2025-04-17T17:57:04.060364Z 5 05h15m11.867920s :BS_NODE DEBUG: [5] VDiskId# [80000036:6:0:0:0] status changed to READY 2025-04-17T17:57:04.061606Z 17 05h15m11.868432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.061652Z 17 05h15m11.868432s :BS_NODE DEBUG: [17] VDiskId# [80000036:5:0:0:0] destroyed 2025-04-17T17:57:04.061790Z 5 05h15m13.895920s :BS_NODE DEBUG: [5] VDiskId# [8000002e:5:0:0:0] status changed to READY 2025-04-17T17:57:04.062865Z 17 05h15m13.896432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.062911Z 17 05h15m13.896432s :BS_NODE DEBUG: [17] VDiskId# [8000002e:4:0:0:0] destroyed 2025-04-17T17:57:04.063718Z 5 05h15m20.942920s :BS_NODE DEBUG: [5] VDiskId# [8000003e:5:0:0:0] status changed to READY 2025-04-17T17:57:04.064881Z 17 05h15m20.943432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.064933Z 17 05h15m20.943432s :BS_NODE DEBUG: [17] VDiskId# [8000003e:4:0:0:0] destroyed 2025-04-17T17:57:04.065075Z 5 05h15m21.570920s :BS_NODE DEBUG: [5] VDiskId# [8000000e:5:0:0:0] status changed to READY 2025-04-17T17:57:04.066128Z 17 05h15m21.571432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.067122Z 17 05h15m21.571432s :BS_NODE DEBUG: [17] VDiskId# [8000000e:4:0:0:0] destroyed 2025-04-17T17:57:04.067628Z 24 05h15m24.070920s :BS_NODE DEBUG: [24] VDiskId# [80000030:4:0:2:0] status changed to READY 2025-04-17T17:57:04.068668Z 17 05h15m24.071432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.068719Z 17 05h15m24.071432s :BS_NODE DEBUG: [17] VDiskId# [80000030:3:0:2:0] destroyed 2025-04-17T17:57:04.070302Z 5 05h15m32.989920s :BS_NODE DEBUG: [5] VDiskId# [80000011:4:0:4:0] status changed to READY 2025-04-17T17:57:04.071417Z 17 05h15m32.990432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.071466Z 17 05h15m32.990432s :BS_NODE DEBUG: [17] VDiskId# [80000011:3:0:4:0] destroyed 2025-04-17T17:57:04.071601Z 5 05h15m33.613920s :BS_NODE DEBUG: [5] VDiskId# [8000001e:5:0:0:0] status changed to READY 2025-04-17T17:57:04.072663Z 17 05h15m33.614432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.072707Z 17 05h15m33.614432s :BS_NODE DEBUG: [17] VDiskId# [8000001e:4:0:0:0] destroyed 2025-04-17T17:57:04.073760Z 5 05h15m39.225920s :BS_NODE DEBUG: [5] VDiskId# [80000026:6:0:0:0] status changed to READY 2025-04-17T17:57:04.124040Z 17 05h15m39.226432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:04.124121Z 17 05h15m39.226432s :BS_NODE DEBUG: [17] VDiskId# [80000026:5:0:0:0] destroyed >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> KqpStats::JoinNoStatsYql [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 21039, MsgBus: 2612 2025-04-17T17:55:03.183582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342138113676934:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:03.185190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000da0/r3tmp/tmpySqmpL/pdisk_1.dat 2025-04-17T17:55:03.549898Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:03.584949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:03.585032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21039, node 1 2025-04-17T17:55:03.592250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:03.630406Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:03.630429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:03.630434Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:03.630569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2612 TClient is connected to server localhost:2612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:04.221119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:04.265541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:04.431126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.673111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:04.757326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:06.331840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342150998580523:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.331944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.641216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.671447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.700374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.734400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.769595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.801187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.871107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.940283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342150998581183:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.940388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342150998581188:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.940403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.944041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:06.954120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342150998581190:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:07.027623Z node 1 :TX_PROXY ERROR: Actor# [1:7494342155293548540:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:08.000284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:55:08.183283Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342138113676934:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:08.183345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:08.901602Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2e7sx19qm0ev8ap621c1s8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdhMjA0ODMtNDMxZGY4ZjYtNmRmNzYyNjktMTRhMjUwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.935955Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2e7sxc0k2p3f8bbchdr5d6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE3ZThjY2UtOGI1NWM3Y2QtNjdhYWEwNTYtMzU0NGIwNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.952441Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2e7sx19qm0ev8ap621c1s8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdhMjA0ODMtNDMxZGY4ZjYtNmRmNzYyNjktMTRhMjUwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.960614Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01js2e7sy1c8cb1vsymsmhdmxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIzOWQ2ODMtZWM0NzUyNTEtOTg0ZmI3YzUtZmFhZGVjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.963071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01js2e7sxc0k2p3f8bbchdr5d6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE3ZThjY2UtOGI1NWM3Y2QtNjdhYWEwNTYtMzU0NGIwNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.963980Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01js2e7sy1bz8km31dynk5p2by, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTIzNzJmODMtMzc1MTgxYjYtZGM3NzFmY2EtNGNkZjQ2NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.971434Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01js2e7sysc386913mz9snfdq1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRhM2M0OWMtMTc3NWQxNTMtNGQzZDJlZGItNWU3YWM1NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.972371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01js2e7sz75p7dfka2513d657x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI3MjFmNi1jZDVmZDhjZS03ZDg0NjE2Zi0yZTQ5NTliMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.972592Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01js2e7szj52hcvymmn195pvc9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGVjYzU2MzQtOTc1MjQwYjAtMTYzY2I0YTUtODYzMTQyMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.973882Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01js2e7sz511ax80zy5vyegc6c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2IyYTVhM2MtYzNjNmFmOGQtNjQxMmM4NzgtMmUyNjY4ZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.975849Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01js2e7sz55k9weqy1z831apy9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDA4NDVjMzItM2E2NTQzMy ... sion/3?node_id=2&id=ZDE2NTMzMDItMmIyZTVlMTAtZjIyZTMyNjAtYWY2ZDNjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.525172Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714507. Ctx: { TraceId: 01js2eb506fpjasx791zyqcp75, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU1ZDU3MzAtY2RkY2I3ZTQtMWQzODc4MTQtMWQ5MjY3MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.535853Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714510. Ctx: { TraceId: 01js2eb506d3srvc34s2yqd6pj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDE2NTMzMDItMmIyZTVlMTAtZjIyZTMyNjAtYWY2ZDNjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.539423Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714511. Ctx: { TraceId: 01js2eb506fpjasx791zyqcp75, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU1ZDU3MzAtY2RkY2I3ZTQtMWQzODc4MTQtMWQ5MjY3MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.541799Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714513. Ctx: { TraceId: 01js2eb50tczdfjn5csq4psg5y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjM5MzJkMTItYTliMTIxOWMtYTI4M2M1NTctZTg1YTQzMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.551317Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714512. Ctx: { TraceId: 01js2eb50t65fkttqkndpmv1q9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJiYTJmMTgtYjRlZDJmODEtYmRjZDhlNWQtNGYxZmI3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.585972Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714514. Ctx: { TraceId: 01js2eb50tczdfjn5csq4psg5y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjM5MzJkMTItYTliMTIxOWMtYTI4M2M1NTctZTg1YTQzMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.606083Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714515. Ctx: { TraceId: 01js2eb50t65fkttqkndpmv1q9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJiYTJmMTgtYjRlZDJmODEtYmRjZDhlNWQtNGYxZmI3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.610701Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714516. Ctx: { TraceId: 01js2eb5223tqwsef9et9h42zp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjI0YTEyODAtN2M1MmMwZDMtYWMxZDI3OTQtODljMTQxNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.629415Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714517. Ctx: { TraceId: 01js2eb53pcz2kxbeh1z50sh5n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGEzZTFmM2YtZmM1ODllYTctYjYzYmJkYTktNjkwYzBmZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.630647Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714518. Ctx: { TraceId: 01js2eb5223tqwsef9et9h42zp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjI0YTEyODAtN2M1MmMwZDMtYWMxZDI3OTQtODljMTQxNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.647457Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714519. Ctx: { TraceId: 01js2eb549bzjs5v8x974extmw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzViNzY0NmMtZmIxZDM5YWItMzRmYzA1ZGItZTVlMDMzYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.650371Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714520. Ctx: { TraceId: 01js2eb5494y1s1a0qnq92jx10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDE2NTMzMDItMmIyZTVlMTAtZjIyZTMyNjAtYWY2ZDNjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.660797Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714521. Ctx: { TraceId: 01js2eb5494y1s1a0qnq92jx10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDE2NTMzMDItMmIyZTVlMTAtZjIyZTMyNjAtYWY2ZDNjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.665956Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714522. Ctx: { TraceId: 01js2eb54yf4wm9kz3s1sgwfnc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjM5MzJkMTItYTliMTIxOWMtYTI4M2M1NTctZTg1YTQzMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.667135Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714523. Ctx: { TraceId: 01js2eb5494y1s1a0qnq92jx10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDE2NTMzMDItMmIyZTVlMTAtZjIyZTMyNjAtYWY2ZDNjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.677676Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714524. Ctx: { TraceId: 01js2eb5571jfcqkv41ywbcxm3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJiYTJmMTgtYjRlZDJmODEtYmRjZDhlNWQtNGYxZmI3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.690716Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714525. Ctx: { TraceId: 01js2eb55j7b7ckvc7fw271m08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGEzZTFmM2YtZmM1ODllYTctYjYzYmJkYTktNjkwYzBmZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.704846Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714526. Ctx: { TraceId: 01js2eb55j7b7ckvc7fw271m08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGEzZTFmM2YtZmM1ODllYTctYjYzYmJkYTktNjkwYzBmZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.714166Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714528. Ctx: { TraceId: 01js2eb569d56bm6n5yvpmsegw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjI0YTEyODAtN2M1MmMwZDMtYWMxZDI3OTQtODljMTQxNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.716353Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714529. Ctx: { TraceId: 01js2eb56m9jfqcrkxeb65mh0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzViNzY0NmMtZmIxZDM5YWItMzRmYzA1ZGItZTVlMDMzYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.717062Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714527. Ctx: { TraceId: 01js2eb56md23sx0j127jkf8gk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjM5MzJkMTItYTliMTIxOWMtYTI4M2M1NTctZTg1YTQzMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.728134Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714531. Ctx: { TraceId: 01js2eb56m9jfqcrkxeb65mh0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzViNzY0NmMtZmIxZDM5YWItMzRmYzA1ZGItZTVlMDMzYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.728196Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714530. Ctx: { TraceId: 01js2eb569d56bm6n5yvpmsegw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjI0YTEyODAtN2M1MmMwZDMtYWMxZDI3OTQtODljMTQxNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.735759Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714532. Ctx: { TraceId: 01js2eb56m9jfqcrkxeb65mh0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzViNzY0NmMtZmIxZDM5YWItMzRmYzA1ZGItZTVlMDMzYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.743778Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714533. Ctx: { TraceId: 01js2eb572f13a2jen2xvj86y4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJiYTJmMTgtYjRlZDJmODEtYmRjZDhlNWQtNGYxZmI3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.758010Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714534. Ctx: { TraceId: 01js2eb572f13a2jen2xvj86y4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJiYTJmMTgtYjRlZDJmODEtYmRjZDhlNWQtNGYxZmI3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.759000Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714535. Ctx: { TraceId: 01js2eb57q2xdrtn9rd3e5gfr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDE2NTMzMDItMmIyZTVlMTAtZjIyZTMyNjAtYWY2ZDNjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.771057Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714536. Ctx: { TraceId: 01js2eb57z2jc91sdhmw5y44ty, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU1ZDU3MzAtY2RkY2I3ZTQtMWQzODc4MTQtMWQ5MjY3MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.777778Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714537. Ctx: { TraceId: 01js2eb57q2xdrtn9rd3e5gfr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDE2NTMzMDItMmIyZTVlMTAtZjIyZTMyNjAtYWY2ZDNjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.778955Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714538. Ctx: { TraceId: 01js2eb583c9mp1qvesr8csnxg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjM5MzJkMTItYTliMTIxOWMtYTI4M2M1NTctZTg1YTQzMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:56:58.798378Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714539. Ctx: { TraceId: 01js2eb583c9mp1qvesr8csnxg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjM5MzJkMTItYTliMTIxOWMtYTI4M2M1NTctZTg1YTQzMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:56:58.810266Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714540. Ctx: { TraceId: 01js2eb58yffddmn1e6hf97hkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjI0YTEyODAtN2M1MmMwZDMtYWMxZDI3OTQtODljMTQxNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.820604Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714541. Ctx: { TraceId: 01js2eb59q4tjgh072p91672vy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzViNzY0NmMtZmIxZDM5YWItMzRmYzA1ZGItZTVlMDMzYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.824707Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714542. Ctx: { TraceId: 01js2eb58yffddmn1e6hf97hkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjI0YTEyODAtN2M1MmMwZDMtYWMxZDI3OTQtODljMTQxNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:58.832921Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714543. Ctx: { TraceId: 01js2eb59q4tjgh072p91672vy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzViNzY0NmMtZmIxZDM5YWItMzRmYzA1ZGItZTVlMDMzYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS >> KqpStats::JoinStatsBasicYql+StreamLookupJoin |81.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 17494535920096824821 |81.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> BsControllerTest::SelfHealMirror3dc [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestPartitionedBigTest >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown 2025-04-17 17:56:37,718 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:56:38,324 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 153764 46.0M 45.7M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6873/001883/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/tes 154041 1.6G 1.6G 1.1G └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6873/001883/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unit Test command err: Trying to start YDB, gRPC: 28640, MsgBus: 29530 2025-04-17T17:55:39.785066Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342290183314439:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:39.785558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001883/r3tmp/tmpgU2zMJ/pdisk_1.dat 2025-04-17T17:55:40.397684Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:40.429575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:40.429654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:40.435946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28640, node 1 2025-04-17T17:55:40.566476Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:40.566501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:40.566512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:40.566664Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29530 TClient is connected to server localhost:29530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:41.445841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:43.829040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342307363184155:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:43.829167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.333142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:55:44.516828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342311658151574:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.516885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.517033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342311658151579:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.519972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:55:44.529687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342311658151581:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:55:44.617636Z node 1 :TX_PROXY ERROR: Actor# [1:7494342311658151622:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:44.784099Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342290183314439:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:44.784183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:45.440017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:46.048433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:1, at schemeshard: 72057594046644480 2025-04-17T17:55:46.721720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:55:47.455421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:55:47.921457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T17:55:48.455645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:55:48.545632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:55:51.132500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-17T17:55:51.164449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-17T17:55:51.168379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-17T17:55:51.169924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" ... protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 7288, MsgBus: 23178 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001883/r3tmp/tmpQsmham/pdisk_1.dat 2025-04-17T17:56:22.990436Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:23.012654Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:23.014326Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:23.014446Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:23.046159Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7288, node 4 2025-04-17T17:56:23.211137Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:23.211173Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:23.211185Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:23.211373Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23178 TClient is connected to server localhost:23178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:24.084581Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:28.051290Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342500362554785:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.051396Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.088020Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-17T17:56:28.161887Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342500362554903:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.162052Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.166354Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342500362554909:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.170988Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-17T17:56:28.182770Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-17T17:56:28.183171Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494342500362554911:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:56:28.257479Z node 4 :TX_PROXY ERROR: Actor# [4:7494342500362554951:2405] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:29.052610Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:29.908577Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-17T17:56:30.766461Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T17:56:31.650815Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T17:56:32.483404Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-17T17:56:33.516279Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T17:56:33.589747Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T17:56:37.990353Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:56:37.990381Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:38.236035Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715705:0, at schemeshard: 72057594046644480 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001883/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001883/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) 2025-04-17 17:57:09,062 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-17 17:57:09,062 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown 2025-04-17 17:56:38,402 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:56:38,598 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 153951 46.1M 46.0M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/6873/001868/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk7/testing_out_stuff/tes 154162 1.6G 1.6G 1.1G └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6873/001868/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unit Test command err: Trying to start YDB, gRPC: 11647, MsgBus: 16037 2025-04-17T17:55:40.460904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342296817046188:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:40.466827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001868/r3tmp/tmpyJDCo6/pdisk_1.dat 2025-04-17T17:55:41.122256Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:41.128080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:41.128159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:41.132610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11647, node 1 2025-04-17T17:55:41.306770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:41.306794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:41.306803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:41.306908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16037 TClient is connected to server localhost:16037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:42.020252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:42.037425Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:44.227293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342313996915896:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.227386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.553620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:55:44.722909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342313996916020:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.723025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.724082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342313996916025:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:44.728229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:55:44.737173Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T17:55:44.737469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342313996916027:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:55:44.825516Z node 1 :TX_PROXY ERROR: Actor# [1:7494342313996916068:2403] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:45.466450Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342296817046188:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:45.466541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:45.862954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:46.332735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:55:46.928358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:55:47.716044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:55:48.204244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T17:55:48.660216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:55:48.712367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:55:51.595284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-17T17:55:51.632846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-17T17:55:51.636348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-17T17:55:51.638592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: PO ... Bus: 25596 2025-04-17T17:56:22.204120Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342477807737777:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:22.204182Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001868/r3tmp/tmpzmkJVl/pdisk_1.dat 2025-04-17T17:56:22.596360Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:22.605889Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:22.606004Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:22.608302Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18600, node 4 2025-04-17T17:56:22.790423Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:22.790446Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:22.790456Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:22.790634Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25596 TClient is connected to server localhost:25596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:23.690433Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:27.206600Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494342477807737777:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:27.206684Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:28.351624Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342503577542160:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.351734Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.379468Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:56:28.472772Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342503577542280:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.472867Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.473098Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342503577542285:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:28.477466Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:56:28.490012Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494342503577542287:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:56:28.563029Z node 4 :TX_PROXY ERROR: Actor# [4:7494342503577542328:2408] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:29.289671Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:30.058337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:56:30.817243Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:56:31.577506Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:56:32.373712Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T17:56:34.006681Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:56:34.090519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:56:37.556891Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:56:37.556926Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:37.698594Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710706:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001868/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk7/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001868/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk7/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) 2025-04-17 17:57:09,304 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-17 17:57:09,304 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap >> CompressExecutor::TestReorderedExecutor [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2025-04-17T17:56:53.731151Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-17T17:56:53.731209Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-17T17:56:53.731301Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-17T17:56:53.731326Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-17T17:56:53.731380Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-17T17:56:53.731402Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-17T17:56:53.731438Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-17T17:56:53.731461Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-17T17:56:53.731519Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-17T17:56:53.731540Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-17T17:56:53.731572Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-17T17:56:53.731591Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-17T17:56:53.731625Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-17T17:56:53.731655Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-17T17:56:53.731700Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-17T17:56:53.731724Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-17T17:56:53.731770Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-17T17:56:53.731805Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-17T17:56:53.731849Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-17T17:56:53.731869Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-17T17:56:53.731917Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-17T17:56:53.731944Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-17T17:56:53.731982Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-17T17:56:53.732004Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-17T17:56:53.732037Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-17T17:56:53.732057Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-17T17:56:53.732100Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-17T17:56:53.732124Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-17T17:56:53.732169Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-17T17:56:53.732204Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-17T17:56:53.732267Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-17T17:56:53.732288Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-17T17:56:53.732322Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-17T17:56:53.732342Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-17T17:56:53.732380Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-17T17:56:53.732400Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-17T17:56:53.732449Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-17T17:56:53.732471Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-17T17:56:53.732505Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-17T17:56:53.732524Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-17T17:56:53.732565Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-17T17:56:53.732586Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-17T17:56:53.732615Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-17T17:56:53.732634Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-17T17:56:53.732664Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-17T17:56:53.732682Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-17T17:56:53.732712Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-17T17:56:53.732730Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-17T17:56:53.732778Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-17T17:56:53.732812Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-17T17:56:53.732859Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-17T17:56:53.732881Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-17T17:56:53.732931Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-17T17:56:53.732957Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-17T17:56:53.732994Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-17T17:56:53.733015Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-17T17:56:53.733054Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-17T17:56:53.733072Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-17T17:56:53.733111Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-17T17:56:53.733136Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-17T17:56:53.733178Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-17T17:56:53.733197Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-17T17:56:53.733228Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-17T17:56:53.733247Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-17T17:56:53.733282Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-04-17T17:56:53.733300Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-04-17T17:56:53.733358Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-04-17T17:56:53.733380Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-04-17T17:56:53.733426Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-04-17T17:56:53.733448Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-04-17T17:56:53.733480Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-04-17T17:56:53.733498Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-04-17T17:56:53.749981Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-04-17T17:56:53.751219Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-04-17T17:56:53.751296Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-04-17T17:56:53.751350Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-04-17T17:56:53.751389Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-04-17T17:56:53.751443Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-04-17T17:56:53.751478Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-04-17T17:56:53.751501Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-04-17T17:56:53.751525Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-04-17T17:56:53.751548Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-04-17T17:56:53.751572Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-04-17T17:56:53.751600Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-04-17T17:56:53.751623Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-04-17T17:56:53.751645Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-04-17T17:56:53.751672Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-04-17T17:56:53.751709Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-04-17T17:56:53.751734Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-04-17T17:56:53.751757Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-04-17T17:56:53.751781Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-04-17T17:56:53.751823Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-04-17T17:56:53.751847Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-04-17T17:56:53.751873Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-04-17T17:56:53.751896Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-04-17T17:56:53.751949Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-04-17T17:56:53.751991Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-04-17T17:56:53.752032Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-04-17T17:56:53.752067Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-04-17T17:56:53.752103Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-04-17T17:56:53.752161Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-04-17T17:56:53.752196Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-04-17T17:56:53.752232Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-04-17T17:56:53.752281Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-04-17T17:56:53.752327Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-04-17T17:56:53.752351Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-04-17T17:56:53.752373Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... NodeServiceSetUpdate 2025-04-17T17:57:06.456412Z 29 05h25m00.118432s :BS_NODE DEBUG: [29] VDiskId# [80000015:1:2:2:0] -> [80000015:2:2:2:0] 2025-04-17T17:57:06.456504Z 14 05h25m00.118432s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-17T17:57:06.456548Z 14 05h25m00.118432s :BS_NODE DEBUG: [14] VDiskId# [80000015:1:1:1:0] -> [80000015:2:1:1:0] 2025-04-17T17:57:06.456677Z 34 05h25m00.118432s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2025-04-17T17:57:06.456728Z 34 05h25m00.118432s :BS_NODE DEBUG: [34] VDiskId# [80000005:1:2:0:0] -> [80000005:2:2:0:0] 2025-04-17T17:57:06.456817Z 17 05h25m00.118432s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-17T17:57:06.456869Z 17 05h25m00.118432s :BS_NODE DEBUG: [17] VDiskId# [80000005:1:1:2:0] -> [80000005:2:1:2:0] 2025-04-17T17:57:06.456951Z 2 05h25m00.118432s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-04-17T17:57:06.456992Z 2 05h25m00.118432s :BS_NODE DEBUG: [2] VDiskId# [80000005:1:0:1:0] -> [80000005:2:0:1:0] 2025-04-17T17:57:06.457076Z 5 05h25m00.118432s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-17T17:57:06.457122Z 5 05h25m00.118432s :BS_NODE DEBUG: [5] VDiskId# [80000005:1:0:2:0] -> [80000005:2:0:2:0] 2025-04-17T17:57:06.457212Z 22 05h25m00.118432s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-17T17:57:06.457256Z 22 05h25m00.118432s :BS_NODE DEBUG: [22] VDiskId# [80000005:1:1:0:0] -> [80000005:2:1:0:0] 2025-04-17T17:57:06.457340Z 26 05h25m00.118432s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-04-17T17:57:06.457386Z 26 05h25m00.118432s :BS_NODE DEBUG: [26] VDiskId# [80000005:1:2:1:0] -> [80000005:2:2:1:0] 2025-04-17T17:57:06.457474Z 9 05h25m00.118432s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-17T17:57:06.457511Z 9 05h25m00.118432s :BS_NODE DEBUG: [9] VDiskId# [80000005:2:0:0:0] PDiskId# 1000 VSlotId# 1016 created 2025-04-17T17:57:06.457568Z 9 05h25m00.118432s :BS_NODE DEBUG: [9] VDiskId# [80000005:2:0:0:0] status changed to INIT_PENDING 2025-04-17T17:57:06.457640Z 10 05h25m00.118432s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T17:57:06.457714Z 29 05h25m00.118432s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-17T17:57:06.457762Z 29 05h25m00.118432s :BS_NODE DEBUG: [29] VDiskId# [80000005:1:2:2:0] -> [80000005:2:2:2:0] 2025-04-17T17:57:06.457853Z 14 05h25m00.118432s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-17T17:57:06.457901Z 14 05h25m00.118432s :BS_NODE DEBUG: [14] VDiskId# [80000005:1:1:1:0] -> [80000005:2:1:1:0] 2025-04-17T17:57:06.462868Z 9 05h25m01.874432s :BS_NODE DEBUG: [9] VDiskId# [80000075:2:0:0:0] status changed to REPLICATING 2025-04-17T17:57:06.463762Z 9 05h25m03.652432s :BS_NODE DEBUG: [9] VDiskId# [80000005:2:0:0:0] status changed to REPLICATING 2025-04-17T17:57:06.464532Z 9 05h25m03.957432s :BS_NODE DEBUG: [9] VDiskId# [80000015:2:0:0:0] status changed to REPLICATING 2025-04-17T17:57:06.480562Z 9 05h25m04.366432s :BS_NODE DEBUG: [9] VDiskId# [80000035:2:0:0:0] status changed to REPLICATING 2025-04-17T17:57:06.481644Z 9 05h25m04.694432s :BS_NODE DEBUG: [9] VDiskId# [80000055:2:0:0:0] status changed to REPLICATING 2025-04-17T17:57:06.483424Z 9 05h25m05.304432s :BS_NODE DEBUG: [9] VDiskId# [80000065:2:0:0:0] status changed to REPLICATING 2025-04-17T17:57:06.484244Z 9 05h25m05.359432s :BS_NODE DEBUG: [9] VDiskId# [80000025:2:0:0:0] status changed to REPLICATING 2025-04-17T17:57:06.485024Z 9 05h25m05.838432s :BS_NODE DEBUG: [9] VDiskId# [80000045:2:0:0:0] status changed to REPLICATING 2025-04-17T17:57:06.486161Z 9 05h25m13.089432s :BS_NODE DEBUG: [9] VDiskId# [80000045:2:0:0:0] status changed to READY 2025-04-17T17:57:06.487470Z 10 05h25m13.089944s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T17:57:06.487533Z 10 05h25m13.089944s :BS_NODE DEBUG: [10] VDiskId# [80000045:1:0:0:0] destroyed 2025-04-17T17:57:06.487709Z 9 05h25m14.069432s :BS_NODE DEBUG: [9] VDiskId# [80000035:2:0:0:0] status changed to READY 2025-04-17T17:57:06.488928Z 10 05h25m14.069944s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T17:57:06.488987Z 10 05h25m14.069944s :BS_NODE DEBUG: [10] VDiskId# [80000035:1:0:0:0] destroyed 2025-04-17T17:57:06.489962Z 9 05h25m17.110432s :BS_NODE DEBUG: [9] VDiskId# [80000015:2:0:0:0] status changed to READY 2025-04-17T17:57:06.491172Z 10 05h25m17.110944s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T17:57:06.491233Z 10 05h25m17.110944s :BS_NODE DEBUG: [10] VDiskId# [80000015:1:0:0:0] destroyed 2025-04-17T17:57:06.491733Z 9 05h25m22.438432s :BS_NODE DEBUG: [9] VDiskId# [80000075:2:0:0:0] status changed to READY 2025-04-17T17:57:06.492984Z 10 05h25m22.438944s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T17:57:06.493043Z 10 05h25m22.438944s :BS_NODE DEBUG: [10] VDiskId# [80000075:1:0:0:0] destroyed 2025-04-17T17:57:06.493206Z 9 05h25m23.116432s :BS_NODE DEBUG: [9] VDiskId# [80000005:2:0:0:0] status changed to READY 2025-04-17T17:57:06.494399Z 10 05h25m23.116944s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T17:57:06.494456Z 10 05h25m23.116944s :BS_NODE DEBUG: [10] VDiskId# [80000005:1:0:0:0] destroyed 2025-04-17T17:57:06.494611Z 9 05h25m24.469432s :BS_NODE DEBUG: [9] VDiskId# [80000055:2:0:0:0] status changed to READY 2025-04-17T17:57:06.495764Z 10 05h25m24.469944s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T17:57:06.495818Z 10 05h25m24.469944s :BS_NODE DEBUG: [10] VDiskId# [80000055:1:0:0:0] destroyed 2025-04-17T17:57:06.497451Z 9 05h25m31.939432s :BS_NODE DEBUG: [9] VDiskId# [80000065:2:0:0:0] status changed to READY 2025-04-17T17:57:06.498677Z 10 05h25m31.939944s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T17:57:06.498732Z 10 05h25m31.939944s :BS_NODE DEBUG: [10] VDiskId# [80000065:1:0:0:0] destroyed 2025-04-17T17:57:06.499455Z 9 05h25m37.251432s :BS_NODE DEBUG: [9] VDiskId# [80000025:2:0:0:0] status changed to READY 2025-04-17T17:57:06.500616Z 10 05h25m37.251944s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T17:57:06.500669Z 10 05h25m37.251944s :BS_NODE DEBUG: [10] VDiskId# [80000025:1:0:0:0] destroyed 2025-04-17T17:57:07.376047Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483706 2025-04-17T17:57:07.376775Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-04-17T17:57:07.376843Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:4:0:0:0] DiskIsOk# true 2025-04-17T17:57:07.376891Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-04-17T17:57:07.376924Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:4:0:1:0] DiskIsOk# true 2025-04-17T17:57:07.376959Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-04-17T17:57:07.376991Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:4:0:2:0] DiskIsOk# true 2025-04-17T17:57:07.377027Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-04-17T17:57:07.377059Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:4:1:1:0] DiskIsOk# true 2025-04-17T17:57:07.377092Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-04-17T17:57:07.377125Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:4:1:2:0] DiskIsOk# true 2025-04-17T17:57:07.377159Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-04-17T17:57:07.377191Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:4:2:0:0] DiskIsOk# true 2025-04-17T17:57:07.377224Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-04-17T17:57:07.377256Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:4:2:1:0] DiskIsOk# true 2025-04-17T17:57:07.377288Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-04-17T17:57:07.377320Z 1 05h45m00.118944s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:4:2:2:0] DiskIsOk# true 2025-04-17T17:57:07.392414Z 1 05h45m00.119456s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483706 Items# [8000003a:4:1:0:0]: 16:1000:1010 -> 20:1001:1015 ConfigTxSeqNo# 542 2025-04-17T17:57:07.392480Z 1 05h45m00.119456s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483706 Success# true 2025-04-17T17:57:07.392680Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2025-04-17T17:57:07.392777Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] VDiskId# [8000003a:4:2:1:0] -> [8000003a:5:2:1:0] 2025-04-17T17:57:07.392920Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-17T17:57:07.392974Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] VDiskId# [8000003a:5:1:0:0] PDiskId# 1001 VSlotId# 1015 created 2025-04-17T17:57:07.393067Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] VDiskId# [8000003a:5:1:0:0] status changed to INIT_PENDING 2025-04-17T17:57:07.393181Z 3 05h45m00.119456s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-17T17:57:07.393243Z 3 05h45m00.119456s :BS_NODE DEBUG: [3] VDiskId# [8000003a:4:0:2:0] -> [8000003a:5:0:2:0] 2025-04-17T17:57:07.393344Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-17T17:57:07.393401Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [8000003a:4:1:1:0] -> [8000003a:5:1:1:0] 2025-04-17T17:57:07.393506Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-04-17T17:57:07.393566Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] VDiskId# [8000003a:4:0:0:0] -> [8000003a:5:0:0:0] 2025-04-17T17:57:07.393672Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-17T17:57:07.393723Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] VDiskId# [8000003a:4:2:2:0] -> [8000003a:5:2:2:0] 2025-04-17T17:57:07.393814Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-04-17T17:57:07.393871Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] VDiskId# [8000003a:4:0:1:0] -> [8000003a:5:0:1:0] 2025-04-17T17:57:07.393969Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-17T17:57:07.394023Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] VDiskId# [8000003a:4:1:2:0] -> [8000003a:5:1:2:0] 2025-04-17T17:57:07.394124Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-17T17:57:07.394580Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] VDiskId# [8000003a:4:2:0:0] -> [8000003a:5:2:0:0] 2025-04-17T17:57:07.394714Z 16 05h45m00.119456s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T17:57:07.397004Z 20 05h45m06.100456s :BS_NODE DEBUG: [20] VDiskId# [8000003a:5:1:0:0] status changed to REPLICATING 2025-04-17T17:57:07.399524Z 20 05h45m31.821456s :BS_NODE DEBUG: [20] VDiskId# [8000003a:5:1:0:0] status changed to READY 2025-04-17T17:57:07.400740Z 16 05h45m31.821968s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T17:57:07.400811Z 16 05h45m31.821968s :BS_NODE DEBUG: [16] VDiskId# [8000003a:4:1:0:0] destroyed ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown 2025-04-17 17:56:38,986 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:56:39,214 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 154037 46.0M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6873/001866/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk2/testing_out_stuff/tes 154290 1.6G 1.6G 1.1G └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6873/001866/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unit Test command err: Trying to start YDB, gRPC: 19663, MsgBus: 2735 2025-04-17T17:55:41.470449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342302245766036:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:41.470512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001866/r3tmp/tmplCxm7J/pdisk_1.dat 2025-04-17T17:55:42.407898Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:42.416159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:42.416243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:42.421079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19663, node 1 2025-04-17T17:55:42.682896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:42.682918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:42.682924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:42.683053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2735 TClient is connected to server localhost:2735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:43.459296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:43.475400Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:45.902995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342319425635652:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:45.903119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:46.212033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:55:46.400962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342323720603070:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:46.401059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:46.401623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342323720603075:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:46.405401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:55:46.420018Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T17:55:46.420229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342323720603078:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:55:46.470296Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342302245766036:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:46.470367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:46.502140Z node 1 :TX_PROXY ERROR: Actor# [1:7494342323720603119:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:47.280603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:47.750313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-17T17:55:48.367506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:55:49.030629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:55:49.557239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:55:50.167634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:55:50.261987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:55:53.032485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-17T17:55:53.063917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-17T17:55:53.067964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-17T17:55:53.069874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { ... s { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 8565, MsgBus: 11241 2025-04-17T17:56:24.791528Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342485492989802:2176];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001866/r3tmp/tmpFparQ8/pdisk_1.dat 2025-04-17T17:56:24.966780Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:25.087504Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:25.123122Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:25.123230Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:25.131712Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8565, node 4 2025-04-17T17:56:25.350699Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:25.350725Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:25.350735Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:25.350876Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11241 TClient is connected to server localhost:11241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:26.385170Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:29.752321Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494342485492989802:2176];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:29.752403Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:30.511415Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342511262794103:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.511524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.558033Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:56:30.654233Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342511262794222:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.654343Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.654819Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342511262794228:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:30.663195Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:56:30.685719Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T17:56:30.686066Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494342511262794230:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:56:30.745733Z node 4 :TX_PROXY ERROR: Actor# [4:7494342511262794270:2405] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:31.618779Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:56:32.544396Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-17T17:56:33.920056Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:56:34.832402Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T17:56:35.914222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:56:36.683511Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:56:36.748627Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001866/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk2/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001866/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk2/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) 2025-04-17 17:57:09,994 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-17 17:57:09,995 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> BackupRestore::ImportDataShouldHandleErrors >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> TDataShardTrace::TestTraceDistributedUpsert-UseSink >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2025-04-17T17:47:35.074227Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:35.292862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:35.369845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:35.370084Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:35.423943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:35.424845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:35.431473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:35.432181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:35.433315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:35.433412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:35.436538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:35.437022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:35.437851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:35.437933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:35.438874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:35.438968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:35.495170Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:35.495314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:35.495374Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:35.495572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:35.495725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:35.495792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:35.495832Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:35.495920Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:35.495971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:35.496008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:35.496035Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:35.496228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:35.496293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:35.496335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:35.496366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:35.496447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:35.496501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:35.496563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:35.496603Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:35.496676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:35.496711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:35.496755Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:35.496808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:35.496850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:35.496876Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:35.497299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-17T17:47:35.497401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-17T17:47:35.497472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-17T17:47:35.497542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-17T17:47:35.497714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:35.497773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:35.497808Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:35.497981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:35.498019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:35.498046Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:35.498964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:35.499048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:35.499088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:35.499304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:35.499347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:35.499381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:35.499489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:35.499523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:35.499570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... O: tablet_id=9437184;self_id=[1:10456:12417];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; 2025-04-17T17:57:05.468250Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T17:57:05.468382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-04-17T17:57:05.469065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=588; 2025-04-17T17:57:05.469126Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=679; 2025-04-17T17:57:05.476599Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T17:57:05.476730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=17; 2025-04-17T17:57:05.508620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=31738; 2025-04-17T17:57:05.541739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=31522; 2025-04-17T17:57:05.541876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=33130; 2025-04-17T17:57:05.542101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=145; 2025-04-17T17:57:05.542280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=119; 2025-04-17T17:57:05.542496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=161; 2025-04-17T17:57:05.542678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=132; 2025-04-17T17:57:05.542926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=190; 2025-04-17T17:57:05.542994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=66177; 2025-04-17T17:57:05.550610Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T17:57:05.550728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-04-17T17:57:05.556471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5627; 2025-04-17T17:57:05.595854Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=39240; 2025-04-17T17:57:05.596059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=52; 2025-04-17T17:57:05.596156Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=33; 2025-04-17T17:57:05.596383Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-04-17T17:57:05.596459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-17T17:57:05.596569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=11; 2025-04-17T17:57:05.596681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=63; 2025-04-17T17:57:05.596754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2025-04-17T17:57:05.596876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=76; 2025-04-17T17:57:05.596936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-17T17:57:05.597022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2025-04-17T17:57:05.597174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=81; 2025-04-17T17:57:05.597298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=74; 2025-04-17T17:57:05.597358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=46562; 2025-04-17T17:57:05.597646Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961132;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T17:57:05.599052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10456:12417];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T17:57:05.599165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10456:12417];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T17:57:05.599286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T17:57:05.599374Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T17:57:05.599682Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:57:05.599787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:57:05.600071Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-17T17:57:05.600165Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744911760637;tx_id=18446744073709551615;;current_snapshot_ts=1744912056279; 2025-04-17T17:57:05.600246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:57:05.600333Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:57:05.600388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:57:05.600542Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T17:57:05.607105Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:57:05.613292Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T17:57:05.618691Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T17:57:05.618773Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T17:57:05.618838Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T17:57:05.618918Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T17:57:05.619025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T17:57:05.619376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-17T17:57:05.619504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744911760637;tx_id=18446744073709551615;;current_snapshot_ts=1744912056279; 2025-04-17T17:57:05.619591Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T17:57:05.619679Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T17:57:05.619762Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T17:57:05.619882Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-17T17:57:05.619978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; >> BackupRestore::TestAllPrimitiveTypes-INT64 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::DefaultParameterValue >> KqpExplain::Explain [GOOD] >> KqpExplain::CompoundKeyRange >> DemoTx::Scenario_1 >> BackupRestoreS3::TestAllPrimitiveTypes-UINT64 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-TIMESTAMP >> KqpQuery::Now [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> TPersQueueTest::UpdatePartitionLocation >> KqpQuery::CreateAsSelectBadTypes-IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown 2025-04-17 17:56:44,326 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:56:44,621 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 155111 46.1M 45.9M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/6873/001854/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/tes 155195 1.6G 1.6G 1.1G └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6873/001854/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unit Test command err: Trying to start YDB, gRPC: 19854, MsgBus: 14226 2025-04-17T17:55:46.171665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342322358470647:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:46.172422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001854/r3tmp/tmpOH8Ir7/pdisk_1.dat 2025-04-17T17:55:46.751698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:46.804909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:46.805003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:46.812248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19854, node 1 2025-04-17T17:55:46.968995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:46.969020Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:46.969027Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:46.969135Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14226 TClient is connected to server localhost:14226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:47.853112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:47.888873Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:50.691068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342339538340335:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:50.691194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:51.104890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:55:51.158359Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342322358470647:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:51.158419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:51.272430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342343833307754:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:51.272507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:51.272800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342343833307760:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:51.276585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:55:51.290684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342343833307762:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:55:51.350134Z node 1 :TX_PROXY ERROR: Actor# [1:7494342343833307802:2403] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:52.224118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:52.760549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-17T17:55:53.756258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:55:54.394106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:55:55.172914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T17:55:55.693674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:55:55.732060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:55:58.821075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-17T17:55:58.861762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-17T17:55:58.863495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-17T17:55:58.865163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { ... { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 27194, MsgBus: 4468 2025-04-17T17:56:33.577067Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342522899953238:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:33.577140Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001854/r3tmp/tmpROlbKj/pdisk_1.dat 2025-04-17T17:56:33.899158Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:33.942686Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:33.942797Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:33.946972Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27194, node 4 2025-04-17T17:56:34.138975Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:34.139002Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:34.139011Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:34.139149Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4468 TClient is connected to server localhost:4468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:56:35.104950Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:56:38.563551Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494342522899953238:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:38.563643Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:40.959762Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342552964724984:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.959869Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:41.011882Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:56:41.081674Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342557259692399:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:41.081863Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:41.082576Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342557259692405:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:41.087401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:56:41.102997Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494342557259692407:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:56:41.165247Z node 4 :TX_PROXY ERROR: Actor# [4:7494342557259692447:2410] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:41.939694Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:42.771331Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:56:43.504895Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001854/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001854/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) 2025-04-17 17:57:15,364 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-17 17:57:15,365 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> KqpBatchDelete::ManyPartitions_2 >> KqpBatchUpdate::Large_1 >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpLimits::BigParameter >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 >> TPartitionWriterCacheActorTests::WriteReplyOrder >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> TPersQueueTest::SchemeshardRestart ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown 2025-04-17 17:56:46,715 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:56:46,997 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 155262 46.1M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/6873/001845/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/tes 155332 1.6G 1.6G 1.1G └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/6873/001845/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unit Test command err: Trying to start YDB, gRPC: 12920, MsgBus: 30566 2025-04-17T17:55:48.498395Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342332188244445:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:48.498458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001845/r3tmp/tmpPv43EU/pdisk_1.dat 2025-04-17T17:55:49.034058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:49.051346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:49.051433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12920, node 1 2025-04-17T17:55:49.056452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:49.134790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:49.134815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:49.134821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:49.134937Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30566 TClient is connected to server localhost:30566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:49.793348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:49.835419Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:52.737812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342349368114289:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:52.737962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:53.111166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T17:55:53.340037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342353663081707:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:53.340140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:53.341178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342353663081713:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:53.344885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T17:55:53.365511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342353663081715:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:55:53.442705Z node 1 :TX_PROXY ERROR: Actor# [1:7494342353663081756:2404] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:53.494628Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342332188244445:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:53.494742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:54.236941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:54.693510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T17:55:55.231382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:55:55.743201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T17:55:56.291365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T17:55:56.811431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T17:55:56.871327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T17:55:58.808571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-04-17T17:55:58.852847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-17T17:55:58.854483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-17T17:55:58.857182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } databa ... ource_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 1065, MsgBus: 20912 2025-04-17T17:56:30.779246Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342512201081745:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:30.779289Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001845/r3tmp/tmpqq77Ng/pdisk_1.dat 2025-04-17T17:56:31.081427Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:31.096712Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:31.096812Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:31.099815Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1065, node 4 2025-04-17T17:56:31.286990Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:31.287019Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:31.287028Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:31.287526Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20912 TClient is connected to server localhost:20912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:32.033855Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:32.053538Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:56:35.782346Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494342512201081745:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:35.782452Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:37.879508Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342542265853478:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:37.879620Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:37.917881Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-17T17:56:38.045648Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342546560820893:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:38.045770Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:38.046234Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342546560820899:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:38.059343Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-17T17:56:38.123210Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494342546560820901:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:56:38.189945Z node 4 :TX_PROXY ERROR: Actor# [4:7494342546560820942:2411] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:39.179116Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.299791Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-17T17:56:41.313561Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T17:56:42.061310Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T17:56:42.984005Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-17T17:56:43.628165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T17:56:43.712272Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T17:56:46.058299Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:56:46.058335Z node 4 :IMPORT WARN: Table profiles were not loaded Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001845/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001845/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) 2025-04-17 17:57:17,766 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-17 17:57:17,766 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2025-04-17T17:57:14.077844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:14.078449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:14.078522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d0a/r3tmp/tmpMygYav/pdisk_1.dat 2025-04-17T17:57:14.469992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:14.522449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:14.569538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:14.569712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:14.581399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:14.694009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:16.835819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:16.835933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:16.836358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:16.841751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:57:16.866778Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T17:57:17.068195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:57:17.161027Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:17.694515Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2ebpx11xm704qk96wng4vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI2ZWE1ZTctYjIxYWVlMmQtOGIzNmIwOTctODUyYmE0NTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:57:17.915273Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2ebqstf6f6eqj4s0t0khp4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRjMDU5M2QtYzA2ZTAzZjItM2ZiODgyMGQtMjc2ZTgxMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:57:18.174113Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2ebqzs3r3y6kyzntt873bs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmUxNTlkYzgtYWE5ZDFhM2QtMzMxZGJkMzgtNTYyNDNjOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INTERVAL >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService-UseSink >> TPersQueueTest::WriteExisting >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] >> BackupRestore::ImportDataShouldHandleErrors [GOOD] >> BackupRestore::BackupUuid >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::SysViewCancelled >> KqpExplain::CompoundKeyRange [GOOD] >> KqpExplain::CreateTableAs+Stats >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: 2025-04-17T17:57:16.153827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:16.154526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:16.154611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c68/r3tmp/tmpJ9y6Lf/pdisk_1.dat 2025-04-17T17:57:16.532626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:16.596711Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:16.650310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:16.650460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:16.665505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:16.768473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:18.962604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:18.962718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:18.963110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:18.967965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:57:18.994249Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T17:57:19.185774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:57:19.286444Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:19.671379Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2ebrzg8ftcp6p544zk9r65, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzllMjlmZWQtNzVhYzRjZDctNGQ1M2E4NWUtYzU1MDFhYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain >> DataShardWrite::UpsertImmediate >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> DataShardWrite::WriteImmediateBadRequest >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] >> WithSDK::DescribeConsumer >> BackupRestoreS3::TestAllPrimitiveTypes-TIMESTAMP [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-INTERVAL >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2025-04-17T17:56:47.086418Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.086452Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.086476Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:47.088522Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:47.103202Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:47.103395Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.106500Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:47.107118Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.108818Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:47.114342Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:47.114420Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:56:47.115408Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.115443Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.115471Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:47.116686Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:47.126674Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:47.126875Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.130537Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:47.131138Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.131285Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:47.131390Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:47.131448Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:56:47.132718Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.138264Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.138320Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:47.150482Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:47.158531Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:47.158715Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.162431Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:47.163434Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.163703Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:47.164146Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:47.164225Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:56:47.165403Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.165425Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.166907Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:47.167389Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:47.182460Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:47.182654Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.183005Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:47.184646Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.186558Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:47.190310Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:47.190383Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:56:47.195205Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.195238Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.195262Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:47.206458Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:47.209486Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:47.209702Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.210388Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:47.210855Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.210980Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:47.211064Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:47.211111Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-17T17:56:47.211873Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.211897Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.211921Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:47.222456Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:47.234479Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:47.234681Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.238472Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:47.239011Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.239969Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:47.240160Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:47.240230Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-17T17:56:47.241513Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.241543Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.241570Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:47.254371Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:47.255231Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:47.255369Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.258433Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:47.286964Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.290372Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:47.294661Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:47.294742Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-17T17:56:47.299538Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.299572Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.299596Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:47.310625Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T17:56:47.323156Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T17:56:47.323395Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.326445Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T17:56:47.328167Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:47.328628Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T17:56:47.328742Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T17:56:47.328792Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-17T17:56:47.370545Z :ReadSession INFO: Random seed for debugging is 1744912607370502 2025-04-17T17:56:47.816419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342581835616241:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:47.821829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:47.892891Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342584451285795:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:47.892962Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error ... 00 2025-04-17T17:57:11.509326Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 response to read: guid# 5434985-187c7fc5-476f2017-18290179 2025-04-17T17:57:11.509506Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 Process answer. Aval parts: 0 2025-04-17T17:57:11.510448Z :DEBUG: [/Root] [/Root] [689c9e69-f2a45da9-a2c415f8-968d0f43] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:57:11.510592Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-17T17:57:11.510658Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-17T17:57:11.494000Z WriteTime: 2025-04-17T17:57:11.494000Z Ip: "ipv6:[::1]:58942" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:58942" } } } 2025-04-17T17:57:11.510827Z :DEBUG: [/Root] [/Root] [689c9e69-f2a45da9-a2c415f8-968d0f43] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-04-17T17:57:11.511033Z :DEBUG: [/Root] [/Root] [689c9e69-f2a45da9-a2c415f8-968d0f43] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T17:57:11.511623Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 grpc read done: success# 1, data# { read { } } 2025-04-17T17:57:11.511718Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 got read request: guid# d5e005-fb16c389-3eaac169-da61ec60 2025-04-17T17:57:11.511909Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 grpc read done: success# 1, data# { commit { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } } 2025-04-17T17:57:11.511997Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 4 2025-04-17T17:57:11.514631Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:57:11.514663Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:57:11.514762Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_3017476688732539145_v1 2025-04-17T17:57:11.514875Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:57:11.514890Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:57:11.514905Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:57:11.514920Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:57:11.514931Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:57:11.514942Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:57:11.514955Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:57:11.514970Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:57:11.514996Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:57:11.523051Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:57:11.523112Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:57:11.523161Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-04-17T17:57:11.523885Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2025-04-17T17:57:11.523933Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 4 2025-04-17T17:57:11.523979Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 3 2025-04-17T17:57:11.524969Z :DEBUG: [/Root] [/Root] [689c9e69-f2a45da9-a2c415f8-968d0f43] [dc1] Committed response: { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } 2025-04-17T17:57:11.599885Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|330a25b9-a1ad332f-e0011fb9-b2a5086f_0] Write session will now close 2025-04-17T17:57:11.599953Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|330a25b9-a1ad332f-e0011fb9-b2a5086f_0] Write session: aborting 2025-04-17T17:57:11.600481Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|330a25b9-a1ad332f-e0011fb9-b2a5086f_0] Write session: gracefully shut down, all writes complete 2025-04-17T17:57:11.600535Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|330a25b9-a1ad332f-e0011fb9-b2a5086f_0] Write session: destroy 2025-04-17T17:57:11.605320Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|330a25b9-a1ad332f-e0011fb9-b2a5086f_0 grpc read done: success: 0 data: 2025-04-17T17:57:11.605346Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|330a25b9-a1ad332f-e0011fb9-b2a5086f_0 grpc read failed 2025-04-17T17:57:11.605386Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|330a25b9-a1ad332f-e0011fb9-b2a5086f_0 grpc closed 2025-04-17T17:57:11.605403Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|330a25b9-a1ad332f-e0011fb9-b2a5086f_0 is DEAD 2025-04-17T17:57:11.606005Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T17:57:11.606311Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7494342684914834459:2653] destroyed 2025-04-17T17:57:11.606357Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:57:13.934307Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-04-17T17:57:21.507557Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-04-17T17:57:21.614274Z :INFO: [/Root] [/Root] [689c9e69-f2a45da9-a2c415f8-968d0f43] Closing read session. Close timeout: 0.000000s 2025-04-17T17:57:21.614386Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-04-17T17:57:21.614444Z :INFO: [/Root] [/Root] [689c9e69-f2a45da9-a2c415f8-968d0f43] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16802 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:57:21.614565Z :NOTICE: [/Root] [/Root] [689c9e69-f2a45da9-a2c415f8-968d0f43] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T17:57:21.614632Z :DEBUG: [/Root] [/Root] [689c9e69-f2a45da9-a2c415f8-968d0f43] [dc1] Abort session to cluster 2025-04-17T17:57:21.615243Z :NOTICE: [/Root] [/Root] [689c9e69-f2a45da9-a2c415f8-968d0f43] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:57:21.623742Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 grpc read done: success# 0, data# { } 2025-04-17T17:57:21.623776Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 grpc read failed 2025-04-17T17:57:21.623818Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 grpc closed 2025-04-17T17:57:21.623852Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_3017476688732539145_v1 is DEAD 2025-04-17T17:57:21.630919Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7494342654850062871:2558] disconnected; active server actors: 1 2025-04-17T17:57:21.630953Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7494342654850062871:2558] client user disconnected session shared/user_1_1_3017476688732539145_v1 2025-04-17T17:57:21.634665Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_3017476688732539145_v1 2025-04-17T17:57:21.634733Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7494342654850062876:2562] destroyed 2025-04-17T17:57:21.634797Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_3017476688732539145_v1 2025-04-17T17:57:22.150381Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494342732159475273:2755] TxId: 281474976710725. Ctx: { TraceId: 01js2ebve0cdwdeqsn3y7kbtft, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODlmM2JlYTctYzFjYjMzOGEtZmY2NzMwZWUtYzk0MWRmNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-04-17T17:57:22.151128Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494342732159475284:2764], TxId: 281474976710725, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODlmM2JlYTctYzFjYjMzOGEtZmY2NzMwZWUtYzk0MWRmNDA=. TraceId : 01js2ebve0cdwdeqsn3y7kbtft. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494342732159475273:2755], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-17T17:57:22.151493Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494342732159475285:2765], TxId: 281474976710725, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODlmM2JlYTctYzFjYjMzOGEtZmY2NzMwZWUtYzk0MWRmNDA=. TraceId : 01js2ebve0cdwdeqsn3y7kbtft. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494342732159475273:2755], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> KqpStats::DeferredEffects+UseSink [GOOD] >> KqpStats::DeferredEffects-UseSink >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication >> KqpLimits::BigParameter [GOOD] >> KqpLimits::AffectedShardsLimit >> KqpStats::JoinStatsBasicScan [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INTERVAL [GOOD] >> BackupRestore::TestAllPrimitiveTypes-TZ_DATE [GOOD] >> BackupRestore::TestAllPrimitiveTypes-TZ_DATETIME [GOOD] >> BackupRestore::TestAllPrimitiveTypes-TZ_TIMESTAMP [GOOD] >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 19922, MsgBus: 12984 2025-04-17T17:56:57.272475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342626647661693:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:57.272938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b52/r3tmp/tmpCCcGtn/pdisk_1.dat 2025-04-17T17:56:57.945511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:57.961274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:57.961356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:57.965742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19922, node 1 2025-04-17T17:56:58.197428Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:58.197461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:58.197470Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:58.197604Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12984 TClient is connected to server localhost:12984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:59.249651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:59.266333Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:56:59.275629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:59.451949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:59.836998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:56:59.965839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:57:02.271163Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342626647661693:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:02.271221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:02.327140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342648122499809:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:02.327251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:02.864267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:02.923195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:03.005059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:03.056704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:03.109316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:03.174931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:03.262331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:03.342118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342652417467784:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:03.342203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:03.342526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342652417467789:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:03.346561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:03.357548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342652417467791:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:03.457515Z node 1 :TX_PROXY ERROR: Actor# [1:7494342652417467849:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23582, MsgBus: 29200 2025-04-17T17:57:06.900671Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342664523374596:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:06.935095Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b52/r3tmp/tmpnfCukp/pdisk_1.dat 2025-04-17T17:57:07.208980Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:07.265938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:07.266029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:07.270848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23582, node 2 2025-04-17T17:57:07.518326Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:07.518348Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:07.518358Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:07.518477Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29200 TClient is connected to server localhost:29200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:08.695267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.716420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.818238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:09.144751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.902323Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342664523374596:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:11.902385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:11.961241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:12.004199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:12.083014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:12.170485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:12.249495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:12.351776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342690293180791:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:12.351877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:12.352145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342690293180796:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:12.356238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:12.374380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342690293180798:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:12.464990Z node 2 :TX_PROXY ERROR: Actor# [2:7494342690293180853:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27296, MsgBus: 14601 2025-04-17T17:57:15.807052Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494342704322215722:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:15.807092Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b52/r3tmp/tmp76LtXS/pdisk_1.dat 2025-04-17T17:57:16.101888Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:16.128253Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:16.128350Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:16.135237Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27296, node 3 2025-04-17T17:57:16.290733Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:16.290754Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:16.290761Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:16.290880Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14601 TClient is connected to server localhost:14601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:16.993707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:17.002980Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:17.012275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:17.125658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:17.309481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:17.403165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:20.057941Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342725797053966:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.058050Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.118548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.161159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.234865Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.322950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.368447Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.448501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.509294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.597757Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342725797054632:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.597865Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.598154Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342725797054637:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.603495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:20.626822Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494342725797054639:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:20.685230Z node 3 :TX_PROXY ERROR: Actor# [3:7494342725797054696:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:20.807648Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494342704322215722:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:20.807751Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:25.888674Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912643096, txId: 281474976710672] shutting down >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] >> DemoTx::Scenario_1 [GOOD] >> BsControllerConfig::MergeBoxes [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 15628, MsgBus: 7667 2025-04-17T17:56:58.232384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342629598020570:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:58.248155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b35/r3tmp/tmpA26tpD/pdisk_1.dat 2025-04-17T17:56:59.043823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:59.085337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:59.085441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:59.097152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15628, node 1 2025-04-17T17:56:59.400997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:59.401021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:59.401033Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:59.401157Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7667 TClient is connected to server localhost:7667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:00.375429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:00.390761Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:03.235657Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342629598020570:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:03.235741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:03.490007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342651072857573:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:03.490231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342651072857600:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:03.490327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:03.495845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:57:03.515040Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:57:03.521647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342651072857602:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:57:03.630439Z node 1 :TX_PROXY ERROR: Actor# [1:7494342651072857654:2345] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:04.150252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:57:04.925939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:05.476087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:57:05.476314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:57:05.476572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:57:05.476679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:57:05.476796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:57:05.477266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:57:05.477373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:57:05.477468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:57:05.482393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:57:05.482572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:57:05.482682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:57:05.482803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494342659662792577:2371];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:57:05.694872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:57:05.694929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:57:05.695028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:57:05.695056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:57:05.695230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:57:05.695260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:57:05.695339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:57:05.695363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:57:05.695418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:57:05.695438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:57:05.695473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:57:05.695495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:57:05.696149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:57:05.696205Z node 1 :TX ... pp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494342702663421734:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:15.579693Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b35/r3tmp/tmpC4EzrE/pdisk_1.dat 2025-04-17T17:57:15.753475Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:15.765753Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:15.765831Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:15.767168Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19475, node 3 2025-04-17T17:57:15.818682Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:15.818705Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:15.818713Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:15.818835Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20784 TClient is connected to server localhost:20784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:16.332584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:16.346641Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:57:16.363433Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:57:16.426557Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-17T17:57:16.448981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:57:19.353675Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342719843291623:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:19.353777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:19.354048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342719843291635:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:19.358320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-17T17:57:19.371574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494342719843291637:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-17T17:57:19.449054Z node 3 :TX_PROXY ERROR: Actor# [3:7494342719843291688:2365] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:19.499293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:19.840853Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11765, MsgBus: 7614 2025-04-17T17:57:21.447209Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342730682543836:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:21.534288Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b35/r3tmp/tmpXgHmGy/pdisk_1.dat 2025-04-17T17:57:21.693116Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:21.744586Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:21.744696Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:21.751205Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11765, node 4 2025-04-17T17:57:21.914762Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:21.914790Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:21.914798Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:21.914918Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7614 TClient is connected to server localhost:7614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:22.693026Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:22.708553Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:57:22.741019Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T17:57:22.758982Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:57:25.774978Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342747862413577:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:25.775041Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342747862413585:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:25.775096Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:25.779358Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T17:57:25.795783Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494342747862413591:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:57:25.882669Z node 4 :TX_PROXY ERROR: Actor# [4:7494342747862413642:2369] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:25.907591Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:26.148805Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:2, at schemeshard: 72057594046644480 2025-04-17T17:57:26.421693Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494342730682543836:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:26.421763Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-04-17T17:57:08.502518Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342672451573078:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:08.506066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ef/r3tmp/tmpiZUjnx/pdisk_1.dat 2025-04-17T17:57:09.136371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:09.176655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:09.176737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:09.186549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14016 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:57:09.537366Z node 1 :TX_PROXY DEBUG: actor# [1:7494342672451573309:2116] Handle TEvNavigate describe path dc-1 2025-04-17T17:57:09.537417Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342676746541078:2438] HANDLE EvNavigateScheme dc-1 2025-04-17T17:57:09.537526Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494342672451573334:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:57:09.537578Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494342672451573334:2130], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T17:57:09.537787Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:57:09.540025Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494342672451572986:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494342676746541083:2439] 2025-04-17T17:57:09.540100Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494342672451572986:2050] Subscribe: subscriber# [1:7494342676746541083:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:57:09.540178Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494342672451572989:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494342676746541084:2439] 2025-04-17T17:57:09.540195Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494342672451572989:2053] Subscribe: subscriber# [1:7494342676746541084:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:57:09.540266Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494342672451573334:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:57:09.540322Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494342672451572992:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494342676746541085:2439] 2025-04-17T17:57:09.540341Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494342672451572992:2056] Subscribe: subscriber# [1:7494342676746541085:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:57:09.540501Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494342676746541083:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494342672451572986:2050] 2025-04-17T17:57:09.540525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494342676746541084:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494342672451572989:2053] 2025-04-17T17:57:09.540546Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494342676746541085:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494342672451572992:2056] 2025-04-17T17:57:09.540597Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494342676746541080:2439] 2025-04-17T17:57:09.540626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494342676746541081:2439] 2025-04-17T17:57:09.540676Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494342676746541079:2439][/dc-1] Set up state: owner# [1:7494342672451573334:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:57:09.540875Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494342676746541082:2439] 2025-04-17T17:57:09.540924Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494342676746541079:2439][/dc-1] Path was already updated: owner# [1:7494342672451573334:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:57:09.540963Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494342676746541083:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494342676746541080:2439], cookie# 1 2025-04-17T17:57:09.540981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494342676746541084:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494342676746541081:2439], cookie# 1 2025-04-17T17:57:09.540996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494342676746541085:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494342676746541082:2439], cookie# 1 2025-04-17T17:57:09.541068Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494342672451572986:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494342676746541083:2439] 2025-04-17T17:57:09.541096Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494342672451572986:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494342676746541083:2439], cookie# 1 2025-04-17T17:57:09.541119Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494342672451572989:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494342676746541084:2439] 2025-04-17T17:57:09.541135Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494342672451572989:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494342676746541084:2439], cookie# 1 2025-04-17T17:57:09.541168Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494342672451572992:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494342676746541085:2439] 2025-04-17T17:57:09.541182Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494342672451572992:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494342676746541085:2439], cookie# 1 2025-04-17T17:57:09.541956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494342676746541083:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494342672451572986:2050], cookie# 1 2025-04-17T17:57:09.541975Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494342676746541084:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494342672451572989:2053], cookie# 1 2025-04-17T17:57:09.541990Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494342676746541085:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494342672451572992:2056], cookie# 1 2025-04-17T17:57:09.542040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494342676746541080:2439], cookie# 1 2025-04-17T17:57:09.542081Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:57:09.542099Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494342676746541081:2439], cookie# 1 2025-04-17T17:57:09.542141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:57:09.542168Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494342676746541082:2439], cookie# 1 2025-04-17T17:57:09.542201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494342676746541079:2439][/dc-1] Unexpected sync response: sender# [1:7494342676746541082:2439], cookie# 1 2025-04-17T17:57:09.655177Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494342672451573334:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T17:57:09.655567Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494342672451573334:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPath ... ts, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:57:26.495138Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7494342735166923495:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [5:7494342752346793905:2759] 2025-04-17T17:57:26.495145Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7494342735166923495:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-17T17:57:26.495164Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7494342735166923495:2056] Subscribe: subscriber# [5:7494342752346793905:2759], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:57:26.495205Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7494342752346793903:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7494342735166923489:2050] 2025-04-17T17:57:26.495223Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7494342752346793904:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7494342735166923492:2053] 2025-04-17T17:57:26.495239Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7494342752346793905:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7494342735166923495:2056] 2025-04-17T17:57:26.495280Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7494342752346793893:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7494342752346793898:2759] 2025-04-17T17:57:26.495316Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7494342752346793893:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7494342752346793901:2759] 2025-04-17T17:57:26.495348Z node 5 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][5:7494342752346793893:2759][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [5:7494342735166923834:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:57:26.495418Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7494342752346793893:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7494342752346793902:2759] 2025-04-17T17:57:26.495444Z node 5 :SCHEME_BOARD_SUBSCRIBER INFO: [main][5:7494342752346793893:2759][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [5:7494342735166923834:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:57:26.495473Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7494342735166923489:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7494342752346793903:2759] 2025-04-17T17:57:26.495485Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7494342735166923492:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7494342752346793904:2759] 2025-04-17T17:57:26.495497Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7494342735166923495:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7494342752346793905:2759] 2025-04-17T17:57:26.495556Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [5:7494342735166923834:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-17T17:57:26.495622Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7494342735166923834:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [5:7494342752346793893:2759] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:57:26.495705Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7494342735166923834:2127], cacheItem# { Subscriber: { Subscriber: [5:7494342752346793893:2759] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:57:26.495792Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7494342752346793906:2760], recipient# [5:7494342752346793889:2315], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:57:26.882583Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7494342735166923834:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:57:26.882720Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7494342735166923834:2127], cacheItem# { Subscriber: { Subscriber: [5:7494342739461891765:2579] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:57:26.882819Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7494342752346793908:2761], recipient# [5:7494342752346793907:2317], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:57:27.480590Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7494342735166923834:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:57:27.480776Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7494342735166923834:2127], cacheItem# { Subscriber: { Subscriber: [5:7494342752346793873:2756] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:57:27.480884Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7494342756641761215:2765], recipient# [5:7494342756641761214:2318], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:57:27.826560Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494342735166923657:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:27.826652Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:27.890368Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7494342735166923834:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:57:27.890530Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7494342735166923834:2127], cacheItem# { Subscriber: { Subscriber: [5:7494342739461891765:2579] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:57:27.890638Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7494342756641761219:2768], recipient# [5:7494342756641761218:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> DataShardWrite::UpsertPrepared+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10813:2166] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10813:2166] Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11017:2156] recipient: [1:10813:2166] 2025-04-17T17:56:07.696200Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T17:56:07.705278Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T17:56:07.705853Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T17:56:07.708698Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:56:07.709291Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T17:56:07.710938Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T17:56:07.710994Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T17:56:07.711592Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T17:56:07.723398Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T17:56:07.723556Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T17:56:07.723749Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T17:56:07.723874Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:56:07.724007Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T17:56:07.724116Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11042:2156] recipient: [1:110:2157] 2025-04-17T17:56:07.738506Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T17:56:07.738676Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:56:07.753470Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T17:56:07.753631Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:56:07.753752Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T17:56:07.753842Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:56:07.754028Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T17:56:07.754097Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:56:07.754132Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T17:56:07.754225Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:56:07.771321Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T17:56:07.771479Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:56:07.782469Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T17:56:07.782662Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T17:56:07.784138Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T17:56:07.784207Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T17:56:07.784449Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T17:56:07.784520Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T17:56:07.817205Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... 9} Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2025-04-17T17:57:21.482971Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2025-04-17T17:57:21.483006Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2025-04-17T17:57:21.483037Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2025-04-17T17:57:21.483065Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2025-04-17T17:57:21.483096Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2025-04-17T17:57:21.483130Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2025-04-17T17:57:21.483159Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2025-04-17T17:57:21.483191Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2025-04-17T17:57:21.483220Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2025-04-17T17:57:21.483251Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2025-04-17T17:57:21.483285Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2025-04-17T17:57:21.483338Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2025-04-17T17:57:21.485635Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2025-04-17T17:57:21.485686Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2025-04-17T17:57:21.485718Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2025-04-17T17:57:21.485750Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2025-04-17T17:57:21.485781Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2025-04-17T17:57:21.485817Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2025-04-17T17:57:21.485849Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2025-04-17T17:57:21.485883Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2025-04-17T17:57:21.485918Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2025-04-17T17:57:21.485950Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2025-04-17T17:57:21.485981Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2025-04-17T17:57:21.486014Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2025-04-17T17:57:21.486045Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2025-04-17T17:57:21.486079Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2025-04-17T17:57:21.486113Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2025-04-17T17:57:21.486146Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2025-04-17T17:57:21.486195Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2025-04-17T17:57:21.486257Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2025-04-17T17:57:21.486292Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2025-04-17T17:57:21.486323Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2025-04-17T17:57:21.486353Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2025-04-17T17:57:21.486385Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2025-04-17T17:57:21.486419Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2025-04-17T17:57:21.486450Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2025-04-17T17:57:21.486481Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2025-04-17T17:57:21.486511Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2025-04-17T17:57:21.486553Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2025-04-17T17:57:21.486588Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2025-04-17T17:57:21.486619Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2025-04-17T17:57:21.486651Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2025-04-17T17:57:21.486679Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2025-04-17T17:57:21.486709Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2025-04-17T17:57:21.486743Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2025-04-17T17:57:21.486774Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2025-04-17T17:57:21.486806Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2025-04-17T17:57:21.486836Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2025-04-17T17:57:21.486867Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2025-04-17T17:57:21.486900Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2025-04-17T17:57:21.486932Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2025-04-17T17:57:21.486967Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2025-04-17T17:57:21.487002Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2025-04-17T17:57:21.487036Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2025-04-17T17:57:21.487068Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2025-04-17T17:57:21.487099Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2025-04-17T17:57:21.487132Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2025-04-17T17:57:21.487163Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2025-04-17T17:57:21.487195Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2025-04-17T17:57:21.487225Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2025-04-17T17:57:21.487256Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2025-04-17T17:57:21.487289Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2025-04-17T17:57:21.487320Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2025-04-17T17:57:21.487351Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2025-04-17T17:57:21.487385Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2025-04-17T17:57:21.487417Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2025-04-17T17:57:21.487449Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2025-04-17T17:57:21.487480Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2025-04-17T17:57:21.487511Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2025-04-17T17:57:21.487541Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2025-04-17T17:57:21.487571Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2025-04-17T17:57:21.487601Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2025-04-17T17:57:21.487631Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2025-04-17T17:57:21.487661Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2025-04-17T17:57:21.487693Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2025-04-17T17:57:21.808564Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.330501s 2025-04-17T17:57:21.808761Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.330719s 2025-04-17T17:57:21.854109Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2025-04-17T17:57:21.879626Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> DemoTx::Scenario_2 |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |81.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> TPersQueueTest::UpdatePartitionLocation [GOOD] >> TPersQueueTest::TopicServiceCommitOffset >> BackupRestore::BackupUuid [GOOD] >> BackupRestore::RestoreViewQueryText ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:234:2152] sender: [1:235:2060] recipient: [1:218:2142] 2025-04-17T17:55:23.500832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:55:23.500921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:23.500955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:55:23.500991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:55:23.501055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:55:23.501083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:55:23.501136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:55:23.501218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:55:23.501568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:55:23.622214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:55:23.622276Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:23.630260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:55:23.630390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:55:23.630568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:55:23.654358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:55:23.654730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:55:23.655374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:23.655939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:55:23.661076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:23.663142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:23.663216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:23.664973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:55:23.665049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:23.665102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:55:23.669063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:55:23.677176Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:234:2152] sender: [1:349:2060] recipient: [1:17:2064] 2025-04-17T17:55:23.859448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:55:23.859719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:23.859948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:55:23.860166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:55:23.860225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:23.865416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:23.865677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:55:23.865878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:23.865947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:55:23.865984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:55:23.866012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:55:23.875252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:23.875365Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:55:23.875409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:55:23.877743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:23.877800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:23.877836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:23.877886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:55:23.881599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:55:23.883790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:55:23.884024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:55:23.885159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:55:23.885295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 243 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:55:23.885357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:23.885687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:55:23.885749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:55:23.885923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:55:23.885998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:55:23.888263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:55:23.888304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:55:23.888514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:55:23.888558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:312:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:55:23.888971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:55:23.889020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:55:23.889102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:23.889175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:23.889213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:55:23.889241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:23.889278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:55:23.889316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:55:23.889347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:55:23.889381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:55:23.889453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:55:23.889491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:55:23.889522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:55:23.891339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:55:23.891445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:55:23.891482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:25.910603Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:25.910680Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:25.910707Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:26.294459Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:26.294538Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:26.294613Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:26.294639Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:26.691517Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:26.691597Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:26.691664Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:26.691690Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:27.082477Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:27.082551Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:27.082619Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:27.082645Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:27.514474Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:27.514555Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:27.514633Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:27.514660Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:27.931621Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:27.931701Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:27.931777Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:27.931802Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:28.355677Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:28.355758Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:28.355842Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:28.355890Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:28.767273Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:28.767362Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:28.767443Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:28.767470Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:29.146192Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:29.146290Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:29.146379Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:29.146407Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:29.558635Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:29.558711Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:57:29.558783Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:29.558812Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:57:29.611011Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1079:2837], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-17T17:57:29.611092Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:57:29.611225Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:57:29.611463Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 219us result status StatusPathDoesNotExist 2025-04-17T17:57:29.611617Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:57:29.612107Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1080:2838], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-17T17:57:29.612160Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:57:29.612259Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:57:29.612451Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 193us result status StatusPathDoesNotExist 2025-04-17T17:57:29.612575Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:57:29.613017Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1081:2839], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2025-04-17T17:57:29.613062Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T17:57:29.613150Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T17:57:29.613301Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 150us result status StatusPathDoesNotExist 2025-04-17T17:57:29.613427Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> BackupRestoreS3::TestAllPrimitiveTypes-INTERVAL [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-TZ_DATE [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-TZ_DATETIME [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-TZ_TIMESTAMP [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-TIMESTAMP64 >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries >> TPersQueueTest::DirectReadPreCached >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> TopicService::OneConsumer_TheRangesDoNotOverlap >> TPersQueueTest::SchemeshardRestart [GOOD] >> TPersQueueTest::SameOffset >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TPersQueueTest::SetupLockSession2 >> KqpStats::DeferredEffects-UseSink [GOOD] |81.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 2025-04-17 17:57:25,785 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:57:26,000 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 30591 46.4M 46.5M 23.4M test_tool run_ut @/home/runner/.ya/build/build_root/6873/00131e/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk30/testing_out_stuff/test_tool.args 31054 1.7G 1.7G 1.6G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/6873/00131e/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chun Test command err: 2025-04-17T17:47:34.314534Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:34.411343Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:34.435478Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:34.435761Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:34.443930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:34.444168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:34.444415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:34.444558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:34.444733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:34.444845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:34.445016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:34.445147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:34.445280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:34.445415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.445524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:34.445633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:34.474448Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:34.474596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:34.474672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:34.474829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.474980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:34.475059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:34.475106Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:34.475209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:34.475281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:34.475321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:34.475352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:34.475521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:34.475580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:34.475616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:34.475646Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:34.475736Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:34.475790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:34.475837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:34.475868Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:34.475953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:34.475989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:34.476016Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:34.476057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:34.476110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:34.476142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:34.476518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-17T17:47:34.476595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-17T17:47:34.476683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-17T17:47:34.476767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-17T17:47:34.476960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:34.477030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:34.477070Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:34.477254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:34.477302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.477350Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:34.477511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:34.477562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:34.477592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:34.477749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:34.477795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:34.477827Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:34.477952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:34.477997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:34.478041Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... r=;fline=scanner.cpp:67;event=wait_interval;remained=50;interval_idx=2; 2025-04-17T17:57:25.491931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:2;records_count:56668;schema=timestamp: int64 message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-17T17:57:25.491968Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-17T17:57:25.492013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:57:25.493380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T17:57:25.493428Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=48; 2025-04-17T17:57:25.493464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=48;intervalId=1330; 2025-04-17T17:57:25.493509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:43;event=interval_result_absent;interval_idx=2;merger=0;interval_id=1284; 2025-04-17T17:57:25.493551Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:67;event=wait_interval;remained=50;interval_idx=2; 2025-04-17T17:57:25.493682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:2;records_count:56668;schema=timestamp: int64 message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-17T17:57:25.493718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-17T17:57:25.493771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:57:25.495379Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T17:57:25.495425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=32; 2025-04-17T17:57:25.495467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=32;intervalId=1314; 2025-04-17T17:57:25.495519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:43;event=interval_result_absent;interval_idx=2;merger=0;interval_id=1284; 2025-04-17T17:57:25.495551Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:67;event=wait_interval;remained=50;interval_idx=2; 2025-04-17T17:57:25.495687Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:2;records_count:56668;schema=timestamp: int64 message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-17T17:57:25.495741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-17T17:57:25.495783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:57:25.496930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T17:57:25.496985Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=36; 2025-04-17T17:57:25.497020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=36;intervalId=1318; 2025-04-17T17:57:25.497061Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:43;event=interval_result_absent;interval_idx=2;merger=0;interval_id=1284; 2025-04-17T17:57:25.497091Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:67;event=wait_interval;remained=50;interval_idx=2; 2025-04-17T17:57:25.497213Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:2;records_count:56668;schema=timestamp: int64 message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-17T17:57:25.497248Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-17T17:57:25.497284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:57:25.531428Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-17T17:57:25.554468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:67;scan_step=name=ASSEMBLER::SPEC;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2;source_id=129; 2025-04-17T17:57:25.563513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:67;scan_step=name=ASSEMBLER::LAST_PK;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3;source_id=129; 2025-04-17T17:57:25.642662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:67;scan_step=name=SNAPSHOT;details={};;scan_step_idx=4;source_id=129; 2025-04-17T17:57:25.650226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:67;scan_step=name=ALLOCATE_MEMORY::Fetching;details={stage=Fetching;column_ids=[RawSequential:6];};;scan_step_idx=5;source_id=129; 2025-04-17T17:57:25.650621Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:67;scan_step=name=OPTIONAL_ASSEMBLER::LAST;details={columns=(column_ids=6;column_names=message;);;};;scan_step_idx=6;source_id=129; 2025-04-17T17:57:25.651237Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:67;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=7;source_id=129; 2025-04-17T17:57:25.655409Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T17:57:25.655473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-04-17T17:57:25.655517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=42; 2025-04-17T17:57:25.655569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=42;interval_id=1324;memory=13866349;count=3; 2025-04-17T17:57:25.656782Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15340:17314];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=42; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/00131e/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk30/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/00131e/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk30/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> TPersQueueTest::WriteExisting [GOOD] >> TPersQueueTest::WriteExistingBigValue >> KqpLimits::AffectedShardsLimit [GOOD] >> KqpLimits::CancelAfterRoTx >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DeferredEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3803, MsgBus: 17106 2025-04-17T17:57:05.436719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342662505213805:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:05.447966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b0f/r3tmp/tmptEPoQm/pdisk_1.dat 2025-04-17T17:57:06.135003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:06.135138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:06.136658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3803, node 1 2025-04-17T17:57:06.280838Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:57:06.280859Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:57:06.314735Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:06.446783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:06.446803Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:06.446814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:06.446927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17106 TClient is connected to server localhost:17106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:07.657156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:07.691628Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:07.714366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.031905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.412926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:57:08.571722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.438599Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342662505213805:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:10.438662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:11.232829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342688275019204:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.232950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.774764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.806240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.840203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.875707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.961543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:12.035114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:12.087470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:12.164256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342692569987169:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:12.164353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342692569987174:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:12.164373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:12.169146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:12.181325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342692569987176:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:12.275393Z node 1 :TX_PROXY ERROR: Actor# [1:7494342692569987235:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:16.559492Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912634381, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 6705, MsgBus: 17709 2025-04-17T17:57:17.315430Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342714301670135:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:17.315483Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b0f/r3tmp/tmpeiHCGe/pdisk_1.dat 2025-04-17T17:57:17.468263Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:17.491421Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:17.491502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:17.495782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6705, node 2 2025-04-17T17:57:17.542426Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:17.542449Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:17.542456Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:17.542574Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17709 TClient is connected to server localhost:17709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:57:18.185769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:18.216852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:18.412915Z node 2 :FLAT_TX_SCHEMESHARD W ... :57:21.762410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.830015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.896402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:22.034630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342735776509047:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:22.034709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:22.035096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342735776509052:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:22.039162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:22.052012Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342735776509054:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:57:22.156758Z node 2 :TX_PROXY ERROR: Actor# [2:7494342735776509110:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:22.318341Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342714301670135:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:22.318410Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 25767, MsgBus: 18857 2025-04-17T17:57:26.014451Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494342751571636088:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:26.014494Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b0f/r3tmp/tmpBobhyg/pdisk_1.dat 2025-04-17T17:57:26.194531Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:26.210391Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:26.210493Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:26.214008Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25767, node 3 2025-04-17T17:57:26.275973Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:26.275996Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:26.276006Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:26.276126Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18857 TClient is connected to server localhost:18857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:26.852669Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:26.874261Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:26.897586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:27.041642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:27.268978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:27.378809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:30.595711Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342768751507034:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:30.595819Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:30.677935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:30.739846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:30.796166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:30.846149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:30.898682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:30.981057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:31.014148Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494342751571636088:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:31.014234Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:31.026487Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:31.146240Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342773046475003:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:31.146352Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:31.146755Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342773046475008:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:31.151851Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:31.171178Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:57:31.171512Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494342773046475010:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:31.244994Z node 3 :TX_PROXY ERROR: Actor# [3:7494342773046475066:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> KqpExplain::CreateTableAs-Stats >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> TPersQueueTest::BadTopic >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> CompressExecutor::TestReorderedExecutor [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> CompressExecutor::TestExecutorMemUsage >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink |81.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 2025-04-17 17:57:34,308 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 17:57:34,593 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 31386 46.3M 46.4M 23.4M test_tool run_ut @/home/runner/.ya/build/build_root/6873/0012e5/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk27/testing_out_stuff/test_tool.args 31712 1.5G 1.5G 1.5G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/6873/0012e5/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chun Test command err: 2025-04-17T17:47:38.119945Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:47:40.882676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:47:41.214368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:47:41.226621Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:47:41.805553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:47:41.805789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:47:41.806016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:47:41.806154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:47:41.819023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:47:41.819354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:47:41.820279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:47:41.830542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:47:41.830673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:47:41.830781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:47:41.830889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:47:41.830987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:47:42.390762Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:47:42.391699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:47:42.392842Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:47:42.410968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:42.412032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:47:42.412516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:47:42.413019Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:47:42.414159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:47:42.426475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:47:42.426547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:47:42.426592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:47:42.429155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:47:42.429786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:47:42.430119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:47:42.448921Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:47:42.449659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:47:42.462607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:47:42.462974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:47:42.463276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:47:42.464216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:47:42.464250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:47:42.464276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:47:42.464313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:47:42.464347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:47:42.464391Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:47:42.467944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=509; 2025-04-17T17:47:42.469836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=1040; 2025-04-17T17:47:42.483759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=13569; 2025-04-17T17:47:42.483880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-04-17T17:47:42.484067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:47:42.484138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:47:42.484172Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:47:42.484323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:47:42.484355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:47:42.484377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:47:42.484501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:47:42.484552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:47:42.484579Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:47:42.484716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:47:42.484747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:47:42.484772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T17:47:42.484895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:47:42.484932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:47:42.484969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Fi ... ;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:79:2872:0];range=bytes=0-2871;object_exists=1; 2025-04-17T17:57:32.103186Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:74:2840:0];range=bytes=0-2839;object_exists=1; 2025-04-17T17:57:32.103291Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:75:2776:0];range=bytes=0-2775;object_exists=1; 2025-04-17T17:57:32.103391Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:108:2728:0];range=bytes=0-2727;object_exists=1; 2025-04-17T17:57:32.103485Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:127:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.103572Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:168:2888:0];range=bytes=0-2887;object_exists=1; 2025-04-17T17:57:32.103679Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:233:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.103777Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:59:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-17T17:57:32.103870Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:37:2776:0];range=bytes=0-2775;object_exists=1; 2025-04-17T17:57:32.103956Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:159:2832:0];range=bytes=0-2831;object_exists=1; 2025-04-17T17:57:32.104063Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:245:2728:0];range=bytes=0-2727;object_exists=1; 2025-04-17T17:57:32.104158Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:88:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-17T17:57:32.104244Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:79:255:5:9096:0];range=bytes=0-9095;object_exists=1; 2025-04-17T17:57:32.104342Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:85:2832:0];range=bytes=0-2831;object_exists=1; 2025-04-17T17:57:32.104437Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:152:2840:0];range=bytes=0-2839;object_exists=1; 2025-04-17T17:57:32.104544Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:146:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.104638Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:139:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.104730Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:123:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-17T17:57:32.104815Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:151:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-17T17:57:32.104921Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:79:255:1:2776:0];range=bytes=0-2775;object_exists=1; 2025-04-17T17:57:32.105019Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:34:2776:0];range=bytes=0-2775;object_exists=1; 2025-04-17T17:57:32.105115Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:25:2856:0];range=bytes=0-2855;object_exists=1; 2025-04-17T17:57:32.105206Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:67:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.105312Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:175:2808:0];range=bytes=0-2807;object_exists=1; 2025-04-17T17:57:32.105412Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:33:2776:0];range=bytes=0-2775;object_exists=1; 2025-04-17T17:57:32.105505Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:44:2880:0];range=bytes=0-2879;object_exists=1; 2025-04-17T17:57:32.105595Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:60:2832:0];range=bytes=0-2831;object_exists=1; 2025-04-17T17:57:32.105702Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:185:2888:0];range=bytes=0-2887;object_exists=1; 2025-04-17T17:57:32.105802Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:137:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.105897Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:181:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.105981Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:107:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.106086Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:104:2776:0];range=bytes=0-2775;object_exists=1; 2025-04-17T17:57:32.106198Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:241:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.106292Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:246:8760:0];range=bytes=0-8759;object_exists=1; 2025-04-17T17:57:32.106405Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:198:2880:0];range=bytes=0-2879;object_exists=1; 2025-04-17T17:57:32.106491Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:212:8872:0];range=bytes=0-8871;object_exists=1; 2025-04-17T17:57:32.106594Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:106:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-17T17:57:32.106718Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:122:2848:0];range=bytes=0-2847;object_exists=1; 2025-04-17T17:57:32.106827Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:19:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.106969Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:138:2832:0];range=bytes=0-2831;object_exists=1; 2025-04-17T17:57:32.107059Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:70:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-17T17:57:32.107164Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:10:255:30:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.107263Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:77:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.107359Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:129:2848:0];range=bytes=0-2847;object_exists=1; 2025-04-17T17:57:32.107444Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:100:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-17T17:57:32.107559Z node 1 :S3_WRAPPER DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:78:255:116:2880:0];range=bytes=0-2879;object_exists=1; 2025-04-17T17:57:32.112016Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303;fline=actor.cpp:48;task=agents_waiting=2;additional_info=();; 2025-04-17T17:57:32.119780Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-17T17:57:33.596177Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=6ccd9478-1bb511f0-9f70c8db-25e08303; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/0012e5/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk27/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/0012e5/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk27/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign >> BackupRestore::RestoreViewQueryText [GOOD] >> BackupRestore::RestoreViewReferenceTable >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-TIMESTAMP64 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-INTERVAL64 >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> LabeledDbCounters::TwoTablets [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] Test command err: Trying to start YDB, gRPC: 62051, MsgBus: 5845 2025-04-17T17:57:05.553905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342662686842576:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:05.554846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b11/r3tmp/tmpCcaDLN/pdisk_1.dat 2025-04-17T17:57:06.261437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:06.295997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:06.296101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:06.302771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62051, node 1 2025-04-17T17:57:06.603779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:06.603800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:06.603810Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:06.603901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5845 TClient is connected to server localhost:5845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:07.764088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:07.798930Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:07.810284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:07.972688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.204216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.329503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:10.558281Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342662686842576:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:10.558351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:11.026967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342688456648110:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.027062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.421689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.469685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.512628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.598894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.672874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.759825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.815200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.901700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342688456648784:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.901780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.902020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342688456648789:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.905400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:11.921549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342688456648791:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:12.005609Z node 1 :TX_PROXY ERROR: Actor# [1:7494342692751616146:3564] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27944, MsgBus: 28289 2025-04-17T17:57:14.873795Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342700971796310:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:14.873833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b11/r3tmp/tmpRrmljC/pdisk_1.dat 2025-04-17T17:57:15.138299Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:15.163202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:15.163274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:15.167406Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27944, node 2 2025-04-17T17:57:15.350574Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:15.350592Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:15.350598Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:15.350696Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28289 TClient is connected to server localhost:28289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:16.055325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:18.634490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342718151666151:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:18.634594Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:18.635006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService ... _COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:57:36.401476Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:57:36.401520Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:57:36.401549Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:57:36.403691Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:57:36.403757Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:57:36.404010Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:57:36.404046Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:57:36.404232Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:57:36.404268Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:57:36.404570Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:57:36.404606Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:57:36.404789Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:57:36.404825Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:57:36.454835Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:57:36.454915Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:57:36.455329Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:57:36.455528Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:57:36.455664Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:57:36.455790Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:57:36.455894Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:57:36.456002Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:57:36.456114Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:57:36.456228Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:57:36.456345Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:57:36.456473Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[4:7494342793066448897:2515];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:57:36.461145Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:57:36.461211Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:57:36.461321Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:57:36.461352Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:57:36.461543Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:57:36.461574Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:57:36.461691Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:57:36.461730Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:57:36.461797Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:57:36.461823Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:57:36.461861Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:57:36.461902Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:57:36.462608Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:57:36.462661Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:57:36.462858Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:57:36.462896Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:57:36.463068Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:57:36.463098Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:57:36.463288Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:57:36.463316Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:57:36.463445Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T17:57:36.463476Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T17:57:36.490451Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710666;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710666; 2025-04-17T17:57:36.495080Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710666;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710666; 2025-04-17T17:57:36.505961Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710666;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710666; 2025-04-17T17:57:36.513590Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710666;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710666; >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> WithSDK::DescribeConsumer [GOOD] >> WithSDK::CommitWithWrongSessionId >> TPersQueueTest::ReadFromSeveralPartitions >> TDataShardTrace::TestTraceDistributedUpsert+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11739, MsgBus: 61945 2025-04-17T17:57:04.038798Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342650546194208:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:04.039113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b23/r3tmp/tmpOVJljj/pdisk_1.dat 2025-04-17T17:57:04.806994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:04.829856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:04.829972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:04.835986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11739, node 1 2025-04-17T17:57:05.086601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:05.086631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:05.086640Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:05.086734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61945 TClient is connected to server localhost:61945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:05.859597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:05.917491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:06.159526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:06.441688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:06.608100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.999059Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342650546194208:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:09.013362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:10.283767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342680610966920:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:10.283866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:10.612369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.699166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.754862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.807835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.841869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.892631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.943825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.037762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342684905934882:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.037844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.038017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342684905934887:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.041813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:11.062669Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:57:11.066469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342684905934889:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:11.126398Z node 1 :TX_PROXY ERROR: Actor# [1:7494342684905934947:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 4325, MsgBus: 62714 2025-04-17T17:57:13.777272Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342696545482164:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:13.851766Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b23/r3tmp/tmpoaddV7/pdisk_1.dat 2025-04-17T17:57:13.943858Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4325, node 2 2025-04-17T17:57:13.962585Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:13.962657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:13.967162Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:14.104878Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:14.104898Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:14.104904Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:14.105006Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62714 TClient is connected to server localhost:62714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:57:14.663873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:14.680874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:14.821490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:15.010034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... 17:57:30.727655Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342766580777913:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b23/r3tmp/tmpUpXjaB/pdisk_1.dat 2025-04-17T17:57:30.781622Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:30.880661Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:30.894811Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:30.894897Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:30.899897Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27980, node 4 2025-04-17T17:57:30.990395Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:30.990418Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:30.990429Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:30.990593Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28041 TClient is connected to server localhost:28041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:31.675004Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:31.688888Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:57:31.701954Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:31.834310Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:32.112964Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:32.243477Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:35.347274Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342788055615993:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:35.347351Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:35.411255Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.498198Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.590836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.641167Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.666287Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494342766580777913:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:35.666356Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:35.710221Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.809483Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.900120Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.998723Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342788055616667:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:35.998828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:35.999206Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342788055616672:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:36.003385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:36.029091Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494342788055616674:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:57:36.100126Z node 4 :TX_PROXY ERROR: Actor# [4:7494342792350584026:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:37.731217Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T17:57:39.304429Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494342805235486613:2562], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-04-17T17:57:39.306144Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTJhZDczNDctOTliNDcyYjEtY2U5ZWJlYWUtZGY1NTcwZWU=, ActorId: [4:7494342796645551680:2516], ActorState: ExecuteState, TraceId: 01js2ecctm863p6y6j76swe77d, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:57:39.435376Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTJhZDczNDctOTliNDcyYjEtY2U5ZWJlYWUtZGY1NTcwZWU=, ActorId: [4:7494342796645551680:2516], ActorState: ExecuteState, TraceId: 01js2eccvwef5jdg09enqgc0sv, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1320: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-04-17T17:57:39.482826Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494342805235486631:2569], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T17:57:39.484700Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTJhZDczNDctOTliNDcyYjEtY2U5ZWJlYWUtZGY1NTcwZWU=, ActorId: [4:7494342796645551680:2516], ActorState: ExecuteState, TraceId: 01js2ecd03bnnpxfndb6x1q2dh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:57:39.512327Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494342805235486641:2573], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T17:57:39.514067Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTJhZDczNDctOTliNDcyYjEtY2U5ZWJlYWUtZGY1NTcwZWU=, ActorId: [4:7494342796645551680:2516], ActorState: ExecuteState, TraceId: 01js2ecd19fk04xfrd23rg3y57, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-04-17T17:56:48.578951Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1744912608578919 2025-04-17T17:56:50.179278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342595168140012:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:50.192303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b7/r3tmp/tmp7fhaiN/pdisk_1.dat 2025-04-17T17:56:50.747224Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:56:50.778807Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:56:50.976863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:51.331710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:51.520064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:51.590992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:51.591102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:51.606612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:51.606683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:51.627118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:51.627382Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:56:51.667087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8075, node 1 2025-04-17T17:56:52.074243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0015b7/r3tmp/yandex2qfAJw.tmp 2025-04-17T17:56:52.074266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0015b7/r3tmp/yandex2qfAJw.tmp 2025-04-17T17:56:52.120396Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0015b7/r3tmp/yandex2qfAJw.tmp 2025-04-17T17:56:52.120727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:56:52.175939Z INFO: TTestServer started on Port 29502 GrpcPort 8075 TClient is connected to server localhost:29502 PQClient connected to localhost:8075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:52.915859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T17:56:55.142616Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342595168140012:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:55.142709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:56.511350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342624141083661:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:56.511485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:56.511834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342624141083688:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:56.521799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-04-17T17:56:56.574463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342624141083690:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-04-17T17:56:56.651565Z node 2 :TX_PROXY ERROR: Actor# [2:7494342624141083718:2133] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:57.423884Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342620937944791:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:57.424615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:56:57.425468Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWJhNGE4YzUtODllM2U4MjItNGQzMjIxMWYtYWJhZWRlMjg=, ActorId: [1:7494342620937944742:2340], ActorState: ExecuteState, TraceId: 01js2eb39y42087bw79z679rkb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:57.427880Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:56:57.442247Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342624141083725:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:57.443653Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODY5ZjA2NGUtNTYzM2VlZGUtNWYyZTcyYzMtYjYyMTU5YmU=, ActorId: [2:7494342624141083659:2310], ActorState: ExecuteState, TraceId: 01js2eb31f76zt22x6498mbp3b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:57.443993Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:56:57.605938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:57.852215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:8075", true, true, 1000); 2025-04-17T17:56:58.161538Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2eb4ga0fasw8xhx4c23n7w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q3ZTg2MS04YzczYjc0NC1jYzUxMDI5Ny03YzA1YzE1NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342629527879797:2976] === CheckClustersList. Ok 2025-04-17T17:57:03.662852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:8075 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T17:57:04.094857Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:8075 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzo ... 1Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-17T17:57:38.013200Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:36358 2025-04-17T17:57:38.013220Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:36358 proto=v1 topic=test-topic durationSec=0 2025-04-17T17:57:38.013235Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T17:57:38.015028Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-17T17:57:38.015175Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-17T17:57:38.015192Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T17:57:38.015208Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-17T17:57:38.015236Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342803844944667:2502] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T17:57:38.018425Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342803844944667:2502] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-17T17:57:38.241787Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342803844944667:2502] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-17T17:57:38.242883Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7494342803844944706:2502] connected; active server actors: 1 2025-04-17T17:57:38.243070Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342803844944667:2502] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-17T17:57:38.243092Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342803844944667:2502] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-17T17:57:38.272733Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7494342803844944706:2502] disconnected; active server actors: 1 2025-04-17T17:57:38.277075Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7494342803844944706:2502] disconnected no session 2025-04-17T17:57:38.452063Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342803844944667:2502] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T17:57:38.452119Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342803844944667:2502] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-17T17:57:38.452138Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494342803844944667:2502] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-17T17:57:38.452172Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T17:57:38.455555Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-04-17T17:57:38.456509Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7494342803844944738:2502], now have 1 active actors on pipe 2025-04-17T17:57:38.456582Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:57:38.456606Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:57:38.456724Z node 6 :PERSQUEUE INFO: new Cookie src|2607bef-eeec1a6-4fcfaba4-2f74b09_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-17T17:57:38.456817Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T17:57:38.456862Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:57:38.462026Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:57:38.462066Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:57:38.462156Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:57:38.462908Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|2607bef-eeec1a6-4fcfaba4-2f74b09_0 2025-04-17T17:57:38.463797Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744912658463 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:57:38.463911Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|2607bef-eeec1a6-4fcfaba4-2f74b09_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T17:57:38.464917Z :INFO: [] MessageGroupId [src] SessionId [src|2607bef-eeec1a6-4fcfaba4-2f74b09_0] Write session: close. Timeout = 0 ms 2025-04-17T17:57:38.465019Z :INFO: [] MessageGroupId [src] SessionId [src|2607bef-eeec1a6-4fcfaba4-2f74b09_0] Write session will now close 2025-04-17T17:57:38.465062Z :DEBUG: [] MessageGroupId [src] SessionId [src|2607bef-eeec1a6-4fcfaba4-2f74b09_0] Write session: aborting 2025-04-17T17:57:38.470765Z :INFO: [] MessageGroupId [src] SessionId [src|2607bef-eeec1a6-4fcfaba4-2f74b09_0] Write session: gracefully shut down, all writes complete 2025-04-17T17:57:38.470825Z :DEBUG: [] MessageGroupId [src] SessionId [src|2607bef-eeec1a6-4fcfaba4-2f74b09_0] Write session: destroy 2025-04-17T17:57:38.515148Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7494342803844944738:2502] destroyed 2025-04-17T17:57:38.515220Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:57:38.510865Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|2607bef-eeec1a6-4fcfaba4-2f74b09_0 grpc read done: success: 0 data: 2025-04-17T17:57:38.510897Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2607bef-eeec1a6-4fcfaba4-2f74b09_0 grpc read failed 2025-04-17T17:57:38.510928Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2607bef-eeec1a6-4fcfaba4-2f74b09_0 grpc closed 2025-04-17T17:57:38.510947Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2607bef-eeec1a6-4fcfaba4-2f74b09_0 is DEAD 2025-04-17T17:57:38.511744Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T17:57:38.524575Z :INFO: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] Starting read session 2025-04-17T17:57:38.524619Z :DEBUG: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] Starting session to cluster null (localhost:18065) 2025-04-17T17:57:38.527278Z :DEBUG: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:57:38.527314Z :DEBUG: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:57:38.527339Z :DEBUG: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] [null] Reconnecting session to cluster null in 0.000000s 2025-04-17T17:57:38.531974Z :ERROR: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-04-17T17:57:38.532067Z :DEBUG: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:57:38.532109Z :DEBUG: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:57:38.532241Z :INFO: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-04-17T17:57:38.532424Z :NOTICE: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:57:38.532446Z :DEBUG: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-04-17T17:57:38.532534Z :INFO: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] Closing read session. Close timeout: 0.000000s 2025-04-17T17:57:38.532576Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T17:57:38.532635Z :INFO: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] Counters: { Errors: 1 CurrentSessionLifetimeMs: 8 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:57:38.532721Z :NOTICE: [/Root] [/Root] [e0102254-d80030bb-3c2df551-be15fcf8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:57:39.749103Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [5:7494342808139912120:2529]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-17T17:57:39.786283Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [5:7494342808139912120:2529]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:57:39.858271Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [5:7494342808139912120:2529]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T17:57:39.926844Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [5:7494342808139912120:2529]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::DeleteImmediate >> KqpExplain::CreateTableAs-Stats [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreViewQueryText >> DemoTx::Scenario_2 [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs-Stats [GOOD] Test command err: Trying to start YDB, gRPC: 24819, MsgBus: 8394 2025-04-17T17:57:04.921263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342657481580900:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:04.921702Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b18/r3tmp/tmpKbuTpW/pdisk_1.dat 2025-04-17T17:57:05.736698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:05.782595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:05.782705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:05.791066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24819, node 1 2025-04-17T17:57:06.098904Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:06.098930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:06.098937Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:06.099037Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8394 TClient is connected to server localhost:8394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:07.380927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:07.452252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:07.864087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.113167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.211537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:09.906652Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342657481580900:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:09.906728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:10.255010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342683251386311:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:10.255134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:10.592037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.646629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.714336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.797426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.831265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.880217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.004037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.146842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342687546354286:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.146935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.147361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342687546354291:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.151032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:11.165840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342687546354293:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:11.250465Z node 1 :TX_PROXY ERROR: Actor# [1:7494342687546354353:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type" ... ptr.h:259: __on_zero_shared @ 0x40AFFE99 13. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:171: __release_shared @ 0x436B3745 14. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:217: __release_shared @ 0x436B3745 15. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:700: ~shared_ptr @ 0x436B3745 16. /-S/ydb/core/kqp/session_actor/kqp_query_state.h:36: ~TKqpQueryState @ 0x436B3745 17. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:171: __release_shared @ 0x43632EEE 18. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:217: __release_shared @ 0x43632EEE 19. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:700: ~shared_ptr @ 0x43632EEE 20. /tmp//-S/ydb/core/kqp/session_actor/kqp_session_actor.cpp:150: ~TKqpSessionActor @ 0x43632EEE 21. /tmp//-S/ydb/core/kqp/session_actor/kqp_session_actor.cpp:150: ?? @ 0x436339BD 22. /-S/util/generic/ptr.h:36: CheckedDelete @ 0x1C6717F2 23. /-S/util/generic/ptr.h:57: Destroy @ 0x1C6717F2 24. /-S/util/generic/ptr.h:360: DoDestroy @ 0x1C6717F2 25. /-S/util/generic/ptr.h:290: ~THolder @ 0x1C6717F2 26. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:168: destroy @ 0x1C6717F2 27. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:335: destroy, 0> @ 0x1C6717F2 28. /-S/contrib/libs/cxxsupp/libcxx/include/vector:962: __base_destruct_at_end @ 0x1C6717F2 29. /-S/contrib/libs/cxxsupp/libcxx/include/vector:956: __clear @ 0x1C6717F2 30. /-S/contrib/libs/cxxsupp/libcxx/include/vector:747: clear @ 0x1C6717F2 31. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:113: DropUnregistered @ 0x1C6717F2 32. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:280: Execute @ 0x1C67461D 33. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:460: operator() @ 0x1C67D12E 34. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:512: ProcessExecutorPool @ 0x1C67C689 35. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:538: ThreadProc @ 0x1C67E61E 36. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x19882584 37. /tmp//-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x1952A698 38. ??:0: ?? @ 0x7F0F42491AC2 39. ??:0: ?? @ 0x7F0F4252384F Trying to start YDB, gRPC: 6904, MsgBus: 24760 2025-04-17T17:57:37.498767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342797621896165:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:37.501379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b18/r3tmp/tmpYzgLMi/pdisk_1.dat 2025-04-17T17:57:38.169800Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:38.184182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:38.184290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:38.186362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6904, node 1 2025-04-17T17:57:38.516411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:38.516434Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:38.516441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:38.516536Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24760 TClient is connected to server localhost:24760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:39.335685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:41.779757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342814801765876:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:41.782350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342814801765872:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:41.782487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:41.783937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:57:41.797380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342814801765885:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:57:41.853747Z node 1 :TX_PROXY ERROR: Actor# [1:7494342814801765937:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:42.161014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:57:42.478310Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342797621896165:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:42.478381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; PLAN::{"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"Tables":["Destination"],"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Col1 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Source","ReadColumns":["Col1","Col2"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Destination","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]},{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Sink"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"Tables":["test\/Destination2"],"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Col1 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Source","ReadColumns":["Col1","Col2"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/Destination2","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Sink"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Col1 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Source","ReadColumns":["Col1","Col2"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Sink"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> DemoTx::Scenario_3 >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> TDataShardTrace::TestTraceDistributedSelect >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TopicService::OneConsumer_TheRangesDoNotOverlap [GOOD] |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes >> TPQTest::TestDirectReadHappyWay >> TopicService::OneConsumer_TheRangesOverlap >> TPersQueueTest::DirectReadPreCached [GOOD] >> TPersQueueTest::DirectReadNotCached >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> TPersQueueTest::TopicServiceCommitOffset [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-04-17T17:55:18.257896Z :WriteRAW INFO: Random seed for debugging is 1744912518257863 2025-04-17T17:55:18.680483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342201866209253:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:18.684089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:19.041225Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001800/r3tmp/tmp0s0CHH/pdisk_1.dat 2025-04-17T17:55:19.078807Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:55:19.188626Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:19.588577Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:19.604848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:19.604918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:19.608942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:19.609007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:19.619609Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:19.619779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:19.620467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2177, node 1 2025-04-17T17:55:20.022959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001800/r3tmp/yandex7pVAFE.tmp 2025-04-17T17:55:20.022989Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001800/r3tmp/yandex7pVAFE.tmp 2025-04-17T17:55:20.023147Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001800/r3tmp/yandex7pVAFE.tmp 2025-04-17T17:55:20.023318Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:55:20.141950Z INFO: TTestServer started on Port 1193 GrpcPort 2177 TClient is connected to server localhost:1193 PQClient connected to localhost:2177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:20.486278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T17:55:23.690318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342201866209253:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:23.690411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:24.016709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342225495598455:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:24.016798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342225495598435:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:24.017247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:24.029675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:55:24.062229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342225495598462:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:55:24.139417Z node 2 :TX_PROXY ERROR: Actor# [2:7494342225495598493:2132] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:24.638121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:55:24.640109Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342225495598508:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:24.641304Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjM1ODdkN2EtYWQ0ZGQyZTctMTBkYzYyNDgtZTU4YWY0NjU=, ActorId: [2:7494342221200631137:2309], ActorState: ExecuteState, TraceId: 01js2e88q36yqx4wxa0x8wk7bg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:24.639242Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342227636014019:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:24.641037Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzgwMGRhMzgtNTY2ZWM5OGUtNDliOTdlYzgtYmFjMTE2NWE=, ActorId: [1:7494342227636013968:2336], ActorState: ExecuteState, TraceId: 01js2e88y73qha2b3nj7ab8xn7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:24.643322Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:55:24.643322Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:55:24.983749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:25.224576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:2177", true, true, 1000); 2025-04-17T17:55:25.570802Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2e8a3b0mv6bxrtze0htezp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFkMmI5YjItODUxMGI0MzAtYTA5MzFjNTMtODYwZjJjZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342231930981726:2996] === CheckClustersList. Ok 2025-04-17T17:55:31.957545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:2177 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: 2025-04-17T17:55:32.183876Z node 1 :PERSQUEUE INFO: proxy answer Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:2177 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710679 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic ... 7:42.560295Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-17T17:57:42.560475Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:49412 2025-04-17T17:57:42.560496Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:49412 proto=v1 topic=test-topic durationSec=0 2025-04-17T17:57:42.560509Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T17:57:42.562891Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-17T17:57:42.563059Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-17T17:57:42.563074Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T17:57:42.563090Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-17T17:57:42.563112Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342820321079964:2529] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T17:57:42.573408Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342820321079964:2529] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-17T17:57:43.040634Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342820321079964:2529] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-17T17:57:43.042993Z node 16 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7494342824616047328:2529] connected; active server actors: 1 2025-04-17T17:57:43.050815Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342820321079964:2529] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-17T17:57:43.050862Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342820321079964:2529] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-17T17:57:43.054692Z node 16 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7494342824616047328:2529] disconnected; active server actors: 1 2025-04-17T17:57:43.054745Z node 16 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7494342824616047328:2529] disconnected no session 2025-04-17T17:57:43.305772Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342820321079964:2529] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T17:57:43.305822Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342820321079964:2529] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-17T17:57:43.305851Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342820321079964:2529] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-17T17:57:43.305908Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T17:57:43.314131Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 15, Generation: 1 2025-04-17T17:57:43.314223Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7494342824616047384:2529], now have 1 active actors on pipe 2025-04-17T17:57:43.314419Z node 15 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:57:43.314453Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:57:43.314565Z node 15 :PERSQUEUE INFO: new Cookie src|aee02c7a-1a25c998-93966546-58e4f32a_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-17T17:57:43.314697Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T17:57:43.314756Z node 15 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:57:43.315101Z node 15 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:57:43.315123Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:57:43.315202Z node 15 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:57:43.315353Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|aee02c7a-1a25c998-93966546-58e4f32a_0 2025-04-17T17:57:43.319047Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744912663318 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:57:43.319218Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|aee02c7a-1a25c998-93966546-58e4f32a_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T17:57:43.321664Z :INFO: [] MessageGroupId [src] SessionId [src|aee02c7a-1a25c998-93966546-58e4f32a_0] Write session: close. Timeout = 0 ms 2025-04-17T17:57:43.321740Z :INFO: [] MessageGroupId [src] SessionId [src|aee02c7a-1a25c998-93966546-58e4f32a_0] Write session will now close 2025-04-17T17:57:43.321803Z :DEBUG: [] MessageGroupId [src] SessionId [src|aee02c7a-1a25c998-93966546-58e4f32a_0] Write session: aborting 2025-04-17T17:57:43.324060Z :INFO: [] MessageGroupId [src] SessionId [src|aee02c7a-1a25c998-93966546-58e4f32a_0] Write session: gracefully shut down, all writes complete 2025-04-17T17:57:43.324120Z :DEBUG: [] MessageGroupId [src] SessionId [src|aee02c7a-1a25c998-93966546-58e4f32a_0] Write session: destroy 2025-04-17T17:57:43.334283Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|aee02c7a-1a25c998-93966546-58e4f32a_0 grpc read done: success: 0 data: 2025-04-17T17:57:43.334319Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|aee02c7a-1a25c998-93966546-58e4f32a_0 grpc read failed 2025-04-17T17:57:43.334362Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|aee02c7a-1a25c998-93966546-58e4f32a_0 grpc closed 2025-04-17T17:57:43.334389Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|aee02c7a-1a25c998-93966546-58e4f32a_0 is DEAD 2025-04-17T17:57:43.335466Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T17:57:43.338856Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7494342824616047384:2529] destroyed 2025-04-17T17:57:43.338930Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:57:43.457440Z :INFO: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Starting read session 2025-04-17T17:57:43.457491Z :DEBUG: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Starting cluster discovery 2025-04-17T17:57:43.457738Z :INFO: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30852: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30852
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30852. " 2025-04-17T17:57:43.457788Z :DEBUG: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Restart cluster discovery in 0.009536s 2025-04-17T17:57:43.470293Z :DEBUG: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Starting cluster discovery 2025-04-17T17:57:43.470657Z :INFO: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30852: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30852
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30852. " 2025-04-17T17:57:43.470715Z :DEBUG: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Restart cluster discovery in 0.012045s 2025-04-17T17:57:43.485341Z :DEBUG: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Starting cluster discovery 2025-04-17T17:57:43.485563Z :INFO: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30852: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30852
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30852. " 2025-04-17T17:57:43.485610Z :DEBUG: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Restart cluster discovery in 0.023747s 2025-04-17T17:57:43.510352Z :DEBUG: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Starting cluster discovery 2025-04-17T17:57:43.510687Z :NOTICE: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30852: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30852
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30852. " } 2025-04-17T17:57:43.510890Z :NOTICE: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30852: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30852
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:30852. " } 2025-04-17T17:57:43.511030Z :INFO: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Closing read session. Close timeout: 0.000000s 2025-04-17T17:57:43.511139Z :NOTICE: [/Root] [/Root] [a1e71885-5b28b2b-e3285a23-eb2e6f37] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestDescribeBalancer |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |81.8%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations >> TPersQueueTest::SameOffset [GOOD] >> TPersQueueTest::SchemeOperationsTest >> BackupRestore::TestAllPrimitiveTypes-INTERVAL64 >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> RetryPolicy::TWriteSession_TestPolicy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: 2025-04-17T17:57:46.725639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:46.726346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:46.726428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c4a/r3tmp/tmp1s6GiF/pdisk_1.dat 2025-04-17T17:57:47.226832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:47.283687Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:47.323724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:47.323859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:47.337689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:47.428344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:49.626767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:49.626881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:49.627305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:49.633127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:57:49.660448Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T17:57:49.860781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:57:49.935791Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:50.396175Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2ecpxq2mn9ft8e4phzbbqx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU2MTQ3NTctM2Y4ZmFlNjQtM2IyYzQzMjQtMTExNzBmY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TPersQueueTest::SetupLockSession2 [GOOD] >> TPersQueueTest::SetupLockSession >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> BackupRestore::RestoreViewReferenceTable [GOOD] >> BackupRestore::RestoreViewToDifferentDatabase >> TPQTest::TestDescribeBalancer [GOOD] >> TPQTest::TestLowWatermark >> TPQTabletTests::ProposeTx_Unknown_WriteId >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> BackupRestoreS3::TestAllPrimitiveTypes-INTERVAL64 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-STRING >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPQTabletTests::ProposeTx_Command_After_Propose ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2025-04-17T17:57:49.538121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:49.538711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:49.538805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c09/r3tmp/tmpwCNUCv/pdisk_1.dat 2025-04-17T17:57:49.935773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:50.017742Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:50.060055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:50.060163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:50.071426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:50.161031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> BackupRestoreS3::RestoreViewQueryText [FAIL] >> BackupRestoreS3::RestoreViewReferenceTable >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut |81.8%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> TPersQueueTest::BadTopic [GOOD] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |81.9%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |81.9%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |81.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] Test command err: 2025-04-17T17:57:52.234706Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:57:52.243031Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:57:52.243319Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-17T17:57:52.243360Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:57:52.243387Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-17T17:57:52.243430Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:57:52.243464Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:57:52.243556Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:57:52.281116Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-17T17:57:52.281246Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:57:52.298843Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:57:52.302655Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:57:52.302790Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:57:52.303722Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:57:52.303869Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:57:52.304307Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:57:52.304717Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-17T17:57:52.318035Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:57:52.318159Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-04-17T17:57:52.318244Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:57:52.318871Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:57:52.318984Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-17T17:57:52.319028Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-17T17:57:52.319173Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:57:52.319215Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:57:52.319250Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:57:52.319299Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:57:52.319341Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:57:52.319373Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:57:52.319417Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:57:52.319458Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:57:52.319552Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:57:52.319728Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:57:52.330152Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:57:52.330647Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-04-17T17:57:52.345168Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-04-17T17:57:52.346063Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 2 Data { Immediate: false } 2025-04-17T17:57:52.346127Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2 empty list of operations 2025-04-17T17:57:52.354339Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 2 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-04-17T17:57:52.952813Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:57:52.964306Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:57:52.964642Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-17T17:57:52.964679Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:57:52.964707Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-17T17:57:52.964737Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:57:52.964773Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:57:52.964818Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:57:52.990108Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:176:2191], now have 1 active actors on pipe 2025-04-17T17:57:52.990226Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:57:52.990475Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:57:52.996137Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:57:52.996261Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:57:52.996966Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:57:52.997080Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:57:52.997514Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:57:52.997766Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2025-04-17T17:57:52.998673Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:57:52.998736Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:184:2197] 2025-04-17T17:57:52.998785Z node 2 :PERSQUEUE DEBUG: [PQ: 7205759403792793 ... TInitInternalFieldsStep 2025-04-17T17:57:55.923999Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:304:2290] 2025-04-17T17:57:55.924755Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-04-17T17:57:55.925855Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-04-17T17:57:55.926168Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-17T17:57:55.935395Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-17T17:57:55.935754Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:57:55.935796Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:57:55.935833Z node 6 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:57:55.935888Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:57:55.935937Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:304:2290] 2025-04-17T17:57:55.936046Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:57:55.936101Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:57:55.936181Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-04-17T17:57:55.936493Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-04-17T17:57:55.936533Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2025-04-17T17:57:55.936570Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State PLANNED FrontTxId 67890 2025-04-17T17:57:55.936605Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-17T17:57:55.936646Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-17T17:57:55.936714Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-04-17T17:57:55.936752Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-04-17T17:57:55.936822Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-04-17T17:57:55.937244Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-04-17T17:57:55.937306Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-04-17T17:57:55.937360Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-17T17:57:55.937394Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-17T17:57:55.937430Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-17T17:57:55.937468Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-04-17T17:57:55.937507Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-17T17:57:55.937544Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-17T17:57:55.937583Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-04-17T17:57:55.937747Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-04-17T17:57:55.937857Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:57:55.945920Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:57:55.945993Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-17T17:57:55.946030Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-17T17:57:55.946093Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-17T17:57:55.946131Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-17T17:57:55.946168Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-17T17:57:55.946237Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-17T17:57:55.946272Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-17T17:57:55.946395Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-04-17T17:57:55.956529Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-17T17:57:55.956604Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-04-17T17:57:55.962928Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:329:2308], now have 1 active actors on pipe 2025-04-17T17:57:55.963050Z node 6 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2025-04-17T17:57:55.963162Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-17T17:57:55.963199Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-04-17T17:57:55.963237Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-04-17T17:57:55.963272Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2025-04-17T17:57:55.963308Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-04-17T17:57:55.963344Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2025-04-17T17:57:55.963380Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-17T17:57:55.963432Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-17T17:57:55.963518Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-04-17T17:57:55.963560Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2025-04-17T17:57:55.963639Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-17T17:57:55.963696Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-04-17T17:57:55.963852Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:57:55.963888Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:57:55.963957Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:57:55.963997Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:57:55.964026Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:57:55.964045Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:57:55.964064Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:57:55.964092Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:57:55.964126Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:57:55.964209Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:57:55.971694Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:57:55.971857Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-17T17:57:55.971899Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-17T17:57:55.971947Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-17T17:57:55.971987Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-17T17:57:55.972033Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-17T17:57:55.972081Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-17T17:57:55.972122Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-04-17T17:57:55.972163Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-17T17:57:55.972202Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-17T17:57:55.972246Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-04-17T17:57:55.972443Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-04-17T17:57:55.972537Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:57:55.987277Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:57:55.987373Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-17T17:57:55.987409Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-17T17:57:55.987446Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-17T17:57:55.987502Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T17:57:55.987561Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-04-17T17:57:55.987609Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-17T17:57:55.987641Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T17:57:55.987679Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-17T17:57:55.987706Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T17:57:55.987738Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 |81.9%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2025-04-17T17:57:50.619827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:50.620382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:50.620469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000b32/r3tmp/tmpE5ookG/pdisk_1.dat 2025-04-17T17:57:51.235431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:51.281565Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:51.328439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:51.328593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:51.340239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:51.440407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:53.986452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:53.986575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:53.986990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:54.002642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T17:57:54.051594Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T17:57:54.286013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T17:57:54.504902Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:55.264091Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2ecv601xrfbz4brtszcsx7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1NDc3ZmUtNDNjNzU3NzgtODE5NjFjM2YtZjI3YmE5YTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:57:55.394125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2ecwf7cvhpdqmq54fxja49, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVmNjNlN2QtZWQ0MzNiMjYtNzc1ODQ1NzMtM2RjZmI5OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:57:57.059046Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2ecwss4geb2kbvaphd3cv1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk1Y2U3NzYtMzY4YTgwZDgtZGFhYmE1MDItNDkyYWI5NDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> TPersQueueTest::WriteExistingBigValue [GOOD] >> TPersQueueTest::WriteEmptyData |81.9%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> KqpBatchUpdate::Large_1 [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> TPartitionTests::SetOffset |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TopicService::OneConsumer_TheRangesOverlap [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INTERVAL64 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-STRING >> TPartitionTests::SetOffset [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 13953, MsgBus: 20317 2025-04-17T17:57:15.932262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342703062014557:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:15.932992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c77/r3tmp/tmpxUgYKY/pdisk_1.dat 2025-04-17T17:57:16.605364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:16.613139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:16.613242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:16.616618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13953, node 1 2025-04-17T17:57:16.770731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:16.770751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:16.770758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:16.770857Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20317 TClient is connected to server localhost:20317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:17.391992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:17.415171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:17.608796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:17.842123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:17.925589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:19.710464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342720241885509:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:19.710543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.120292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.171253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.218300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.310840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.361937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.472925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.521627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.626680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342724536853479:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.626747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.627194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342724536853484:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.631812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:20.651409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342724536853487:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:20.714591Z node 1 :TX_PROXY ERROR: Actor# [1:7494342724536853545:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:20.934436Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342703062014557:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:20.934506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:22.370871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:31.584900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:57:31.584926Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 18027, MsgBus: 62317 2025-04-17T17:57:36.242408Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342796128398439:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:36.245219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c77/r3tmp/tmpjbVPN1/pdisk_1.dat 2025-04-17T17:57:36.527526Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:36.553745Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:36.553844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:36.556272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18027, node 2 2025-04-17T17:57:36.725934Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:36.725958Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:36.725967Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:36.726095Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62317 TClient is connected to server localhost:62317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:37.329188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:37.336916Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:37.346256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting.. ... tion type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:40.112262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:40.145704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:40.200783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:40.282196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:40.385795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342813308270042:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:40.385899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:40.386172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342813308270047:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:40.389492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:40.401691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342813308270049:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:40.489873Z node 2 :TX_PROXY ERROR: Actor# [2:7494342813308270105:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:41.246302Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342796128398439:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:41.246398Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-04-17T17:57:42.281030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25831, MsgBus: 18249 2025-04-17T17:57:48.132299Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494342846290216370:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:48.181881Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c77/r3tmp/tmpFIpw5q/pdisk_1.dat 2025-04-17T17:57:48.382688Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:48.384329Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:48.384441Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:48.391251Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25831, node 3 2025-04-17T17:57:48.518701Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:48.518724Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:48.518733Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:48.518856Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18249 TClient is connected to server localhost:18249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:49.124055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:49.149185Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:49.165269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:49.263427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:49.458825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:49.539097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:52.126026Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342863470087156:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:52.126126Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:52.185992Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:52.220325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:52.258028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:52.315942Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:52.399964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:52.478647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:52.541771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:52.647534Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342863470087835:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:52.647609Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:52.647806Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342863470087840:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:52.651089Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:52.665732Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494342863470087842:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:52.754557Z node 3 :TX_PROXY ERROR: Actor# [3:7494342863470087895:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:53.114703Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494342846290216370:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:53.114761Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:54.624336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... >> TPartitionTests::OldPlanStep ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] Test command err: 2025-04-17T17:57:26.995138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:26.995694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:26.995769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001998/r3tmp/tmp61CK3g/pdisk_1.dat 2025-04-17T17:57:27.483705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:27.536175Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:27.582502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:27.582610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:27.594003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:27.685992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:27.762218Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:57:27.763162Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:57:27.770281Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:57:27.770576Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:57:27.888403Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:57:27.889161Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:57:27.889264Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:57:27.890895Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:57:27.890997Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:57:27.891066Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:57:27.891424Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:57:27.891565Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:57:27.891647Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:57:27.902762Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:57:27.944073Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:57:27.944274Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:57:27.944426Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:57:27.944479Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:57:27.944530Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:57:27.944584Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:27.945131Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:27.945175Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:27.945377Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:57:27.945439Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:57:27.945514Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:27.945544Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:57:27.945572Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:57:27.945597Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:57:27.945624Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:57:27.945643Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:57:27.945669Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:57:27.945744Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:27.945766Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:27.945826Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:57:27.946263Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:57:27.946316Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:57:27.946420Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:57:27.946641Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:57:27.946696Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:57:27.946792Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:57:27.946871Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:57:27.946910Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:57:27.946957Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:57:27.947008Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:27.947273Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:57:27.947314Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:57:27.947349Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:57:27.947381Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:27.947421Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:57:27.947450Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:57:27.947507Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:57:27.947540Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:27.947575Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:57:27.950497Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:57:27.950547Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:57:27.961295Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:57:27.961369Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:27.961402Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:27.961492Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:57:27.961565Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:57:28.132199Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:28.132280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:28.132332Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:57:28.132713Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:57:28.132749Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:57:28.132865Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:28.132911Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:57:28.132958Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:57:28.133000Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:57:28.137960Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:57:28.138053Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:28.138917Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:28.138959Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:28.139021Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:2 ... 3689Z node 6 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-04-17T17:57:59.053716Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-17T17:57:59.053766Z node 6 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 1500:100 keys extracted: 3 2025-04-17T17:57:59.053811Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:57:59.053836Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-17T17:57:59.053877Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:57:59.053904Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:57:59.053962Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically complete end at 72075186224037888 2025-04-17T17:57:59.054000Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically incomplete end at 72075186224037888 2025-04-17T17:57:59.054039Z node 6 :TX_DATASHARD TRACE: Activated operation [1500:100] at 72075186224037888 2025-04-17T17:57:59.054077Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:57:59.054100Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:57:59.054120Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-17T17:57:59.054140Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildWriteOutRS 2025-04-17T17:57:59.054201Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:57:59.054239Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-17T17:57:59.054261Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-17T17:57:59.054379Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-17T17:57:59.054410Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:57:59.054431Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-17T17:57:59.054450Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-17T17:57:59.054471Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-17T17:57:59.054492Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:57:59.054511Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-17T17:57:59.054539Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-17T17:57:59.054559Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-17T17:57:59.054578Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:57:59.054599Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-17T17:57:59.054618Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T17:57:59.054648Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:57:59.054681Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [1500:100] at 72075186224037888 2025-04-17T17:57:59.054820Z node 6 :TX_DATASHARD DEBUG: Executed write operation for [1500:100] at 72075186224037888, row count=3 2025-04-17T17:57:59.054868Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T17:57:59.054922Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:57:59.054949Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T17:57:59.054993Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompleteWrite 2025-04-17T17:57:59.055031Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-17T17:57:59.055208Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is DelayComplete 2025-04-17T17:57:59.055255Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompleteWrite 2025-04-17T17:57:59.055301Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:57:59.055334Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:57:59.056722Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-17T17:57:59.056763Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:57:59.056798Z node 6 :TX_DATASHARD TRACE: Execution plan for [1500:100] at 72075186224037888 has finished 2025-04-17T17:57:59.056839Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:57:59.056893Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:57:59.056934Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:57:59.056977Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:57:59.071153Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T17:57:59.071274Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:57:59.071327Z node 6 :TX_DATASHARD TRACE: Complete execution for [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-17T17:57:59.071395Z node 6 :TX_DATASHARD DEBUG: Complete write [1500 : 100] from 72075186224037888 at tablet 72075186224037888 send result to client [6:593:2518] 2025-04-17T17:57:59.071453Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:59.077453Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:767:2637], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:59.077535Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:59.077583Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:766:2636], serverId# [6:767:2637], sessionId# [0:0:0] 2025-04-17T17:57:59.077733Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:765:2635], Recipient [6:666:2570]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:57:59.086580Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:770:2640], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:59.086690Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:59.086747Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:769:2639], serverId# [6:770:2640], sessionId# [0:0:0] 2025-04-17T17:57:59.087052Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:768:2638], Recipient [6:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:57:59.087186Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:57:59.087235Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/100 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:57:59.087280Z node 6 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2025-04-17T17:57:59.087364Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-17T17:57:59.087482Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:57:59.087527Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:57:59.087565Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:57:59.087604Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:57:59.087654Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-17T17:57:59.087688Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:57:59.087715Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:57:59.087734Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:57:59.087755Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:57:59.087849Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:57:59.088076Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[6:768:2638], 1000} after executionsCount# 1 2025-04-17T17:57:59.088136Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} sends rowCount# 3, bytes# 96, quota rows left# 18446744073709551612, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:57:59.088219Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} finished in read 2025-04-17T17:57:59.088291Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:57:59.088313Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:57:59.088335Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:57:59.088357Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:57:59.088397Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T17:57:59.088415Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:57:59.088452Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-17T17:57:59.088491Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:57:59.088586Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TListAllTopicsTests::PlainList >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |82.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |82.0%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |82.0%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel >> TPartitionTests::OldPlanStep [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace >> TopicService::DifferentConsumers_TheRangesOverlap >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> DemoTx::Scenario_3 [GOOD] |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |82.0%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> TPersQueueTest::ReadFromSeveralPartitionsMigrated >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPartitionTests::ShadowPartitionCounters >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> TPQTest::TestSeveralOwners >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink >> DemoTx::Scenario_4 >> BackupRestoreS3::TestAllPrimitiveTypes-STRING [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-JSON >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TColumnShardTestSchema::ForgetAfterFail [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut >> WithSDK::CommitWithWrongSessionId [GOOD] >> WithSDK::CommitToPastWithWrongSessionId >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass >> BackupRestore::RestoreViewToDifferentDatabase [GOOD] >> BackupRestore::RestoreViewDependentOnAnotherView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2025-04-17T17:57:25.884161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:25.884663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:25.884759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00199c/r3tmp/tmpLDBxtw/pdisk_1.dat 2025-04-17T17:57:26.261680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:26.305548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:26.347776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:26.347880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:26.359261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:26.454148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:26.517732Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:57:26.518780Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:57:26.519213Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:57:26.526000Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:57:26.580331Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:57:26.581210Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:57:26.581327Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:57:26.587278Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:57:26.587407Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:57:26.587458Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:57:26.587821Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:57:26.587981Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:57:26.588061Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:57:26.602787Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:57:26.649635Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:57:26.649820Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:57:26.649931Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:57:26.649982Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:57:26.650014Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:57:26.650056Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:26.654615Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:26.654686Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:26.654999Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:57:26.655089Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:57:26.655219Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:26.655260Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:57:26.655297Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:57:26.655330Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:57:26.655364Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:57:26.655391Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:57:26.655429Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:57:26.655602Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:26.655639Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:26.655702Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:57:26.656080Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:57:26.656122Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:57:26.656223Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:57:26.656461Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:57:26.656514Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:57:26.656595Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:57:26.656642Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:57:26.656675Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:57:26.656721Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:57:26.656754Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:26.657025Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:57:26.657060Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:57:26.657093Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:57:26.657133Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:26.657207Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:57:26.657236Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:57:26.657283Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:57:26.657312Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:26.657336Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:57:26.666781Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:57:26.666847Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:57:26.680631Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:57:26.680702Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:26.680731Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:26.680784Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:57:26.680853Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:57:26.835201Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:26.835254Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:26.835290Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:57:26.835601Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:57:26.835630Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:57:26.835737Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:26.835781Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:57:26.835827Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:57:26.835877Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:57:26.844936Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:57:26.845021Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:26.845921Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:26.845963Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:26.846011Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:2 ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:58:07.173693Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:934:2775], 1001} after executionsCount# 1 2025-04-17T17:58:07.173732Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:934:2775], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:58:07.173781Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:934:2775], 1001} finished in read 2025-04-17T17:58:07.173825Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-17T17:58:07.173850Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:58:07.173872Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:58:07.173894Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:58:07.173939Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-17T17:58:07.173968Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:58:07.173991Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-04-17T17:58:07.174020Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:58:07.174089Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:58:07.183115Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:939:2780], Recipient [7:718:2598]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:07.183175Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:07.183212Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:938:2779], serverId# [7:939:2780], sessionId# [0:0:0] 2025-04-17T17:58:07.183289Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:937:2778], Recipient [7:718:2598]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:58:07.184021Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:942:2783], Recipient [7:718:2598]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:07.184058Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:07.184089Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:941:2782], serverId# [7:942:2783], sessionId# [0:0:0] 2025-04-17T17:58:07.184250Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:940:2781], Recipient [7:718:2598]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:58:07.184360Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-17T17:58:07.184391Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:58:07.184437Z node 7 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-17T17:58:07.184505Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2025-04-17T17:58:07.184565Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-17T17:58:07.184590Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2025-04-17T17:58:07.184615Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-17T17:58:07.184637Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-17T17:58:07.184679Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2025-04-17T17:58:07.184705Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-17T17:58:07.184729Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-17T17:58:07.184749Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2025-04-17T17:58:07.184767Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2025-04-17T17:58:07.184834Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:58:07.184971Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[7:940:2781], 1002} after executionsCount# 1 2025-04-17T17:58:07.185009Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:940:2781], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:58:07.185054Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:940:2781], 1002} finished in read 2025-04-17T17:58:07.185093Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-17T17:58:07.185114Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2025-04-17T17:58:07.185134Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:58:07.185154Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:58:07.185190Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-17T17:58:07.185210Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:58:07.185230Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2025-04-17T17:58:07.185253Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-17T17:58:07.185318Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-17T17:58:07.185905Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:945:2786], Recipient [7:716:2596]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:07.185941Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:07.185970Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:944:2785], serverId# [7:945:2786], sessionId# [0:0:0] 2025-04-17T17:58:07.186026Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:943:2784], Recipient [7:716:2596]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:58:07.199048Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:948:2789], Recipient [7:716:2596]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:07.199106Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:07.199141Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:947:2788], serverId# [7:948:2789], sessionId# [0:0:0] 2025-04-17T17:58:07.199368Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:946:2787], Recipient [7:716:2596]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:58:07.199483Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-17T17:58:07.199519Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:58:07.199553Z node 7 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-17T17:58:07.199596Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2025-04-17T17:58:07.199658Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-17T17:58:07.199684Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2025-04-17T17:58:07.199711Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-17T17:58:07.199739Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-17T17:58:07.199779Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2025-04-17T17:58:07.199813Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-17T17:58:07.199833Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-17T17:58:07.199853Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2025-04-17T17:58:07.199877Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2025-04-17T17:58:07.199950Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:58:07.200088Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[7:946:2787], 1003} after executionsCount# 1 2025-04-17T17:58:07.200125Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:946:2787], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:58:07.200174Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:946:2787], 1003} finished in read 2025-04-17T17:58:07.200216Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-17T17:58:07.200236Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2025-04-17T17:58:07.200254Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2025-04-17T17:58:07.200275Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2025-04-17T17:58:07.200310Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-17T17:58:07.200336Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2025-04-17T17:58:07.200355Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2025-04-17T17:58:07.200378Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-17T17:58:07.200458Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> TPersQueueTest::DirectReadNotCached [GOOD] >> TPersQueueTest::DirectReadBadCases >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets [GOOD] >> TPersQueueTest::TopicServiceReadBudget >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-04-17T17:55:20.414047Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1744912520414014 2025-04-17T17:55:21.131604Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342213003953097:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:21.131656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:21.685992Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017d3/r3tmp/tmp4KVAnY/pdisk_1.dat 2025-04-17T17:55:21.752623Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:21.752728Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:55:22.237313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:22.339439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:22.339515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:22.340556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:22.340605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:22.344696Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:22.344803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:22.345288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:22.374001Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28975, node 1 2025-04-17T17:55:22.434086Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:55:22.434110Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:55:22.622816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0017d3/r3tmp/yandexfTLp8X.tmp 2025-04-17T17:55:22.622840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0017d3/r3tmp/yandexfTLp8X.tmp 2025-04-17T17:55:22.622994Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0017d3/r3tmp/yandexfTLp8X.tmp 2025-04-17T17:55:22.623100Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:55:22.725898Z INFO: TTestServer started on Port 11812 GrpcPort 28975 TClient is connected to server localhost:11812 PQClient connected to localhost:28975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:23.306357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T17:55:26.132162Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342213003953097:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:26.132265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:26.542971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342234478790618:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:26.543123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:26.546282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342234478790653:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:26.551279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2025-04-17T17:55:26.593767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342234478790655:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-04-17T17:55:26.888610Z node 1 :TX_PROXY ERROR: Actor# [1:7494342234478790747:2704] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:26.919895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:26.932083Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342234478790760:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:26.932807Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2JmNDcxNTEtYjQxOWU1YjgtMTRiZDdjZjAtNjkyMTk1MzU=, ActorId: [1:7494342234478790614:2337], ActorState: ExecuteState, TraceId: 01js2e8b5fae766nmwbvvhvf1n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:26.932233Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342235981582696:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:26.934003Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjlkNjI2MWEtNDNmMGJkYmItN2NjYmQ1ZGEtYWM1ZDQyZQ==, ActorId: [2:7494342235981582664:2309], ActorState: ExecuteState, TraceId: 01js2e8b9381nn0yzbw84qa3b2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:26.935132Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:55:26.936724Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:55:27.112136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:27.345263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:28975", true, true, 1000); 2025-04-17T17:55:27.734400Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01js2e8c5v2dw8j8tb9tvtg4qj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA3NjgwYTUtODhlZjQ1MDctNGZiZWMyNzYtZGJhYjY3ZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342238773758478:3016] === CheckClustersList. Ok 2025-04-17T17:55:34.117394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:28975 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T17:55:34.316245Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:28975 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFrom ... { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-17T17:58:03.048670Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:56394 2025-04-17T17:58:03.048698Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:56394 proto=v1 topic=test-topic durationSec=0 2025-04-17T17:58:03.048711Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T17:58:03.050638Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-17T17:58:03.050809Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-17T17:58:03.050834Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T17:58:03.050850Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-17T17:58:03.050875Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342911539866799:2591] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T17:58:03.061056Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7494342911539866799:2591] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-17T17:58:03.259290Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710698. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-17T17:58:03.259422Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7494342911539866807:2593] TxId: 281474976710698. Ctx: { TraceId: 01js2ed41na7ga9gwnkgvs0bae, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=OTFlM2Y3MTQtNGFhMjMwYzYtNmY1MDhmYjktZWUwNDZhNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-17T17:58:03.259654Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=OTFlM2Y3MTQtNGFhMjMwYzYtNmY1MDhmYjktZWUwNDZhNzc=, ActorId: [15:7494342911539866800:2593], ActorState: ExecuteState, TraceId: 01js2ed41na7ga9gwnkgvs0bae, Create QueryResponse for error on request, msg: 2025-04-17T17:58:03.261464Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7494342911539866799:2591] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=OTFlM2Y3MTQtNGFhMjMwYzYtNmY1MDhmYjktZWUwNDZhNzc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01js2ed41pccgzpfs0g0me6baq" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-04-17T17:58:03.261616Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=OTFlM2Y3MTQtNGFhMjMwYzYtNmY1MDhmYjktZWUwNDZhNzc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01js2ed41pccgzpfs0g0me6baq" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-04-17T17:58:03.261974Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-04-17T17:58:03.264054Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b91b0df1-a1112553-7544dd69-7afca019_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=OTFlM2Y3MTQtNGFhMjMwYzYtNmY1MDhmYjktZWUwNDZhNzc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01js2ed41pccgzpfs0g0me6baq" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-04-17T17:58:03.264082Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b91b0df1-a1112553-7544dd69-7afca019_0] Write session will restart in 2.000000s 2025-04-17T17:58:03.264200Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b91b0df1-a1112553-7544dd69-7afca019_0] Write session: Do CDS request 2025-04-17T17:58:03.264236Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b91b0df1-a1112553-7544dd69-7afca019_0] Do schedule cds request after 2000 ms 2025-04-17T17:58:03.456567Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720683. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:58:03.456701Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7494342910330263245:2497] TxId: 281474976720683. Ctx: { TraceId: 01js2ed2p29r3ewztsjx67pa6b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MjhhYzFiODEtNWExMTY4YzYtOGI3ZGFiNi1jZjNhZTUwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:58:03.456975Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=MjhhYzFiODEtNWExMTY4YzYtOGI3ZGFiNi1jZjNhZTUwMw==, ActorId: [16:7494342901740328590:2497], ActorState: ExecuteState, TraceId: 01js2ed2p29r3ewztsjx67pa6b, Create QueryResponse for error on request, msg: 2025-04-17T17:58:03.458956Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2ed46k3n1sdjpyjb15jwk4" } } YdbStatus: UNAVAILABLE ConsumedRu: 1022 } 2025-04-17T17:58:04.046301Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b91b0df1-a1112553-7544dd69-7afca019_0] Write session: close. Timeout = 0 ms 2025-04-17T17:58:04.046363Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b91b0df1-a1112553-7544dd69-7afca019_0] Write session will now close 2025-04-17T17:58:04.046435Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b91b0df1-a1112553-7544dd69-7afca019_0] Write session: aborting 2025-04-17T17:58:04.047265Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b91b0df1-a1112553-7544dd69-7afca019_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-17T17:58:04.047327Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b91b0df1-a1112553-7544dd69-7afca019_0] Write session: destroy 2025-04-17T17:58:04.223977Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720685. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:58:04.224142Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7494342910330263329:2508] TxId: 281474976720685. Ctx: { TraceId: 01js2ed4y39yqy6f1sndv7j9wc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MjQwMjFmZTktN2ExNWJmMTItNzc4ZWMwYzYtM2QyMGE0ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:58:04.224377Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=MjQwMjFmZTktN2ExNWJmMTItNzc4ZWMwYzYtM2QyMGE0ZTQ=, ActorId: [16:7494342910330263326:2508], ActorState: ExecuteState, TraceId: 01js2ed4y39yqy6f1sndv7j9wc, Create QueryResponse for error on request, msg: 2025-04-17T17:58:04.231072Z node 16 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2ed4yj7hw76emsxnzsky9w" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-17T17:58:04.233904Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710700. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:58:04.234033Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7494342915834834168:2582] TxId: 281474976710700. Ctx: { TraceId: 01js2ed3cj5rnhqw0w3vvb4zwf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NGJjOWJjMjQtOGIzYzdkZGEtMmJmZjU3ZWEtYWM4NDNjN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:58:04.234324Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=NGJjOWJjMjQtOGIzYzdkZGEtMmJmZjU3ZWEtYWM4NDNjN2Q=, ActorId: [15:7494342907244899477:2582], ActorState: ExecuteState, TraceId: 01js2ed3cj5rnhqw0w3vvb4zwf, Create QueryResponse for error on request, msg: 2025-04-17T17:58:04.236646Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2ed4zp3fksrqwpcrhmr579" } } YdbStatus: UNAVAILABLE ConsumedRu: 1081 } 2025-04-17T17:58:04.275466Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710701. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:58:04.275596Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7494342915834834197:2597] TxId: 281474976710701. Ctx: { TraceId: 01js2ed51tbz8vhs9f5g2zfxby, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=M2RmNzE1NjQtYzYxYWI5NTItMWNlZGFhZDQtNDM0MDYxZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:58:04.275916Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=M2RmNzE1NjQtYzYxYWI5NTItMWNlZGFhZDQtNDM0MDYxZWU=, ActorId: [15:7494342915834834192:2597], ActorState: ExecuteState, TraceId: 01js2ed51tbz8vhs9f5g2zfxby, Create QueryResponse for error on request, msg: 2025-04-17T17:58:04.281178Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2ed51w6vw95chwzc6zg0xd" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> BackupRestore::TestAllPrimitiveTypes-STRING [GOOD] >> BackupRestore::TestAllPrimitiveTypes-JSON |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |82.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913052.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913052.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124911852.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-17T17:54:14.324610Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:54:14.558832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:54:14.584069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:54:14.584405Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:54:14.593733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:54:14.593986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:54:14.597412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:54:14.597652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:54:14.597780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:54:14.597902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:54:14.598022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:54:14.598135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:54:14.598285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:54:14.598427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:54:14.598571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:54:14.598681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:54:14.633691Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:54:14.633873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:54:14.633940Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:54:14.634118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:14.634323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:54:14.634413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:54:14.634463Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:54:14.634557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:54:14.634620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:54:14.634669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:54:14.634704Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:54:14.634874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:14.634934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:54:14.634975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:54:14.635004Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:54:14.635093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:54:14.635152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:54:14.635195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:54:14.635229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:54:14.635299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:54:14.635340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:54:14.635374Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:54:14.635424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:54:14.635482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:54:14.635516Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:54:14.635950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-17T17:54:14.636045Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T17:54:14.636122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-17T17:54:14.636204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-17T17:54:14.636396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:54:14.636463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:54:14.636497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:54:14.636713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:54:14.636768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:54:14.636809Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:54:14.636959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:54:14.637002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:54:14.637033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:54:14.637227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... imestamp;);;;); 2025-04-17T17:58:06.860598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:58:06.860637Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:58:06.860673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:58:06.860839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:58:06.860952Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:58:06.860987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:58:06.861071Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-04-17T17:58:06.861117Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-04-17T17:58:06.861260Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1898:3872];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1899:3873]->[1:1898:3872] 2025-04-17T17:58:06.861384Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:58:06.861491Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:58:06.861586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:58:06.861722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:58:06.861808Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:58:06.861883Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:58:06.861918Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1899:3873] finished for tablet 9437184 2025-04-17T17:58:06.862606Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1898:3872];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.009},{"events":["f_processing","f_task_result"],"t":0.011},{"events":["f_ack","l_task_result"],"t":1.554},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.562}],"full":{"a":1744912685299668,"name":"_full_task","f":1744912685299668,"d_finished":0,"c":0,"l":1744912686861975,"d":1562307},"events":[{"name":"bootstrap","f":1744912685299826,"d_finished":9397,"c":1,"l":1744912685309223,"d":9397},{"a":1744912686861704,"name":"ack","f":1744912686854466,"d_finished":6435,"c":7,"l":1744912686861609,"d":6706},{"a":1744912686861689,"name":"processing","f":1744912685310868,"d_finished":785154,"c":56,"l":1744912686861611,"d":785440},{"name":"ProduceResults","f":1744912685303278,"d_finished":26835,"c":65,"l":1744912686861901,"d":26835},{"a":1744912686861904,"name":"Finish","f":1744912686861904,"d_finished":0,"c":0,"l":1744912686861975,"d":71},{"name":"task_result","f":1744912685310887,"d_finished":770104,"c":49,"l":1744912686854025,"d":770104}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T17:58:06.862715Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1898:3872];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:58:06.863262Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1898:3872];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.009},{"events":["f_processing","f_task_result"],"t":0.011},{"events":["f_ack","l_task_result"],"t":1.554},{"events":["l_ProduceResults","f_Finish"],"t":1.562},{"events":["l_ack","l_processing","l_Finish"],"t":1.563}],"full":{"a":1744912685299668,"name":"_full_task","f":1744912685299668,"d_finished":0,"c":0,"l":1744912686862765,"d":1563097},"events":[{"name":"bootstrap","f":1744912685299826,"d_finished":9397,"c":1,"l":1744912685309223,"d":9397},{"a":1744912686861704,"name":"ack","f":1744912686854466,"d_finished":6435,"c":7,"l":1744912686861609,"d":7496},{"a":1744912686861689,"name":"processing","f":1744912685310868,"d_finished":785154,"c":56,"l":1744912686861611,"d":786230},{"name":"ProduceResults","f":1744912685303278,"d_finished":26835,"c":65,"l":1744912686861901,"d":26835},{"a":1744912686861904,"name":"Finish","f":1744912686861904,"d_finished":0,"c":0,"l":1744912686862765,"d":861},{"name":"task_result","f":1744912685310887,"d_finished":770104,"c":49,"l":1744912686854025,"d":770104}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1899:3873]->[1:1898:3872] 2025-04-17T17:58:06.863349Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:58:05.299215Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-04-17T17:58:06.863393Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:58:06.863638Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1899:3873];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource >> BackupRestoreS3::RestoreViewReferenceTable [GOOD] >> BackupRestoreS3::RestoreViewDependentOnAnotherView >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-04-17T17:57:34.728432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:34.729012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:34.729070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00197e/r3tmp/tmp9SIWdh/pdisk_1.dat 2025-04-17T17:57:35.267813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.306155Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:35.352043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:35.352183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:35.368457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:35.460247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.550748Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:57:35.551851Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:57:35.552279Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:57:35.552566Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:57:35.629247Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:57:35.630042Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:57:35.630130Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:57:35.631835Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:57:35.631926Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:57:35.631983Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:57:35.632344Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:57:35.632506Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:57:35.632681Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:57:35.647531Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:57:35.715535Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:57:35.715759Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:57:35.715891Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:57:35.715928Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:57:35.715965Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:57:35.716012Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:35.716233Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:35.716286Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:35.716546Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:57:35.716636Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:57:35.716735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:35.716781Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:57:35.716831Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:57:35.716875Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:57:35.716918Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:57:35.716948Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:57:35.716991Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:57:35.717153Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:35.717188Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:35.717238Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:57:35.717592Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:57:35.717640Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:57:35.717743Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:57:35.717946Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:57:35.718023Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:57:35.718100Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:57:35.718146Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:57:35.718435Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:57:35.718486Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:57:35.718520Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:35.718815Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:57:35.718848Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:57:35.718894Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:57:35.718926Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:35.718968Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:57:35.718998Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:57:35.719039Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:57:35.719071Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:35.719094Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:57:35.720367Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:57:35.720425Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:57:35.731205Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:57:35.731287Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:35.731324Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:35.731374Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:57:35.731486Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:57:35.903314Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:35.903377Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:35.903418Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:57:35.903739Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:57:35.903781Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:57:35.903892Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:35.903958Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:57:35.904012Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:57:35.904057Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:57:35.911487Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:57:35.911576Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:35.912575Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:35.912618Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:35.912676Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:3 ... chemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:58:11.219413Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:1003:2819], 1001} after executionsCount# 1 2025-04-17T17:58:11.219459Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1003:2819], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:58:11.219552Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1003:2819], 1001} finished in read 2025-04-17T17:58:11.219592Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:58:11.219610Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:58:11.219629Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:58:11.219647Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:58:11.219682Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:58:11.219702Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:58:11.219724Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-17T17:58:11.219776Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:58:11.219877Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:58:11.220770Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1009:2825], Recipient [6:719:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:11.220816Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:11.220852Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1008:2824], serverId# [6:1009:2825], sessionId# [0:0:0] 2025-04-17T17:58:11.220965Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1007:2823], Recipient [6:719:2597]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:58:11.221674Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1012:2828], Recipient [6:719:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:11.221783Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:11.221822Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1011:2827], serverId# [6:1012:2828], sessionId# [0:0:0] 2025-04-17T17:58:11.221969Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1010:2826], Recipient [6:719:2597]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:58:11.222071Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-17T17:58:11.222106Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:58:11.222140Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-17T17:58:11.222204Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-04-17T17:58:11.222270Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-17T17:58:11.222295Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-04-17T17:58:11.222329Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-17T17:58:11.222359Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-17T17:58:11.222411Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2025-04-17T17:58:11.222444Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-17T17:58:11.222467Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-17T17:58:11.222488Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-04-17T17:58:11.222509Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-04-17T17:58:11.222585Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:58:11.222746Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:1010:2826], 1002} after executionsCount# 1 2025-04-17T17:58:11.222787Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1010:2826], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:58:11.222882Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1010:2826], 1002} finished in read 2025-04-17T17:58:11.222924Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-17T17:58:11.222947Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-04-17T17:58:11.222983Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:58:11.223010Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:58:11.223052Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-17T17:58:11.223073Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:58:11.223108Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2025-04-17T17:58:11.223136Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-17T17:58:11.223213Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-17T17:58:11.223991Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1016:2832], Recipient [6:716:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:11.224029Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:11.224075Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1015:2831], serverId# [6:1016:2832], sessionId# [0:0:0] 2025-04-17T17:58:11.224154Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1014:2830], Recipient [6:716:2595]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T17:58:11.224929Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1019:2835], Recipient [6:716:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:11.224965Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:58:11.224998Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1018:2834], serverId# [6:1019:2835], sessionId# [0:0:0] 2025-04-17T17:58:11.225172Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1017:2833], Recipient [6:716:2595]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-17T17:58:11.225273Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-17T17:58:11.225313Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:58:11.225344Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-17T17:58:11.225398Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-04-17T17:58:11.225461Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-17T17:58:11.225486Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-04-17T17:58:11.225509Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-17T17:58:11.225533Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-17T17:58:11.225574Z node 6 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2025-04-17T17:58:11.225603Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-17T17:58:11.225623Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-17T17:58:11.225643Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-04-17T17:58:11.225665Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-04-17T17:58:11.225738Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-17T17:58:11.225870Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:1017:2833], 1003} after executionsCount# 1 2025-04-17T17:58:11.225925Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1017:2833], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:58:11.225979Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1017:2833], 1003} finished in read 2025-04-17T17:58:11.226023Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-17T17:58:11.226045Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-04-17T17:58:11.226066Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-04-17T17:58:11.226089Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-04-17T17:58:11.226128Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-17T17:58:11.226149Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-04-17T17:58:11.231978Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2025-04-17T17:58:11.232077Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-17T17:58:11.232178Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |82.1%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-04-17T17:57:30.161250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:30.161811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:30.161918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001987/r3tmp/tmphzRzMw/pdisk_1.dat 2025-04-17T17:57:30.558106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:30.628687Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:30.673379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:30.673499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:30.685277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:30.788352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:30.845414Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:57:30.846710Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:57:30.847231Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:57:30.847524Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:57:30.901141Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:57:30.901999Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:57:30.902127Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:57:30.904023Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:57:30.904140Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:57:30.904218Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:57:30.904805Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:57:30.904980Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:57:30.905088Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:57:30.918963Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:57:30.962471Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:57:30.962710Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:57:30.962867Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:57:30.962907Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:57:30.962943Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:57:30.962990Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:30.963299Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:30.963352Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:30.963674Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:57:30.963776Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:57:30.963939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:30.964001Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:57:30.964052Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:57:30.964090Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:57:30.964127Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:57:30.964163Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:57:30.964214Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:57:30.964396Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:30.964449Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:30.964495Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:57:30.964938Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:57:30.964995Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:57:30.965100Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:57:30.965389Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:57:30.965463Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:57:30.965566Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:57:30.965618Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:57:30.965664Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:57:30.965701Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:57:30.965737Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:30.966073Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:57:30.966123Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:57:30.966161Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:57:30.966332Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:30.966388Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:57:30.966424Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:57:30.966460Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:57:30.966493Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:30.966533Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:57:30.968107Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:57:30.968160Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:57:30.978991Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:57:30.979076Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:30.979145Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:30.979193Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:57:30.979294Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:57:31.155487Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:31.155576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:31.155621Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:57:31.155994Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:57:31.156032Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:57:31.156141Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:31.156187Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:57:31.156235Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:57:31.156272Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:57:31.164628Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:57:31.164730Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:31.165728Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:31.165782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:31.165838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:3 ... 75186224037888 is Executed 2025-04-17T17:58:12.569208Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:58:12.569266Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:58:12.569498Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:58:12.569560Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:58:12.569920Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:810:2667], Recipient [7:810:2667]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:58:12.569964Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:58:12.570046Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:58:12.570104Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T17:58:12.570157Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:58:12.570574Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-04-17T17:58:12.570638Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-04-17T17:58:12.570694Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:58:12.570752Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-04-17T17:58:12.570807Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-04-17T17:58:12.570858Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-04-17T17:58:12.571204Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-17T17:58:12.571317Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-17T17:58:12.571395Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-17T17:58:12.571487Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-04-17T17:58:12.571532Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:58:12.571583Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-17T17:58:12.571612Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:58:12.571644Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:58:12.571708Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-04-17T17:58:12.571763Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-04-17T17:58:12.571818Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2025-04-17T17:58:12.571870Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:58:12.571897Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:58:12.571920Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-17T17:58:12.571944Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2025-04-17T17:58:12.571985Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:58:12.572007Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-17T17:58:12.572029Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-17T17:58:12.572055Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-17T17:58:12.572079Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:58:12.572102Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-17T17:58:12.572127Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-17T17:58:12.572150Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-17T17:58:12.572176Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:58:12.572199Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-17T17:58:12.572221Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-17T17:58:12.572248Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-17T17:58:12.572275Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:58:12.572296Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-17T17:58:12.572319Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T17:58:12.572342Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:58:12.572376Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-17T17:58:12.572764Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-04-17T17:58:12.572863Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2025-04-17T17:58:12.572924Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-04-17T17:58:12.572999Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T17:58:12.573048Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:58:12.573119Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:58:12.573175Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:58:12.573606Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:58:12.573671Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-17T17:58:12.573731Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-17T17:58:12.574071Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-17T17:58:12.578246Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-17T17:58:12.578404Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-17T17:58:12.578547Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2025-04-17T17:58:12.578767Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-04-17T17:58:12.578843Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2025-04-17T17:58:12.578953Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T17:58:12.579040Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:58:12.579097Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T17:58:12.579151Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-04-17T17:58:12.579206Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-17T17:58:12.579488Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-04-17T17:58:12.579532Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-04-17T17:58:12.579590Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:58:12.579638Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:58:12.579673Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-17T17:58:12.579708Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:58:12.579770Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-04-17T17:58:12.579832Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:58:12.579890Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T17:58:12.579967Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:58:12.580016Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:58:12.580852Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-17T17:58:12.581518Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:58:12.581588Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-17T17:58:12.581664Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:803:2661] 2025-04-17T17:58:12.581728Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] Test command err: 2025-04-17T17:58:01.248882Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:01.248963Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:01.307961Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:179:2194] 2025-04-17T17:58:01.326783Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:01.000000Z 2025-04-17T17:58:01.326886Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\250\220\232\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\250\220\232\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\250\220\232\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-17T17:58:01.816209Z node 1 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\250\220\232\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-17T17:58:02.865205Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:02.865266Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:02.879807Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] 2025-04-17T17:58:02.881355Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:02.000000Z 2025-04-17T17:58:02.881410Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] 2025-04-17T17:58:04.283120Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:04.283188Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:04.328917Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:179:2194] 2025-04-17T17:58:04.329781Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:179:2194] 2025-04-17T17:58:04.334324Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-17T17:58:04.334458Z node 3 :PERSQUEUE INFO: new Cookie owner1|aa544213-27cc8ee9-b7cd3e9c-b6f5a756_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-04-17T17:58:04.718635Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-04-17T17:58:05.332023Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:05.332088Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:05.365282Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [4:179:2194] 2025-04-17T17:58:05.369039Z node 4 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:58:05.369117Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [4:179:2194] 2025-04-17T17:58:05.730746Z node 4 :PERSQUEUE INFO: new Cookie owner1|f1f2ea0-ad71dd34-fdb466a-20f53dfe_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 2025-04-17T17:58:09.927333Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:09.927406Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:09.951558Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [5:179:2194] 2025-04-17T17:58:09.952955Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:58:09.953013Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [5:179:2194] 2025-04-17T17:58:10.294639Z node 5 :PERSQUEUE INFO: new Cookie owner1|7608a5c3-3efb040e-32b70156-24087ab8_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 >> KqpBatchUpdate::NotIdempotent >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable [GOOD] >> TPersQueueTest::Cache |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TPersQueueTest::SchemeOperationsTest [GOOD] >> TPersQueueTest::SchemeOperationFirstClassCitizen |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.1%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> BackupRestoreS3::TestAllPrimitiveTypes-JSON [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-JSON_DOCUMENT |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TPQRBDescribes::PartitionLocations [GOOD] >> VDiskBalancing::TestRandom_Block42 >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList >> KqpBatchDelete::Large_2 |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.1%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> KqpBatchUpdate::ManyPartitions_3 >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSetClientOffset >> DataShardVolatile::DistributedUpsertRestartAfterPlan [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit >> TopicService::DifferentConsumers_TheRangesOverlap [GOOD] >> KqpBatchUpdate::Large_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQRBDescribes::PartitionLocations [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2025-04-17T17:57:56.052357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342878760694693:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:56.052425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cb/r3tmp/tmpP63yfb/pdisk_1.dat 2025-04-17T17:57:56.596649Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:56.698116Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:56.718560Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:57.149377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:57.503293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:57.520980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:57.521064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:57.527350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:57.527415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:57.540949Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:57:57.541098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:57.544528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22179, node 1 2025-04-17T17:57:58.055882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0015cb/r3tmp/yandexy4KQQh.tmp 2025-04-17T17:57:58.055906Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0015cb/r3tmp/yandexy4KQQh.tmp 2025-04-17T17:57:58.056027Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0015cb/r3tmp/yandexy4KQQh.tmp 2025-04-17T17:57:58.056125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:58.151215Z INFO: TTestServer started on Port 24986 GrpcPort 22179 TClient is connected to server localhost:24986 PQClient connected to localhost:22179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:58.925344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:57:59.115348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:58:01.071958Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342878760694693:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:01.086935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:04.592122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342912920549574:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:04.592311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:04.592863Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342912920549586:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:04.604821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:58:04.672238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342912920549588:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:58:04.759481Z node 2 :TX_PROXY ERROR: Actor# [2:7494342912920549615:2184] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:05.532941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:05.541222Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342913120434095:2356], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:58:05.542437Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjdmYjk0OTUtNWVmMTM4OS1iN2M0OTkzMS03OGIzNzY5, ActorId: [1:7494342913120434064:2347], ActorState: ExecuteState, TraceId: 01js2ed5qb91hffsn65dk51ex8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:58:05.544276Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:58:05.545950Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342912920549622:2325], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:58:05.547065Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2M1MWI1ZmUtYjg0MTc3MDItMzU5MDM2ODItMmJiMDYxYTE=, ActorId: [2:7494342912920549572:2316], ActorState: ExecuteState, TraceId: 01js2ed5h98sjse1egj7xsn120, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:58:05.547358Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:58:05.665028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:05.869786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:58:06.318904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2ed6yq11vmx21ytgnadc7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc1Yjc2NmItNWY2MDcyYjMtNmE1MTBkZWMtZWI0NDMxODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342921710369148:3117] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 5 partitions CallPersQueueGRPC request to localhost:22179 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:22179 2025-04-17T17:58:12.131543Z node 1 :PERSQUEUE INFO: proxy answer MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { ... 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" name rt3.dc1--topic version0 2025-04-17T17:58:12.736095Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:12.736126Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-17T17:58:12.736139Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State EXECUTED 2025-04-17T17:58:12.736152Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State EXECUTED FrontTxId 281474976710676 2025-04-17T17:58:12.736168Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T17:58:12.736198Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState WAIT_RS_ACKS 2025-04-17T17:58:12.736219Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T17:58:12.736257Z node 2 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-17T17:58:12.736265Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T17:58:12.736294Z node 2 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-17T17:58:12.736315Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710676 to the list for deletion 2025-04-17T17:58:12.736344Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState DELETING 2025-04-17T17:58:12.736378Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710676 2025-04-17T17:58:12.736446Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:12.740488Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:12.740517Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-17T17:58:12.740531Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State DELETING 2025-04-17T17:58:12.740548Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710676 TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710676 CreateStep: 1744912692551 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710676 CreateStep: 1744912692551 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 3 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 4 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 5 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic" name rt3.dc1--topic version1 CallPersQueueGRPC request to localhost:22179 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-04-17T17:58:13.310290Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:22179 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-04-17T17:58:13.845665Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710676 CreateStep: 1744912692551 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) 2025-04-17T17:58:13.855183Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7494342951775140992:3582] connected; active server actors: 1 2025-04-17T17:58:13.855230Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-04-17T17:58:13.855243Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 2, Generation 1 2025-04-17T17:58:13.855253Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 2, Generation 1 2025-04-17T17:58:13.855264Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-17T17:58:13.855274Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 2, Generation 1 2025-04-17T17:58:13.855835Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7494342951775140994:3584] connected; active server actors: 1 2025-04-17T17:58:13.855860Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-04-17T17:58:13.855870Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 2, Generation 1 2025-04-17T17:58:13.855878Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 2, Generation 1 2025-04-17T17:58:13.855889Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-17T17:58:13.855898Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 2, Generation 1 response: Status: true Locations { PartitionId: 0 NodeId: 2 Generation: 1 } Locations { PartitionId: 1 NodeId: 2 Generation: 1 } Locations { PartitionId: 2 NodeId: 2 Generation: 1 } Locations { PartitionId: 3 NodeId: 2 Generation: 1 } Locations { PartitionId: 4 NodeId: 2 Generation: 1 } 2025-04-17T17:58:13.856611Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7494342951775140996:3586] connected; active server actors: 1 2025-04-17T17:58:13.856646Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 response: Status: true Locations { PartitionId: 3 NodeId: 2 Generation: 1 } 2025-04-17T17:58:13.862592Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7494342951775140998:3588] connected; active server actors: 1 response: Status: false >> TopicService::UnknownConsumer >> TPersQueueTest::WriteEmptyData [GOOD] >> TPersQueueTest::WriteNonExistingPartition >> BackupRestore::RestoreViewDependentOnAnotherView [FAIL] >> BackupRestore::RestoreKesusResources >> BackupRestore::TestAllPrimitiveTypes-JSON [GOOD] >> BackupRestore::TestAllPrimitiveTypes-JSON_DOCUMENT >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 7168, MsgBus: 2922 2025-04-17T17:55:02.586609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342131312928454:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:02.592398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000da9/r3tmp/tmphZfnzN/pdisk_1.dat 2025-04-17T17:55:02.987545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:03.039274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:03.039420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:03.041261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7168, node 1 2025-04-17T17:55:03.090922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:03.090945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:03.090954Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:03.091084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2922 TClient is connected to server localhost:2922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:03.649415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:03.677990Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:03.688800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:03.869176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:04.073911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:55:04.160016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.873340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342144197832104:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:05.873454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.187310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.216693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.243110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.270977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.305949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.339497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.407122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:06.498913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342148492800070:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.499006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342148492800075:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.499006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.502932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:06.512802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342148492800077:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:06.602080Z node 1 :TX_PROXY ERROR: Actor# [1:7494342148492800134:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:07.587519Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342131312928454:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:07.587679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:07.661906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:55:08.575197Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2e7skad3ke25ah6g8j7s45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVjMTQ5NTMtNjJlZjZjNS1lOWM1Y2ZiNC01NmY2Zjc4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.581178Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2e7ska9tk6h8abbsa67tjk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYxZDI1NjItOWQxZjFhYmMtMTYwMzQ2NzItZTE5YzFjNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.626391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01js2e7smsc39m6wvs75rr6eyx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTEzZjYwYTEtZDljMTBjNTItZjdlYzJlYi03MWM0MDhhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.627087Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01js2e7sms6r65haf4a7d869xp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI5ZjM0YzItNjkyOWNhMjQtMzdiYTVmN2ItOTc2YmZmYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.627595Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01js2e7smt545gdqg41wy6q48x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2JhOGIyODMtZTUxMTkxZDEtMzQ4ZGRjZTAtYmMxOTg5M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.628086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01js2e7smvc8y4thce03a67wef, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNmZTlhMzAtOTJkYWNkNzAtODQ3ODc3YWUtMjY5ZTU3ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.628583Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01js2e7smvb4n3p4bbqfgp0mgy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDc2NDMyN2ItNDU3ODU4NGMtODYzZDg1M2EtOThiMGEzOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.629220Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2e7skad3ke25ah6g8j7s45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVjMTQ5NTMtNjJlZjZjNS1lOWM1Y2ZiNC01NmY2Zjc4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.629986Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01js2e7sm470w6tvrp8xe27t4h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE2ZjMzNGEtYTg5ZWQ2MWQtYmJjZDBlNzQtY2VhYWNlN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.643336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01js2e7skt9mt64wv0e4jddfaa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIzNTAxNzQtYjFlMjZkZTAtZWRmMjFmMzMtYjEwNzZkOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:08.651842Z node 1 :KQP_EXECUTER ERROR: Tx ... /Root, SessionId: ydb://session/3?node_id=3&id=ZDljNzgxOTMtNjY1ZGViMDEtOWJhNDMxZjktYzQwNWM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:05.964055Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719477. Ctx: { TraceId: 01js2ed6vs4h6zdb62163z2wsb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzM1ZTA2ZS0yY2M3MzBhYi0yNWM4YzE0LWZlYzU1ZmNk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:05.973121Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719478. Ctx: { TraceId: 01js2ed6vs4h6zdb62163z2wsb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzM1ZTA2ZS0yY2M3MzBhYi0yNWM4YzE0LWZlYzU1ZmNk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:05.978764Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719479. Ctx: { TraceId: 01js2ed6we9fymw6f3trgv0frf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBhMjQ4OWMtNWU4YTFlNzQtZWRkOTI1YzMtN2NiZTI1NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:05.989250Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719480. Ctx: { TraceId: 01js2ed6we9fymw6f3trgv0frf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBhMjQ4OWMtNWU4YTFlNzQtZWRkOTI1YzMtN2NiZTI1NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.029456Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719481. Ctx: { TraceId: 01js2ed6xaee08q959b2xqt6cp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDljNzgxOTMtNjY1ZGViMDEtOWJhNDMxZjktYzQwNWM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.035173Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719482. Ctx: { TraceId: 01js2ed6xj6htw6yeeg7e2xb8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzYzODg0MWUtMmFhM2NiMjEtNjg2ZmE1MGQtYzU4MDZmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.039721Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719483. Ctx: { TraceId: 01js2ed6xj0gtvkgezp5rfsfmh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTYxNjZjZTYtZjQ1NzU5MzQtYWI5Zjc5YzgtMzIyNGE5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.050296Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719484. Ctx: { TraceId: 01js2ed6xndbvbr0fgc6bcvsvk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDIwN2MxMjItYTg3OTg4MjktZTQzNWViYjktMzA0MTgyMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.052576Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719485. Ctx: { TraceId: 01js2ed6xaee08q959b2xqt6cp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDljNzgxOTMtNjY1ZGViMDEtOWJhNDMxZjktYzQwNWM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.053611Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719486. Ctx: { TraceId: 01js2ed6xj6htw6yeeg7e2xb8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzYzODg0MWUtMmFhM2NiMjEtNjg2ZmE1MGQtYzU4MDZmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.068959Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719487. Ctx: { TraceId: 01js2ed6xj0gtvkgezp5rfsfmh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTYxNjZjZTYtZjQ1NzU5MzQtYWI5Zjc5YzgtMzIyNGE5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.074049Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719488. Ctx: { TraceId: 01js2ed6xndbvbr0fgc6bcvsvk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDIwN2MxMjItYTg3OTg4MjktZTQzNWViYjktMzA0MTgyMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.076623Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719489. Ctx: { TraceId: 01js2ed6ye75ytvp081ck9ygnh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzM1ZTA2ZS0yY2M3MzBhYi0yNWM4YzE0LWZlYzU1ZmNk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.087551Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719490. Ctx: { TraceId: 01js2ed6xj0gtvkgezp5rfsfmh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTYxNjZjZTYtZjQ1NzU5MzQtYWI5Zjc5YzgtMzIyNGE5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.100397Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719491. Ctx: { TraceId: 01js2ed6ye75ytvp081ck9ygnh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzM1ZTA2ZS0yY2M3MzBhYi0yNWM4YzE0LWZlYzU1ZmNk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.112044Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719492. Ctx: { TraceId: 01js2ed70x7z55h4wxjpe6w3yd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=M2EyMzQ5MzYtNWRlMGQ2Zi1kMjcwMjU4ZS1lOWVkYjcwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.132704Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719493. Ctx: { TraceId: 01js2ed71f2x12g62r5kdqy6sv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzYzODg0MWUtMmFhM2NiMjEtNjg2ZmE1MGQtYzU4MDZmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.144954Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719494. Ctx: { TraceId: 01js2ed71nb48jhvys5e8qrvtx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDljNzgxOTMtNjY1ZGViMDEtOWJhNDMxZjktYzQwNWM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.146614Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719495. Ctx: { TraceId: 01js2ed71f2x12g62r5kdqy6sv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzYzODg0MWUtMmFhM2NiMjEtNjg2ZmE1MGQtYzU4MDZmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.147972Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719496. Ctx: { TraceId: 01js2ed71j9pge399eqbywxjdg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDIwN2MxMjItYTg3OTg4MjktZTQzNWViYjktMzA0MTgyMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.156429Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719497. Ctx: { TraceId: 01js2ed71j9pge399eqbywxjdg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDIwN2MxMjItYTg3OTg4MjktZTQzNWViYjktMzA0MTgyMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.166549Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719498. Ctx: { TraceId: 01js2ed7288s8sfzd52maxe0p8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBhMjQ4OWMtNWU4YTFlNzQtZWRkOTI1YzMtN2NiZTI1NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.186333Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719499. Ctx: { TraceId: 01js2ed72q99htwev9dtpcngez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzM1ZTA2ZS0yY2M3MzBhYi0yNWM4YzE0LWZlYzU1ZmNk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.186539Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719500. Ctx: { TraceId: 01js2ed7288s8sfzd52maxe0p8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBhMjQ4OWMtNWU4YTFlNzQtZWRkOTI1YzMtN2NiZTI1NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.197695Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719501. Ctx: { TraceId: 01js2ed735cb1hy4hb2admqb63, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDljNzgxOTMtNjY1ZGViMDEtOWJhNDMxZjktYzQwNWM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.209440Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719502. Ctx: { TraceId: 01js2ed73953qj1e7ebgbttygv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=M2EyMzQ5MzYtNWRlMGQ2Zi1kMjcwMjU4ZS1lOWVkYjcwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.214853Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719503. Ctx: { TraceId: 01js2ed73991pfkd4q059t0hy7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzYzODg0MWUtMmFhM2NiMjEtNjg2ZmE1MGQtYzU4MDZmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.218733Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719504. Ctx: { TraceId: 01js2ed735cb1hy4hb2admqb63, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDljNzgxOTMtNjY1ZGViMDEtOWJhNDMxZjktYzQwNWM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.225544Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719505. Ctx: { TraceId: 01js2ed73991pfkd4q059t0hy7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzYzODg0MWUtMmFhM2NiMjEtNjg2ZmE1MGQtYzU4MDZmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:58:06.235847Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719506. Ctx: { TraceId: 01js2ed74adxskke5ren0zz5j1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDIwN2MxMjItYTg3OTg4MjktZTQzNWViYjktMzA0MTgyMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.243016Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719507. Ctx: { TraceId: 01js2ed74q1sejk2v1b1yn0wh5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzM1ZTA2ZS0yY2M3MzBhYi0yNWM4YzE0LWZlYzU1ZmNk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.244228Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719508. Ctx: { TraceId: 01js2ed74adxskke5ren0zz5j1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDIwN2MxMjItYTg3OTg4MjktZTQzNWViYjktMzA0MTgyMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:58:06.248259Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719509. Ctx: { TraceId: 01js2ed74adxskke5ren0zz5j1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDIwN2MxMjItYTg3OTg4MjktZTQzNWViYjktMzA0MTgyMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:06.252176Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719510. Ctx: { TraceId: 01js2ed74q1sejk2v1b1yn0wh5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzM1ZTA2ZS0yY2M3MzBhYi0yNWM4YzE0LWZlYzU1ZmNk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-17T17:58:06.267136Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976719511. Ctx: { TraceId: 01js2ed75b3bxcx3b7625e4nbj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBhMjQ4OWMtNWU4YTFlNzQtZWRkOTI1YzMtN2NiZTI1NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |82.1%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink >> KqpBatchUpdate::NotIdempotent [GOOD] |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |82.2%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> KqpStats::SysViewCancelled [GOOD] >> TPersQueueTest::SetupLockSession [GOOD] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] Test command err: 2025-04-17T17:56:43.766472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342566636473148:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:43.774311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpFtOefy/pdisk_1.dat 2025-04-17T17:56:44.713087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:44.808483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:44.808591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:44.818828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:44.828032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29391, node 1 2025-04-17T17:56:45.127045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:45.127074Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:45.127081Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:45.134514Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:45.920301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:48.770382Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342566636473148:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:48.770452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:49.131389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342592406278010:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:49.131497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:49.740379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:56:50.110209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342596701245497:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:50.110310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:50.110689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342596701245502:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:50.118887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T17:56:50.162523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342596701245504:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T17:56:50.239474Z node 1 :TX_PROXY ERROR: Actor# [1:7494342596701245601:2821] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:50.427603Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2eawsx9890yy95swvpcavh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU5ZDljOS1kZWRkNWRjNi00ZDMyZDMwZS1lNTRkMDI4ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:50.653053Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2eax4xa6sw1512nxrmqr5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU5ZDljOS1kZWRkNWRjNi00ZDMyZDMwZS1lNTRkMDI4ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/"Create temporary directory "/Root/~backup_20250417T175650" in databaseProcess "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250417T175650/table" }Backup table "/Root/~backup_20250417T175650/table" to "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table"Describe table "/Root/~backup_20250417T175650/table"Write scheme into "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table/permissions.pb"Read table "/Root/~backup_20250417T175650/table"Write data into "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table/data_00.csv"Drop table "/Root/~backup_20250417T175650/table"Remove temporary directory "/Root/~backup_20250417T175650" in database2025-04-17T17:56:51.300259Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T17:56:51.321758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-17T17:56:51.411367Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table"Read scheme from "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table" to "/Root/table"2025-04-17T17:56:51.491407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table/data_00.csv"2025-04-17T17:56:51.809712Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2eay95bk8k3nqey58bzqdf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM0ZWM2NmQtYzIxODRlZS02MzhkMTdiYy0xMTU2YWUxZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpvyjNSy/table/permissions.pb"2025-04-17T17:56:51.860444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T17:56:52.061815Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2eayhpax4z4brh1e51mjv8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU5ZDljOS1kZWRkNWRjNi00ZDMyZDMwZS1lNTRkMDI4ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:53.999855Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342611484875822:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:53.999917Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpjZByyZ/pdisk_1.dat 2025-04-17T17:56:54.465746Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:54.489796Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:54.494591Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:54.511977Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23455, node 4 2025-04-17T17:56:54.814230Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:54.814251Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:54.814259Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:54.814400Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: ... it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:08.843311Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T17:58:08.876760Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/"Create temporary directory "/Root/~backup_20250417T175809" in databaseProcess "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalTable"Backup external table "/Root/externalTable" to "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalTable"Write external table into "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalTable/create_external_table.sql"Write ACL into "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalTable/permissions.pb"Process "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250417T175809" in database2025-04-17T17:58:09.240497Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715663:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalDataSource/create_external_data_source.sql"2025-04-17T17:58:09.536304Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalDataSource/permissions.pb"2025-04-17T17:58:09.585707Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715667:0, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalTable"Restore external table "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalTable" to "/Root/externalTable"Read external table from "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalTable/create_external_table.sql"2025-04-17T17:58:09.711315Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 Created "/Root/externalTable"Restore ACL "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalTable" to "/Root/externalTable"Read ACL from "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpklREL0/externalTable/permissions.pb"2025-04-17T17:58:09.787372Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-17T17:58:12.664486Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7494342947472521002:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:12.669295Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpgXUDOm/pdisk_1.dat 2025-04-17T17:58:13.267062Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:13.273436Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:13.273570Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:13.330720Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7751, node 22 2025-04-17T17:58:13.553308Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:13.553345Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:13.553361Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:13.553576Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:14.025254Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:17.662328Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7494342947472521002:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:17.662424Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:19.639095Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7494342977537293235:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:19.639256Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:19.650458Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7494342977537293247:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:19.657379Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:58:19.727904Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [22:7494342977537293249:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:58:19.804174Z node 22 :TX_PROXY ERROR: Actor# [22:7494342977537293322:2698] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:19.880343Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715660:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/"Create temporary directory "/Root/~backup_20250417T175819" in databaseProcess "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250417T175819" in database2025-04-17T17:58:20.102248Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715662:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/externalDataSource/create_external_data_source.sql"2025-04-17T17:58:20.344315Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715664:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/6873/0008c0/r3tmp/tmpcxH5dx/externalDataSource/permissions.pb"2025-04-17T17:58:20.451432Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 Restore completed successfully ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::NotIdempotent [GOOD] Test command err: Trying to start YDB, gRPC: 26332, MsgBus: 28175 2025-04-17T17:58:15.568755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342962878179440:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:15.571933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c63/r3tmp/tmpHQsBQI/pdisk_1.dat 2025-04-17T17:58:15.974071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:16.005348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:16.005448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26332, node 1 2025-04-17T17:58:16.007841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:58:16.146752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:16.146773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:16.146779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:16.146879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28175 TClient is connected to server localhost:28175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:16.886160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:16.923241Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:58:16.940603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:17.190636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:17.453460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:17.573321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:20.293016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342984353017681:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:20.293141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:20.569434Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342962878179440:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:20.569494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:20.653893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:20.685049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:20.723919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:20.766249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:20.813240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:20.858385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:20.901603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:21.007517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342988647985644:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:21.007579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:21.007899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342988647985649:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:21.012377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:21.029580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342988647985651:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:21.087820Z node 1 :TX_PROXY ERROR: Actor# [1:7494342988647985709:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:22.305322Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342992942953353:2522], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2025-04-17T17:58:22.306862Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2FhMTRhNTctZWY1OWU3NWQtZmNlY2JlZGQtZTNkODhkZmI=, ActorId: [1:7494342992942953344:2516], ActorState: ExecuteState, TraceId: 01js2edpsb9175ekv9npvxpn4g, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:58:22.334430Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342992942953381:2525], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2025-04-17T17:58:22.335683Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2FhMTRhNTctZWY1OWU3NWQtZmNlY2JlZGQtZTNkODhkZmI=, ActorId: [1:7494342992942953344:2516], ActorState: ExecuteState, TraceId: 01js2edpv8bey0e59q0m2pm3fh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T17:58:22.361239Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342992942953385:2527], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:51: Error: Batch update is only supported for idempotent updates. 2025-04-17T17:58:22.362367Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2FhMTRhNTctZWY1OWU3NWQtZmNlY2JlZGQtZTNkODhkZmI=, ActorId: [1:7494342992942953344:2516], ActorState: ExecuteState, TraceId: 01js2edpw7etrt0za6s2gjp9we, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> DemoTx::Scenario_4 [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] Test command err: 2025-04-17T17:57:35.072282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:57:35.072872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:35.072940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001928/r3tmp/tmpf4mP1P/pdisk_1.dat 2025-04-17T17:57:35.472677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.518031Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:35.562057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:35.562162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:35.573775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:35.664943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:35.713592Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:57:35.714777Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:57:35.715259Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:57:35.715539Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:57:35.761405Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:57:35.762261Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:57:35.762399Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:57:35.764195Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:57:35.764292Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:57:35.764359Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:57:35.764801Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:57:35.764949Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:57:35.765058Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:57:35.776330Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:57:35.815243Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:57:35.815495Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:57:35.815647Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:57:35.815683Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:57:35.815725Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:57:35.815775Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:35.816035Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:35.816090Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:35.816426Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:57:35.816546Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:57:35.816692Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:35.816747Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:57:35.816799Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:57:35.816837Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:57:35.816869Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:57:35.816901Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:57:35.816943Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:57:35.817082Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:35.817123Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:35.817167Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:57:35.817611Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:57:35.817653Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:57:35.817766Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:57:35.818075Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:57:35.818144Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:57:35.822384Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:57:35.822483Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:57:35.822529Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:57:35.822569Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:57:35.822607Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:35.822970Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:57:35.823015Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:57:35.823058Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:57:35.823091Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:35.823146Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:57:35.823176Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:57:35.823216Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:57:35.823250Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:35.823275Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:57:35.824890Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:57:35.824947Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:57:35.835713Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:57:35.835810Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:57:35.835843Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:57:35.835910Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:57:35.836011Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:57:35.995744Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:35.995813Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:57:35.995879Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:57:35.996262Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:57:35.996294Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:57:35.996449Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:57:35.996497Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:57:35.996536Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:57:35.996571Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:57:36.000436Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:57:36.000522Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:57:36.001459Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:36.001504Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:57:36.001555Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:57:3 ... p# 1501 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:58:21.865936Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T17:58:21.865981Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715660 2025-04-17T17:58:21.866031Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1501 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T17:58:21.866132Z node 7 :TX_DATASHARD DEBUG: Complete volatile write [1501 : 281474976715660] from 72075186224037889 at tablet 72075186224037889 send result to client [7:917:2664] 2025-04-17T17:58:21.866898Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:58:21.867108Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T17:58:21.867634Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:58:21.867952Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:58:21.869368Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-17T17:58:21.869508Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:698:2586], Recipient [7:700:2588]: {TEvReadSet step# 1501 txid# 281474976715660 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-17T17:58:21.869550Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:58:21.869611Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715660 2025-04-17T17:58:21.870032Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T17:58:21.870304Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:700:2588], Recipient [7:698:2586]: {TEvReadSet step# 1501 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-17T17:58:21.870332Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T17:58:21.870357Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715660 ... validating table 2025-04-17T17:58:23.015172Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2edpeba53swytgejp6y831, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YjA3MzEwNi0zNmJkYjhhNS00MjE0OWRmNS1mYTI2ZGQyOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:23.059487Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:970:2783], Recipient [7:698:2586]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1501 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-17T17:58:23.059722Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:58:23.059824Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-17T17:58:23.059963Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:58:23.060019Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:58:23.060083Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:58:23.060142Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:58:23.060206Z node 7 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-17T17:58:23.060268Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:58:23.060299Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:58:23.060326Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:58:23.060353Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:58:23.060522Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1501 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-17T17:58:23.060831Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1501/18446744073709551615 2025-04-17T17:58:23.060912Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:970:2783], 0} after executionsCount# 1 2025-04-17T17:58:23.060982Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:970:2783], 0} sends rowCount# 1, bytes# 64, quota rows left# 1000, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:58:23.061083Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:970:2783], 0} finished in read 2025-04-17T17:58:23.061177Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:58:23.061210Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:58:23.061237Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:58:23.061265Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:58:23.061319Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:58:23.061342Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:58:23.061371Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-17T17:58:23.061421Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:58:23.061563Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T17:58:23.076170Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:970:2783], Recipient [7:698:2586]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T17:58:23.076280Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-17T17:58:23.079818Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:970:2783], Recipient [7:700:2588]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1501 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-04-17T17:58:23.080081Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-17T17:58:23.080144Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CheckRead 2025-04-17T17:58:23.080219Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:58:23.080248Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CheckRead 2025-04-17T17:58:23.080270Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:58:23.080296Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:58:23.080339Z node 7 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037889 2025-04-17T17:58:23.080367Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:58:23.080408Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:58:23.080440Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit ExecuteRead 2025-04-17T17:58:23.080479Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit ExecuteRead 2025-04-17T17:58:23.080598Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1501 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-04-17T17:58:23.080813Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1501/18446744073709551615 2025-04-17T17:58:23.080855Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:970:2783], 1} after executionsCount# 1 2025-04-17T17:58:23.080890Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:970:2783], 1} sends rowCount# 1, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:58:23.080943Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:970:2783], 1} finished in read 2025-04-17T17:58:23.080981Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:58:23.081006Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:58:23.081025Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:58:23.081043Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:58:23.081076Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-17T17:58:23.081092Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:58:23.081110Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-17T17:58:23.081138Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:58:23.081220Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:58:23.081883Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:970:2783], Recipient [7:700:2588]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-17T17:58:23.081927Z node 7 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 2 } items { int32_value: 3 } items { int32_value: 4 } }, { items { int32_value: 11 } items { int32_value: 12 } items { int32_value: 12 } items { int32_value: 12 } } >> TPersQueueTest::DirectReadBadCases [GOOD] >> TPersQueueTest::DirectReadStop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewCancelled [GOOD] Test command err: Trying to start YDB, gRPC: 16671, MsgBus: 11956 2025-04-17T17:57:03.161474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342652173540253:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:03.161998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b2c/r3tmp/tmpBqSIkg/pdisk_1.dat 2025-04-17T17:57:03.868134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:03.868223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:03.882546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:03.884564Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16671, node 1 2025-04-17T17:57:04.122889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:04.122909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:04.122918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:04.123047Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11956 TClient is connected to server localhost:11956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:04.833533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:04.923647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:05.159087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:05.444241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:05.534992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:08.073832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342673648378503:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:08.073939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:08.162458Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342652173540253:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:08.162526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:08.594570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:08.684899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:08.735662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:08.819513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:08.877178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:08.984639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:09.056340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:09.215577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342677943346480:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:09.215719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:09.216133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342677943346487:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:09.220604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:09.245033Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:57:09.245972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342677943346489:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:09.351436Z node 1 :TX_PROXY ERROR: Actor# [1:7494342677943346544:3561] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"E-Size":"No estimate","PlanNodeId":5,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/TwoShard","Columns":["Key","Value1","Value2"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":4264,"Max":4264,"Min":4264},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":5,"Min":1}}}],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":1000,"Table":[{"Path":"\/Root\/KeyValue","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":16,"Max":16,"Min":16}}],"BaseTimeMs":1744912631890,"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"CpuTimeUs":{"Count":1,"Sum":814,"Max":814,"Min":814},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32},"WaitTimeUs":{"Count":1,"Sum":4318,"Max":4318,"Min":4318},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"UpdateTimeMs":6}}],"Node Type":"HashShuffle","KeyColumns":["Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"PartitionByKey","Input":"NarrowMap"}],"Node Type":"Aggregate","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"DurationUs":{"Count":1,"Sum":7000,"Max":7000,"Min":7000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"BaseTimeMs":1744912631890,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"RESULT" ... ","QueuedTimeUs":446},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["Key","Value1","Value2"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":2,"A-SelfCpu":1.426,"A-Cpu":1.426,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.357,"A-Cpu":1.783,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 26607, MsgBus: 26786 2025-04-17T17:57:21.143137Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494342730330873343:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:21.173963Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b2c/r3tmp/tmpbTcmU1/pdisk_1.dat 2025-04-17T17:57:21.415355Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:21.439919Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:21.440013Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:21.441473Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26607, node 3 2025-04-17T17:57:21.533803Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:21.533826Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:21.533834Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:21.533950Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26786 TClient is connected to server localhost:26786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:22.110033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:22.119412Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:57:22.123980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:22.219176Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:22.407535Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:22.483954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:24.980222Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342743215776875:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:24.980317Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:25.035502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:25.070238Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:25.116142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:25.201512Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:25.278980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:25.334930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:25.393962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:25.510973Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342747510744845:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:25.511067Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:25.511325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494342747510744850:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:25.514234Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:25.523453Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494342747510744852:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:57:25.597832Z node 3 :TX_PROXY ERROR: Actor# [3:7494342747510744907:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:26.143089Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494342730330873343:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:26.143168Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:26.892082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:36.414308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:57:36.414341Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:21.103954Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912701057, txId: 281474976715673] shutting down 2025-04-17T17:58:21.878237Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494342988028915573:2843] TxId: 281474976715675. Ctx: { TraceId: 01js2edpae5bqpc7xvya9wtzn6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTUzMDEwZmEtYzUxZDAwOTktNTY3OWQzZWUtOWQwZWRmYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 102ms during execution } ] 2025-04-17T17:58:21.878813Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494342988028915595:2865], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YTUzMDEwZmEtYzUxZDAwOTktNTY3OWQzZWUtOWQwZWRmYWI=. TraceId : 01js2edpae5bqpc7xvya9wtzn6. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494342988028915573:2843], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:58:21.879245Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494342988028915604:2873], TxId: 281474976715675, task: 9. Ctx: { SessionId : ydb://session/3?node_id=3&id=YTUzMDEwZmEtYzUxZDAwOTktNTY3OWQzZWUtOWQwZWRmYWI=. TraceId : 01js2edpae5bqpc7xvya9wtzn6. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494342988028915573:2843], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:58:21.879793Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTUzMDEwZmEtYzUxZDAwOTktNTY3OWQzZWUtOWQwZWRmYWI=, ActorId: [3:7494342983733948218:2843], ActorState: ExecuteState, TraceId: 01js2edpae5bqpc7xvya9wtzn6, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 102ms during execution 2025-04-17T17:58:22.223893Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912702199, txId: 281474976715677] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 7194543883963062867 2025-04-17T17:58:18.696141Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.231911s 2025-04-17T17:58:18.696272Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.242815s SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2025-04-17T17:58:22.676836Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |82.2%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] >> BackupRestoreS3::RestoreViewDependentOnAnotherView [FAIL] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal >> DemoTx::Scenario_5 >> BackupRestoreS3::TestAllPrimitiveTypes-JSON_DOCUMENT [GOOD] |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 16971992622747176944 2025-04-17T17:58:18.695218Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.240795s 2025-04-17T17:58:18.695321Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.240933s SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2025-04-17T17:58:23.248242Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:6332:830] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> KqpBatchDelete::Returning >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError |82.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TPersQueueTest::ReadFromSeveralPartitionsMigrated [GOOD] >> TPersQueueTest::Init |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging >> KqpBatchDelete::SimpleOnePartition |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> BackupRestore::TestAllPrimitiveTypes-JSON_DOCUMENT [GOOD] >> KqpBatchDelete::ManyPartitions_1 >> TPersQueueTest::TopicServiceReadBudget [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites >> KqpBatchDelete::ManyPartitions_3 >> TopicService::UnknownConsumer [GOOD] >> TPQTabletTests::UpdateConfig_1 >> TPartitionTests::CorrectRange_Commit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllPrimitiveTypes-JSON_DOCUMENT [GOOD] Test command err: 2025-04-17T17:56:46.708470Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342577521499495:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:46.708600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp0j3zhL/pdisk_1.dat 2025-04-17T17:56:47.723465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:47.892582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:47.934825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:47.934908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:47.961474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20970, node 1 2025-04-17T17:56:48.523462Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:48.523481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:48.523504Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:48.523630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:49.221628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:51.714288Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342577521499495:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:51.714351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:52.516616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342603291304333:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:52.516780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:52.517240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342603291304345:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:52.521037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:56:52.560906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342603291304347:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:56:52.645479Z node 1 :TX_PROXY ERROR: Actor# [1:7494342603291304422:2701] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:53.026457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:56:53.327681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2eazv72ktznqqdmj5g8s9c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdkMzdmOTMtOWQ1ZTlhMTUtZDA5YWY0OTgtYmIzNWMwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:53.537657Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2eazz712kapancha3zbe60, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdkMzdmOTMtOWQ1ZTlhMTUtZDA5YWY0OTgtYmIzNWMwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/"Create temporary directory "/Root/~backup_20250417T175653" in databaseProcess "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table"Copy tables: { src: "/Root/Int8Table", dst: "/Root/~backup_20250417T175653/Int8Table" }Backup table "/Root/~backup_20250417T175653/Int8Table" to "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table"Describe table "/Root/~backup_20250417T175653/Int8Table"Write scheme into "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table/scheme.pb"Describe table "/Root/Int8Table"Write ACL into "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table/permissions.pb"Read table "/Root/~backup_20250417T175653/Int8Table"Write data into "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table/data_00.csv"Drop table "/Root/~backup_20250417T175653/Int8Table"Remove temporary directory "/Root/~backup_20250417T175653" in database2025-04-17T17:56:54.159298Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T17:56:54.175687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/" to "/Root"2025-04-17T17:56:54.298530Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table"Read scheme from "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table" to "/Root/Int8Table"2025-04-17T17:56:54.328626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Created "/Root/Int8Table"Read data from "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table/data_00.csv"2025-04-17T17:56:54.699053Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2eb14t3hz56afeep8bbew4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjQyMDlhOTctNzBiOTg4ZTItMjA1Njc4Mi03ZGU0NDM2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table" to "/Root/Int8Table"Read ACL from "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp89snAv/Int8Table/permissions.pb"2025-04-17T17:56:54.742645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T17:56:54.906748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2eb1ba40xrpjkmvf1q1wdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdkMzdmOTMtOWQ1ZTlhMTUtZDA5YWY0OTgtYmIzNWMwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:57.777935Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342625199313225:2222];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmpm3xRL6/pdisk_1.dat 2025-04-17T17:56:57.907238Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:58.079230Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:58.120972Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:58.121067Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:58.143596Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21652, node 4 2025-04-17T17:56:58.430891Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:58.430909Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:58.430918Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:58.431033Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: ... sonTable"2025-04-17T17:58:19.728405Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Created "/Root/JsonTable"Read data from "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp2xps1a/JsonTable/data_00.csv"2025-04-17T17:58:20.103193Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2edmhz8r0sssgnr1m8jmgv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NDI3ZjkzYWItOTZkZjI2NmYtNWZmMDRhMGUtMjQwY2E1OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp2xps1a/JsonTable" to "/Root/JsonTable"Read ACL from "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp2xps1a/JsonTable/permissions.pb"2025-04-17T17:58:20.169536Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T17:58:20.649194Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2edms950h44zxdkq382gpp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NWRkZjRjYzAtNWM2NzUwZjktNDlkMjhmNi0zYTQwMzdiNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:23.560209Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494342997937849562:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:23.560353Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmpbklb23/pdisk_1.dat 2025-04-17T17:58:24.119226Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:24.179980Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:24.180083Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:24.184775Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19970, node 10 2025-04-17T17:58:24.520834Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:24.520858Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:24.520866Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:24.521017Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:25.155269Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:28.559803Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494342997937849562:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:28.559888Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:29.200193Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494343023707654350:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:29.200313Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:29.204604Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494343023707654362:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:29.214955Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:58:29.282502Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494343023707654364:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:58:29.378658Z node 10 :TX_PROXY ERROR: Actor# [10:7494343023707654434:2695] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:29.445431Z node 10 :TX_PROXY ERROR: Actor# [10:7494343023707654467:2708] txid# 281474976710660, issues: { message: "Column Key has wrong key type JsonDocument" severity: 1 } 2025-04-17T17:58:29.446933Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=YWQ0MTUyNjAtOGM0ZWJjZjctN2EyZjQxMzItYmUwYjZiZjg=, ActorId: [10:7494343023707654329:2337], ActorState: ExecuteState, TraceId: 01js2edxjb5q2m4y3nba9vz5a7, Create QueryResponse for error on request, msg: 2025-04-17T17:58:29.450854Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2edxjb5q2m4y3nba9vz5a7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWQ0MTUyNjAtOGM0ZWJjZjctN2EyZjQxMzItYmUwYjZiZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:29.490604Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:29.799503Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2edy1yfypdm92mwje5crjn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWQ0MTUyNjAtOGM0ZWJjZjctN2EyZjQxMzItYmUwYjZiZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:30.198574Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2edy69d4sf61ndcvr2fvmn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWQ0MTUyNjAtOGM0ZWJjZjctN2EyZjQxMzItYmUwYjZiZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/"Create temporary directory "/Root/~backup_20250417T175830" in databaseProcess "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable"Copy tables: { src: "/Root/JsonDocumentTable", dst: "/Root/~backup_20250417T175830/JsonDocumentTable" }Backup table "/Root/~backup_20250417T175830/JsonDocumentTable" to "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable"Describe table "/Root/~backup_20250417T175830/JsonDocumentTable"Write scheme into "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable/scheme.pb"Describe table "/Root/JsonDocumentTable"Write ACL into "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable/permissions.pb"Read table "/Root/~backup_20250417T175830/JsonDocumentTable"Write data into "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable/data_00.csv"Drop table "/Root/~backup_20250417T175830/JsonDocumentTable"Remove temporary directory "/Root/~backup_20250417T175830" in database2025-04-17T17:58:30.989747Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-04-17T17:58:31.007239Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable"Read scheme from "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable/scheme.pb"2025-04-17T17:58:31.202485Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found Restore table "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable" to "/Root/JsonDocumentTable"2025-04-17T17:58:31.233569Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Created "/Root/JsonDocumentTable"Read data from "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable/data_00.csv"2025-04-17T17:58:31.542405Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2edzrj0ayqk1z5bnjhtk27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YzBjYWFjOGUtMWJhZDE0ZDYtMzk1ZjVjZDgtOGJlMmQ2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable" to "/Root/JsonDocumentTable"Read ACL from "/home/runner/.ya/build/build_root/6873/0005ea/r3tmp/tmp82KF1Q/JsonDocumentTable/permissions.pb"2025-04-17T17:58:31.574009Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710674:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T17:58:31.787792Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2edzx54cyhcftefcfr49mr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWQ0MTUyNjAtOGM0ZWJjZjctN2EyZjQxMzItYmUwYjZiZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> WithSDK::CommitToPastWithWrongSessionId [GOOD] >> TPersQueueTest::SchemeOperationFirstClassCitizen [GOOD] >> TPersQueueTest::SchemeOperationsCheckPropValues >> BackupRestore::RestoreKesusResources [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPQTest::TestPartitionTotalQuota >> TPQTabletTests::UpdateConfig_2 |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> TPartitionTests::CorrectRange_Multiple_Consumers >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test >> TopicService::UnknownTopic >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPQTest::DirectReadBadSessionOrPipe >> TPartitionTests::ConflictingTxIsAborted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> WithSDK::CommitToPastWithWrongSessionId [GOOD] Test command err: 2025-04-17T17:56:35.683430Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342530602505681:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:35.683873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:36.086825Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018af/r3tmp/tmpSesgFY/pdisk_1.dat 2025-04-17T17:56:36.496458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:36.496611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:36.507372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:36.514657Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1724, node 1 2025-04-17T17:56:36.834845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0018af/r3tmp/yandexdDjuYe.tmp 2025-04-17T17:56:36.834870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0018af/r3tmp/yandexdDjuYe.tmp 2025-04-17T17:56:36.835033Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0018af/r3tmp/yandexdDjuYe.tmp 2025-04-17T17:56:36.835170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:56:36.926160Z INFO: TTestServer started on Port 19647 GrpcPort 1724 TClient is connected to server localhost:19647 PQClient connected to localhost:1724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:37.550000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:37.626800Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:37.650077Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:56:37.655853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:56:37.939717Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:56:40.643049Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342530602505681:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:40.643124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:41.440919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342556372310163:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:41.441060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342556372310153:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:41.441356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:41.445601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:56:41.459315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342556372310167:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:56:41.546854Z node 1 :TX_PROXY ERROR: Actor# [1:7494342556372310232:2457] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:41.860378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:41.896148Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342556372310240:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:41.902494Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2NkNTM4ZDMtMjM2MmVmMmItYjAzYmRiNDYtODRiNzk2ZjU=, ActorId: [1:7494342556372310136:2339], ActorState: ExecuteState, TraceId: 01js2eamadc1dach5jwj4y2rh7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:41.904680Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:56:41.923325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:42.076328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494342560667277832:2642] === CheckClustersList. Ok 2025-04-17T17:56:47.754021Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:56:47.769757Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494342582142114544:2774], Recipient [1:7494342534897473248:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:47.769803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:47.769829Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:56:47.770521Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7494342539192440813:2318], Recipient [1:7494342534897473248:2183]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 3 Generation: 1 Round: 0 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 1744912597669 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T17:56:47.770565Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T17:56:47.770647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-17T17:56:47.770780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T17:56:47.770842Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099993s, queue# 1 2025-04-17T17:56:47.800681Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:56:47.801965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494342582142114556:2779], Recipient [1:7494342534897473248:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:47.801996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:47.802007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:56:47.802037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494342582142114552:2776], Recipient [1:7494342534897473248:2183]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72 ... node 6 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [6:7494343037658767781:2833]: Got sessions 2025-04-17T17:58:33.546273Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7494343037658767784:2835] disconnected; active server actors: 1 2025-04-17T17:58:33.546301Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7494343037658767784:2835] disconnected no session 2025-04-17T17:58:33.546399Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494343037658767785:3416], Recipient [6:7494342999004060812:2474]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:58:33.546425Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:58:33.546445Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T17:58:33.546472Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [6:7494343037658767783:2834] destroyed 2025-04-17T17:58:33.578929Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494342999004060812:2474], Partition 0, Sender [0:0:0], Recipient [6:7494342999004060873:2478], Cookie: 0 2025-04-17T17:58:33.579017Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494342999004060873:2478]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:33.579049Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:33.579103Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:33.579170Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:33.579197Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:33.579254Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:33.682171Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494342999004060812:2474], Partition 0, Sender [0:0:0], Recipient [6:7494342999004060873:2478], Cookie: 0 2025-04-17T17:58:33.682280Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494342999004060873:2478]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:33.682310Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:33.682361Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:33.682444Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:33.682471Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:33.682503Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:33.790407Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494342999004060812:2474], Partition 0, Sender [0:0:0], Recipient [6:7494342999004060873:2478], Cookie: 0 2025-04-17T17:58:33.790495Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494342999004060873:2478]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:33.790530Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:33.790578Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:33.790645Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:33.790672Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:33.790720Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:33.802600Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [6:7494342934579550227:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:58:33.802641Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:58:33.802697Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7494342934579550227:2147], Recipient [6:7494342934579550227:2147]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:58:33.802719Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:58:33.890766Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494342999004060812:2474], Partition 0, Sender [0:0:0], Recipient [6:7494342999004060873:2478], Cookie: 0 2025-04-17T17:58:33.890851Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494342999004060873:2478]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:33.890888Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:33.890955Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:33.891027Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:33.891067Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:33.891102Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:33.958771Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TPersQueueReadBalancer::HandleWakeup 2025-04-17T17:58:33.958827Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 9 2025-04-17T17:58:33.960979Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271187975, Sender [6:7494342999004060814:2475], Recipient [6:7494342999004060812:2474]: NKikimrPQ.TStatus GetStatForAllConsumers: true 2025-04-17T17:58:33.961023Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPersQueue::TEvStatus 2025-04-17T17:58:33.961047Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2025-04-17T17:58:33.961135Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188536, Sender [6:7494342999004060814:2475], Recipient [6:7494342999004060812:2474]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-04-17T17:58:33.961157Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvSubDomainStatus 2025-04-17T17:58:33.961253Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [6:7494342999004060812:2474], Partition 0, Sender [6:7494342999004060812:2474], Recipient [6:7494342999004060873:2478], Cookie: 0 2025-04-17T17:58:33.961309Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [6:7494342999004060812:2474], Recipient [6:7494342999004060873:2478]: NKikimr::TEvPQ::TEvPartitionStatus 2025-04-17T17:58:33.961343Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2025-04-17T17:58:33.961572Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-04-17T17:58:33.961721Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [6:7494342999004060812:2474], Partition 0, Sender [6:7494342999004060812:2474], Recipient [6:7494342999004060873:2478], Cookie: 0 2025-04-17T17:58:33.961767Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [6:7494342999004060812:2474], Recipient [6:7494342999004060873:2478]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-04-17T17:58:33.961787Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-04-17T17:58:33.961836Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7494342999004060873:2478], Recipient [6:7494342999004060812:2474]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:58:33.961860Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-17T17:58:33.962279Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 9 DataSize: 0 UsedReserveSize: 0 2025-04-17T17:58:33.962411Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 1 2025-04-17T17:58:33.963000Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [6:7494342999004060814:2475], Recipient [6:7494342934579550227:2147]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 9 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2025-04-17T17:58:33.963036Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-17T17:58:33.963059Z node 6 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2025-04-17T17:58:33.963080Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099996s, queue# 1 2025-04-17T17:58:33.964315Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [6:7494342999004060812:2474], Partition 0, Sender [6:7494342999004060876:2480], Recipient [6:7494342999004060873:2478], Cookie: 0 2025-04-17T17:58:33.964373Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [6:7494342999004060876:2480], Recipient [6:7494342999004060873:2478]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-17T17:58:33.964398Z node 6 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-17T17:58:33.985135Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:7494342999004060814:2475], Recipient [6:7494342934579550227:2147]: NKikimrSchemeOp.TDescribePath PathId: 13 SchemeshardId: 72057594046644480 2025-04-17T17:58:33.985184Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme >> KqpBatchDelete::Returning [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 16605, MsgBus: 11071 2025-04-17T17:58:29.008216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343017139825298:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:29.008618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c2c/r3tmp/tmpwy6DLi/pdisk_1.dat 2025-04-17T17:58:30.058456Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:30.059453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:58:30.100169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:30.100281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:30.112006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16605, node 1 2025-04-17T17:58:30.314677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:30.314698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:30.314705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:30.314796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11071 TClient is connected to server localhost:11071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:31.403616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:31.451167Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:58:31.472744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:31.674460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:31.993240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:32.120200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:34.002399Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343017139825298:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:34.002665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:34.688421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343042909630711:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:34.688569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:35.184390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:35.248320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:35.308649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:35.390406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:35.419842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:35.492386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:35.534313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:35.635524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343047204598682:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:35.635637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:35.635978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343047204598688:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:35.639939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:35.659196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343047204598690:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:35.759894Z node 1 :TX_PROXY ERROR: Actor# [1:7494343047204598749:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:37.308083Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494343055794533703:2527], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with RETURNING 2025-04-17T17:58:37.308393Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWZiMDAyNjItZGFmY2Y3OTItYzljNTQ0ZDctNzNjZmE0ZWM=, ActorId: [1:7494343055794533694:2521], ActorState: ExecuteState, TraceId: 01js2ee5f3fbm0rpx0ysm0jsk4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPQTest::DirectReadOldPipe >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestReadSessions >> TPersQueueTest::DirectReadStop [GOOD] >> TPersQueueTest::DirectReadCleanCache >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete >> TPersQueueTest::WriteNonExistingPartition [GOOD] >> TPersQueueTest::WriteNonExistingTopic |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TPersQueueTest::Cache [GOOD] >> TPersQueueTest::CacheHead >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches >> KqpBatchUpdate::HasTxControl >> TPersQueueTest::StreamReadCreateAndDestroyMsgs [GOOD] >> TPersQueueTest::StreamReadCommitAndStatusMsgs >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] >> TListAllTopicsTests::ListLimitAndPaging [GOOD] >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] >> DemoTx::Scenario_5 [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> TPQTabletTests::DropTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 2288, msgbus: 15046 2025-04-17T17:53:57.014609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341852551832204:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:57.014656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001377/r3tmp/tmpeCqd5T/pdisk_1.dat 2025-04-17T17:53:58.128897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:58.128981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:58.130583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:53:58.141480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:53:58.155477Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2288, node 1 2025-04-17T17:53:58.450822Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:58.450842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:58.450849Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:58.450953Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15046 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:53:58.790417Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] Handle TEvNavigate describe path dc-1 2025-04-17T17:53:58.790463Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800261:2451] HANDLE EvNavigateScheme dc-1 2025-04-17T17:53:58.790767Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800261:2451] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:53:58.842295Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800261:2451] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } TClient::Ls response: 2025-04-17T17:53:58.852733Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800261:2451] Handle TEvDescribeSchemeResult Forward to# [1:7494341856846800260:2450] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:53:58.910338Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] Handle TEvProposeTransaction 2025-04-17T17:53:58.910363Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T17:53:58.910480Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494341856846800269:2458] 2025-04-17T17:53:59.062111Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T17:53:59.062412Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:53:59.062434Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T17:53:59.062498Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:53:59.062723Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:53:59.062898Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T17:53:59.062946Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T17:53:59.063087Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T17:53:59.063755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:53:59.067541Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T17:53:59.067588Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341856846800269:2458] txid# 281474976710657 SEND to# [1:7494341856846800268:2457] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T17:53:59.078920Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] Handle TEvProposeTransaction 2025-04-17T17:53:59.078944Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T17:53:59.078972Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494341861141767606:2495] 2025-04-17T17:53:59.080541Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T17:53:59.080617Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:53:59.080630Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T17:53:59.080693Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:53:59.080938Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:53:59.081044Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:53:59.081078Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T17:53:59.081209Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T17:53:59.081617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:53:59.083604Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T17:53:59.083657Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767606:2495] txid# 281474976710658 SEND to# [1:7494341861141767605:2494] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T17:53:59.120378Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] Handle TEvProposeTransaction 2025-04-17T17:53:59.120409Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T17:53:59.120446Z node 1 :TX_PROXY DEBUG: actor# [1:7494341852551832441:2116] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494341861141767624:2505] 2025-04-17T17:53:59.122119Z node 1 :TX_PROXY DEBUG: Actor# [1:7494341861141767624:2505] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin* ... e 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546434:2586] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:57:23.305405Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546434:2586] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:57:23.305572Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546434:2586] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:57:23.305633Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546434:2586] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-17T17:57:23.305794Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546434:2586] txid# 281474976710660 HANDLE EvClientConnected 2025-04-17T17:57:23.310430Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546434:2586] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T17:57:23.310605Z node 59 :TX_PROXY ERROR: Actor# [59:7494342739876546434:2586] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:23.310644Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546434:2586] txid# 281474976710660 SEND to# [59:7494342739876546362:2342] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-17T17:57:23.336502Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] Handle TEvProposeTransaction 2025-04-17T17:57:23.336538Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] TxId# 281474976710661 ProcessProposeTransaction 2025-04-17T17:57:23.336588Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7494342739876546458:2598] 2025-04-17T17:57:23.339376Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46560" 2025-04-17T17:57:23.339452Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T17:57:23.339479Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T17:57:23.339536Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:57:23.339872Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:57:23.339987Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:57:23.340048Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-17T17:57:23.340208Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] txid# 281474976710661 HANDLE EvClientConnected 2025-04-17T17:57:23.349931Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-17T17:57:23.350003Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546458:2598] txid# 281474976710661 SEND to# [59:7494342739876546457:2334] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-17T17:57:23.459406Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] Handle TEvProposeTransaction 2025-04-17T17:57:23.459453Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] TxId# 281474976710662 ProcessProposeTransaction 2025-04-17T17:57:23.459527Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7494342739876546478:2612] 2025-04-17T17:57:23.462610Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46576" 2025-04-17T17:57:23.462699Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T17:57:23.462726Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T17:57:23.462790Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:57:23.463153Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:57:23.463283Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:57:23.463350Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-17T17:57:23.463521Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] txid# 281474976710662 HANDLE EvClientConnected 2025-04-17T17:57:23.464150Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:23.471254Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-17T17:57:23.471332Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546478:2612] txid# 281474976710662 SEND to# [59:7494342739876546477:2348] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-17T17:57:23.568579Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] Handle TEvProposeTransaction 2025-04-17T17:57:23.568614Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] TxId# 281474976710663 ProcessProposeTransaction 2025-04-17T17:57:23.568661Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7494342739876546520:2636] 2025-04-17T17:57:23.571062Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546520:2636] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NTg0MywiaWF0IjoxNzQ0OTEyNjQzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.meu_zm4sz0EO9GndSQ-Yt8qzi-2lQ-4SP2aL22uzzW6bQ_c61dYmPLsWyXJemhdyDG7cKe_tcVtSMl-CP5HJO6Rn5fL3Qjua0tPzwlr_10mNlhFlw32uqfknKR5z2GHVRnfrycLRlueVFPciFuuuq_8D2S3gVHfOuz5uF9nwQX5nHWX5hDOeJ0lU-3FGSD-YQbwgmHLwwaKOs9JwQ0BuqcNTEz0MxF_KgUb5AV45aosCWuPq3sWYHFJdYOu5wgwlHqmi3rBevJCo71PiX7EqyuHx1HF-rvOaQtZ4Jfpbwwz9izzifX4FRxzyUd5teY_FOSVIbQ-5H_Z0QOgo9hzmBg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NTg0MywiaWF0IjoxNzQ0OTEyNjQzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46596" 2025-04-17T17:57:23.571128Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546520:2636] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T17:57:23.571150Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546520:2636] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-17T17:57:23.571414Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546520:2636] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T17:57:23.571459Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546520:2636] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T17:57:23.571505Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546520:2636] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:57:23.571746Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546520:2636] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:57:23.571774Z node 59 :TX_PROXY ERROR: Actor# [59:7494342739876546520:2636] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-04-17T17:57:23.571862Z node 59 :TX_PROXY ERROR: Actor# [59:7494342739876546520:2636] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-17T17:57:23.571892Z node 59 :TX_PROXY DEBUG: Actor# [59:7494342739876546520:2636] txid# 281474976710663 SEND to# [59:7494342739876546519:2354] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T17:57:23.572224Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=M2VmOTliNTUtZGZkYjgzMy04OWJjMmM3Yi0zZWRjZTZiYg==, ActorId: [59:7494342739876546505:2354], ActorState: ExecuteState, TraceId: 01js2ebxenbgm34vaespff6vc6, Create QueryResponse for error on request, msg: 2025-04-17T17:57:23.572600Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] Handle TEvExecuteKqpTransaction 2025-04-17T17:57:23.572616Z node 59 :TX_PROXY DEBUG: actor# [59:7494342714106741696:2113] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] Test command err: 2025-04-17T17:58:35.465062Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:58:35.478479Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:58:35.478799Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-17T17:58:35.478852Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:58:35.478885Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-17T17:58:35.478931Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:58:35.478979Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:35.479075Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:35.527254Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-17T17:58:35.527399Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:58:35.616354Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:35.667424Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:35.667576Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:35.669245Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:35.669411Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:35.669488Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:35.669991Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:35.698701Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-17T17:58:35.699678Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:58:35.699748Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-04-17T17:58:35.699811Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:58:35.700424Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:35.700565Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-17T17:58:35.700610Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-17T17:58:35.700765Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:35.700809Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:35.700850Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:35.700883Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:35.700924Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:58:35.700955Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:58:35.700995Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:35.701029Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:35.701114Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:58:35.701294Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:35.701735Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:35.701997Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:188:2201] 2025-04-17T17:58:35.718894Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-17T17:58:35.718981Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:188:2201] 2025-04-17T17:58:35.719036Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:58:35.722050Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:35.722240Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-17T17:58:35.722280Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-17T17:58:35.722398Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:35.722441Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:35.722472Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T17:58:35.722523Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-17T17:58:35.722552Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-04-17T17:58:35.722579Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-04-17T17:58:35.722603Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:35.722649Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-17T17:58:35.722716Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:58:35.722851Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:35.747424Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:35.747622Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:35.748046Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:199:2208], now have 1 active actors on pipe 2025-04-17T17:58:35.748874Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:202:2210], now have 1 active actors on pipe 2025-04-17T17:58:35.749851Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 0 Important: false } Consumers { Name: "client-3" Generation: 7 Important: false } } BootstrapConfig { } } 2025-04-17T17:58:35.749992Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-17T17:58:35.750043Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-17T17:58:35.750082Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-17T17:58:35.750121Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-17T17:58:35.750159Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-17T17:58:35.766647Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 230 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" Ydb ... mAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.451014Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.452054Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.453078Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.454103Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.455156Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.456228Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.457273Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.465750Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.466236Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-04-17T17:58:43.467803Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.468789Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.469722Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.474748Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.475764Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.476761Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.477738Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.478763Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.479758Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.480886Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.482052Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.483105Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.484110Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.485117Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-17T17:58:43.485561Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-04-17T17:58:43.488174Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2025-04-17T17:58:43.524700Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:444:2420], now have 1 active actors on pipe 2025-04-17T17:58:43.524856Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:58:43.524915Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T17:58:43.524977Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 1 size 102400 offset: 14 2025-04-17T17:58:43.525092Z node 10 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-04-17T17:58:43.525241Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:43.525292Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:43.525339Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000002_00000_0000000001_00014, d0000000000_00000000000000000002_00000_0000000001_00014] 2025-04-17T17:58:43.525382Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000003_00000_0000000001_00014, d0000000000_00000000000000000003_00000_0000000001_00014] 2025-04-17T17:58:43.525420Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:43.525464Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:43.525511Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:43.525556Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:43.525637Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 45, Error new GetOwnership request needed for owner 2025-04-17T17:58:43.525679Z node 10 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-04-17T17:58:43.525738Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:43.525786Z node 10 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000002_00000_0000000001_00014(+) to d0000000000_00000000000000000002_00000_0000000001_00014(+) 2025-04-17T17:58:43.525819Z node 10 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000003_00000_0000000001_00014(+) to d0000000000_00000000000000000003_00000_0000000001_00014(+) 2025-04-17T17:58:43.532706Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:43.537765Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:456:2431], now have 1 active actors on pipe 2025-04-17T17:58:43.537896Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:58:43.537968Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T17:58:43.538104Z node 10 :PERSQUEUE INFO: new Cookie default|1e6f8889-e9f6c880-10792ec1-c76d6f21_14 generated for partition 0 topic 'topic' owner default 2025-04-17T17:58:43.538243Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T17:58:43.538327Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:58:43.538718Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:458:2433], now have 1 active actors on pipe 2025-04-17T17:58:43.538799Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:58:43.538835Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T17:58:43.538881Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 0 size 102400 offset: 14 2025-04-17T17:58:43.538978Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 102409. Cookie: 15 2025-04-17T17:58:43.802921Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 15 2025-04-17T17:58:43.803208Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 15 partNo 0 2025-04-17T17:58:43.804167Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 15 partNo 0 FormedBlobsCount 0 NewHead: Offset 14 PartNo 0 PackedSize 102472 count 1 nextOffset 15 batches 1 2025-04-17T17:58:43.805084Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 14,1 HeadOffset 14 endOffset 14 curOffset 15 d0000000000_00000000000000000014_00000_0000000001_00000| size 102462 WTime 2103 2025-04-17T17:58:43.805399Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:43.805452Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:43.805499Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:58:43.805546Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:43.805588Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid1 2025-04-17T17:58:43.805619Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000014_00000_0000000001_00000| 2025-04-17T17:58:43.805637Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:43.805667Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:43.805709Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:43.805810Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:43.805951Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 14 partNo 0 count 1 size 102462 2025-04-17T17:58:43.810460Z node 10 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 14 count 1 size 102462 actorID [10:134:2160] 2025-04-17T17:58:43.810572Z node 10 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 14 partno 0 count 1 parts 0 size 102462 2025-04-17T17:58:43.810659Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102409 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:43.810711Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:58:43.810770Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 15, partNo: 0, Offset: 14 is stored on disk 2025-04-17T17:58:43.811065Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:58:43.811521Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:469:2442], now have 1 active actors on pipe >> TPQTabletTests::DropTablet [GOOD] >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMeteringSink::FlushStorageV1 [GOOD] Test command err: 2025-04-17T17:58:03.002762Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342906585513597:2164];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:03.003492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:58:03.535974Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015be/r3tmp/tmp2XfVY9/pdisk_1.dat 2025-04-17T17:58:03.972127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:03.992786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:03.992899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:03.997823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20985, node 1 2025-04-17T17:58:04.195449Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0015be/r3tmp/yandexK1eDdU.tmp 2025-04-17T17:58:04.195475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0015be/r3tmp/yandexK1eDdU.tmp 2025-04-17T17:58:04.195611Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0015be/r3tmp/yandexK1eDdU.tmp 2025-04-17T17:58:04.195724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:58:04.362705Z INFO: TTestServer started on Port 20645 GrpcPort 20985 TClient is connected to server localhost:20645 PQClient connected to localhost:20985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:04.964483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:58:05.017107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:58:05.351004Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:05.365710Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:58:07.982327Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342906585513597:2164];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:07.999096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:09.297716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342936650285374:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:09.297855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:09.310348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342936650285402:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:09.346475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:58:09.387311Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-17T17:58:09.388301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342936650285404:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:58:09.440308Z node 1 :TX_PROXY ERROR: Actor# [1:7494342936650285468:2461] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:10.108391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:10.116806Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342936650285476:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:58:10.134470Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjgyMGVjN2UtODA4YjUzODAtNDk0NjZkNmEtYWQ1OGY4MzA=, ActorId: [1:7494342936650285362:2338], ActorState: ExecuteState, TraceId: 01js2eda1eaamy88kr37g5mgpw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:58:10.136763Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:58:10.178855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:10.309040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:58:10.663387Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2edb76bem16wkevrfg2efh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFkM2NlMGQtNTAwMzQ1ZWUtMmQ5ZTEzMTYtYzUwYWM2ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342940945253073:2646] === CheckClustersList. Ok 2025-04-17T17:58:16.402623Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-17T17:58:16.411867Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-17T17:58:16.445231Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7494342966715057143:2820] connected; active server actors: 1 2025-04-17T17:58:16.445410Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-17T17:58:16.446277Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] BALANCER INIT DONE for topic2: (0, 72075186224037892) 2025-04-17T17:58:16.446416Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-17T17:58:16.461739Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-17T17:58:16.478130Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:58:16.478606Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][] pipe [1:7494342966715057162:2832] connected; active server actors: 1 2025-04-17T17:58:16.478740Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-17T17:58:16.486313Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-17T17:58:16.486581Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:58:16.486821Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-17T17:58:16.486839Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:58:16.486859Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-17T17:58:16.486883Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:58:16.486919Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:16.486973Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-17T17:58:16.487015Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:58:16.487321Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037895 2025-04-17T17:58:16.487377Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] BALANCER INIT DONE for topic1: (0, 72075186224037894) 2025-04-17T17:58:16.489547Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7494342966715057167:2461], now have 1 active actors on pipe 2025-04-17T17:58:16.489578Z node 1 :P ... Root/dir1/topic2" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7494343028140626591 RawX2: 12884904028 } Partitions { Partition { PartitionId: 0 } } 2025-04-17T17:58:42.848321Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:42.851305Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:42.851352Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Try execute txs with state CALCULATED 2025-04-17T17:58:42.851366Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676, State CALCULATED 2025-04-17T17:58:42.851381Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676 State CALCULATED FrontTxId 281474976715676 2025-04-17T17:58:42.851392Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676, NewState WAIT_RS 2025-04-17T17:58:42.851423Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676 moved from CALCULATED to WAIT_RS 2025-04-17T17:58:42.851459Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-17T17:58:42.851478Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] HaveParticipantsDecision 1 2025-04-17T17:58:42.851515Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676, NewState EXECUTING 2025-04-17T17:58:42.851532Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676 moved from WAIT_RS to EXECUTING 2025-04-17T17:58:42.851542Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Received 0, Expected 1 2025-04-17T17:58:42.851577Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1744912722889, TxId 281474976715676 2025-04-17T17:58:42.851779Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:42.851802Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:42.851818Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:42.851833Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:42.851842Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:58:42.851851Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] _config_0 2025-04-17T17:58:42.851871Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:42.851889Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:42.851907Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:42.853956Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:42.853979Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state EXECUTED 2025-04-17T17:58:42.853993Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715675, State EXECUTED 2025-04-17T17:58:42.854006Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715675 State EXECUTED FrontTxId 281474976715675 2025-04-17T17:58:42.854020Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T17:58:42.854035Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715675, NewState WAIT_RS_ACKS 2025-04-17T17:58:42.854046Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715675 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T17:58:42.854065Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715675] PredicateAcks: 0/0 2025-04-17T17:58:42.854073Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T17:58:42.854082Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715675] PredicateAcks: 0/0 2025-04-17T17:58:42.854096Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] add an TxId 281474976715675 to the list for deletion 2025-04-17T17:58:42.854119Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715675, NewState DELETING 2025-04-17T17:58:42.854145Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete key for TxId 281474976715675 2025-04-17T17:58:42.854209Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:42.854981Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:42.855005Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state DELETING 2025-04-17T17:58:42.855016Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715675, State DELETING 2025-04-17T17:58:42.855032Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete TxId 281474976715675 2025-04-17T17:58:42.856134Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:42.856171Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvTxCommitDone Step 1744912722889, TxId 281474976715676, Partition 0 2025-04-17T17:58:42.856190Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Try execute txs with state EXECUTING 2025-04-17T17:58:42.856203Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676, State EXECUTING 2025-04-17T17:58:42.856219Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676 State EXECUTING FrontTxId 281474976715676 2025-04-17T17:58:42.856233Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Received 1, Expected 1 2025-04-17T17:58:42.856293Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId: 281474976715676 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-17T17:58:42.856333Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] complete TxId 281474976715676 2025-04-17T17:58:42.856641Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-04-17T17:58:42.856710Z node 3 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:42.856793Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete partitions for TxId 281474976715676 2025-04-17T17:58:42.856811Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676, NewState EXECUTED 2025-04-17T17:58:42.856828Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676 moved from EXECUTING to EXECUTED 2025-04-17T17:58:42.857113Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715676] save tx TxId: 281474976715676 State: EXECUTED MinStep: 1744912722875 MaxStep: 18446744073709551615 Step: 1744912722889 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7494343028140626591 RawX2: 12884904028 } Partitions { Partition { PartitionId: 0 } } 2025-04-17T17:58:42.857276Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:42.857929Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:42.857945Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Try execute txs with state EXECUTED 2025-04-17T17:58:42.857958Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676, State EXECUTED 2025-04-17T17:58:42.857970Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676 State EXECUTED FrontTxId 281474976715676 2025-04-17T17:58:42.857982Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T17:58:42.858006Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676, NewState WAIT_RS_ACKS 2025-04-17T17:58:42.858032Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T17:58:42.858049Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715676] PredicateAcks: 0/0 2025-04-17T17:58:42.858061Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T17:58:42.858072Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715676] PredicateAcks: 0/0 2025-04-17T17:58:42.858818Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] add an TxId 281474976715676 to the list for deletion 2025-04-17T17:58:42.858837Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676, NewState DELETING 2025-04-17T17:58:42.858851Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete key for TxId 281474976715676 2025-04-17T17:58:42.858879Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:42.859472Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:42.859481Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Try execute txs with state DELETING 2025-04-17T17:58:42.859490Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715676, State DELETING 2025-04-17T17:58:42.859500Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete TxId 281474976715676 >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] >> TPQTabletTests::Cancel_Tx >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] >> TPQTabletTests::Cancel_Tx [GOOD] >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TPersQueueTest::Init [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks >> TFstClassSrcIdPQTest::TestTableCreated >> TPQTest::TestMessageNo >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] >> KqpBatchDelete::Large_2 [GOOD] >> TPartitionTests::UserActCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] Test command err: 2025-04-17T17:58:35.692184Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:35.692270Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:35.711346Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-04-17T17:58:35.713007Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:35.000000Z 2025-04-17T17:58:35.713086Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\370\231\234\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\370\231\234\247\3442" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-17T17:58:37.019185Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:37.019251Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:37.044992Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] 2025-04-17T17:58:37.046659Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:37.000000Z 2025-04-17T17:58:37.046715Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\310\251\234\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\310\251\234\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\310\251\234\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\310\251\234\247\3442" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } 2025-04-17T17:58:38.217563Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:38.217624Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:38.888975Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:38.889043Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:38.899770Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:58:38.899917Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:38.900081Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:178:2193] 2025-04-17T17:58:38.900685Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:58:38.900791Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:58:38.900897Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:58:38.901009Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:58:38.901296Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-17T17:58:38.901355Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:58:38.901384Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:58:38.901417Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:38.000000Z 2025-04-17T17:58:38.901444Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:58:38.901474Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:178:2193] 2025-04-17T17:58:38.901514Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-17T17:58:38.901544Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create immediate tx with id = 4 and act no: 5 2025-04-17T17:58:40.302857Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:58:40.302965Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-04-17T17:58:41.774566Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:58:41.774655Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:58:41.774686Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion Got batch complete: 2 Wait batch completion Wait kv request 2025-04-17T17:58:42.043854Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-04-17T17:58:42.043924Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-04-17T17:58:42.043999Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:58:42.044043Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-17T17:58:42.044118Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:58:42.044169Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:58:42.044213Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-17T17:58:42.079821Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:58:42.080083Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:42.080124Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:42.080166Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:42.080203Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-04-17T17:58:42.080234Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-17T17:58:42.080254Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:42.080273Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:42.080308Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:58:42.080361Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:42.080395Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 2 2025-04-17T17:58:42.114280Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 2025-04-17T17:58:42.825653Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:42.825716Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:42.841116Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:58:42.841300Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:42.841544Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-04-17T17:58:42.842470Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:58:42.842630Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:58:42.842779Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:58:42.842984Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:58:42.843369Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-17T17:58:42.843456Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:58:42.843496Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:58:42.843540Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:42.000000Z 2025-04-17T17:58:42.843573Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:58:42.843615Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-04-17T17:58:42.843666Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-17T17:58:42.843711Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:43.188594Z node 5 :PERSQUEUE INFO: new Cookie src1|a306e955-ef29459e-dcf93f05-cab930ea_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-04-17T17:58:43.188748Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 2025-04-17T17:58:44.270726Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:58:44.270841Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-04-17T17:58:44.270925Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-04-17T17:58:45.654750Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:58:45.654901Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:58:45.654965Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion 2025-04-17T17:58:45.655124Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-17T17:58:45.655182Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-17T17:58:45.655273Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:58:45.655332Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 Got batch complete: 2 Wait batch completion Wait for no tx committed 2025-04-17T17:58:45.931346Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-04-17T17:58:45.931438Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-04-17T17:58:45.931534Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:58:45.931596Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-04-17T17:58:45.931863Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:45.931915Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:45.931962Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:45.932011Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-17T17:58:45.932050Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:45.932076Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:45.932108Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:58:45.932147Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:45.932195Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait kv request Wait tx committed for tx 0 2025-04-17T17:58:46.208953Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 2025-04-17T17:58:46.209102Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 1. Committed seqNo: 6. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 60 2025-04-17T17:58:46.209233Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 7 partNo 0 2025-04-17T17:58:46.210113Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 7 partNo 0 FormedBlobsCount 0 NewHead: Offset 70 PartNo 0 PackedSize 84 count 1 nextOffset 71 batches 1 2025-04-17T17:58:46.210234Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 7. Committed seqNo: 6. Writing seqNo: 7. EndOffset: 1. CurOffset: 71. Offset: 80 2025-04-17T17:58:46.210909Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 70,1 HeadOffset 1 endOffset 1 curOffset 71 d0000000000_00000000000000000070_00000_0000000001_00000| size 70 WTime 12141 2025-04-17T17:58:46.211095Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:46.211159Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:46.211208Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:58:46.211252Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:46.211300Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-17T17:58:46.211337Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000070_00000_0000000001_00000| 2025-04-17T17:58:46.211370Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:46.211402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:46.211494Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 3 Got KV request Got KV request Wait tx committed for tx 4 Wait batch completion Wait kv request 2025-04-17T17:58:46.246825Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:46.246925Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:58:46.247030Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 1 is already written 2025-04-17T17:58:46.247104Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:58:46.247144Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 70 is stored on disk 2025-04-17T17:58:46.247175Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:58:46.247216Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 71 is already written >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State >> TopicService::UnknownTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 4523, MsgBus: 9954 2025-04-17T17:58:18.046278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342970225962116:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:18.046489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c5a/r3tmp/tmpXy4mMJ/pdisk_1.dat 2025-04-17T17:58:18.887690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:18.887844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:18.901603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:58:18.918609Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4523, node 1 2025-04-17T17:58:19.273603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:19.273635Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:19.273645Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:19.273749Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9954 TClient is connected to server localhost:9954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:20.349875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:20.378793Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:20.393967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:58:20.572065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:20.740503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:20.829563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:22.982721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342970225962116:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:22.982840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:23.129309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342995995767525:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:23.129406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:23.592479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:23.633332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:23.716625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:23.785479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:23.840532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:23.908871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:23.977792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:24.146274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343000290735510:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:24.146345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:24.146739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343000290735515:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:24.150976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:24.175060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343000290735517:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:24.270523Z node 1 :TX_PROXY ERROR: Actor# [1:7494343000290735572:3567] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:25.448778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 7174, MsgBus: 22850 2025-04-17T17:58:34.262617Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343041786538675:2147];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c5a/r3tmp/tmpa5oLiP/pdisk_1.dat 2025-04-17T17:58:34.378110Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:58:34.426415Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7174, node 2 2025-04-17T17:58:34.445119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:34.445198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:34.503196Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:58:34.690858Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:34.690890Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:34.690899Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:34.691068Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22850 TClient is connected to server localhost:22850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:35.691585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:35.712837Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:58:35.732892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:35.870424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:36.165569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:36.294943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:38.627375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343058966409514:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.627454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.689372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.723693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.801887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.844559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.879380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.931602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:39.024069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:39.161093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343063261377477:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:39.161225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:39.162293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343063261377482:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:39.166756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:39.181279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343063261377484:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T17:58:39.269677Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343041786538675:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:39.269765Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:39.282049Z node 2 :TX_PROXY ERROR: Actor# [2:7494343063261377542:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:40.684227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] >> KqpJoin::HashJoinWithAsTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: 2025-04-17T17:54:27.752615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:27.753155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:27.753212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00142f/r3tmp/tmpdXhwbB/pdisk_1.dat 2025-04-17T17:54:28.144406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:28.199147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:28.238331Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:54:28.239159Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:54:28.239329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:28.239473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:28.250693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:28.430654Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:54:28.430731Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:54:28.430911Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:645:2553] 2025-04-17T17:54:28.555688Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:54:28.555790Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:54:28.556400Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:54:28.556517Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:54:28.556889Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:54:28.557110Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:54:28.557207Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:54:28.557513Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:54:28.559041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:28.560116Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:54:28.560193Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:54:28.593780Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:661:2568], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:54:28.596831Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:661:2568], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:54:28.597309Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:668:2573] 2025-04-17T17:54:28.597567Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:28.649692Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:661:2568], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:54:28.650552Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:28.650685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:28.652394Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:54:28.652489Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:54:28.652545Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:54:28.652905Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:28.653047Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:28.653142Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:685:2573] in generation 1 2025-04-17T17:54:28.664104Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:28.701877Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:54:28.702084Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:28.702215Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2584] 2025-04-17T17:54:28.702246Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:28.702287Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:54:28.702316Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:28.702519Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:668:2573], Recipient [1:668:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:28.702561Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:28.702861Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:54:28.702939Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:54:28.702998Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:28.703037Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:28.703088Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:54:28.703124Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:28.703153Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:28.703175Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:54:28.703231Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:28.703588Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:676:2578], Recipient [1:668:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:28.703629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:28.703676Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2574], serverId# [1:676:2578], sessionId# [0:0:0] 2025-04-17T17:54:28.703729Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:676:2578] 2025-04-17T17:54:28.703769Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:54:28.703853Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:28.704007Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:54:28.704060Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:54:28.704133Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:54:28.704177Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:54:28.704204Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:54:28.704230Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:54:28.704266Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:54:28.704464Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:54:28.704502Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:54:28.704528Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:54:28.704552Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:54:28.704600Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:54:28.704622Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:54:28.704651Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:54:28.704682Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:54:28.704710Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:54:28.705768Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:688:2585], Recipient [1:668:2573]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:54:28.705802Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54:28.716464Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Compl ... nected at leader tablet# 72075186224037888, clientId# [26:981:2787], serverId# [26:982:2788], sessionId# [0:0:0] 2025-04-17T17:58:42.424466Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [26:981:2787] 2025-04-17T17:58:42.424599Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [26:981:2787] 2025-04-17T17:58:42.424693Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:981:2787] 2025-04-17T17:58:42.424818Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] shutdown pipe due to pending shutdown request [26:981:2787] 2025-04-17T17:58:42.424923Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [26:981:2787] 2025-04-17T17:58:42.425260Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [26:980:2786], Recipient [26:696:2585]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-17T17:58:42.425453Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2025-04-17T17:58:42.425624Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T17:58:42.425795Z node 26 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2025-04-17T17:58:42.426281Z node 26 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [26:980:2786], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2025-04-17T17:58:42.426486Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T17:58:42.426684Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T17:58:42.427061Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2025-04-17T17:58:42.427172Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:21} starting compaction 2025-04-17T17:58:42.427662Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} starting Scan{1 on 1001, Compact{72075186224037888.1.21, eph 1}} 2025-04-17T17:58:42.427857Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} started compaction 1 2025-04-17T17:58:42.427966Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR 2025-04-17T17:58:42.539089Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 21, product {tx status + 1 parts epoch 2} done 2025-04-17T17:58:42.539524Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2025-04-17T17:58:42.539707Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2025-04-17T17:58:42.539801Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2025-04-17T17:58:42.540416Z node 26 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.514048Z 2025-04-17T17:58:42.540713Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2025-04-17T17:58:42.540886Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T17:58:42.541043Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-04-17T17:58:42.541183Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [26:980:2786]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T17:58:42.542153Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2025-04-17T17:58:42.542357Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ========= Starting an immediate read ========= 2025-04-17T17:58:42.760603Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2eeakr4z862ds4e3r01vd4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=Yjk1ODEzMDgtZDY4MGRkMjQtODI5NTZjZTMtMzg2YWI1ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:58:42.762502Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send [26:914:2733] 2025-04-17T17:58:42.762629Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:914:2733] 2025-04-17T17:58:42.763090Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [26:1006:2794], Recipient [26:696:2585]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-04-17T17:58:42.763411Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-17T17:58:42.763578Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T17:58:42.763772Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T17:58:42.763898Z node 26 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1515/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T17:58:42.764012Z node 26 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1515/18446744073709551615 2025-04-17T17:58:42.764168Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-17T17:58:42.764380Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:58:42.764497Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-17T17:58:42.764601Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T17:58:42.764688Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T17:58:42.764762Z node 26 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-17T17:58:42.764857Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:58:42.764892Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T17:58:42.764920Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T17:58:42.764944Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-17T17:58:42.765135Z node 26 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-17T17:58:42.765444Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-04-17T17:58:42.765504Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T17:58:42.765600Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T17:58:42.765675Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-17T17:58:42.765768Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T17:58:42.765798Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T17:58:42.765847Z node 26 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-17T17:58:42.765943Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T17:58:42.766117Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T17:58:42.766314Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T17:58:42.821591Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} commited cookie 8 for step 20 2025-04-17T17:58:42.854823Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:22} commited cookie 8 for step 21 2025-04-17T17:58:42.878346Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-17T17:58:42.878597Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T17:58:42.879045Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{12, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-17T17:58:42.879237Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T17:58:42.880117Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} commited cookie 1 for step 18 2025-04-17T17:58:42.880415Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [26:548:2484] 2025-04-17T17:58:42.880550Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [26:548:2484] 2025-04-17T17:58:42.906139Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:23} commited cookie 8 for step 22 2025-04-17T17:58:42.937915Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:24} commited cookie 8 for step 23 2025-04-17T17:58:42.974801Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:25} commited cookie 8 for step 24 >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestOwnership ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] Test command err: 2025-04-17T17:58:44.671801Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:58:44.675637Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:58:44.675904Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-17T17:58:44.675952Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:58:44.675984Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-17T17:58:44.676051Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:58:44.676097Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:44.676192Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:44.735325Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-17T17:58:44.735458Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:58:44.774815Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:44.783283Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:44.783443Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:44.784326Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:44.784491Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:44.784888Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:44.785242Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-17T17:58:44.786096Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:58:44.786160Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-04-17T17:58:44.786275Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:58:44.786740Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:44.786842Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-17T17:58:44.786883Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-17T17:58:44.787015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:44.787063Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:44.787114Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:44.787163Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:44.787189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:58:44.787212Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:58:44.787270Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:44.787310Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:44.787393Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:58:44.787541Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:44.799976Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:44.800429Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-04-17T17:58:44.801091Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-04-17T17:58:44.801189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvDropTablet 2025-04-17T17:58:45.373446Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:58:45.376754Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:58:45.376989Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-17T17:58:45.377024Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:58:45.377052Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-17T17:58:45.377081Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:58:45.377112Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:45.377152Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:45.401581Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:176:2191], now have 1 active actors on pipe 2025-04-17T17:58:45.401660Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:58:45.401932Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:58:45.408587Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:58:45.408704Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:45.409880Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:58:45.409983Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:45.410046Z node 2 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:45.411115Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:45.411421Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2025-04-17T17:58:45.412254Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:58:45.412303Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:184:2197] 2025-04-17T17:58:45.412371Z node 2 :PERSQUEUE DEBUG ... DEBUG: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING 2025-04-17T17:58:48.933290Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-04-17T17:58:48.933443Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-04-17T17:58:48.933479Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-04-17T17:58:48.933515Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Partition responses 1/1 2025-04-17T17:58:48.933549Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-17T17:58:48.933581Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-04-17T17:58:48.933614Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-04-17T17:58:48.933649Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-17T17:58:48.933691Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-04-17T17:58:48.933723Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-04-17T17:58:48.933887Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 153 MaxStep: 30153 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-04-17T17:58:48.933970Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:48.940294Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:48.940394Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-17T17:58:48.940432Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-04-17T17:58:48.940488Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-04-17T17:58:48.940526Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-04-17T17:58:48.940561Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-04-17T17:58:48.940604Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-17T17:58:48.940661Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-17T17:58:48.940767Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-04-17T17:58:48.945549Z node 6 :PERSQUEUE DEBUG: Client pipe to tablet 72057594037927937 from 22222 is reset 2025-04-17T17:58:48.973735Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:58:48.980792Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:58:48.982087Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] has a tx info 2025-04-17T17:58:48.982144Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 110, PlanTxId 67891, ExecStep 110, ExecTxId 67891 2025-04-17T17:58:48.982289Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067890, Status 0 2025-04-17T17:58:48.982376Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67890, Step: 100, State: EXECUTED, WriteId: 2025-04-17T17:58:48.982441Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067891, Status 0 2025-04-17T17:58:48.982479Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67891, Step: 110, State: CALCULATED, WriteId: 2025-04-17T17:58:48.982502Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Fix tx state 2025-04-17T17:58:48.982551Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=2, PlannedTxs.size=2 2025-04-17T17:58:48.982592Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] top tx queue (100, 67890) 2025-04-17T17:58:48.982631Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67890 EXECUTED 0 2025-04-17T17:58:48.982663Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67891 PLANNED 0 2025-04-17T17:58:48.983146Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:48.983192Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937] has a tx writes info 2025-04-17T17:58:48.983328Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:48.983642Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:48.983884Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:362:2340] 2025-04-17T17:58:48.984694Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-04-17T17:58:48.985703Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-04-17T17:58:48.985960Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-17T17:58:48.997731Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-17T17:58:48.998068Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:58:48.998113Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:58:48.998153Z node 6 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:58:48.998206Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:58:48.998275Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:362:2340] 2025-04-17T17:58:48.998334Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:58:48.998387Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:48.998467Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-04-17T17:58:48.998636Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-17T17:58:48.998685Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-17T17:58:48.998816Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-17T17:58:48.998859Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-17T17:58:48.998897Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-17T17:58:48.998933Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T17:58:48.998971Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-17T17:58:48.999006Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T17:58:48.999045Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-17T17:58:48.999073Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T17:58:48.999105Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-17T17:58:48.999147Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-04-17T17:58:48.999172Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PLANNED 2025-04-17T17:58:48.999196Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State PLANNED FrontTxId 67891 2025-04-17T17:58:48.999220Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-17T17:58:48.999255Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 110, ExecTxId 67891 2025-04-17T17:58:48.999319Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATING 2025-04-17T17:58:48.999360Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING 2025-04-17T17:58:48.999564Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-04-17T17:58:49.001375Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-04-17T17:58:49.001420Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-04-17T17:58:49.001452Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Partition responses 1/1 2025-04-17T17:58:49.001485Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-17T17:58:49.001522Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-04-17T17:58:49.001557Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-04-17T17:58:49.001591Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-17T17:58:49.001628Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-04-17T17:58:49.001664Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-04-17T17:58:49.001824Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 153 MaxStep: 30153 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-04-17T17:58:49.001930Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:49.001995Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-17T17:58:49.002037Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-04-17T17:58:49.012974Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:49.013039Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-17T17:58:49.013080Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-04-17T17:58:49.013123Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-04-17T17:58:49.013165Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-04-17T17:58:49.013201Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-04-17T17:58:49.013265Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-17T17:58:49.013303Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-17T17:58:49.013414Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] Test command err: 2025-04-17T17:54:13.963284Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T17:54:14.056303Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:54:14.063111Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:54:14.063525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T17:54:14.080359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T17:54:14.080645Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T17:54:14.088517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:54:14.088711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:54:14.088937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:54:14.089076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:54:14.089192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:54:14.089305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:54:14.089418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:54:14.089514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:54:14.089653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:54:14.089750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:54:14.089847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:54:14.089939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:54:14.112512Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:54:14.118934Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T17:54:14.119096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T17:54:14.119145Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T17:54:14.119341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:14.119503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T17:54:14.119583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T17:54:14.119644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T17:54:14.119762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T17:54:14.119831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T17:54:14.119875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T17:54:14.119915Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T17:54:14.120097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T17:54:14.120175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T17:54:14.120215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T17:54:14.120245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T17:54:14.120327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T17:54:14.120394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T17:54:14.120463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T17:54:14.120495Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T17:54:14.120575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T17:54:14.120622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T17:54:14.120649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T17:54:14.120702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T17:54:14.120741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T17:54:14.120769Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T17:54:14.121177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-17T17:54:14.121266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=42; 2025-04-17T17:54:14.121386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-17T17:54:14.121466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-17T17:54:14.121619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T17:54:14.121684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T17:54:14.121722Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T17:54:14.121966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T17:54:14.122016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T17:54:14.122052Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T17:54:14.122388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T17:54:14.122450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T17:54:14.122488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T17:54:14.122666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T17:54:14.122701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T17:54:14.122750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... =9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.671163Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.671200Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:58:40.671236Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T17:58:40.671359Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:58:40.671450Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.671492Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T17:58:40.671582Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-04-17T17:58:40.671648Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-04-17T17:58:40.671786Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:585:2601];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-04-17T17:58:40.671883Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.671973Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.672146Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.672275Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T17:58:40.672373Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.672493Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.672564Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:592:2608] finished for tablet 9437184 2025-04-17T17:58:40.673277Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:585:2601];stats={"p":[{"events":["f_bootstrap"],"t":0.272},{"events":["f_ProduceResults"],"t":1.84},{"events":["l_bootstrap"],"t":3.462},{"events":["f_processing","f_task_result"],"t":3.578},{"events":["l_task_result"],"t":24.235},{"events":["f_ack"],"t":24.28},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":29.727}],"full":{"a":1744912690945297,"name":"_full_task","f":1744912690945297,"d_finished":0,"c":0,"l":1744912720672656,"d":29727359},"events":[{"name":"bootstrap","f":1744912691217948,"d_finished":3189874,"c":1,"l":1744912694407822,"d":3189874},{"a":1744912720672258,"name":"ack","f":1744912715225471,"d_finished":5318742,"c":903,"l":1744912720672183,"d":5319140},{"a":1744912720672244,"name":"processing","f":1744912694523562,"d_finished":14545923,"c":4515,"l":1744912720672185,"d":14546335},{"name":"ProduceResults","f":1744912692785321,"d_finished":6734575,"c":5420,"l":1744912720672536,"d":6734575},{"a":1744912720672540,"name":"Finish","f":1744912720672540,"d_finished":0,"c":0,"l":1744912720672656,"d":116},{"name":"task_result","f":1744912694523606,"d_finished":9034930,"c":3612,"l":1744912715180982,"d":9034930}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.673385Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:585:2601];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T17:58:40.674039Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:585:2601];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.272},{"events":["f_ProduceResults"],"t":1.84},{"events":["l_bootstrap"],"t":3.462},{"events":["f_processing","f_task_result"],"t":3.578},{"events":["l_task_result"],"t":24.235},{"events":["f_ack"],"t":24.28},{"events":["l_ProduceResults","f_Finish"],"t":29.727},{"events":["l_ack","l_processing","l_Finish"],"t":29.728}],"full":{"a":1744912690945297,"name":"_full_task","f":1744912690945297,"d_finished":0,"c":0,"l":1744912720673443,"d":29728146},"events":[{"name":"bootstrap","f":1744912691217948,"d_finished":3189874,"c":1,"l":1744912694407822,"d":3189874},{"a":1744912720672258,"name":"ack","f":1744912715225471,"d_finished":5318742,"c":903,"l":1744912720672183,"d":5319927},{"a":1744912720672244,"name":"processing","f":1744912694523562,"d_finished":14545923,"c":4515,"l":1744912720672185,"d":14547122},{"name":"ProduceResults","f":1744912692785321,"d_finished":6734575,"c":5420,"l":1744912720672536,"d":6734575},{"a":1744912720672540,"name":"Finish","f":1744912720672540,"d_finished":0,"c":0,"l":1744912720673443,"d":903},{"name":"task_result","f":1744912694523606,"d_finished":9034930,"c":3612,"l":1744912715180982,"d":9034930}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T17:58:40.674132Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T17:58:10.841251Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-04-17T17:58:40.675125Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T17:58:40.675499Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpBatchUpdate::HasTxControl [GOOD] >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients >> CacheEviction::DeleteKeys [GOOD] >> PQCountersLabeled::Partition >> TopicService::UseDoubleSlashInTopicPath >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 19089, MsgBus: 10456 2025-04-17T17:58:42.953118Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343077829944132:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:42.953239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000b46/r3tmp/tmpsC7m8u/pdisk_1.dat 2025-04-17T17:58:43.391372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:43.439255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:43.439397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:43.447386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19089, node 1 2025-04-17T17:58:43.514907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:43.514933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:43.514940Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:43.515067Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10456 TClient is connected to server localhost:10456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:44.126443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:44.154699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:44.303138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:44.456985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:44.524164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:46.263879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343095009815105:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:46.263987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:46.788305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:46.821737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:46.850159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:46.925594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:47.004944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:47.054554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:47.098557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:47.201101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343099304783075:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:47.201209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:47.206313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343099304783080:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:47.211052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:47.224041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343099304783082:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:47.298031Z node 1 :TX_PROXY ERROR: Actor# [1:7494343099304783140:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:47.962374Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343077829944132:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:47.962471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:49.617084Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWU0NDU3YTgtMTRkMTY3ZjEtYjAyMTQ4MGQtN2ExOTEwMzQ=, ActorId: [1:7494343103599750772:2514], ActorState: ExecuteState, TraceId: 01js2eegpwan2abmdgj3dczrmm, Create QueryResponse for error on request, msg: BATCH operation can be executed only in NoTx mode. |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPQTabletTests::Parallel_Transactions_1 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-JSON_DOCUMENT [GOOD] Test command err: 2025-04-17T17:56:44.836287Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342569947120412:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:44.843078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00084e/r3tmp/tmpyWRZ8A/pdisk_1.dat 2025-04-17T17:56:45.619349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:45.619412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:45.628262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:45.692805Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62383, node 1 2025-04-17T17:56:45.746026Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:45.746540Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:45.959208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:45.959224Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:45.959229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:45.959325Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:46.273314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:49.189845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342591421957985:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:49.189960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:49.190404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342591421957997:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:49.190779Z node 1 :TX_PROXY DEBUG: actor# [1:7494342569947120636:2117] Handle TEvProposeTransaction 2025-04-17T17:56:49.190800Z node 1 :TX_PROXY DEBUG: actor# [1:7494342569947120636:2117] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T17:56:49.190839Z node 1 :TX_PROXY DEBUG: actor# [1:7494342569947120636:2117] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494342591421958000:2647] 2025-04-17T17:56:49.273472Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T17:56:49.273542Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:56:49.273559Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T17:56:49.275498Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:56:49.275584Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:56:49.275747Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:56:49.275891Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:56:49.275933Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T17:56:49.276042Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T17:56:49.277381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:56:49.286376Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T17:56:49.286481Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958000:2647] txid# 281474976710658 SEND to# [1:7494342591421957999:2345] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T17:56:49.321170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342591421957999:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:56:49.382726Z node 1 :TX_PROXY DEBUG: actor# [1:7494342569947120636:2117] Handle TEvProposeTransaction 2025-04-17T17:56:49.382765Z node 1 :TX_PROXY DEBUG: actor# [1:7494342569947120636:2117] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T17:56:49.382808Z node 1 :TX_PROXY DEBUG: actor# [1:7494342569947120636:2117] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494342591421958071:2695] 2025-04-17T17:56:49.385810Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T17:56:49.385886Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:56:49.385903Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T17:56:49.386814Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:56:49.386940Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:56:49.387137Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:56:49.387306Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:56:49.387369Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T17:56:49.387532Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T17:56:49.395492Z node 1 :TX_PROXY DEBUG: Actor# [1:7494342591421958071:2695] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T17:56:49.395713Z node 1 :TX_PROXY ERROR: Actor# [1:7494342591421958071:2695] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-0 ... nux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T175825Z 2025-04-17T17:58:25.635034Z node 28 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [28:7494343004634228806:2205], result# No response body. 2025-04-17T17:58:25.654502Z node 28 :TX_PROXY DEBUG: [GetImport] [28:7494343004634228816:2408] [0] Resolve database: name# /Root 2025-04-17T17:58:25.660352Z node 28 :TX_PROXY DEBUG: [GetImport] [28:7494343004634228816:2408] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:58:25.660398Z node 28 :TX_PROXY DEBUG: [GetImport] [28:7494343004634228816:2408] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T17:58:25.687033Z node 28 :TX_PROXY DEBUG: [GetImport] [28:7494343004634228816:2408] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710667 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:13575" scheme: HTTP bucket: "test_bucket" items { source_prefix: "JsonDocumentTable" destination_path: "/Root/JsonDocumentTable" } } StartTime { seconds: 1744912705 } } REQUEST: GET /test_bucket?prefix=JsonDocumentTable HTTP/1.1 HEADERS: Host: localhost:13575 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7CC400F4-1C02-4E08-927A-B4DE78E5A04C amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=0bd06a069a9483913da4945b44b7dbcb8031f868152b268621fe85cb7f6f46c6 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T175825Z S3_MOCK::HttpServeList: JsonDocumentTable 2025-04-17T17:58:25.708933Z node 28 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [28:7494343004634228806:2205], result# ListObjectsResult { } 2025-04-17T17:58:25.708998Z node 28 :IMPORT INFO: Reply: self# [28:7494343004634228806:2205], success# 1, error# 2025-04-17T17:58:25.709119Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T17:58:25.709134Z node 28 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976710667, itemIdx# 0, success# 1 2025-04-17T17:58:25.709480Z node 28 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonDocumentTable' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T17:58:25.735001Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T17:58:25.735174Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T17:58:25.735188Z node 28 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715762, id# 281474976710667 2025-04-17T17:58:25.735243Z node 28 :IMPORT INFO: TImport::TTxProgress: CreateTable propose: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonDocumentTable' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715762 2025-04-17T17:58:25.735404Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T17:58:25.737125Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715762:0, at schemeshard: 72057594046644480 2025-04-17T17:58:25.747558Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T17:58:25.747595Z node 28 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715762, status# StatusAccepted 2025-04-17T17:58:25.747744Z node 28 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonDocumentTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976715762 Issue: '' } 2025-04-17T17:58:25.761683Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T17:58:25.838148Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T17:58:25.838193Z node 28 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715762 2025-04-17T17:58:25.838291Z node 28 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonDocumentTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T17:58:25.839731Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T17:58:25.839814Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T17:58:25.839825Z node 28 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715763, id# 281474976710667 2025-04-17T17:58:25.839870Z node 28 :IMPORT INFO: TImport::TTxProgress: Restore propose: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonDocumentTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715763 2025-04-17T17:58:25.840911Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T17:58:25.841430Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRestore, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-04-17T17:58:25.843395Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T17:58:25.843418Z node 28 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715763, status# StatusAccepted 2025-04-17T17:58:25.843503Z node 28 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonDocumentTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Subscribed WaitTxId: 281474976715763 Issue: '' } 2025-04-17T17:58:25.845953Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoComplete REQUEST: HEAD /test_bucket/JsonDocumentTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:13575 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 259A67AB-8BB8-4D08-9089-85DA3E4800B1 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=29d42890906ea52719897c5563e2c38ce7eaa8628de775520d4860fddd36fccf content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T175825Z S3_MOCK::HttpServeRead: /test_bucket/JsonDocumentTable/data_00.csv / 32 REQUEST: GET /test_bucket/JsonDocumentTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:13575 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6BB9CFEC-45D6-4875-8324-C9F093E65D50 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=3c8f9258f3ab7dc62b3960ebe57f58d6db0dfa80bda840cdd39ba502fc351db7 content-type: application/xml range: bytes=0-31 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T175825Z S3_MOCK::HttpServeRead: /test_bucket/JsonDocumentTable/data_00.csv / 32 2025-04-17T17:58:26.003072Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T17:58:26.003098Z node 28 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715763 2025-04-17T17:58:26.005733Z node 28 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T17:58:26.100569Z node 28 :TX_PROXY DEBUG: [GetImport] [28:7494343008929196297:2414] [0] Resolve database: name# /Root 2025-04-17T17:58:26.102811Z node 28 :TX_PROXY DEBUG: [GetImport] [28:7494343008929196297:2414] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:58:26.102852Z node 28 :TX_PROXY DEBUG: [GetImport] [28:7494343008929196297:2414] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T17:58:26.104515Z node 28 :TX_PROXY DEBUG: [GetImport] [28:7494343008929196297:2414] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710667 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:13575" scheme: HTTP bucket: "test_bucket" items { source_prefix: "JsonDocumentTable" destination_path: "/Root/JsonDocumentTable" } } StartTime { seconds: 1744912705 } EndTime { seconds: 1744912706 } } 2025-04-17T17:58:26.256535Z node 28 :TX_PROXY DEBUG: actor# [28:7494342970274488445:2136] Handle TEvExecuteKqpTransaction 2025-04-17T17:58:26.256566Z node 28 :TX_PROXY DEBUG: actor# [28:7494342970274488445:2136] TxId# 281474976710668 ProcessProposeKqpTransaction 2025-04-17T17:58:26.257652Z node 28 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01js2edtj3f615qy0n790jz7af, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=MzI3MWM5YTktZTMxOTdiMTEtYTY5MmE4YzEtOTliY2IyNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TPQTest::TestWritePQCompact >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] >> TPersQueueTest::SchemeOperationsCheckPropValues [GOOD] >> TPersQueueTest::ReadRuleServiceType >> TPQTabletTests::Parallel_Transactions_1 [GOOD] >> TPQTabletTests::Parallel_Transactions_2 >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: Size: 8002 Create chunk: 0.000268s Read by index: 0.000026s Iterate: 0.000145s Size: 8256 Create chunk: 0.000203s Read by index: 0.000042s Iterate: 0.000080s Size: 8532 Create chunk: 0.000091s Read by index: 0.000043s Iterate: 0.000036s Size: 7769 Create chunk: 0.000098s Read by index: 0.000047s Iterate: 0.000048s Size: 2853 Create chunk: 0.000097s Read by index: 0.000085s Iterate: 0.000052s Size: 2419 Create chunk: 0.000107s Read by index: 0.000086s Iterate: 0.000049s Size: 2929 Create chunk: 0.000073s Read by index: 0.000080s Iterate: 0.000043s Size: 2472 Create chunk: 0.000131s Read by index: 0.000111s Iterate: 0.000050s Size: 2407 Create chunk: 0.000083s Read by index: 0.000076s Iterate: 0.000041s Size: 2061 Create chunk: 0.000094s Read by index: 0.000294s Iterate: 0.000048s >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> TPQTabletTests::Multiple_PQTablets_1 >> TPQTabletTests::Partition_Send_Predicate_With_False >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestoreS3::PrefixedVectorIndex >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] Test command err: 2025-04-17T17:58:50.185629Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:58:50.200646Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:58:50.200967Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-17T17:58:50.201021Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:58:50.201057Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-17T17:58:50.201104Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:58:50.201150Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:50.201219Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:50.240621Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-17T17:58:50.240745Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:58:50.292826Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:50.308273Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:50.308465Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:50.310171Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:50.318837Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:50.318942Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:50.319490Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:50.319942Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-17T17:58:50.321024Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:58:50.321107Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-04-17T17:58:50.321178Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:58:50.321750Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:50.321894Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-17T17:58:50.321941Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-17T17:58:50.322106Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:50.322152Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:50.329107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:50.329194Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:50.329229Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:58:50.329252Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:58:50.329290Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:50.329339Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:50.329449Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:58:50.329681Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:50.330214Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:50.330508Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:188:2201] 2025-04-17T17:58:50.331231Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-17T17:58:50.331282Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:188:2201] 2025-04-17T17:58:50.331324Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:58:50.331679Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:50.331751Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-17T17:58:50.331784Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-17T17:58:50.331894Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:50.331952Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:50.331985Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T17:58:50.332020Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-17T17:58:50.332045Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-04-17T17:58:50.332067Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-04-17T17:58:50.332092Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:50.332118Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-17T17:58:50.332179Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:58:50.332296Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:50.346969Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:50.347159Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:50.347643Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:199:2208], now have 1 active actors on pipe 2025-04-17T17:58:50.348415Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:202:2210], now have 1 active actors on pipe 2025-04-17T17:58:50.349370Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2025-04-17T17:58:50.349457Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-17T17:58:50.349540Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-17T17:58:50.349590Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-17T17:58:50.349625Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-17T17:58:50.349667Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-17T17:58:50.349730Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-17T17:58:50.349926Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 230 MaxStep: 30230 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-17T17:58:50.350066Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:50.360073Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17 ... ve 1 active actors on pipe 2025-04-17T17:58:54.031413Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:224:2225], now have 1 active actors on pipe 2025-04-17T17:58:54.031620Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 25769805968 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-17T17:58:54.031669Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-17T17:58:54.031749Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-17T17:58:54.031793Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-17T17:58:54.031831Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-17T17:58:54.031873Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-17T17:58:54.031918Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-17T17:58:54.032062Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-04-17T17:58:54.032154Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:54.035637Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:54.035707Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-17T17:58:54.035751Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-17T17:58:54.035798Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-17T17:58:54.036130Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-17T17:58:54.036180Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-04-17T17:58:54.036222Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-04-17T17:58:54.036349Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 177 RawX2: 25769805968 } } Step: 100 2025-04-17T17:58:54.036406Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-04-17T17:58:54.036445Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-04-17T17:58:54.036517Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-04-17T17:58:54.036566Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-04-17T17:58:54.036738Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-04-17T17:58:54.036835Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:54.040092Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:54.040166Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNING 2025-04-17T17:58:54.040206Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2025-04-17T17:58:54.040252Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2025-04-17T17:58:54.040298Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2025-04-17T17:58:54.040336Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-17T17:58:54.040374Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-17T17:58:54.040443Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-04-17T17:58:54.040523Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-04-17T17:58:54.040617Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-04-17T17:58:54.040702Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'user' Bad request (behind the last offset) EndOffset 0 End 1 2025-04-17T17:58:54.040887Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 0 2025-04-17T17:58:54.040926Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-04-17T17:58:54.040963Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-17T17:58:54.040998Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-17T17:58:54.041127Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-17T17:58:54.041169Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-04-17T17:58:54.041205Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-17T17:58:54.041254Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-17T17:58:54.041307Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-04-17T17:58:54.041487Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-04-17T17:58:54.041575Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:54.055217Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:54.055291Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-17T17:58:54.055345Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-17T17:58:54.055381Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-17T17:58:54.055433Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-17T17:58:54.055475Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-17T17:58:54.055525Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-17T17:58:54.055569Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-17T17:58:54.055715Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-17T17:58:54.055766Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-17T17:58:54.055808Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-04-17T17:58:54.055843Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 0 2025-04-17T17:58:54.055884Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-04-17T17:58:54.055931Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-04-17T17:58:54.055976Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-17T17:58:54.056013Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-17T17:58:54.056054Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-04-17T17:58:54.056248Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-04-17T17:58:54.056344Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:54.056551Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:54.056595Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:54.056636Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:54.056674Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:54.056708Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:58:54.056743Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:54.056786Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:54.056963Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:54.063377Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-17T17:58:54.063458Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-04-17T17:58:54.072312Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:54.074752Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:54.074820Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-17T17:58:54.074854Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-17T17:58:54.074890Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-17T17:58:54.074924Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T17:58:54.074993Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-04-17T17:58:54.075040Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-17T17:58:54.075078Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T17:58:54.075124Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-17T17:58:54.075156Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T17:58:54.075206Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 >> TPQTabletTests::One_Tablet_For_All_Partitions >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] >> TPartitionTests::Batching >> TPQTabletTests::Multiple_PQTablets_2 >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TPersQueueTest::DirectReadCleanCache [GOOD] >> TPersQueueTest::DirectReadRestartPQRB >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] >> TPQTabletTests::DropTablet_And_Tx >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> TPartitionTests::Batching [GOOD] >> TPartitionTests::CommitOffsetRanges >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> TPQTabletTests::DropTablet_Before_Write >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction >> TPartitionTests::AfterRestart_2 >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TPartitionTests::ConflictingSrcIdForTxWithHead >> TPQTabletTests::Huge_ProposeTransacton >> TPartitionTests::AfterRestart_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] Test command err: 2025-04-17T17:58:52.913863Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:58:52.918625Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:58:52.918957Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-17T17:58:52.919037Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:58:52.919080Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-17T17:58:52.919129Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:58:52.919204Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:52.919302Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:53.002992Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:206:2212], now have 1 active actors on pipe 2025-04-17T17:58:53.003127Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:58:53.026540Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-17T17:58:53.031996Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-17T17:58:53.032168Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:53.033349Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-17T17:58:53.033559Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:53.034024Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:53.034680Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:214:2218] 2025-04-17T17:58:53.035687Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:58:53.035757Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:214:2218] 2025-04-17T17:58:53.035842Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:58:53.036797Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:53.036924Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-17T17:58:53.036975Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-17T17:58:53.037045Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-04-17T17:58:53.037082Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-04-17T17:58:53.037285Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:53.037330Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:53.037385Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:53.037421Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:53.037452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:58:53.037475Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:58:53.037519Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer 2025-04-17T17:58:53.037551Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer 2025-04-17T17:58:53.037583Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:53.037623Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:53.037749Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:58:53.037803Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:58:53.038058Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:53.043147Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:53.043576Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:221:2223], now have 1 active actors on pipe 2025-04-17T17:58:53.044257Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:224:2225], now have 1 active actors on pipe 2025-04-17T17:58:53.045185Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-17T17:58:53.045241Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-17T17:58:53.045340Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-17T17:58:53.045400Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-17T17:58:53.045444Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-17T17:58:53.045485Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-17T17:58:53.045532Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-17T17:58:53.045738Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-17T17:58:53.045833Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:53.052828Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:53.052916Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-17T17:58:53.052970Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-17T17:58:53.053023Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-17T17:58:53.053398Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67891 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-17T17:58:53.053446Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-17T17:58:53.053510Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-04-17T17:58:53.053551Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-17T17:58:53.053589Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-04-17T17:58:53.053629Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-17T17:58:53.053687Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-04-17T17:58:53.053845Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-17T17:58:53.053936Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:53.059738Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:53.059834Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPA ... 67890, State CALCULATED 2025-04-17T17:58:56.533413Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-17T17:58:56.533451Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-17T17:58:56.533494Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-17T17:58:56.533580Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-17T17:58:56.533624Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-17T17:58:56.533768Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-17T17:58:56.533886Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-17T17:58:56.533931Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-04-17T17:58:56.533965Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 2 2025-04-17T17:58:56.534120Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-17T17:58:56.534301Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2025-04-17T17:58:56.534345Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2025-04-17T17:58:56.534383Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2025-04-17T17:58:56.534665Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:56.534711Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:56.534749Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cuser, m0000000000cuser] 2025-04-17T17:58:56.534782Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uuser, m0000000000uuser] 2025-04-17T17:58:56.534813Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:56.534854Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:56.534890Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:58:56.534916Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-04-17T17:58:56.534943Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-04-17T17:58:56.534967Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-3 2025-04-17T17:58:56.534991Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-3 2025-04-17T17:58:56.535016Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-04-17T17:58:56.535045Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:56.535090Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:56.535160Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-17T17:58:56.535369Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:56.535402Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:56.535437Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T17:58:56.535464Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-17T17:58:56.535491Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] I0000000001 2025-04-17T17:58:56.535516Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-04-17T17:58:56.535544Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-04-17T17:58:56.535569Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-3 2025-04-17T17:58:56.535592Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-3 2025-04-17T17:58:56.535617Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-04-17T17:58:56.535645Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:56.535672Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-17T17:58:56.535820Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:56.536102Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:56.541628Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-17T17:58:56.541707Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-04-17T17:58:56.556159Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:56.556465Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-17T17:58:56.556533Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-17T17:58:56.556572Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-17T17:58:56.556613Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-17T17:58:56.556648Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 2 2025-04-17T17:58:56.556685Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 status has not changed 2025-04-17T17:58:56.558357Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:56.558512Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 1 2025-04-17T17:58:56.558550Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-17T17:58:56.558580Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-17T17:58:56.558610Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-17T17:58:56.558637Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2025-04-17T17:58:56.558672Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-17T17:58:56.558730Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-04-17T17:58:56.558971Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2025-04-17T17:58:56.559049Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:56.559122Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-17T17:58:56.559162Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-17T17:58:56.559210Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-04-17T17:58:56.559489Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 18446744073709551615 PredicateRecipients: 22222 Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-04-17T17:58:56.559735Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:56.572405Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:56.572481Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-17T17:58:56.572514Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-17T17:58:56.572546Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-17T17:58:56.572588Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T17:58:56.572631Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-17T17:58:56.572668Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T17:58:56.572712Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-17T17:58:56.572759Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T17:58:56.572795Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-17T17:58:56.577248Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:365:2337], now have 1 active actors on pipe 2025-04-17T17:58:56.577440Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-17T17:58:56.582298Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvReadSetAck to 22222 2025-04-17T17:58:56.582437Z node 6 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 >> TPartitionTests::ChangeConfig >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut >> TPartitionTests::ChangeConfig [GOOD] >> TPersQueueTest::WriteNonExistingTopic [GOOD] >> TPersQueueTest::WriteAfterAlter >> TPartitionTests::ConflictingActsInSeveralBatches >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] Test command err: 2025-04-17T17:56:35.224675Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342534073285220:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:35.224916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:35.597673Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018aa/r3tmp/tmpQ0EYBB/pdisk_1.dat 2025-04-17T17:56:36.077588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:36.077714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:36.080004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22691, node 1 2025-04-17T17:56:36.362911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0018aa/r3tmp/yandexyV2Ppb.tmp 2025-04-17T17:56:36.362952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0018aa/r3tmp/yandexyV2Ppb.tmp 2025-04-17T17:56:36.363193Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0018aa/r3tmp/yandexyV2Ppb.tmp 2025-04-17T17:56:36.363342Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:56:36.383166Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:36.395101Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:36.412403Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:36.419971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:36.483495Z INFO: TTestServer started on Port 29612 GrpcPort 22691 TClient is connected to server localhost:29612 PQClient connected to localhost:22691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:37.069707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:56:37.131506Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:56:37.136903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:56:37.434735Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:37.445721Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:56:40.205490Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342534073285220:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:40.213762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:40.607500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342555548122396:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.607500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342555548122376:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.607633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:40.613234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:56:40.639171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342555548122398:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:56:40.875828Z node 1 :TX_PROXY ERROR: Actor# [1:7494342555548122463:2459] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:40.906108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:40.943663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:56:41.057889Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342555548122471:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:41.059061Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTM3ODhhODYtOWEzMjE4Zi1iMGUxNzE2Yy1mOTdhMzA3Mg==, ActorId: [1:7494342555548122372:2338], ActorState: ExecuteState, TraceId: 01js2eakgk6hq46j3gzkxv7dat, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:41.061185Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:56:41.090092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7494342559843090062:2642] === CheckClustersList. Ok 2025-04-17T17:56:47.368403Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T17:56:47.394494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7494342534073285525:2204]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T17:56:47.394530Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T17:56:47.394547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T17:56:47.394615Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T17:56:47.394628Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000197s, queue# 1 2025-04-17T17:56:47.394691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 0 row count 0 2025-04-17T17:56:47.394781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=SourceIdMeta2, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T17:56:47.394796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-04-17T17:56:47.394865Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-17T17:56:47.394906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-04-17T17:56:47.394968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T17:56:47.395443Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7494342534073285525:2204]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T17:56:47.395481Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T17:56:47.395509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T17:56:47.398056Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T17:56:47.402929Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494342585612894102:2792], Recipient [1:7494342534073285525:2204]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:47.402964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:56:47.402985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T17:56:47.403022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494342585612894098:2789], Recipient [1: ... ERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.153173Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:57.153203Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:57.153221Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:57.153240Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:57.156166Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [6:7494343034468128930:2166]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:58:57.156217Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T17:58:57.156308Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7494343034468128930:2166], Recipient [6:7494343034468128930:2166]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:58:57.156340Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T17:58:57.162282Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494343120367477218:2770], Partition 2, Sender [0:0:0], Recipient [6:7494343120367477306:2780], Cookie: 0 2025-04-17T17:58:57.162374Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494343120367477306:2780]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.162402Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.162458Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:57.162545Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:57.162573Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:57.162609Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:57.162681Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494343090302704902:2470], Partition 0, Sender [0:0:0], Recipient [6:7494343090302704962:2473], Cookie: 0 2025-04-17T17:58:57.162740Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494343090302704962:2473]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.162756Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.162780Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:57.162813Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:57.162831Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:57.162852Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:57.162894Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494343120367477220:2771], Partition 1, Sender [0:0:0], Recipient [6:7494343120367477309:2782], Cookie: 0 2025-04-17T17:58:57.162926Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494343120367477309:2782]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.162940Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.162965Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:57.162997Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:57.163015Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:57.163035Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:57.208520Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494343034468128930:2166]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T17:58:57.208568Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T17:58:57.208584Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T17:58:57.208596Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T17:58:57.208663Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-17T17:58:57.209076Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494343034468128930:2166]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T17:58:57.209107Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T17:58:57.209118Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T17:58:57.253098Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494343128957412254:2851], Partition 4, Sender [0:0:0], Recipient [6:7494343128957412340:2858], Cookie: 0 2025-04-17T17:58:57.253197Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494343128957412340:2858]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.253229Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.253299Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:57.253397Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:57.253445Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:57.253488Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:57.253564Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494343128957412250:2850], Partition 3, Sender [0:0:0], Recipient [6:7494343128957412345:2861], Cookie: 0 2025-04-17T17:58:57.253602Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494343128957412345:2861]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.253616Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.253644Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:57.253679Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:57.253694Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:57.253713Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:57.262952Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494343120367477218:2770], Partition 2, Sender [0:0:0], Recipient [6:7494343120367477306:2780], Cookie: 0 2025-04-17T17:58:57.263059Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494343120367477306:2780]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.263093Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.263162Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:57.263253Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:57.263283Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:57.263319Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:57.263394Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494343090302704902:2470], Partition 0, Sender [0:0:0], Recipient [6:7494343090302704962:2473], Cookie: 0 2025-04-17T17:58:57.263432Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494343090302704962:2473]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.263448Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.263473Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:57.263508Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:57.263527Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:57.263546Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T17:58:57.263592Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494343120367477220:2771], Partition 1, Sender [0:0:0], Recipient [6:7494343120367477309:2782], Cookie: 0 2025-04-17T17:58:57.263629Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494343120367477309:2782]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.263641Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T17:58:57.263663Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T17:58:57.263695Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T17:58:57.263711Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T17:58:57.263727Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> KqpJoin::HashJoinWithAsTable [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |82.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] Test command err: Size: 128 Create chunk: 0.000029s Read by index: 0.000011s Iterate: 0.000016s Size: 252 Create chunk: 0.000046s Read by index: 0.000020s Iterate: 0.000020s Size: 1887 Create chunk: 0.000091s Read by index: 0.000139s Iterate: 0.000089s Size: 1658 Create chunk: 0.000084s Read by index: 0.000116s Iterate: 0.000079s Size: 1889 Create chunk: 0.000074s Read by index: 0.000093s Iterate: 0.000041s Size: 1660 Create chunk: 0.000098s Read by index: 0.000088s Iterate: 0.000050s >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> TPersQueueTest::StreamReadCommitAndStatusMsgs [GOOD] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 8047, MsgBus: 12302 2025-04-17T17:58:49.309175Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343105723105615:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:49.309251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e66/r3tmp/tmpG66S2u/pdisk_1.dat 2025-04-17T17:58:49.958790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:49.962960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:50.001019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:58:50.058549Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8047, node 1 2025-04-17T17:58:50.126592Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:58:50.126613Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:58:50.337462Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:50.337484Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:50.337490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:50.337595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12302 TClient is connected to server localhost:12302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:51.297299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:51.316329Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:58:51.332630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:51.517593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:51.669770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:58:51.746406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:58:52.700029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343118608009271:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:52.700126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:54.312124Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343105723105615:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:54.312224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:54.504062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.594480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.681938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.732318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.794773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.886033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.976833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:55.119023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343131492911854:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:55.119127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:55.120870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343131492911859:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:55.155385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:55.174870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343131492911861:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:55.253880Z node 1 :TX_PROXY ERROR: Actor# [1:7494343131492911922:3566] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:59.026877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:58:59.074137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T17:58:59.124701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T17:58:59.171829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T17:58:59.248222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T17:58:59.330449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::HashJoinWithAsTable [GOOD] Test command err: Trying to start YDB, gRPC: 16446, MsgBus: 21774 2025-04-17T17:58:49.367834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343109105792893:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:49.367876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e65/r3tmp/tmpZTefqn/pdisk_1.dat 2025-04-17T17:58:50.093980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:50.095576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:50.095668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:50.112407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16446, node 1 2025-04-17T17:58:50.337130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:50.337154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:50.337160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:50.337245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21774 TClient is connected to server localhost:21774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:51.322146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:51.346431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:51.515545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:51.698107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:51.761733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:53.370073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343126285663841:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:53.370216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:54.370641Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343109105792893:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:54.370740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:54.503010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.568398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.631416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.690377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.752389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.843432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.912029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:55.119280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343134875599109:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:55.119343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:55.119589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343134875599114:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:55.151465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:55.179256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343134875599116:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:55.253806Z node 1 :TX_PROXY ERROR: Actor# [1:7494343134875599176:3565] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:59.004605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> IcbAsActorTests::TestHttpGetResponse >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionWriteQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] Test command err: 2025-04-17T17:58:55.896512Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:55.896597Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:55.914725Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:179:2194] 2025-04-17T17:58:55.915560Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } 2025-04-17T17:58:56.520795Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:56.520859Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:56.538801Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:177:2192] 2025-04-17T17:58:56.540441Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:56.000000Z 2025-04-17T17:58:56.540522Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:177:2192] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\200\276\235\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\200\276\235\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\200\276\235\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-17T17:58:57.459285Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:57.459346Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:57.475386Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:179:2194] 2025-04-17T17:58:57.477677Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:57.000000Z 2025-04-17T17:58:57.477745Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:179:2194] 2025-04-17T17:58:58.323311Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:58.323385Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:58.347342Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [4:179:2194] 2025-04-17T17:58:58.356149Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:58.000000Z 2025-04-17T17:58:58.356228Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [4:179:2194] Send change config Wait cmd write (initial) Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\320\315\235\247\3442" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } Wait commit 1 done Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\320\315\235\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-1@\000H\000\252\002\016\n\010client-3@\007H\000" StorageChannel: INLINE } Wait config changed 2025-04-17T17:58:59.283391Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:59.283461Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:59.300450Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:58:59.300642Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:59.300818Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-04-17T17:58:59.301461Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:58:59.301576Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:58:59.301683Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:58:59.301812Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:58:59.302214Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-17T17:58:59.302321Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:58:59.302361Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:58:59.302427Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:59.000000Z 2025-04-17T17:58:59.302466Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:58:59.302512Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-04-17T17:58:59.302570Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-17T17:58:59.302616Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:59.636810Z node 5 :PERSQUEUE INFO: new Cookie src1|1c69aa1-2f64dd1a-7613a0f3-2e2de471_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-04-17T17:58:59.636963Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-04-17T17:58:59.637084Z node 5 :PERSQUEUE INFO: new Cookie src4|9796c36f-65b70d01-88669732-30cd9c7e_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 2025-04-17T17:58:59.637127Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 11 and act no: 12 2025-04-17T17:59:00.705757Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:59:00.705854Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-04-17T17:59:00.705902Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-04-17T17:59:00.706011Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, ... disk 2025-04-17T17:59:02.651238Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:02.651268Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 70 is stored on disk 2025-04-17T17:59:02.651291Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:02.651323Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 71 is stored on disk Got batch complete: 1 Wait tx committed for tx 4 Wait batch completion Wait batch completion 2025-04-17T17:59:02.651871Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 8 2025-04-17T17:59:02.651935Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 8 2025-04-17T17:59:02.651995Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:02.652043Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 70 PartNo 0 PackedSize 121 count 2 nextOffset 72 batches 1, NewHead=Offset 72 PartNo 0 PackedSize 0 count 0 nextOffset 72 batches 0 2025-04-17T17:59:02.652203Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 7 partNo 0 2025-04-17T17:59:02.652626Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src4' seqNo 7 partNo 0 result is x0000000000_00000000000000000070_00000_0000000002_00000 size 121 2025-04-17T17:59:02.652708Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000070_00000_0000000002_00000 new key d0000000000_00000000000000000070_00000_0000000002_00000 size 121 WTime 12141 2025-04-17T17:59:02.653610Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 7 partNo 0 FormedBlobsCount 1 NewHead: Offset 100 PartNo 0 PackedSize 84 count 1 nextOffset 101 batches 1 2025-04-17T17:59:02.653698Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 8 partNo 0 2025-04-17T17:59:02.653755Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 136 count 2 nextOffset 102 batches 1 2025-04-17T17:59:02.653790Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 9 partNo 0 2025-04-17T17:59:02.653831Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 188 count 3 nextOffset 103 batches 1 2025-04-17T17:59:02.653870Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 10 partNo 0 2025-04-17T17:59:02.653919Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 240 count 4 nextOffset 104 batches 1 2025-04-17T17:59:02.653962Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 11 partNo 0 2025-04-17T17:59:02.654021Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 292 count 5 nextOffset 105 batches 1 2025-04-17T17:59:02.654056Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 12 partNo 0 2025-04-17T17:59:02.654093Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 344 count 6 nextOffset 106 batches 1 2025-04-17T17:59:02.654131Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:02.654197Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:02.654248Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 70 PartNo 0 PackedSize 121 count 2 nextOffset 72 batches 1, NewHead=Offset 100 PartNo 0 PackedSize 344 count 6 nextOffset 106 batches 1 2025-04-17T17:59:02.705549Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:59:02.706077Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 100,6 HeadOffset 70 endOffset 72 curOffset 106 d0000000000_00000000000000000100_00000_0000000006_00000| size 211 WTime 12141 2025-04-17T17:59:02.706269Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:02.706313Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:02.706355Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:59:02.706398Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:02.706444Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000070_00000_0000000002_00000 2025-04-17T17:59:02.706480Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc4 2025-04-17T17:59:02.706505Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000100_00000_0000000006_00000| 2025-04-17T17:59:02.706530Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:02.706557Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:02.706582Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:02.706623Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:02.706666Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got batch complete: 7 Got KV request Got KV request Wait kv request Wait tx committed for tx 8 2025-04-17T17:59:02.741616Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102 WriteNewSizeFromSupportivePartitions# 2 2025-04-17T17:59:02.741702Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:02.741786Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 72 is already written 2025-04-17T17:59:02.741829Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:02.741889Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 72 is already written 2025-04-17T17:59:02.741923Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:02.741960Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 72 is already written 2025-04-17T17:59:02.741988Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:02.742030Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 72 is already written 2025-04-17T17:59:02.742068Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:02.742121Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 72 is already written 2025-04-17T17:59:02.742148Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:02.742195Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 12, partNo: 0, Offset: 72 is already written 2025-04-17T17:59:02.742446Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:02.742486Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:02.742529Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000070_00000_0000000002_00000|, d0000000000_00000000000000000070_00000_0000000002_00000|] 2025-04-17T17:59:02.742566Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:02.742605Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:02.742639Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:02.742676Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 11 >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> KqpBatchUpdate::Large_2 [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TopicService::UseDoubleSlashInTopicPath [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPersQueueTest::NoDecompressionMemoryLeaks [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> TFetchRequestTests::HappyWay >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TSourceIdTests::ExpensiveCleanup >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 19132, MsgBus: 16314 2025-04-17T17:58:20.652003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342981735718867:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:20.652035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c49/r3tmp/tmpJT0oPK/pdisk_1.dat 2025-04-17T17:58:21.359045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:21.361019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:21.361098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:21.373212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19132, node 1 2025-04-17T17:58:21.566814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:21.566835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:21.566857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:21.566971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16314 TClient is connected to server localhost:16314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:22.569108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:22.581958Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:58:22.632994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:23.025318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:23.725707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:23.878851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:25.654438Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342981735718867:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:25.654510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:26.875813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343007505524363:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:26.875899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:27.240823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:27.277409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:27.332027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:27.410134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:27.456339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:27.500186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:27.578394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:27.638434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343011800492325:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:27.638532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:27.642441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343011800492330:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:27.649774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:27.663333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343011800492332:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:27.729858Z node 1 :TX_PROXY ERROR: Actor# [1:7494343011800492388:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:29.139215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:36.352688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:58:36.352714Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 25240, MsgBus: 7677 2025-04-17T17:58:45.043012Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343089118729166:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:45.043060Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c49/r3tmp/tmpHJLQXl/pdisk_1.dat 2025-04-17T17:58:45.330931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:45.331017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:45.335118Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:45.335738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25240, node 2 2025-04-17T17:58:45.495500Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:45.495521Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:45.495528Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:45.495670Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7677 TClient is connected to server localhost:7677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:46.431374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:46.439237Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:58:46.457257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:46.574900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:58:46.909878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:58:46.996840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:49.902037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343106298600127:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:49.902139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:49.965453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:50.011473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:50.047137Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343089118729166:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:50.047206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:50.068137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:50.131160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:50.197018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:50.278267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:50.334560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:50.450316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343110593568090:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.450427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.454514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343110593568095:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.458752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:50.474169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343110593568097:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:50.579420Z node 2 :TX_PROXY ERROR: Actor# [2:7494343110593568153:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:52.194652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:00.309203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:59:00.309227Z node 2 :IMPORT WARN: Table profiles were not loaded >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> TPartitionTests::ConflictingCommitFails >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |82.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-04-17T17:56:50.167716Z :TestReorderedExecutor INFO: Random seed for debugging is 1744912610167671 2025-04-17T17:56:51.113360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342597359829365:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:51.172782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:51.195469Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342600961756045:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:51.195570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:51.596595Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a0/r3tmp/tmpN6hnKO/pdisk_1.dat 2025-04-17T17:56:51.669872Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:56:52.214807Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:52.226583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:52.330961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:52.331084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:52.339478Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:56:52.340857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:52.413422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:52.413489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16151, node 1 2025-04-17T17:56:52.495141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:52.503297Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:52.538469Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:52.562874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:52.903250Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0015a0/r3tmp/yandexX1fFeL.tmp 2025-04-17T17:56:52.903281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0015a0/r3tmp/yandexX1fFeL.tmp 2025-04-17T17:56:52.903420Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0015a0/r3tmp/yandexX1fFeL.tmp 2025-04-17T17:56:52.903544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:56:53.010942Z INFO: TTestServer started on Port 3354 GrpcPort 16151 TClient is connected to server localhost:3354 PQClient connected to localhost:16151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:53.578386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T17:56:55.999178Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342597359829365:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:55.999222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:56.202299Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342600961756045:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:56.202367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:56.844706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342622436592836:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:56.847565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342622436592811:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:56.847695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:56.862805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:56:56.980476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342622436592840:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:56:57.115628Z node 2 :TX_PROXY ERROR: Actor# [2:7494342626731560164:2132] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:57.618654Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.205850s 2025-04-17T17:56:57.618689Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.205922s 2025-04-17T17:56:57.708990Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342626731560171:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:57.714422Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjViOWVmYjQtMzg3YTVhYWItMzI5MzIxMTUtNjU2Yzg1ZGE=, ActorId: [2:7494342622436592809:2310], ActorState: ExecuteState, TraceId: 01js2eb3c7ccntqepeeqzsm812, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:57.741240Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342627424601545:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:57.742933Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzJmMjIyZTctOTFkMWY3M2UtNjdjY2YzMDgtODZmNjcxNWE=, ActorId: [1:7494342623129634196:2336], ActorState: ExecuteState, TraceId: 01js2eb3jd46hj0ata9c1bsgqt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:57.756087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:56:57.758947Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:56:57.764237Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:56:57.983666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:56:58.170776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:16151", true, true, 1000); 2025-04-17T17:56:58.511973Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2eb4t8c9xa3asaqym9hhyr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWMwYzdmZGUtNjRmNTdiMjgtMjM0NDVhMTctOWY1YTg1NmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:74943426 ... 55732630:2520] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-17T17:59:01.009689Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7494343153955732630:2520] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-17T17:59:01.009725Z node 13 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T17:59:01.014248Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [13:7494343158250700032:2520], now have 1 active actors on pipe 2025-04-17T17:59:01.014767Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 14, Generation: 1 2025-04-17T17:59:01.018075Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:59:01.018129Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:59:01.018273Z node 14 :PERSQUEUE INFO: new Cookie src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-17T17:59:01.018423Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T17:59:01.018491Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:59:01.022541Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T17:59:01.022585Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T17:59:01.022694Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:59:01.026828Z node 13 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0 2025-04-17T17:59:01.028138Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744912741028 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T17:59:01.028301Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T17:59:01.028527Z :INFO: [] MessageGroupId [src] SessionId [src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0] Write session: close. Timeout = 0 ms 2025-04-17T17:59:01.028575Z :INFO: [] MessageGroupId [src] SessionId [src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0] Write session will now close 2025-04-17T17:59:01.028630Z :DEBUG: [] MessageGroupId [src] SessionId [src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0] Write session: aborting 2025-04-17T17:59:01.029245Z :INFO: [] MessageGroupId [src] SessionId [src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0] Write session: gracefully shut down, all writes complete 2025-04-17T17:59:01.029293Z :DEBUG: [] MessageGroupId [src] SessionId [src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0] Write session: destroy 2025-04-17T17:59:01.034674Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7494343158250700032:2520] destroyed 2025-04-17T17:59:01.034760Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:59:01.033449Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0 grpc read done: success: 0 data: 2025-04-17T17:59:01.033480Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0 grpc read failed 2025-04-17T17:59:01.033593Z node 13 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0 2025-04-17T17:59:01.033612Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|d4a94d5b-6b29910-cc5019ed-32ecf6e4_0 is DEAD 2025-04-17T17:59:01.033898Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T17:59:01.069244Z :INFO: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Starting read session 2025-04-17T17:59:01.069293Z :DEBUG: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Starting cluster discovery 2025-04-17T17:59:01.069581Z :INFO: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25792: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25792
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25792. " 2025-04-17T17:59:01.069632Z :DEBUG: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Restart cluster discovery in 0.007599s 2025-04-17T17:59:01.080929Z :DEBUG: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Starting cluster discovery 2025-04-17T17:59:01.081289Z :INFO: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25792: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25792
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25792. " 2025-04-17T17:59:01.081348Z :DEBUG: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Restart cluster discovery in 0.019188s 2025-04-17T17:59:01.102884Z :DEBUG: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Starting cluster discovery 2025-04-17T17:59:01.103100Z :INFO: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25792: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25792
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25792. " 2025-04-17T17:59:01.103136Z :DEBUG: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Restart cluster discovery in 0.027116s 2025-04-17T17:59:01.131364Z :DEBUG: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Starting cluster discovery 2025-04-17T17:59:01.131696Z :NOTICE: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25792: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25792
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25792. " } 2025-04-17T17:59:01.131884Z :NOTICE: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25792: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25792
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25792. " } 2025-04-17T17:59:01.132042Z :INFO: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Closing read session. Close timeout: 0.000000s 2025-04-17T17:59:01.132150Z :NOTICE: [/Root] [/Root] [5580f16a-60f3319-1a75721-1463e87b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T17:59:01.472947Z node 13 :KQP_EXECUTER ERROR: ActorId: [13:7494343158250700064:2531] TxId: 281474976720687. Ctx: { TraceId: 01js2eewgt4n0tyscqzmfrvr9w, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzE3ZWY2NTAtMWE5ZWYyMzktZjdmMzQ4ZTMtM2JhMjFlMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 14 2025-04-17T17:59:01.473495Z node 13 :KQP_COMPUTE ERROR: SelfId: [13:7494343158250700073:2541], TxId: 281474976720687, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2eewgt4n0tyscqzmfrvr9w. SessionId : ydb://session/3?node_id=13&id=YzE3ZWY2NTAtMWE5ZWYyMzktZjdmMzQ4ZTMtM2JhMjFlMmY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [13:7494343158250700064:2531], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-17T17:59:01.474236Z node 13 :KQP_COMPUTE ERROR: SelfId: [13:7494343158250700074:2542], TxId: 281474976720687, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=13&id=YzE3ZWY2NTAtMWE5ZWYyMzktZjdmMzQ4ZTMtM2JhMjFlMmY=. TraceId : 01js2eewgt4n0tyscqzmfrvr9w. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [13:7494343158250700064:2531], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-17T17:59:02.291147Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720688. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:02.291297Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7494343162545667399:2547] TxId: 281474976720688. Ctx: { TraceId: 01js2eexpj7m8naeava1etj8xs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDEyYzE3MzctMjU2YTQ4ODktY2ZhMDdjYzQtZWU0NjFhY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:02.291715Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZDEyYzE3MzctMjU2YTQ4ODktY2ZhMDdjYzQtZWU0NjFhY2E=, ActorId: [13:7494343162545667396:2547], ActorState: ExecuteState, TraceId: 01js2eexpj7m8naeava1etj8xs, Create QueryResponse for error on request, msg: 2025-04-17T17:59:02.292841Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2eexpj7m8naeava3z6dq8k" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-17T17:59:02.473821Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YzE3ZWY2NTAtMWE5ZWYyMzktZjdmMzQ4ZTMtM2JhMjFlMmY=, ActorId: [13:7494343153955732702:2531], ActorState: ExecuteState, TraceId: 01js2eewgt4n0tyscqzmfrvr9w, Create QueryResponse for error on request, msg: 2025-04-17T17:59:02.480312Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 14" severity: 1 } } TxMeta { id: "01js2eex2t98hqxdgcd71cscp5" } } YdbStatus: UNAVAILABLE ConsumedRu: 379 } >> TPQTest::TestPartitionPerConsumerQuota >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TopicService::RelativePath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: 2025-04-17T17:54:27.652509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:54:27.653010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:54:27.653122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001423/r3tmp/tmphK9PW8/pdisk_1.dat 2025-04-17T17:54:28.005373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:54:28.050980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:28.090892Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T17:54:28.091704Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T17:54:28.091873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:28.091979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:28.107105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:54:28.286222Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T17:54:28.286287Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T17:54:28.286487Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:645:2553] 2025-04-17T17:54:28.405061Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T17:54:28.405169Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:54:28.405810Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T17:54:28.405909Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:54:28.406315Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:54:28.406518Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T17:54:28.406616Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T17:54:28.406975Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T17:54:28.408641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:54:28.409807Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T17:54:28.409873Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T17:54:28.440608Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:661:2568], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:54:28.441768Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:661:2568], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:54:28.442236Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:668:2573] 2025-04-17T17:54:28.442484Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:54:28.488464Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:661:2568], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:54:28.489317Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:54:28.489444Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:54:28.491469Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:54:28.491613Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:54:28.491703Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:54:28.492110Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:54:28.492271Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:54:28.492429Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:685:2573] in generation 1 2025-04-17T17:54:28.503279Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:54:28.526840Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:54:28.527084Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:54:28.527245Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2584] 2025-04-17T17:54:28.527287Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:54:28.527328Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:54:28.527364Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:54:28.527631Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:668:2573], Recipient [1:668:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:28.527687Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:54:28.528107Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:54:28.528221Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:54:28.528318Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:54:28.528382Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:54:28.528453Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:54:28.528499Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:54:28.528535Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:54:28.528569Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:54:28.528629Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:54:28.529073Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:676:2578], Recipient [1:668:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:28.529114Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:54:28.529159Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2574], serverId# [1:676:2578], sessionId# [0:0:0] 2025-04-17T17:54:28.529220Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:676:2578] 2025-04-17T17:54:28.529280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:54:28.529408Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:54:28.529626Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:54:28.529692Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:54:28.529787Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:54:28.529858Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:54:28.529902Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:54:28.529940Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:54:28.529987Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:54:28.530289Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:54:28.530333Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:54:28.530373Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:54:28.530406Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:54:28.530452Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:54:28.530483Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:54:28.530520Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:54:28.530570Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:54:28.530605Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:54:28.532180Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:688:2585], Recipient [1:668:2573]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:54:28.532233Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:54 ... p: 1529 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-04-17T17:59:02.906791Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-17T17:59:02.906837Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T17:59:02.906961Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-17T17:59:02.907017Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit CheckRead 2025-04-17T17:59:02.907083Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-04-17T17:59:02.907112Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit CheckRead 2025-04-17T17:59:02.907139Z node 28 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T17:59:02.907166Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T17:59:02.907207Z node 28 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037889 2025-04-17T17:59:02.907242Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-04-17T17:59:02.907267Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T17:59:02.907293Z node 28 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037889 to execution unit ExecuteRead 2025-04-17T17:59:02.907319Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit ExecuteRead 2025-04-17T17:59:02.907425Z node 28 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1529 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-04-17T17:59:02.907628Z node 28 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1529/18446744073709551615 2025-04-17T17:59:02.907670Z node 28 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[28:1109:2878], 1} after executionsCount# 1 2025-04-17T17:59:02.907708Z node 28 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[28:1109:2878], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:59:02.907769Z node 28 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[28:1109:2878], 1} finished in read 2025-04-17T17:59:02.907817Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-04-17T17:59:02.907843Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T17:59:02.907869Z node 28 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T17:59:02.907897Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit CompletedOperations 2025-04-17T17:59:02.907941Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-04-17T17:59:02.907970Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T17:59:02.907997Z node 28 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037889 has finished 2025-04-17T17:59:02.908025Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T17:59:02.908102Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T17:59:02.908154Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T17:59:02.908190Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T17:59:03.015297Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037889] send [28:959:2766] 2025-04-17T17:59:03.015358Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037889] push event to server [28:959:2766] 2025-04-17T17:59:03.015557Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] ::Bootstrap [28:1112:2881] 2025-04-17T17:59:03.015590Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] lookup [28:1112:2881] 2025-04-17T17:59:03.015705Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] queue send [28:1112:2881] 2025-04-17T17:59:03.015815Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [28:1109:2878], Recipient [28:706:2590]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-17T17:59:03.015866Z node 28 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } 2025-04-17T17:59:03.015947Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] forward result local node, try to connect [28:1112:2881] 2025-04-17T17:59:03.015989Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890]::SendEvent [28:1112:2881] 2025-04-17T17:59:03.016174Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [28:1113:2882], Recipient [28:1065:2850]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:59:03.016211Z node 28 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:59:03.016252Z node 28 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [28:1112:2881], serverId# [28:1113:2882], sessionId# [0:0:0] 2025-04-17T17:59:03.016294Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] connected with status OK role: Leader [28:1112:2881] 2025-04-17T17:59:03.016333Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] send queued [28:1112:2881] 2025-04-17T17:59:03.016363Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] push event to server [28:1112:2881] 2025-04-17T17:59:03.016598Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [28:1109:2878], Recipient [28:1065:2850]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1529 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-04-17T17:59:03.016717Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-17T17:59:03.016772Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T17:59:03.016865Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-17T17:59:03.016933Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-04-17T17:59:03.017003Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-17T17:59:03.017032Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-04-17T17:59:03.017064Z node 28 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-17T17:59:03.017097Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-17T17:59:03.017155Z node 28 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037890 2025-04-17T17:59:03.017199Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-17T17:59:03.017224Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-17T17:59:03.017250Z node 28 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-04-17T17:59:03.017276Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-17T17:59:03.017405Z node 28 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1529 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-04-17T17:59:03.017626Z node 28 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037890 promoting UnprotectedReadEdge to v1529/18446744073709551615 2025-04-17T17:59:03.017675Z node 28 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[28:1109:2878], 2} after executionsCount# 1 2025-04-17T17:59:03.017715Z node 28 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[28:1109:2878], 2} sends rowCount# 1, bytes# 32, quota rows left# 998, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T17:59:03.017783Z node 28 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[28:1109:2878], 2} finished in read 2025-04-17T17:59:03.017840Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-17T17:59:03.017870Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-04-17T17:59:03.017898Z node 28 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T17:59:03.017928Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-04-17T17:59:03.017976Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-17T17:59:03.018003Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T17:59:03.018028Z node 28 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037890 has finished 2025-04-17T17:59:03.018059Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-17T17:59:03.018154Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{17, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T17:59:03.018301Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T17:59:03.018344Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-17T17:59:03.018955Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] send [28:1112:2881] 2025-04-17T17:59:03.018994Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] push event to server [28:1112:2881] 2025-04-17T17:59:03.019101Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [28:1109:2878], Recipient [28:1065:2850]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2025-04-17T17:59:03.019141Z node 28 :TX_DATASHARD TRACE: 72075186224037890 ReadCancel: { ReadId: 2 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 11 } items { uint32_value: 111 } }, { items { uint32_value: 21 } items { uint32_value: 21 } } >> BackupRestore::RestoreReplicationWithoutSecret [GOOD] >> BackupRestore::RestoreExternalDataSourceWithoutSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2025-04-17T17:59:04.273072Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:04.273154Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:04.331914Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:59:04.332278Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:04.332694Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:179:2194] 2025-04-17T17:59:04.333497Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2025-04-17T17:59:04.333572Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:179:2194] 2025-04-17T17:59:04.333631Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:59:04.334105Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:04.334412Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:04.334452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:04.334510Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T17:59:04.334547Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-17T17:59:04.334572Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-04-17T17:59:04.334592Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-04-17T17:59:04.334609Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-04-17T17:59:04.334637Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:04.334671Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-17T17:59:04.334748Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:59:04.334907Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-17T17:59:04.334980Z node 1 :PERSQUEUE INFO: new Cookie owner1|bed1195e-2c2ad28c-d9a7d5f3-9a3386b0_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-04-17T17:59:04.335308Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:04.335406Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2025-04-17T17:59:04.335646Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2025-04-17T17:59:04.335720Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2025-04-17T17:59:04.335841Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2025-04-17T17:59:04.336777Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-04-17T17:59:04.337422Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 2025-04-17T17:59:04.337617Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:04.337656Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:04.344692Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-04-17T17:59:04.344757Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T17:59:04.344813Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-04-17T17:59:04.344854Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000| 2025-04-17T17:59:04.344877Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-17T17:59:04.344908Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:04.344956Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-17T17:59:04.392018Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:04.392111Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-04-17T17:59:04.392192Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2025-04-17T17:59:04.392363Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2025-04-17T17:59:04.749533Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-04-17T17:59:04.785424Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2025-04-17T17:59:04.785533Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2025-04-17T17:59:04.785685Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2025-04-17T17:59:04.786148Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2025-04-17T17:59:04.786237Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 old key x0000000001_00000000000000000100_00000_0000000001_00000 new key d0000000001_00000000000000000100_00000_0000000001_00000 size 104 WTime 1329 2025-04-17T17:59:04.787177Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2025-04-17T17:59:04.787721Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 100 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000| size 105 WTime 1329 2025-04-17T17:59:04.787864Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:04.787899Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:04.787948Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-04-17T17:59:04.787983Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T17:59:04.788019Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000 2025-04-17T17:59:04.788044Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-04-17T17:59:04.788063Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000200_00000_0000000001_00000| 2025-04-17T17:59:04.788081Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-17T17:59:04.788124Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:04.788169Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-17T17:59:04.809003Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:04.809113Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-04-17T17:59:04.809195Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk 2025-04-17T17:59:04.809505Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:04.809569Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:04.809622Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [d0000000001_00000000000000000100_00000_0000000001_00000|, d0000000001_00000000000000000100_00000_0000000001_00000|] 2025-04-17T17:59:04.809655Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T17:59:04.809691Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-17T17:59:04.809722Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T17:5 ... 2 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage >> TPartitionTests::IncorrectRange >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestPQCacheSizeManagement |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> TPartitionTests::IncorrectRange [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess >> BackupRestoreS3::PrefixedVectorIndex [GOOD] >> TPartitionTests::DifferentWriteTxBatchingOptions >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |82.5%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |82.5%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> TPartitionTests::GetPartitionWriteInfoError >> TPQTest::TestSourceIdDropByUserWrites >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear |82.6%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> TPartitionTests::NonConflictingCommitsBatch >> TFstClassSrcIdPQTest::TestTableCreated [GOOD] >> TFstClassSrcIdPQTest::NoMapping |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TPersQueueTest::ReadRuleServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeLimit >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools >> TPartitionTests::ConflictingCommitFails [GOOD] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |82.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TPersQueueTest::CacheHead [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback >> TPersQueueTest::CheckACLForGrpcWrite >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false |82.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> TPQTest::TestReadSubscription [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TPartitionTests::GetUsedStorage >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:58:05.934479Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:05.934556Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:58:05.994869Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:06.034508Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:06.035597Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:58:06.045242Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:58:06.047297Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-17T17:58:06.061780Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-17T17:58:06.077566Z node 1 :PERSQUEUE INFO: new Cookie owner1|15720004-8bbdd8ae-fa44495f-ec599ddc_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-17T17:58:06.078090Z node 1 :PERSQUEUE INFO: new Cookie owner2|3ca462e-8e05f78-3272015e-b9f1e6ce_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-04-17T17:58:06.132151Z node 1 :PERSQUEUE INFO: new Cookie owner1|4ffb5e1e-ca818d01-8ba61bc-7300dc_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:58:06.920470Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:06.920545Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-04-17T17:58:06.953936Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:06.954939Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:58:06.955525Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-04-17T17:58:06.957792Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-04-17T17:58:06.967764Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:184:2197] 2025-04-17T17:58:06.969531Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:184:2197] 2025-04-17T17:58:06.979834Z node 2 :PERSQUEUE INFO: new Cookie owner1|d7a87324-a71d34ff-7e053ee9-f0d723b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-17T17:58:06.980285Z node 2 :PERSQUEUE INFO: new Cookie owner2|3f444091-11b81584-bdb87eaf-308f456b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-04-17T17:58:07.020343Z node 2 :PERSQUEUE INFO: new Cookie owner1|24d3d88d-8190afc6-f8e3657c-ac081568_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-17T17:58:07.910578Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:07.910643Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-17T17:58:07.931102Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:07.931860Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-17T17:58:07.932393Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-17T17:58:07.938692Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-17T17:58:07.940299Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-04-17T17:58:07.942006Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-04-17T17:58:07.967965Z node 3 :PERSQUEUE INFO: new Cookie owner1|d6369924-7fb42906-4b98253b-3988cee9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-17T17:58:07.968355Z node 3 :PERSQUEUE INFO: new Cookie owner2|4bdd954c-d7148193-f0cc7396-70db4ec1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-04-17T17:58:08.000754Z node 3 :PERSQUEUE INFO: new Cookie owner1|b58fd199-fc1ff83d-763fe11-69e625d1_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-17T17:58:09.175852Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:09.175952Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-04-17T17:58:09.197174Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:09.198001Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } 2025-04-17T17:58:09.198677Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-04-17T17:58:09.201143Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-04-17T17:58:09.202958Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:186:2199] 2025-04-17T17:58:09.204755Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:186:2199] 2025-04-17T17:58:09.211094Z node 4 :PERSQUEUE INFO: new Cookie default|22ea6797-e85e004a-4eca4ce4-1618eaf8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner defau ... Ids: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 52 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 52 ReadRuleGenerations: 52 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 52 Important: false } Consumers { Name: "user1" Generation: 52 Important: true } 2025-04-17T17:59:10.459396Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [52:183:2196] 2025-04-17T17:59:10.463196Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [52:183:2196] 2025-04-17T17:59:10.466419Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [52:184:2197] 2025-04-17T17:59:10.468378Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [52:184:2197] 2025-04-17T17:59:10.472929Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [52:185:2198] 2025-04-17T17:59:10.475781Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [52:185:2198] 2025-04-17T17:59:10.479004Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [52:186:2199] 2025-04-17T17:59:10.481268Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [52:186:2199] 2025-04-17T17:59:10.484115Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [52:187:2200] 2025-04-17T17:59:10.486258Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [52:187:2200] 2025-04-17T17:59:10.513778Z node 52 :PERSQUEUE INFO: new Cookie default|64e53842-62fc4dff-c670cde0-f09a484d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:11.588709Z node 52 :PERSQUEUE INFO: new Cookie default|ea8a793d-b1a8c090-16f01b3-d974611b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:11.615217Z node 52 :PERSQUEUE INFO: new Cookie default|b27ba7dc-25448e75-dbd4b6a3-93a222e4_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:103:2057] recipient: [53:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:103:2057] recipient: [53:101:2135] Leader for TabletID 72057594037927937 is [53:107:2139] sender: [53:108:2057] recipient: [53:101:2135] 2025-04-17T17:59:12.605505Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:12.605617Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [53:149:2057] recipient: [53:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [53:149:2057] recipient: [53:147:2170] Leader for TabletID 72057594037927938 is [53:153:2174] sender: [53:154:2057] recipient: [53:147:2170] Leader for TabletID 72057594037927937 is [53:107:2139] sender: [53:177:2057] recipient: [53:14:2061] 2025-04-17T17:59:12.633846Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:12.635982Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 53 actor [53:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 53 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 53 ReadRuleGenerations: 53 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 53 Important: false } Consumers { Name: "user1" Generation: 53 Important: true } 2025-04-17T17:59:12.637089Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [53:183:2196] 2025-04-17T17:59:12.640984Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [53:183:2196] 2025-04-17T17:59:12.644360Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [53:184:2197] 2025-04-17T17:59:12.646696Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [53:184:2197] 2025-04-17T17:59:12.649260Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [53:185:2198] 2025-04-17T17:59:12.651966Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [53:185:2198] 2025-04-17T17:59:12.654871Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [53:186:2199] 2025-04-17T17:59:12.656843Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [53:186:2199] 2025-04-17T17:59:12.660949Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [53:187:2200] 2025-04-17T17:59:12.668689Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [53:187:2200] 2025-04-17T17:59:12.682778Z node 53 :PERSQUEUE INFO: new Cookie default|dad0c9e4-a545451e-1391d0f9-90f3f2c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:13.803410Z node 53 :PERSQUEUE INFO: new Cookie default|3bb58a83-68abc232-43057897-4a42386e_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:13.838298Z node 53 :PERSQUEUE INFO: new Cookie default|f9042fde-63719620-7e661506-81409c46_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:108:2057] recipient: [54:101:2135] 2025-04-17T17:59:14.660158Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:14.660233Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927938 is [54:153:2174] sender: [54:154:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:179:2057] recipient: [54:14:2061] 2025-04-17T17:59:14.679527Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:14.681430Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 54 actor [54:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 54 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 54 ReadRuleGenerations: 54 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 54 Important: false } Consumers { Name: "user1" Generation: 54 Important: true } 2025-04-17T17:59:14.682424Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:186:2199] 2025-04-17T17:59:14.685296Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:186:2199] 2025-04-17T17:59:14.688475Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [54:187:2200] 2025-04-17T17:59:14.691647Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [54:187:2200] 2025-04-17T17:59:14.694242Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [54:188:2201] 2025-04-17T17:59:14.698354Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [54:188:2201] 2025-04-17T17:59:14.701437Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [54:189:2202] 2025-04-17T17:59:14.703673Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [54:189:2202] 2025-04-17T17:59:14.706668Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:185:2198] 2025-04-17T17:59:14.708799Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:185:2198] 2025-04-17T17:59:14.720761Z node 54 :PERSQUEUE INFO: new Cookie default|a8e7a34e-a83814a-dcad8226-dbca1978_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:15.827909Z node 54 :PERSQUEUE INFO: new Cookie default|b22f5c58-1b863486-ae66a9e3-fb805b75_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:15.853606Z node 54 :PERSQUEUE INFO: new Cookie default|acfb7a2d-e078b5a1-b43fbfd5-29935265_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TPartitionTests::GetUsedStorage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:59:07.414555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:59:07.414770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:07.414868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:59:07.414911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:59:07.421217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:59:07.421313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:59:07.421421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:07.421521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:59:07.427578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:59:07.566758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:59:07.566802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:07.602936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:59:07.604802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:59:07.604968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:59:07.628256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:59:07.628491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:07.644876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:07.645290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:59:07.697266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:07.758481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:07.758597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:07.758821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:59:07.758881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:07.758941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:59:07.759090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:59:07.778549Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:59:07.964691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:07.972411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:07.982878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:59:07.987701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:59:07.987806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:07.999751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:07.999948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:59:08.000191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.000346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:08.000394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:59:08.000478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:59:08.002844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.002919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:08.002966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:59:08.011028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.011085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.011145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:08.011227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:59:08.022108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:08.031229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:59:08.040825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:59:08.042121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:08.042314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:08.042371Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:08.054519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:59:08.054610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:08.054796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:08.054897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:59:08.062044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:08.062118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:08.062365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:08.062415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:59:08.062849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.062914Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:59:08.063034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:08.063089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:08.063140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:08.063173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:08.063211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:59:08.063255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:08.063285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:59:08.063315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:59:08.063389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:08.063429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:59:08.063494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:59:08.065379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:08.065525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:08.065577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... shard: 72057594046678944, txId: 103, path id: 1 2025-04-17T17:59:16.196759Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T17:59:16.196825Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:16.196872Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-04-17T17:59:16.196930Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-04-17T17:59:16.198048Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:16.198135Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:16.198165Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:59:16.198219Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:59:16.198250Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:16.199152Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:16.199268Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:16.199304Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:59:16.199334Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:59:16.199365Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T17:59:16.199436Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T17:59:16.201786Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72075186233409546 at ss 72057594046678944 2025-04-17T17:59:16.201852Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72075186233409546 at ss 72057594046678944 2025-04-17T17:59:16.201887Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72075186233409546 at ss 72057594046678944 2025-04-17T17:59:16.201923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-04-17T17:59:16.202113Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:16.202169Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:59:16.202376Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:59:16.202438Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:16.202499Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:59:16.202543Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:16.202600Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T17:59:16.202651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:16.202714Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:59:16.202756Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:59:16.202985Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:59:16.203680Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:59:16.205091Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T17:59:16.205319Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-04-17T17:59:16.205562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:16.205910Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186234409547 2025-04-17T17:59:16.207519Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T17:59:16.207789Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:59:16.208446Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-04-17T17:59:16.208696Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:59:16.209217Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-04-17T17:59:16.210546Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409546 2025-04-17T17:59:16.215224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:59:16.215520Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186234409548 2025-04-17T17:59:16.217180Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T17:59:16.217442Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:59:16.218532Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:59:16.218598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:16.218772Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:59:16.219470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:59:16.219538Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:16.219623Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:16.221760Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:59:16.221829Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T17:59:16.221955Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:59:16.221984Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-04-17T17:59:16.231475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:59:16.231545Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-04-17T17:59:16.231642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T17:59:16.231697Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-04-17T17:59:16.232351Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:59:16.239134Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T17:59:16.239555Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:59:16.239615Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:59:16.240154Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:59:16.240291Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:59:16.240346Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:584:2525] TestWaitNotification: OK eventTxId 103 2025-04-17T17:59:16.241007Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:59:16.241233Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 276us result status StatusPathDoesNotExist 2025-04-17T17:59:16.241417Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2025-04-17T17:58:53.875822Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:53.875923Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:53.895933Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:58:53.896189Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:53.896742Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:178:2193] 2025-04-17T17:58:53.897707Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:58:53.897929Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:58:53.898090Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:58:53.898390Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:58:53.898820Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-17T17:58:53.898929Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:58:53.898972Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:58:53.899035Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:53.000000Z 2025-04-17T17:58:53.899068Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:58:53.899137Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:178:2193] 2025-04-17T17:58:53.899208Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-17T17:58:53.899280Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:54.220763Z node 1 :PERSQUEUE INFO: new Cookie src1|a9ff6e00-7e4817f1-ebf6609f-236d59b7_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-04-17T17:58:54.220944Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 Create distr tx with id = 6 and act no: 7 2025-04-17T17:58:55.258847Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:58:55.258981Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-04-17T17:58:55.259042Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-04-17T17:58:55.259103Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 6 2025-04-17T17:58:56.606627Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:58:56.606793Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:58:56.606874Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:58:56.606910Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch of 1 completion Wait batch completion Expect no KV request 2025-04-17T17:58:56.607126Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-17T17:58:56.607185Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-17T17:58:56.607267Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:58:56.607327Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 Got batch complete: 1 Waif or tx 3 predicate failure 2025-04-17T17:58:56.853281Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-04-17T17:58:56.853346Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-04-17T17:58:56.853396Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:58:56.853456Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 Got batch complete: 2 Waif or tx 4 predicate failure Wait batch of 3 completion Wait batch completion Expect no KV request Wait batch completion Wait for no tx committed 2025-04-17T17:58:57.096271Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:57.096336Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:57.096437Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:57.096496Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-17T17:58:57.096526Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:57.096550Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:57.096570Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:58:57.096598Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:57.096658Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait kv request Wait for commits Wait tx committed for tx 0 2025-04-17T17:58:57.329088Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait tx committed for tx 2 2025-04-17T17:58:57.921678Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:57.921754Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:57.937745Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request Got KV request 2025-04-17T17:58:57.938020Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:57.938322Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:176:2191] 2025-04-17T17:58:57.939221Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:58:57.939423Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:58:57.939628Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:58:57.939808Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:58:57.940222Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-17T17:58:57.940318Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:58:57.940359Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:58:57.940407Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:58:57.000000Z 2025-04-17T17:58:57.940441Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:58:57.940505Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:176:2191] 2025-04-17T17:58:57.940560Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-17T17:58:57.940603Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:58.252868Z node 2 :PERSQUEUE INFO: new Cookie src1|67956c21-96ca392c-ce91dfa3-e2f71e8b_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-04-17T17:58:58.253005Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 2025-04-17T17:58:59.313782Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:59:00.700293Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion Wait 1st KV request Wait kv request 2025-04-17T17:59:00.700565Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-17T17:59:00.700611Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-17T17:59:00.700675Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=1 2025-04-17T17:59:00.700729Z node 2 :PERSQUEUE DEBUG: [PQ: 720575940379 ... c' partition 0 user client-1 offset is set to 3 (startOffset 0) session session-client-1 2025-04-17T17:59:12.699535Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:12.699590Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:12.699633Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:12.699680Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:12.699715Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:12.699736Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-04-17T17:59:12.699757Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-04-17T17:59:12.699784Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:12.699823Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait batch completion Wait kv request 2025-04-17T17:59:12.971965Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:12.972055Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'client-1' Bad request (gap) Offset 3 Begin 0 Got batch complete: 1 Wait batch completion Wait kv request 2025-04-17T17:59:12.972409Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:12.972456Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:12.972517Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:12.972560Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:12.972615Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:12.972650Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:12.972687Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 2025-04-17T17:59:12.972952Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-04-17T17:59:12.998587Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:14.130802Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 3 Wait kv request 2025-04-17T17:59:14.131074Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:14.131142Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:59:14.131229Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-04-17T17:59:14.131462Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:14.131519Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:14.131567Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:14.131610Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:14.131643Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:14.131669Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-04-17T17:59:14.131693Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-04-17T17:59:14.131727Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:14.131769Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait immediate tx complete 10 2025-04-17T17:59:14.142485Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 10 Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 11 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-04-17T17:59:15.005294Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:15.005392Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:15.024311Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:59:15.024684Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:15.024995Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-04-17T17:59:15.025978Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:59:15.026172Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:59:15.026378Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:59:15.027098Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:59:15.027583Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-17T17:59:15.027687Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:59:15.027730Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:59:15.027782Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:15.000000Z 2025-04-17T17:59:15.027829Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:59:15.027880Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-04-17T17:59:15.027943Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-17T17:59:15.028006Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:15.028088Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:59:15.028141Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T17:59:15.028186Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T17:59:15.028545Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-04-17T17:59:15.028732Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-04-17T17:59:15.028784Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Created Tx with id 0 as act# 0 Created Tx with id 1 as act# 1 2025-04-17T17:59:16.449860Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:59:16.449998Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 1 Got batch complete: 1 Wait batch completion Got batch complete: 1 Wait batch completion Wait kv request 2025-04-17T17:59:16.694004Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 1 2025-04-17T17:59:16.694084Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 1 2025-04-17T17:59:16.694298Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:16.694350Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:16.694399Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:16.694449Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:16.694484Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:16.694512Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-17T17:59:16.694540Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-17T17:59:16.694572Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:16.694614Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 1 2025-04-17T17:59:16.726791Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait for no tx committed ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::PrefixedVectorIndex [GOOD] Test command err: 2025-04-17T17:56:45.641121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342574230530552:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:45.641180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpxjYkYs/pdisk_1.dat 2025-04-17T17:56:46.402733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:46.450715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:46.450803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:46.459505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7129, node 1 2025-04-17T17:56:46.782890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:46.782919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:46.782926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:46.783049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:47.226002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:50.642434Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342574230530552:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:50.642499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:51.005286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342600000335403:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:51.005376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:51.009492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342600000335415:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:51.016368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:56:51.069511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342600000335417:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:56:51.137908Z node 1 :TX_PROXY ERROR: Actor# [1:7494342600000335484:2690] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:51.706479Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.219535s 2025-04-17T17:56:51.706521Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.219604s 2025-04-17T17:56:51.963237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2eayf04s4nns7axyv51kdq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVkMjIyNjAtNGRlYjZmZTEtZjhhODZkNDUtNTIxNGQ2MGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/"Create temporary directory "/Root/~backup_20250417T175651" in databaseProcess "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/view"Write view into "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/view/permissions.pb"Remove temporary directory "/Root/~backup_20250417T175651" in database2025-04-17T17:56:52.149983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/view"Restore view "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmpF5r8Ek/view/permissions.pb"2025-04-17T17:56:52.346231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T17:56:52.555127Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2eaz24922zsd876jvbw7tg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVkMjIyNjAtNGRlYjZmZTEtZjhhODZkNDUtNTIxNGQ2MGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:56:54.662541Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342614170224839:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:54.662885Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00081b/r3tmp/tmphHposQ/pdisk_1.dat 2025-04-17T17:56:54.979179Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:55.027094Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:55.027181Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:55.033344Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14435, node 4 2025-04-17T17:56:55.221741Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:55.221767Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:55.221775Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:55.221939Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:55.567870Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:55.587155Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:56:58.790349Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342631350094935:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:58.790444Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:58.825849Z node 4 :TX_PROXY DEBUG: actor# [4:7494342614170224929:2138] Handle TEvProposeTransaction 2025-04-17T17:56:58.825882Z node 4 :TX_PROXY DEBUG: actor# [4:7494342614170224929:2138] TxId# 281474976715658 ProcessProposeTransaction 2025-04-17T17:56:58.825930Z node 4 :TX_PROXY DEBUG: actor# [4:7494342614170224929:2138] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [4:7494342631350094956:2627] 2025-04-17T17:56:58.895168Z node 4 :TX_PROXY DEBUG: Actor# [4:7494342631350094956:2627] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction ... vateTable: false } 2025-04-17T17:59:07.975025Z node 34 :TX_PROXY DEBUG: Actor# [34:7494343184344787533:4869] Handle TEvDescribeSchemeResult Forward to# [34:7494343184344787531:2464] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715762 CreateStep: 1744912745184 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Group" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 10 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Group" KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046644480 >> TPartitionTests::UserActCount [GOOD] >> TPartitionTests::TooManyImmediateTxs >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TPQTabletTests::Huge_ProposeTransacton [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2025-04-17T17:59:08.864083Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:08.864185Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:08.886662Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-04-17T17:59:08.888403Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:08.000000Z 2025-04-17T17:59:08.888489Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\340\233\236\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\340\233\236\247\3442" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\340\233\236\247\3442" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } 2025-04-17T17:59:09.961590Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:09.961661Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:09.976701Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:59:09.976955Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:09.977194Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [2:176:2191] 2025-04-17T17:59:09.977992Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2025-04-17T17:59:09.978053Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [2:176:2191] 2025-04-17T17:59:09.978107Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:59:09.978148Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:09.978341Z node 2 :PERSQUEUE INFO: new Cookie owner1|390f9e4d-16e350f4-b5faab9c-8d551f23_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-04-17T17:59:09.978460Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} 2025-04-17T17:59:09.978771Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2025-04-17T17:59:09.979716Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-04-17T17:59:09.980300Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000| size 104 WTime 127 2025-04-17T17:59:09.980458Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:09.980542Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:09.980583Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-04-17T17:59:09.980624Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-04-17T17:59:09.980660Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-04-17T17:59:09.980693Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000100_00000_0000000001_00000| 2025-04-17T17:59:09.980714Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-04-17T17:59:09.980739Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:09.980777Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-04-17T17:59:10.030911Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:10.031038Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-17T17:59:10.031142Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk 2025-04-17T17:59:10.318634Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-04-17T17:59:10.319505Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 101 PartNo 0 PackedSize 118 count 1 nextOffset 102 batches 1 2025-04-17T17:59:10.319943Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 101,1 HeadOffset 100 endOffset 101 curOffset 102 D0000100001_00000000000000000101_00000_0000000001_00000| size 104 WTime 1128 2025-04-17T17:59:10.320096Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:10.320144Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:10.320177Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-04-17T17:59:10.320208Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-04-17T17:59:10.320257Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-04-17T17:59:10.320285Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000101_00000_0000000001_00000| 2025-04-17T17:59:10.320308Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-04-17T17:59:10.320332Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:10.320359Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-04-17T17:59:10.367293Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:10.367404Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-17T17:59:10.367477Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 4, partNo: 0, Offset: 101 is stored on disk 2025-04-17T17:59:10.572017Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 6 partNo 0 2025-04-17T17:59:10.575335Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 6 partNo 0 FormedBlobsCount 0 NewHead: Offset 102 PartNo 0 PackedSize 118 count 1 nextOffset 103 batches 1 2025-04-17T17:59:10.575822Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 102,1 HeadOffset 100 endOffset 102 curOffset 103 D0000100001_00000000000000000102_00000_0000000001_00000| size 104 WTime 2129 2025-04-17T17:59:10.575959Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:10.575996Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:10.576024Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-04-17T17:59:10.576054Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-04-17T17:59:10.576081Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-04-17T17:59:10.576106Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000102_00000_0000000001_00000| 2025-04-17T17:59:10.576130Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-04-17T17:59:10.576155Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ... , Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [3:179:2194] 2025-04-17T17:59:12.214838Z node 3 :PERSQUEUE INFO: new Cookie owner1|630f9676-dfc562a2-9771fd3b-31a39101_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Wait write info error(2) 2025-04-17T17:59:13.339767Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:13.339851Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:13.357809Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:59:13.358212Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:13.358549Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:178:2193] 2025-04-17T17:59:13.359669Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:59:13.359890Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:59:13.360095Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:59:13.361300Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:59:13.361879Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-17T17:59:13.361999Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:59:13.362045Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:59:13.362103Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:13.000000Z 2025-04-17T17:59:13.362151Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:59:13.362277Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:178:2193] 2025-04-17T17:59:13.362346Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-17T17:59:13.362413Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:13.362498Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:59:13.362555Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T17:59:13.362609Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T17:59:13.362650Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T17:59:13.363016Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-04-17T17:59:13.363213Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-04-17T17:59:13.363273Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. 2025-04-17T17:59:13.718795Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 session is set to 0 (startOffset 0) session session-client-0 2025-04-17T17:59:13.718976Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:13.719032Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:13.719082Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:13.719134Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:13.719171Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-17T17:59:13.719196Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-17T17:59:13.719227Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:13.719269Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\350\302\236\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } 2025-04-17T17:59:13.742561Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 2025-04-17T17:59:14.831050Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:59:14.831190Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 3 2025-04-17T17:59:14.831226Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-04-17T17:59:16.346695Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 6 Wait batch completion Wait kv request 2025-04-17T17:59:16.347127Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 5 (startOffset 0) session session-client-0 2025-04-17T17:59:16.347195Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-04-17T17:59:16.347251Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 3 2025-04-17T17:59:16.347301Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-04-17T17:59:16.347351Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-04-17T17:59:16.347410Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2025-04-17T17:59:16.405181Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-04-17T17:59:16.405548Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:16.405608Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:16.405655Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:16.405703Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:16.405740Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:16.405764Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-2 2025-04-17T17:59:16.405787Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-2 2025-04-17T17:59:16.405808Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-04-17T17:59:16.405849Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-04-17T17:59:16.405878Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-17T17:59:16.405900Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-17T17:59:16.405933Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:16.405974Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 3 2025-04-17T17:59:16.431303Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-04-17T17:59:17.215747Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:17.215845Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:17.251718Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [5:179:2194] 2025-04-17T17:59:17.253611Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:59:17.253698Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [5:179:2194] >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPQPartialRead >> TPartitionTests::CorrectRange_Multiple_Transactions >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TPQTest::TestOffsetEstimation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 2118, MsgBus: 19492 2025-04-17T17:55:03.875147Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342138387609545:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:03.900871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d94/r3tmp/tmpxOCnvz/pdisk_1.dat 2025-04-17T17:55:04.365250Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:04.389634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:04.389719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:04.392047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2118, node 1 2025-04-17T17:55:04.487025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:04.487044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:04.487051Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:04.487162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19492 TClient is connected to server localhost:19492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:05.266791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.305862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.466820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.614075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.693090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:07.210748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342155567480382:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.210854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.516258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.549348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.589230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.629022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.658788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.701196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.775875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.852746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342155567481048:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.852877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.853262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342155567481053:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.857368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:07.870308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342155567481055:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:07.951195Z node 1 :TX_PROXY ERROR: Actor# [1:7494342155567481114:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:08.866371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342138387609545:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:08.876221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:09.018008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:55:10.551387Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2e7vhe076ajj4rbdamnx6n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFhMjA5ZTYtNmI2ZjAxOWQtNDdjYTkxYzgtNWNmZTk5MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.560415Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2e7vhe255f44rqbyk07npw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYzOTZlNTQtM2M5OThhZGMtODIzMjE5OTktNTNmMDU3N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.586785Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2e7vje2mk3gbsya53nysmx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDU0MDA0My02NjBmZjRhOC04OWFhM2E5My1lOGU5MjI1NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.586785Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01js2e7vhe255f44rqbyk07npw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYzOTZlNTQtM2M5OThhZGMtODIzMjE5OTktNTNmMDU3N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.589350Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01js2e7vhe076ajj4rbdamnx6n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFhMjA5ZTYtNmI2ZjAxOWQtNDdjYTkxYzgtNWNmZTk5MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.603907Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01js2e7vjkaw6k8z5mnctp598a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmMwYzg0ZjctMzYyZGI1YjUtYWRkNWUzNC0xZjU2ZmY0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.614083Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01js2e7vjk2r3ammbzjrjwvrdj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGM4MDU0MDQtNDU1ZjExYWEtMTY4MmY4ZmUtODU4Njk1NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.615837Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01js2e7vjk2e848mb1ggm31x08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzcyZTY2ZGEtOWE4Y2Q4NzUtNjMyNGRhM2ItNzgwNjI3YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.618591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01js2e7vjk50x8y8j0xbd0adqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVmNjg1NTgtZDYyOGU5ODctZDAwMmM5YzItOWRlYWQ3MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.619878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01js2e7vjk04cgww5391kgsgtq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNjOTk1NWUtYjU2ZTRjNzYtYmI1ZGYwZmMtMzY5NmFkNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.634320Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01js2e7vjx27cz9kmdxrpp7vpt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVkYWM0YWMtNDVjY2ViN ... /Root, SessionId: ydb://session/3?node_id=2&id=ZTRkYWEyMTEtZmVhZGZkMjMtZjhjY2NkLTgwZDY5YzBm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.862606Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719467. Ctx: { TraceId: 01js2ef58ffjvy6tdbte818bj2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTRkYWEyMTEtZmVhZGZkMjMtZjhjY2NkLTgwZDY5YzBm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.868105Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719468. Ctx: { TraceId: 01js2ef58f24r9h1eskfds82p5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNTQ1YTktNjQ2MDA2OWYtMjY2ZjJmODUtYjU2MDBkNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.871742Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719469. Ctx: { TraceId: 01js2ef58zc7qzb3td3zzdwkkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZjYTNhZjEtYzdkYjQxNTctZjgyOTQ2ODUtNWI3NTU5ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.882257Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719470. Ctx: { TraceId: 01js2ef592czy9wadp5rfg9td4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODgxMTMxNzItZGJiMDY3MWMtYmE3ZmI0MzAtODljNjgwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.882358Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719471. Ctx: { TraceId: 01js2ef58zc7qzb3td3zzdwkkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZjYTNhZjEtYzdkYjQxNTctZjgyOTQ2ODUtNWI3NTU5ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.913161Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719472. Ctx: { TraceId: 01js2ef592czy9wadp5rfg9td4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODgxMTMxNzItZGJiMDY3MWMtYmE3ZmI0MzAtODljNjgwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.918091Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719473. Ctx: { TraceId: 01js2ef5a3d7b6cdnptxj7nrn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODIwOTU0N2QtZjdiNjg3Y2QtYmE1NjU4ZTctY2MyYWYxNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.919640Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719474. Ctx: { TraceId: 01js2ef592czy9wadp5rfg9td4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODgxMTMxNzItZGJiMDY3MWMtYmE3ZmI0MzAtODljNjgwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.930035Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719475. Ctx: { TraceId: 01js2ef5a3d7b6cdnptxj7nrn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODIwOTU0N2QtZjdiNjg3Y2QtYmE1NjU4ZTctY2MyYWYxNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.940199Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719476. Ctx: { TraceId: 01js2ef5b835rdv11t9ds497y7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTRkYWEyMTEtZmVhZGZkMjMtZjhjY2NkLTgwZDY5YzBm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.951802Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719478. Ctx: { TraceId: 01js2ef5b8fv328sm7mbz2gzdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQzMzhkZTYtMjExM2ZjMjEtNmQyYTlkMzQtNDBlMzY4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.952868Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719477. Ctx: { TraceId: 01js2ef5azbm73gt2gqk999g30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNTQ1YTktNjQ2MDA2OWYtMjY2ZjJmODUtYjU2MDBkNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.978171Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719479. Ctx: { TraceId: 01js2ef5b8fv328sm7mbz2gzdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQzMzhkZTYtMjExM2ZjMjEtNmQyYTlkMzQtNDBlMzY4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:09.981382Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719480. Ctx: { TraceId: 01js2ef5azbm73gt2gqk999g30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNTQ1YTktNjQ2MDA2OWYtMjY2ZjJmODUtYjU2MDBkNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.006716Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719482. Ctx: { TraceId: 01js2ef5d613pyndzxth6etkwt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZjYTNhZjEtYzdkYjQxNTctZjgyOTQ2ODUtNWI3NTU5ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.011101Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719481. Ctx: { TraceId: 01js2ef5ddfj2n6xcqszt4pg33, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODgxMTMxNzItZGJiMDY3MWMtYmE3ZmI0MzAtODljNjgwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.020353Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719483. Ctx: { TraceId: 01js2ef5d613pyndzxth6etkwt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZjYTNhZjEtYzdkYjQxNTctZjgyOTQ2ODUtNWI3NTU5ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.026048Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719484. Ctx: { TraceId: 01js2ef5dkfh9zs039r535ytmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTRkYWEyMTEtZmVhZGZkMjMtZjhjY2NkLTgwZDY5YzBm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.029087Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719485. Ctx: { TraceId: 01js2ef5e2br51azcat36qwppt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODIwOTU0N2QtZjdiNjg3Y2QtYmE1NjU4ZTctY2MyYWYxNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.036975Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719486. Ctx: { TraceId: 01js2ef5dkfh9zs039r535ytmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTRkYWEyMTEtZmVhZGZkMjMtZjhjY2NkLTgwZDY5YzBm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.047634Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719487. Ctx: { TraceId: 01js2ef5es0jjx2aansyxrbv3m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQzMzhkZTYtMjExM2ZjMjEtNmQyYTlkMzQtNDBlMzY4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.058358Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719488. Ctx: { TraceId: 01js2ef5ezddx0tfff1jms15f8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNTQ1YTktNjQ2MDA2OWYtMjY2ZjJmODUtYjU2MDBkNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.062262Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719489. Ctx: { TraceId: 01js2ef5f63fnn6bf94azk87mx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODgxMTMxNzItZGJiMDY3MWMtYmE3ZmI0MzAtODljNjgwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.070571Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719490. Ctx: { TraceId: 01js2ef5ezddx0tfff1jms15f8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNTQ1YTktNjQ2MDA2OWYtMjY2ZjJmODUtYjU2MDBkNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.074582Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719491. Ctx: { TraceId: 01js2ef5f63fnn6bf94azk87mx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODgxMTMxNzItZGJiMDY3MWMtYmE3ZmI0MzAtODljNjgwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.080350Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719492. Ctx: { TraceId: 01js2ef5fh7nb5p1eh0b0v23mn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZjYTNhZjEtYzdkYjQxNTctZjgyOTQ2ODUtNWI3NTU5ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.084279Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719493. Ctx: { TraceId: 01js2ef5f63fnn6bf94azk87mx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODgxMTMxNzItZGJiMDY3MWMtYmE3ZmI0MzAtODljNjgwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.089181Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719494. Ctx: { TraceId: 01js2ef5fh7nb5p1eh0b0v23mn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZjYTNhZjEtYzdkYjQxNTctZjgyOTQ2ODUtNWI3NTU5ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.099711Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719495. Ctx: { TraceId: 01js2ef5g6772b9p3hy2zvr95g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQzMzhkZTYtMjExM2ZjMjEtNmQyYTlkMzQtNDBlMzY4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.107161Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719496. Ctx: { TraceId: 01js2ef5g6846hry34tw00zfgw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODVjY2UwMDEtZmM1NGY5M2ItMzJmNWIwMTktMzljMGUxZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:59:10.111503Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719497. Ctx: { TraceId: 01js2ef5g6846hry34tw00zfgw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODVjY2UwMDEtZmM1NGY5M2ItMzJmNWIwMTktMzljMGUxZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-17T17:59:10.125880Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719498. Ctx: { TraceId: 01js2ef5h4as2m16x27wm3gz01, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTRkYWEyMTEtZmVhZGZkMjMtZjhjY2NkLTgwZDY5YzBm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.127394Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719499. Ctx: { TraceId: 01js2ef5h6dbfzkqrv6kwkt2ac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZjYTNhZjEtYzdkYjQxNTctZjgyOTQ2ODUtNWI3NTU5ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:10.143063Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719500. Ctx: { TraceId: 01js2ef5h4as2m16x27wm3gz01, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTRkYWEyMTEtZmVhZGZkMjMtZjhjY2NkLTgwZDY5YzBm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:59:10.168978Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719501. Ctx: { TraceId: 01js2ef5h6dbfzkqrv6kwkt2ac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZjYTNhZjEtYzdkYjQxNTctZjgyOTQ2ODUtNWI3NTU5ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> TPQTest::TestOffsetEstimation [GOOD] >> TPQTest::TestMaxTimeLagRewind >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> TopicService::RelativePath [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> TPQTest::TestUserInfoCompatibility ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-04-17T17:56:46.834277Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1744912606834243 2025-04-17T17:56:47.746955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342584618185230:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:47.747024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:47.892770Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342583928476976:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:47.893137Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d9/r3tmp/tmpKZicyy/pdisk_1.dat 2025-04-17T17:56:48.431980Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:56:48.526961Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:56:49.086171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:49.110381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:50.003766Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:50.086640Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:50.092936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:50.093024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:50.099460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:50.099563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:50.116924Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:56:50.117051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:50.123076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5534, node 1 2025-04-17T17:56:50.543625Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0015d9/r3tmp/yandexBfOuNU.tmp 2025-04-17T17:56:50.543653Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0015d9/r3tmp/yandexBfOuNU.tmp 2025-04-17T17:56:50.543827Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0015d9/r3tmp/yandexBfOuNU.tmp 2025-04-17T17:56:50.543960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:56:50.650004Z INFO: TTestServer started on Port 16582 GrpcPort 5534 TClient is connected to server localhost:16582 PQClient connected to localhost:5534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:51.326498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T17:56:52.747357Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342584618185230:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:52.747432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:52.846285Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342583928476976:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:52.846352Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:54.812991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342613993248245:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:54.813311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342613993248234:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:54.813401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:54.832537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:56:54.841686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342614682957338:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:54.841769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:54.849068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342614682957362:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:54.904576Z node 1 :TX_PROXY ERROR: Actor# [1:7494342614682957381:2665] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:56:54.936368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342613993248248:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:56:54.937253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342614682957378:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:56:55.003933Z node 2 :TX_PROXY ERROR: Actor# [2:7494342613993248278:2135] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:55.023280Z node 1 :TX_PROXY ERROR: Actor# [1:7494342618977924752:2709] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:55.506293Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.327544s 2025-04-17T17:56:55.506335Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.327619s 2025-04-17T17:56:55.592437Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342618977924762:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:55.620106Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDIxOGIyNjItZTM1ODFiYzYtZjk4MzkxMWYtZWI2NjdkNTM=, ActorId: [1:7494342614682957319:2340], ActorState: ExecuteState, TraceId: 01js2eb1crf1ckf6wfk733d9vk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:55.616527Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342618288215581:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:55.617666Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjRkMzNmNGQtMzgxMzRhYTctYWM5OTg2YWYtY2E0YTQ3NDE=, ActorId: [2:7494342613993248225:2312], ActorState: ExecuteState, TraceId: 01js2eb1cb2maaer8ag3txy5q4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:55.635720Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 s ... nit_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-17T17:59:15.440630Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:44984 2025-04-17T17:59:15.440648Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:44984 proto=v1 topic=test-topic durationSec=0 2025-04-17T17:59:15.440658Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T17:59:15.442168Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-17T17:59:15.442354Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-17T17:59:15.442372Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T17:59:15.442385Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-17T17:59:15.442409Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7494343219129477646:2600] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T17:59:15.446961Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7494343219129477646:2600] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-17T17:59:15.646653Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715699. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-17T17:59:15.646805Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7494343219129477659:2602] TxId: 281474976715699. Ctx: { TraceId: 01js2efaqqb57j7kxc5kck2qkb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTU3NjMyMjAtZTUzODExZGMtZDcyMzQ1MDUtZTlhMjIwODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-17T17:59:15.647033Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YTU3NjMyMjAtZTUzODExZGMtZDcyMzQ1MDUtZTlhMjIwODQ=, ActorId: [13:7494343219129477647:2602], ActorState: ExecuteState, TraceId: 01js2efaqqb57j7kxc5kck2qkb, Create QueryResponse for error on request, msg: Test retry state: get retry delay 2025-04-17T17:59:15.656740Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|72c3ccb3-56ac262-7c314c6f-26b0c4a9_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YTU3NjMyMjAtZTUzODExZGMtZDcyMzQ1MDUtZTlhMjIwODQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01js2efaqqb57j7kxc5nbcek51" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-04-17T17:59:15.656788Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|72c3ccb3-56ac262-7c314c6f-26b0c4a9_0] Write session will restart in 2.000000s 2025-04-17T17:59:15.656912Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|72c3ccb3-56ac262-7c314c6f-26b0c4a9_0] Write session: Do CDS request 2025-04-17T17:59:15.656953Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|72c3ccb3-56ac262-7c314c6f-26b0c4a9_0] Do schedule cds request after 2000 ms 2025-04-17T17:59:15.655030Z node 13 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [13:7494343219129477646:2600] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YTU3NjMyMjAtZTUzODExZGMtZDcyMzQ1MDUtZTlhMjIwODQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01js2efaqqb57j7kxc5nbcek51" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-04-17T17:59:15.655244Z node 13 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YTU3NjMyMjAtZTUzODExZGMtZDcyMzQ1MDUtZTlhMjIwODQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01js2efaqqb57j7kxc5nbcek51" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-04-17T17:59:15.655632Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD 2025-04-17T17:59:15.729537Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715700. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:15.729690Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7494343219129477672:2590] TxId: 281474976715700. Ctx: { TraceId: 01js2ef9m34nq1b2140wss7eer, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YmQ2MTI4ZDEtOWFiZjA5YjQtODU1Mzk1MjMtYWVmODJmNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:15.729944Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YmQ2MTI4ZDEtOWFiZjA5YjQtODU1Mzk1MjMtYWVmODJmNGI=, ActorId: [13:7494343214834510288:2590], ActorState: ExecuteState, TraceId: 01js2ef9m34nq1b2140wss7eer, Create QueryResponse for error on request, msg: 2025-04-17T17:59:15.738994Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2efasf0ym951fp7d0ajevs" } } YdbStatus: UNAVAILABLE ConsumedRu: 788 } 2025-04-17T17:59:16.344601Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720682. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:16.344750Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7494343221505548068:2489] TxId: 281474976720682. Ctx: { TraceId: 01js2efa6vc4csfhx6x2dvjnr7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=NmM1NmMyZTUtZjJiMjc1ZTQtNWZiZWMyZTYtMWVjZDBhYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:16.345005Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NmM1NmMyZTUtZjJiMjc1ZTQtNWZiZWMyZTYtMWVjZDBhYTA=, ActorId: [14:7494343212915613457:2489], ActorState: ExecuteState, TraceId: 01js2efa6vc4csfhx6x2dvjnr7, Create QueryResponse for error on request, msg: 2025-04-17T17:59:16.347830Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2efbcp87b3f25jtxqc66g3" } } YdbStatus: UNAVAILABLE ConsumedRu: 789 } 2025-04-17T17:59:16.454274Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|72c3ccb3-56ac262-7c314c6f-26b0c4a9_0] Write session: close. Timeout = 0 ms 2025-04-17T17:59:16.454346Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|72c3ccb3-56ac262-7c314c6f-26b0c4a9_0] Write session will now close 2025-04-17T17:59:16.454424Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|72c3ccb3-56ac262-7c314c6f-26b0c4a9_0] Write session: aborting 2025-04-17T17:59:16.455252Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|72c3ccb3-56ac262-7c314c6f-26b0c4a9_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-17T17:59:16.455314Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|72c3ccb3-56ac262-7c314c6f-26b0c4a9_0] Write session: destroy 2025-04-17T17:59:16.646331Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715703. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:16.646483Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7494343223424445104:2607] TxId: 281474976715703. Ctx: { TraceId: 01js2efbpt433h8gzr0d5r51n1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZTk4YmY5NDItZmNkNGVlN2ItODA2MTY4Yy1iYWFjZDg2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:16.646728Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZTk4YmY5NDItZmNkNGVlN2ItODA2MTY4Yy1iYWFjZDg2NQ==, ActorId: [13:7494343223424445101:2607], ActorState: ExecuteState, TraceId: 01js2efbpt433h8gzr0d5r51n1, Create QueryResponse for error on request, msg: 2025-04-17T17:59:16.648752Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2efbpv9cve03eqg3tzd4p1" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-17T17:59:16.691393Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720684. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:16.691544Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7494343221505548153:2500] TxId: 281474976720684. Ctx: { TraceId: 01js2efbqza9xpc0s2a7tdybp4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZTNlYzE3YjgtZDlhNGY2My00ZWYwYzBlMy00MjkyZWUyYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T17:59:16.691775Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZTNlYzE3YjgtZDlhNGY2My00ZWYwYzBlMy00MjkyZWUyYg==, ActorId: [14:7494343221505548150:2500], ActorState: ExecuteState, TraceId: 01js2efbqza9xpc0s2a7tdybp4, Create QueryResponse for error on request, msg: 2025-04-17T17:59:16.693059Z node 14 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2efbr01xhx2eg53jaj07ap" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TPartitionTests::CorrectRange_Rollback >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace >> TPQTest::TestAccountReadQuota >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Huge_ProposeTransacton [GOOD] Test command err: 2025-04-17T17:58:55.182829Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:58:55.188088Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:58:55.188406Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-17T17:58:55.188485Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:58:55.188540Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-17T17:58:55.188587Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:58:55.188697Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:55.188794Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:55.221915Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:206:2212], now have 1 active actors on pipe 2025-04-17T17:58:55.222046Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:58:55.251682Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-17T17:58:55.254954Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-17T17:58:55.255146Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:55.256184Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-17T17:58:55.256356Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:58:55.256834Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:58:55.257230Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:214:2218] 2025-04-17T17:58:55.258267Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T17:58:55.258339Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:214:2218] 2025-04-17T17:58:55.258400Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:58:55.259290Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:58:55.259420Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-17T17:58:55.259465Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-17T17:58:55.259506Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-04-17T17:58:55.259529Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-04-17T17:58:55.259715Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:58:55.259765Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:58:55.259824Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:58:55.259879Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:58:55.259911Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:58:55.259932Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:58:55.259953Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer 2025-04-17T17:58:55.259977Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer 2025-04-17T17:58:55.260039Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:58:55.260087Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:58:55.260205Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:58:55.260242Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:58:55.260499Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:58:55.263389Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:58:55.263829Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:221:2223], now have 1 active actors on pipe 2025-04-17T17:58:55.264581Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:224:2225], now have 1 active actors on pipe 2025-04-17T17:58:55.265453Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-17T17:58:55.265516Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-17T17:58:55.265586Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-17T17:58:55.265643Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-17T17:58:55.265681Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-17T17:58:55.265719Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-17T17:58:55.265759Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-17T17:58:55.265920Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-17T17:58:55.266007Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:55.275250Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:55.275331Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-17T17:58:55.275367Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-17T17:58:55.275413Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-17T17:58:55.275817Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67891 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-17T17:58:55.275877Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-17T17:58:55.275955Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-04-17T17:58:55.275996Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-17T17:58:55.276039Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-04-17T17:58:55.276075Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-17T17:58:55.276111Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-04-17T17:58:55.276256Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-17T17:58:55.276342Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:58:55.279961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:58:55.280025Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPA ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2496" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2497" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2498" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2499" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-04-17T17:59:18.912392Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T17:59:18.945602Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T17:59:18.945674Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-17T17:59:18.945710Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-17T17:59:18.945760Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-17T17:59:18.945807Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-17T17:59:18.945861Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-17T17:59:18.945939Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-17T17:59:18.945988Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 >> TPartitionTests::ShadowPartitionCountersRestore >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWriteSplit >> KqpBatchUpdate::ManyPartitions_1 [GOOD] |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.6%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] >> TPartitionTests::DataTxCalcPredicateOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T17:59:10.154333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:59:10.154432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:10.154471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:59:10.154511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:59:10.154552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:59:10.154580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:59:10.154642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:10.154729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:59:10.155054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:59:10.343738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:59:10.343789Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:10.395699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:59:10.396186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:59:10.396359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:59:10.425420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:59:10.425591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:10.426299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:10.426512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:59:10.440015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:10.441408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:10.441473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:10.441666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:59:10.441724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:10.441767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:59:10.441920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:59:10.471119Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T17:59:10.791338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:10.791608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:10.791825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:59:10.792057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:59:10.792123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:10.799091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:10.799235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:59:10.799465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:10.799523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:10.799566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:59:10.799598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:59:10.807130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:10.807216Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:10.807254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:59:10.819086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:10.819169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:10.819230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:10.819302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:59:10.826111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:10.838662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:59:10.838881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:59:10.839865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:10.839993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:10.840052Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:10.840337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:59:10.840387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:10.840580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:10.840671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:59:10.847331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:10.847385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:10.847598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:10.847644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:59:10.848098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:10.848155Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:59:10.848269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:10.848318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:10.848374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:10.848405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:10.848445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:59:10.848504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:10.848561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:59:10.848594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:59:10.848673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:10.848711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:59:10.848741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:59:10.850781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:10.850964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:10.851007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1.147091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:21.147378Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:21.147414Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:21.147552Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:59:21.147713Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:21.147749Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-17T17:59:21.147786Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T17:59:21.148046Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:21.148097Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-04-17T17:59:21.148164Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-04-17T17:59:21.149583Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:21.149672Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:21.149699Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:59:21.149730Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:59:21.149761Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:21.155230Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:21.155350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:21.155382Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:59:21.155415Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:59:21.155452Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:59:21.155562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T17:59:21.156803Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:59:21.156863Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:59:21.156894Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T17:59:21.157271Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:21.157325Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:59:21.157464Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:59:21.157510Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:21.157556Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:59:21.157601Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:21.157662Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T17:59:21.157709Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:21.157759Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:59:21.157795Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:59:21.157989Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:59:21.159801Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T17:59:21.160127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:21.160478Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-17T17:59:21.162008Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:21.166793Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-17T17:59:21.167497Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T17:59:21.167772Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:59:21.168381Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-17T17:59:21.170675Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:59:21.170908Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-17T17:59:21.171636Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:59:21.172028Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:59:21.172094Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:21.172223Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:59:21.172879Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:59:21.172991Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:59:21.173040Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:21.173116Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:21.177290Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:59:21.177370Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T17:59:21.177505Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:59:21.177531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T17:59:21.177652Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:59:21.177690Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T17:59:21.178167Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:59:21.178328Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T17:59:21.178609Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:59:21.178657Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:59:21.179171Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:59:21.179284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:59:21.179329Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:537:2487] TestWaitNotification: OK eventTxId 103 2025-04-17T17:59:21.179858Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:59:21.185904Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 251us result status StatusPathDoesNotExist 2025-04-17T17:59:21.186195Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] >> TPartitionTests::TestTxBatchInFederation >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TPersQueueTest::WriteAfterAlter [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed >> TPartitionTests::TestNonConflictingActsBatchOk >> TopicService::AccessRights |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |82.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 20734, MsgBus: 5892 2025-04-17T17:53:58.134718Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341856915823395:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:53:58.135413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c90/r3tmp/tmpYXmT1l/pdisk_1.dat 2025-04-17T17:53:58.954612Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:53:58.997282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:53:59.003609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:53:59.008777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20734, node 1 2025-04-17T17:53:59.246602Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:53:59.246624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:53:59.246633Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:53:59.246737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5892 TClient is connected to server localhost:5892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:00.243251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:00.285628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:00.569515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:00.952783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:01.236319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:03.101896Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341856915823395:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:03.101964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:03.570302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341878390661515:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:03.570465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:03.989472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.069849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.122483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.189694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.275067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.353769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.433001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.589154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341882685629486:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:04.589227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:04.589722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341882685629491:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:04.593534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:04.610463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341882685629493:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:04.673772Z node 1 :TX_PROXY ERROR: Actor# [1:7494341882685629547:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:54:06.696439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 15786, MsgBus: 30264 2025-04-17T17:54:12.095791Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341917020050407:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:12.095842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c90/r3tmp/tmpyKVvD5/pdisk_1.dat 2025-04-17T17:54:12.266283Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:12.294483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:12.294566Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:12.295916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15786, node 2 2025-04-17T17:54:12.378733Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:12.378757Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:12.378768Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:12.378911Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30264 TClient is connected to server localhost:30264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:12.951776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:12.958964Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:12.974764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T17:54:13.039454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... ard: 72057594046644480 2025-04-17T17:58:49.391509Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:49.486009Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:49.594703Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:49.736897Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:49.962070Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:50.177282Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7494343111608327936:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.177468Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.178324Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7494343111608327941:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.185931Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:50.236959Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [19:7494343111608327943:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:50.308716Z node 19 :TX_PROXY ERROR: Actor# [19:7494343111608328001:3590] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:52.705025Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:55.961747Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:58:55.961782Z node 19 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 13441, MsgBus: 12832 2025-04-17T17:58:59.762642Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7494343151665424939:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:59.762759Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c90/r3tmp/tmpd3S2hh/pdisk_1.dat 2025-04-17T17:59:00.137410Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:00.193451Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:00.193624Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:00.200301Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13441, node 20 2025-04-17T17:59:00.352786Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:00.352822Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:00.352839Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:00.353075Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12832 TClient is connected to server localhost:12832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:01.709884Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:01.733695Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:01.923753Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:02.346314Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:02.550135Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:04.764547Z node 20 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7494343151665424939:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:04.783963Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:10.136150Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494343198910067020:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:10.136313Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:10.489829Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:10.632530Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:10.739220Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:10.878731Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:10.982687Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:11.108449Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:11.280916Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:11.478719Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494343203205035006:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:11.478912Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:11.479097Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494343203205035011:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:11.485325Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:11.516270Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7494343203205035013:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:11.609365Z node 20 :TX_PROXY ERROR: Actor# [20:7494343203205035070:3595] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:14.419086Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:15.039700Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:59:15.039733Z node 20 :IMPORT WARN: Table profiles were not loaded >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> TPartitionTests::FailedTxsDontBlock >> TSchemeShardExtSubDomainTest::Create >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestCheckACL >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> PQCountersSimple::Partition >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag >> PQCountersSimple::Partition [GOOD] >> IcbAsActorTests::TestHttpPostReaction >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::BadTopicName >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> IcbAsActorTests::TestHttpPostReaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::Partition [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:126:2057] recipient: [1:124:2158] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:126:2057] recipient: [1:124:2158] Leader for TabletID 72057594037927937 is [1:130:2162] sender: [1:131:2057] recipient: [1:124:2158] 2025-04-17T17:58:51.178783Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:51.178884Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:172:2057] recipient: [1:170:2193] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:172:2057] recipient: [1:170:2193] Leader for TabletID 72057594037927938 is [1:176:2197] sender: [1:177:2057] recipient: [1:170:2193] Leader for TabletID 72057594037927937 is [1:130:2162] sender: [1:202:2057] recipient: [1:14:2061] 2025-04-17T17:58:51.198496Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:51.219916Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:200:2215] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:51.220869Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:208:2221] 2025-04-17T17:58:51.223413Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:208:2221] 2025-04-17T17:58:51.225443Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:209:2222] 2025-04-17T17:58:51.226989Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:209:2222] 2025-04-17T17:58:51.234502Z node 1 :PERSQUEUE INFO: new Cookie default|ee81c931-9e99274f-fc113336-99a3537d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:51.241765Z node 1 :PERSQUEUE INFO: new Cookie default|709c9b2a-82e0a155-8aa62211-bebf7827_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:51.247806Z node 1 :PERSQUEUE INFO: new Cookie default|ed050db7-123a5462-3d2ede6d-c616e9fd_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TimeSinceLastReadMs" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalMessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalSizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalTimeLagMsByLastRead" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/UserPartitionsAnswered" }, "value": 2 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastRead" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastReadOld" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind" ... titions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-04-17T17:59:02.576331Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:02.584206Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 5 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/0/total ANS GROUP user/total/total ANS GROUP user/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP total CHECKING GROUP user/0/rt3.dc1--asdfgs--topic 2025-04-17T17:59:04.794309Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:04.809538Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 6 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 6 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user2/1/total ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/1/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/1/rt3.dc1--asdfgs--topic 2025-04-17T17:59:07.199737Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:07.205566Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 7 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 7 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user2/0/total ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/0/rt3.dc1--asdfgs--topic 2025-04-17T17:59:09.512140Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:09.517884Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 8 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 8 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-04-17T17:59:12.799974Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:12.800064Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:12.849447Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:12.850470Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 9 actor [4:200:2215] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 3600 ImportantClientId: "client" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 9 ReadRuleGenerations: 9 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 9 Important: false } Consumers { Name: "client" Generation: 9 Important: true } 2025-04-17T17:59:12.851287Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:208:2221] 2025-04-17T17:59:12.852249Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [4:208:2221] 2025-04-17T17:59:12.853408Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:209:2222] 2025-04-17T17:59:12.854320Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [4:209:2222] 2025-04-17T17:59:12.878793Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [4:253:2252] connected; active server actors: 1 2025-04-17T17:59:12.886144Z node 4 :PERSQUEUE INFO: new Cookie default|aab0a0a6-f1e53674-cb0c6c4b-746776fd_0 generated for partition 0 topic 'topic' owner default 2025-04-17T17:59:12.893783Z node 4 :PERSQUEUE INFO: new Cookie default|182e4665-133db009-480346d6-4c8c5963_1 generated for partition 0 topic 'topic' owner default 2025-04-17T17:59:12.915581Z node 4 :PERSQUEUE INFO: new Cookie default|6669b037-863b01bc-b372d146-f7858113_2 generated for partition 0 topic 'topic' owner default 2025-04-17T17:59:12.923404Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][topic] pipe [4:300:2293] connected; active server actors: 1 2025-04-17T17:59:19.139674Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][topic] pipe [4:399:2379] connected; active server actors: 1 2025-04-17T17:59:26.915475Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:26.915585Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:26.956821Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:26.957778Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 10 actor [5:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 10 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 10 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 10 Important: false } 2025-04-17T17:59:26.958551Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:206:2219] 2025-04-17T17:59:26.961426Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [5:206:2219] 2025-04-17T17:59:26.963759Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:207:2220] 2025-04-17T17:59:26.965745Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [5:207:2220] 2025-04-17T17:59:26.973559Z node 5 :PERSQUEUE INFO: new Cookie default|357d445a-937bfb90-d78d0b60-23d65a8c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:26.981312Z node 5 :PERSQUEUE INFO: new Cookie default|865f4e36-49098f24-3351095e-331f1b73_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:26.990129Z node 5 :PERSQUEUE INFO: new Cookie default|4bfb7de8-cf45f9d6-d0c0405b-6ff1409d_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:26.996689Z node 5 :PERSQUEUE INFO: new Cookie default|520bf84-2d0d0408-c10f0c32-d3185d83_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:26.998148Z node 5 :PERSQUEUE INFO: new Cookie default|ad103dae-9ee6476e-c89cfc62-9aab710c_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [FAIL] >> TPersQueueTest::SetupWriteSession >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TPartitionTests::DataTxCalcPredicateOk [GOOD] >> KqpBatchDelete::SimpleOnePartition [GOOD] >> TPartitionTests::DataTxCalcPredicateError >> TPQTest::TestWriteTimeLag [GOOD] |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |82.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub >> TPartitionTests::TestTxBatchInFederation [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message >> TPartitionTests::TestBatchingWithChangeConfig >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteTimeLag [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:58:53.040857Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:53.040950Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:58:53.082332Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:53.128491Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:53.129223Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:58:53.130906Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:58:53.133156Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-17T17:58:53.135112Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-17T17:58:53.164113Z node 1 :PERSQUEUE INFO: new Cookie default|6315c062-e90111da-9eab02a4-6d93bf53_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:53.313662Z node 1 :PERSQUEUE INFO: new Cookie default|4160363d-e14c275-cf32a5f1-bdf34822_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:53.404351Z node 1 :PERSQUEUE INFO: new Cookie default|a0983705-441471e1-6dbac851-3979814f_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:53.452465Z node 1 :PERSQUEUE INFO: new Cookie default|fd8fd1b0-ed8922b8-8aa5eb20-863b5a7d_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:53.460659Z node 1 :PERSQUEUE INFO: new Cookie default|81c66125-b050a410-6061ea93-ce9849c5_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:53.470782Z node 1 :PERSQUEUE INFO: new Cookie default|e948e29e-2f0ef646-dc3d687e-7508e5a9_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:58:54.025381Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:54.025466Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-04-17T17:58:54.043066Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:54.044016Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:58:54.044705Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-04-17T17:58:54.046861Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-04-17T17:58:54.048720Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:184:2197] 2025-04-17T17:58:54.049833Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:184:2197] 2025-04-17T17:58:54.074653Z node 2 :PERSQUEUE INFO: new Cookie default|72ebbbca-1e22d1d5-fad1a859-8d817a1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:54.209178Z node 2 :PERSQUEUE INFO: new Cookie default|2853f95d-c74dfb28-58964754-5107bee9_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:54.312500Z node 2 :PERSQUEUE INFO: new Cookie default|f6c66582-b49b5f85-b606b14c-f9457ca2_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:54.379851Z node 2 :PERSQUEUE INFO: new Cookie default|3cb40bcd-ddc7ff8c-16a8bea1-a2f947b7_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:54.401164Z node 2 :PERSQUEUE INFO: new Cookie default|7761b05b-a698cc8b-df4c4b58-4d12cb17_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvRequest ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:293:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:296:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:297:2057] recipient: [2:295:2293] Leader for TabletID 72057594037927937 is [2:298:2294] sender: [2:299:2057] recipient: [2:295:2293] 2025-04-17T17:58:54.461385Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:54.461460Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:54.468038Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:347:2335] 2025-04-17T17:58:54.470957Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:348:2336] 2025-04-17T17:58:54.482520Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:58:54.482614Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:348:2336] 2025-04-17T17:58:54.483806Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:58:54.483873Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:347:2335] !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:298:2294] Leader for TabletID 72057594037927937 is [2:298:2294] sender: [2:391:2057] recipient: [2:14:2061] 2025-04-17T17:58:55.822577Z node 2 :PERSQUEUE INFO: new Cookie default|4597494a-3b1b1345-76866a30-803155ed_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-17T17:58:56.562796Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:56.562871Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-17T17:58:56.583762Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:56.584582Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-17T17:58:56.585155Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-17T17:58:56.587704Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-17T17:58:56.589424Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-04-17T17:58:56.591650Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-04-17T17:58:56.615549Z node 3 :PERSQUEUE INFO: new Cookie default|22ec9adc-dedd660d-f8cbfe1a-ed3ccdb7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--top ... Q: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:59:29.187945Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 20(current 19) received from actor [16:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 20 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 17 ReadRuleGenerations: 17 ReadRuleGenerations: 19 ReadRuleGenerations: 18 ReadRuleGenerations: 20 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 17 Important: false } Consumers { Name: "aaa" Generation: 17 Important: false } Consumers { Name: "another1" Generation: 19 Important: true } Consumers { Name: "important" Generation: 18 Important: true } Consumers { Name: "another" Generation: 20 Important: false } 2025-04-17T17:59:29.191684Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 20 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 17 ReadRuleGenerations: 17 ReadRuleGenerations: 19 ReadRuleGenerations: 18 ReadRuleGenerations: 20 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 17 Important: false } Consumers { Name: "aaa" Generation: 17 Important: false } Consumers { Name: "another1" Generation: 19 Important: true } Consumers { Name: "important" Generation: 18 Important: true } Consumers { Name: "another" Generation: 20 Important: false } 2025-04-17T17:59:29.191802Z node 16 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:29.192085Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another reinit with generation 20 done 2025-04-17T17:59:29.192422Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:29.192501Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:29.192567Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:29.192625Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:29.192671Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:59:29.192701Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:59:29.192732Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000caaa 2025-04-17T17:59:29.192762Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uaaa 2025-04-17T17:59:29.192792Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother1 2025-04-17T17:59:29.192824Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother1 2025-04-17T17:59:29.192859Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother 2025-04-17T17:59:29.192890Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother 2025-04-17T17:59:29.192919Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cimportant 2025-04-17T17:59:29.192950Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uimportant 2025-04-17T17:59:29.192980Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-04-17T17:59:29.193019Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:29.193077Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:59:29.193213Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user another reinit with generation 20 done 2025-04-17T17:59:29.193444Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:29.193481Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:29.193513Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T17:59:29.193549Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-17T17:59:29.193579Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-04-17T17:59:29.193609Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-04-17T17:59:29.193638Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001caaa 2025-04-17T17:59:29.193668Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uaaa 2025-04-17T17:59:29.193699Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother1 2025-04-17T17:59:29.193729Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother1 2025-04-17T17:59:29.193759Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother 2025-04-17T17:59:29.193789Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother 2025-04-17T17:59:29.193821Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cimportant 2025-04-17T17:59:29.193854Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uimportant 2025-04-17T17:59:29.193887Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-04-17T17:59:29.193919Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:29.193953Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-17T17:59:29.194041Z node 16 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:59:29.194334Z node 16 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:59:29.200726Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:29.201366Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-17T17:59:29.202588Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:29.202760Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-17T17:59:29.203094Z node 16 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 20 actor [16:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 20 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 17 ReadRuleGenerations: 17 ReadRuleGenerations: 19 ReadRuleGenerations: 18 ReadRuleGenerations: 20 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 17 Important: false } Consumers { Name: "aaa" Generation: 17 Important: false } Consumers { Name: "another1" Generation: 19 Important: true } Consumers { Name: "important" Generation: 18 Important: true } Consumers { Name: "another" Generation: 20 Important: false } 2025-04-17T17:59:29.203961Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [16:615:2562], now have 1 active actors on pipe 2025-04-17T17:59:29.204950Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [16:618:2564], now have 1 active actors on pipe 2025-04-17T17:59:29.205042Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:59:29.205101Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T17:59:29.205232Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:59:29.205757Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [16:620:2566], now have 1 active actors on pipe 2025-04-17T17:59:29.205895Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:59:29.205945Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T17:59:29.206046Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:59:29.206652Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [16:622:2568], now have 1 active actors on pipe 2025-04-17T17:59:29.206819Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:59:29.206883Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T17:59:29.206991Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:59:29.207542Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [16:624:2570], now have 1 active actors on pipe 2025-04-17T17:59:29.207741Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T17:59:29.207801Z node 16 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T17:59:29.207925Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> TPersQueueTest::CheckACLForGrpcWrite [GOOD] >> TPersQueueTest::CheckACLForGrpcRead >> TPartitionTests::FailedTxsDontBlock [GOOD] >> TSubDomainTest::StartAndStopTenanNode >> TPartitionTests::EndWriteTimestamp_DataKeysBody >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:59:07.415371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:59:07.415517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:07.415566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:59:07.415600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:59:07.421853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:59:07.421929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:59:07.422018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:07.422106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:59:07.429522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:59:07.565977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:59:07.566054Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:07.592179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:59:07.592334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:59:07.592532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:59:07.621583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:59:07.622120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:07.644879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:07.645462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:59:07.689748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:07.754752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:07.754860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:07.755015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:59:07.755081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:07.755180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:59:07.755430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:59:07.772097Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:59:07.993353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:07.993639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:07.993894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:59:07.994164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:59:07.994249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.000531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:08.000698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:59:08.000941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.001014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:08.001062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:59:08.001105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:59:08.007552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.007622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:08.007657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:59:08.010646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.010723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.010780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:08.010875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:59:08.022101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:08.039499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:59:08.039768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:59:08.041112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:08.041323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:08.041401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:08.054104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:59:08.054257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:08.054523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:08.054735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:59:08.062371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:08.062447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:08.062662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:08.062735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:59:08.063116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:08.063184Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:59:08.063372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:08.063420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:08.063463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:08.063501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:08.063543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:59:08.063592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:08.063634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:59:08.063666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:59:08.063740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:08.063784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:59:08.063843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:59:08.066170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:08.066331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:08.066372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 2 2025-04-17T17:59:29.781403Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 1 -> 2 2025-04-17T17:59:29.782009Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 116:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-04-17T17:59:29.782072Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 116:0, at schemeshard: 72075186233409546 2025-04-17T17:59:29.782239Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 12 2025-04-17T17:59:29.782314Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 3 2025-04-17T17:59:29.785436Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 116, response: Status: StatusAccepted TxId: 116 SchemeshardId: 72075186233409546 PathId: 9, at schemeshard: 72075186233409546 2025-04-17T17:59:29.785665Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 116, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-04-17T17:59:29.786007Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T17:59:29.786070Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T17:59:29.786364Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 9] 2025-04-17T17:59:29.786484Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T17:59:29.786542Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:729:2631], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-04-17T17:59:29.786610Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:729:2631], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-04-17T17:59:29.787133Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-17T17:59:29.787210Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-04-17T17:59:29.787500Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-17T17:59:29.788580Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-17T17:59:29.788733Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-17T17:59:29.788785Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-17T17:59:29.788846Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-04-17T17:59:29.790416Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-04-17T17:59:29.797487Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-17T17:59:29.797614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-17T17:59:29.797646Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-17T17:59:29.797682Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-04-17T17:59:29.797720Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-04-17T17:59:29.797827Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-04-17T17:59:29.803816Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-04-17T17:59:29.804024Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-04-17T17:59:29.804088Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-17T17:59:29.804670Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-17T17:59:29.804968Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-04-17T17:59:29.805197Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-17T17:59:29.805253Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-17T17:59:29.805410Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-17T17:59:29.805479Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-04-17T17:59:29.805571Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-17T17:59:29.805691Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 2 -> 3 2025-04-17T17:59:29.807219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-17T17:59:29.813723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-17T17:59:29.823061Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-17T17:59:29.823526Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-17T17:59:29.823609Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-04-17T17:59:29.823711Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-04-17T17:59:29.824198Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 678 RawX2: 30064773663 } TxBody: "\n\236\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-04-17T17:59:29.829210Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-04-17T17:59:29.829411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-04-17T17:59:29.972038Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-04-17T17:59:29.982101Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-17T17:59:29.982352Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 18262, MsgBus: 24456 2025-04-17T17:58:31.966580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343031374144847:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:31.991916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000bdf/r3tmp/tmpKgQgfU/pdisk_1.dat 2025-04-17T17:58:32.769629Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:32.779915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:32.780010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:32.782489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18262, node 1 2025-04-17T17:58:33.118770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:33.118794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:33.118807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:33.118915Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24456 TClient is connected to server localhost:24456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:34.387429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:34.424738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:34.665016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:34.969978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:35.106153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:36.894703Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343031374144847:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:36.894782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:37.212300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343057143950257:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:37.212404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:37.539751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:37.611848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:37.674643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:37.759636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:37.831297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:37.898808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:37.996641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.074105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343061438918233:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.074199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.076045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343061438918238:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.079245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:38.093314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343061438918240:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:38.153026Z node 1 :TX_PROXY ERROR: Actor# [1:7494343061438918295:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27043, MsgBus: 20662 2025-04-17T17:58:44.914128Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343088108964377:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000bdf/r3tmp/tmp9OPbkp/pdisk_1.dat 2025-04-17T17:58:44.971313Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:58:45.292715Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:45.293850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:45.293912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:45.307324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27043, node 2 2025-04-17T17:58:45.466451Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:45.466473Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:45.466480Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:45.466608Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20662 TClient is connected to server localhost:20662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:46.132691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:46.144847Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:58:46.157357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:46.265958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:46.480323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:02.899670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:02.985780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:03.037950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:03.110954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:03.204027Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:03.308832Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:03.366875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:03.546674Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343168047145616:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:03.546776Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:03.547286Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343168047145621:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:03.552063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:03.578660Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343168047145623:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:03.641047Z node 3 :TX_PROXY ERROR: Actor# [3:7494343168047145677:3563] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21379, MsgBus: 5834 2025-04-17T17:59:12.060901Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343202589382816:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:12.061237Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000bdf/r3tmp/tmpy1w674/pdisk_1.dat 2025-04-17T17:59:12.429396Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:12.429489Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:12.450062Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21379, node 4 2025-04-17T17:59:12.558358Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:12.576144Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:12.576165Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:12.576173Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:12.576290Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5834 TClient is connected to server localhost:5834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T17:59:13.443439Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:59:13.467300Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:59:13.481287Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:13.617419Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:13.882498Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:14.104370Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:16.962632Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343202589382816:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:16.962695Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:18.016483Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343232654155484:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:18.016583Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:18.115857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:18.226372Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:18.325826Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:18.423769Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:18.487736Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:18.594466Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:18.702851Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:18.847390Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343232654156179:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:18.847479Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:18.847854Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343232654156184:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:18.852537Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:18.887836Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343232654156186:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:18.965247Z node 4 :TX_PROXY ERROR: Actor# [4:7494343232654156243:3570] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:27.474695Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:59:27.474721Z node 4 :IMPORT WARN: Table profiles were not loaded >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::Drop >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |82.8%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TPartitionTests::EndWriteTimestamp_FromMeta ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] Test command err: 2025-04-17T17:58:47.828125Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:47.828208Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:58:47.872068Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:179:2194] 2025-04-17T17:58:47.872940Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageCh ... SQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-17T17:59:29.838526Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:29.838587Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-17T17:59:29.838691Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:59:29.838736Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:59:29.838849Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:29.838904Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:29.838967Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-17T17:59:29.839046Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:59:29.839115Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'rt3.dc1--account--topic' Partition: 0 SourceId: 'src4'. Message seqNo: 7. Committed seqNo: (NULL). Writing seqNo: 7. EndOffset: 50. CurOffset: 50. Offset: 50 2025-04-17T17:59:29.839223Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 8 partNo 0 2025-04-17T17:59:29.840096Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 84 count 1 nextOffset 52 batches 1 2025-04-17T17:59:29.840188Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 9 partNo 0 2025-04-17T17:59:29.840240Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 136 count 2 nextOffset 53 batches 1 2025-04-17T17:59:29.840279Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 10 partNo 0 2025-04-17T17:59:29.840319Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 188 count 3 nextOffset 54 batches 1 2025-04-17T17:59:29.840361Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 11 partNo 0 2025-04-17T17:59:29.840397Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 240 count 4 nextOffset 55 batches 1 2025-04-17T17:59:29.840435Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 12 partNo 0 2025-04-17T17:59:29.840487Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-04-17T17:59:29.840527Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:29.840564Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:29.840600Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-04-17T17:59:29.840641Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:59:29.840693Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 10 2025-04-17T17:59:29.840729Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:29.840766Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-04-17T17:59:29.841418Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic' partition 0 compactOffset 51,5 HeadOffset 50 endOffset 50 curOffset 56 d0000000000_00000000000000000051_00000_0000000005_00000| size 189 WTime 21151 2025-04-17T17:59:29.841679Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:29.841753Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:29.841810Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:59:29.841860Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:29.841911Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-04-17T17:59:29.841944Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc4 2025-04-17T17:59:29.841970Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-17T17:59:29.841997Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000051_00000_0000000005_00000| 2025-04-17T17:59:29.842022Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:29.842047Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:29.842070Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:29.842100Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:29.842144Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Wait tx committed for tx 0 2025-04-17T17:59:29.878237Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 85 WriteNewSizeFromSupportivePartitions# 4 2025-04-17T17:59:29.878325Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.878409Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 50 is already written 2025-04-17T17:59:29.878455Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.878497Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 50 is already written 2025-04-17T17:59:29.878526Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.878560Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 50 is already written 2025-04-17T17:59:29.878586Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.878623Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 50 is already written 2025-04-17T17:59:29.878650Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.878683Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 50 is already written 2025-04-17T17:59:29.878708Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.878741Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 12, partNo: 0, Offset: 50 is already written Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Wait tx committed for tx 10 2025-04-17T17:59:30.515105Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:30.515187Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:30.557841Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {1, {2, 3}, 4}, State: StateInit] bootstrapping {1, {2, 3}, 4} [5:179:2194] 2025-04-17T17:59:30.558893Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {1, {2, 3}, 4}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {1, {2, 3}, 4} generation 0 [5:179:2194] Got cmd write: CmdDeleteRange { Range { From: "M0000000004" IncludeFrom: true To: "M0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "D0000000004" IncludeFrom: true To: "D0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "X0000000004" IncludeFrom: true To: "X0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "J0000000004" IncludeFrom: true To: "J0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "K0000000004" IncludeFrom: true To: "K0000000005" IncludeTo: false } } >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TPQTest::TestCheckACL [GOOD] >> TPQTest::TestAlreadyWritten >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TFstClassSrcIdPQTest::NoMapping [GOOD] >> TFstClassSrcIdPQTest::ProperPartitionSelected >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> TPartitionTests::DataTxCalcPredicateOrder >> TPartitionTests::EndWriteTimestamp_HeadKeys >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TPersQueueTest::ReadRuleServiceTypeLimit [GOOD] >> TPersQueueTest::ReadRuleDisallowDefaultServiceType >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQ >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> KqpJoinOrder::TPCDS16+ColumnStore >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TPartitionTests::TestBatchingWithProposeConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 6624286263486640923 Reassign# 3 -- VSlotId { NodeId: 4 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 3 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 4 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } Status: "READY" Ready: true Put# [1:1:1:0:0:34:0] Put# [1:1:2:0:0:8:0] Put# [1:1:3:0:0:67:0] Put# [1:1:4:0:0:7:0] Put# [1:1:5:0:0:94:0] 2025-04-17T17:56:31.444323Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T17:56:31.457094Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13636188177978238463] 2025-04-17T17:56:31.493468Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:3:0:0:67:1] 2025-04-17T17:56:31.493565Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:4:0:0:7:1] 2025-04-17T17:56:31.493601Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:5:0:0:94:2] 2025-04-17T17:56:31.493859Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 3 PartsResurrected# 3 Put# [1:1:6:0:0:7:0] Put# [1:1:7:0:0:87:0] Put# [1:1:8:0:0:58:0] Put# [1:1:9:0:0:81:0] Put# [1:1:10:0:0:54:0] Put# [1:1:11:0:0:17:0] Put# [1:1:12:0:0:5:0] Put# [1:1:13:0:0:99:0] Put# [1:1:14:0:0:58:0] Put# [1:1:15:0:0:48:0] Put# [1:1:16:0:0:12:0] Put# [1:1:17:0:0:13:0] Put# [1:1:18:0:0:55:0] Put# [1:1:19:0:0:18:0] Put# [1:1:20:0:0:9:0] Put# [1:1:21:0:0:40:0] Put# [1:1:22:0:0:20:0] Put# [1:1:23:0:0:26:0] Put# [1:1:24:0:0:20:0] Put# [1:1:25:0:0:13:0] Put# [1:1:26:0:0:30:0] Put# [1:1:27:0:0:91:0] Put# [1:1:28:0:0:24:0] Put# [1:1:29:0:0:23:0] Put# [1:1:30:0:0:12:0] Put# [1:1:31:0:0:67:0] Put# [1:1:32:0:0:38:0] Put# [1:1:33:0:0:90:0] Put# [1:1:34:0:0:50:0] Put# [1:1:35:0:0:12:0] Put# [1:1:36:0:0:56:0] Put# [1:1:37:0:0:29:0] Put# [1:1:38:0:0:64:0] Put# [1:1:39:0:0:31:0] Put# [1:1:40:0:0:56:0] Put# [1:1:41:0:0:24:0] Put# [1:1:42:0:0:72:0] Put# [1:1:43:0:0:61:0] Put# [1:1:44:0:0:6:0] Put# [1:1:45:0:0:23:0] Put# [1:1:46:0:0:97:0] Put# [1:1:47:0:0:99:0] Put# [1:1:48:0:0:74:0] Put# [1:1:49:0:0:87:0] Put# [1:1:50:0:0:81:0] Put# [1:1:51:0:0:81:0] Put# [1:1:52:0:0:70:0] Put# [1:1:53:0:0:69:0] Put# [1:1:54:0:0:81:0] Put# [1:1:55:0:0:92:0] Put# [1:1:56:0:0:44:0] Put# [1:1:57:0:0:91:0] Put# [1:1:58:0:0:26:0] Put# [1:1:59:0:0:33:0] Put# [1:1:60:0:0:73:0] Put# [1:1:61:0:0:85:0] Put# [1:1:62:0:0:62:0] Put# [1:1:63:0:0:2:0] Put# [1:1:64:0:0:94:0] Put# [1:1:65:0:0:66:0] Put# [1:1:66:0:0:99:0] Put# [1:1:67:0:0:25:0] Put# [1:1:68:0:0:13:0] Put# [1:1:69:0:0:48:0] Put# [1:1:70:0:0:58:0] Put# [1:1:71:0:0:3:0] Put# [1:1:72:0:0:16:0] Put# [1:1:73:0:0:39:0] Put# [1:1:74:0:0:21:0] Put# [1:1:75:0:0:43:0] Put# [1:1:76:0:0:31:0] Put# [1:1:77:0:0:53:0] Put# [1:1:78:0:0:72:0] Put# [1:1:79:0:0:83:0] Put# [1:1:80:0:0:54:0] Put# [1:1:81:0:0:67:0] Put# [1:1:82:0:0:43:0] Put# [1:1:83:0:0:55:0] Put# [1:1:84:0:0:57:0] Put# [1:1:85:0:0:85:0] Put# [1:1:86:0:0:90:0] Put# [1:1:87:0:0:25:0] Put# [1:1:88:0:0:50:0] Put# [1:1:89:0:0:11:0] Put# [1:1:90:0:0:69:0] Put# [1:1:91:0:0:26:0] Put# [1:1:92:0:0:72:0] Put# [1:1:93:0:0:99:0] Put# [1:1:94:0:0:8:0] Put# [1:1:95:0:0:47:0] Put# [1:1:96:0:0:99:0] Put# [1:1:97:0:0:81:0] Put# [1:1:98:0:0:72:0] Put# [1:1:99:0:0:21:0] Put# [1:1:100:0:0:44:0] Put# [1:1:101:0:0:55:0] Put# [1:1:102:0:0:48:0] Put# [1:1:103:0:0:65:0] Put# [1:1:104:0:0:13:0] Put# [1:1:105:0:0:82:0] Put# [1:1:106:0:0:17:0] Put# [1:1:107:0:0:17:0] Put# [1:1:108:0:0:12:0] Put# [1:1:109:0:0:30:0] Put# [1:1:110:0:0:70:0] Put# [1:1:111:0:0:36:0] Put# [1:1:112:0:0:17:0] Put# [1:1:113:0:0:4:0] Put# [1:1:114:0:0:15:0] Put# [1:1:115:0:0:90:0] Put# [1:1:116:0:0:19:0] Put# [1:1:117:0:0:25:0] Put# [1:1:118:0:0:69:0] Put# [1:1:119:0:0:12:0] Put# [1:1:120:0:0:13:0] Put# [1:1:121:0:0:45:0] Put# [1:1:122:0:0:50:0] Put# [1:1:123:0:0:50:0] Put# [1:1:124:0:0:63:0] Put# [1:1:125:0:0:86:0] Put# [1:1:126:0:0:85:0] Put# [1:1:127:0:0:92:0] Put# [1:1:128:0:0:40:0] Put# [1:1:129:0:0:8:0] Put# [1:1:130:0:0:20:0] Put# [1:1:131:0:0:100:0] Put# [1:1:132:0:0:13:0] Put# [1:1:133:0:0:24:0] Put# [1:1:134:0:0:100:0] Put# [1:1:135:0:0:4:0] Put# [1:1:136:0:0:81:0] Put# [1:1:137:0:0:91:0] Put# [1:1:138:0:0:56:0] Put# [1:1:139:0:0:2:0] Put# [1:1:140:0:0:52:0] Put# [1:1:141:0:0:10:0] Put# [1:1:142:0:0:85:0] Put# [1:1:143:0:0:48:0] Put# [1:1:144:0:0:46:0] Put# [1:1:145:0:0:60:0] Put# [1:1:146:0:0:40:0] Put# [1:1:147:0:0:77:0] Put# [1:1:148:0:0:32:0] Put# [1:1:149:0:0:22:0] Put# [1:1:150:0:0:63:0] Put# [1:1:151:0:0:41:0] Put# [1:1:152:0:0:2:0] Put# [1:1:153:0:0:8:0] Put# [1:1:154:0:0:41:0] Put# [1:1:155:0:0:58:0] Put# [1:1:156:0:0:18:0] Put# [1:1:157:0:0:45:0] Put# [1:1:158:0:0:47:0] Put# [1:1:159:0:0:53:0] Put# [1:1:160:0:0:87:0] Put# [1:1:161:0:0:36:0] Put# [1:1:162:0:0:12:0] Put# [1:1:163:0:0:48:0] Put# [1:1:164:0:0:32:0] Put# [1:1:165:0:0:12:0] Put# [1:1:166:0:0:24:0] Put# [1:1:167:0:0:77:0] Put# [1:1:168:0:0:19:0] Put# [1:1:169:0:0:99:0] Put# [1:1:170:0:0:66:0] Put# [1:1:171:0:0:68:0] Put# [1:1:172:0:0:83:0] Put# [1:1:173:0:0:55:0] Put# [1:1:174:0:0:26:0] Put# [1:1:175:0:0:95:0] Put# [1:1:176:0:0:38:0] Put# [1:1:177:0:0:7:0] Put# [1:1:178:0:0:99:0] Put# [1:1:179:0:0:82:0] Put# [1:1:180:0:0:2:0] Put# [1:1:181:0:0:29:0] Put# [1:1:182:0:0:34:0] Put# [1:1:183:0:0:9:0] Put# [1:1:184:0:0:52:0] Put# [1:1:185:0:0:30:0] Put# [1:1:186:0:0:52:0] Put# [1:1:187:0:0:81:0] Put# [1:1:188:0:0:6:0] Put# [1:1:189:0:0:81:0] Put# [1:1:190:0:0:61:0] Put# [1:1:191:0:0:25:0] Put# [1:1:192:0:0:8:0] Put# [1:1:193:0:0:18:0] Put# [1:1:194:0:0:29:0] Put# [1:1:195:0:0:19:0] Put# [1:1:196:0:0:18:0] Put# [1:1:197:0:0:70:0] Put# [1:1:198:0:0:25:0] Put# [1:1:199:0:0:34:0] Put# [1:1:200:0:0:67:0] Put# [1:1:201:0:0:56:0] Put# [1:1:202:0:0:39:0] Put# [1:1:203:0:0:81:0] Put# [1:1:204:0:0:77:0] Put# [1:1:205:0:0:63:0] Put# [1:1:206:0:0:80:0] Put# [1:1:207:0:0:21:0] Put# [1:1:208:0:0:89:0] Put# [1:1:209:0:0:26:0] Put# [1:1:210:0:0:36:0] Put# [1:1:211:0:0:39:0] Put# [1:1:212:0:0:20:0] Put# [1:1:213:0:0:19:0] Put# [1:1:214:0:0:2:0] Put# [1:1:215:0:0:63:0] Put# [1:1:216:0:0:39:0] Put# [1:1:217:0:0:93:0] Put# [1:1:218:0:0:95:0] Put# [1:1:219:0:0:87:0] Put# [1:1:220:0:0:26:0] Put# [1:1:221:0:0:98:0] Put# [1:1:222:0:0:57:0] Put# [1:1:223:0:0:31:0] Put# [1:1:224:0:0:100:0] Put# [1:1:225:0:0:46:0] Put# [1:1:226:0:0:70:0] Put# [1:1:227:0:0:89:0] Put# [1:1:228:0:0:84:0] Put# [1:1:229:0:0:47:0] Put# [1:1:230:0:0:10:0] Put# [1:1:231:0:0:67:0] Put# [1:1:232:0:0:92:0] Put# [1:1:233:0:0:99:0] Put# [1:1:234:0:0:7:0] Put# [1:1:235:0:0:17:0] Put# [1:1:236:0:0:60:0] Put# [1:1:237:0:0:10:0] Put# [1:1:238:0:0:3:0] Put# [1:1:239:0:0:55:0] Put# [1:1:240:0:0:71:0] Put# [1:1:241:0:0:93:0] Put# [1:1:242:0:0:2:0] Put# [1:1:243:0:0:9:0] Put# [1:1:244:0:0:94:0] Put# [1:1:245:0:0:78:0] Put# [1:1:246:0:0:57:0] Put# [1:1:247:0:0:58:0] Put# [1:1:248:0:0:4:0] Put# [1:1:249:0:0:16:0] Put# [1:1:250:0:0:98:0] Put# [1:1:251:0:0:18:0] Put# [1:1:252:0:0:84:0] Put# [1:1:253:0:0:84:0] Put# [1:1:254:0:0:35:0] Put# [1:1:255:0:0:33:0] Put# [1:1:256:0:0:33:0] Put# [1:1:257:0:0:68:0] Put# [1:1:258:0:0:84:0] Put# [1:1:259:0:0:50:0] Put# [1:1:260:0:0:40:0] Put# [1:1:261:0:0:89:0] Put# [1:1:262:0:0:5:0] Put# [1:1:263:0:0:6:0] Put# [1:1:264:0:0:16:0] Put# [1:1:265:0:0:12:0] Put# [1:1:266:0:0:55:0] Put# [1:1:267:0:0:65:0] Put# [1:1:268:0:0:21:0] Put# [1:1:269:0:0:80:0] Put# [1:1:270:0:0:3:0] Put# [1:1:271:0:0:47:0] Put# [1:1:272:0:0:83:0] Put# [1:1:273:0:0:72:0] Put# [1:1:274:0:0:68:0] Put# [1:1:275:0:0:41:0] Put# [1:1:276:0:0:54:0] Put# [1:1:277:0:0:50:0] Put# [1:1:278:0:0:54:0] Put# [1:1:279:0:0:60:0] Put# [1:1:280:0:0:28:0] Put# [1:1:281:0:0:23:0] Put# [1:1:282:0:0:96:0] Put# [1:1:283:0:0:59:0] Put# [1:1:284:0:0:64:0] Put# [1:1:285:0:0:46:0] Put# [1:1:286:0:0:35:0] Put# [1:1:287:0:0:71:0] Put# [1:1:288:0:0:62:0] Put# [1:1:289:0:0:5:0] Put# [1:1:290:0:0:70:0] Put# [1:1:291:0:0:16:0] Put# [1:1:292:0:0:5:0] Put# [1:1:293:0:0:77:0] Put# [1:1:294:0:0:84:0] Put# [1:1:295:0:0:88:0] Put# [1:1:296:0:0:69:0] Put# [1:1:297:0:0:75:0] Put# [1:1:298:0:0:31:0] Put# [1:1:299:0:0:19:0] Put# [1:1:300:0:0:88:0] Put# [1:1:301:0:0:83:0] Put# [1:1:302:0:0:10:0] Put# [1:1:303:0:0:6:0] Put# [1:1:304:0:0:20:0] Put# [1:1:305:0:0:17:0] Put# [1:1:306:0:0:90:0] Put# [1:1:307:0:0:51:0] Put# [1:1:308:0:0:76:0] Put# [1:1:309:0:0:27:0] Put# [1:1:310:0:0:27:0] Put# [1:1:311:0:0:73:0] Put# [1:1:312:0:0:5:0] Put# [1:1:313:0:0:59:0] Put# [1:1:314:0:0:58:0] Put# [1:1:315:0:0:15:0] Put# [1:1:316:0:0:5:0] Put# [1:1:317:0:0:2:0] Put# [1:1:318:0:0:4:0] Put# [1:1:319:0:0:58:0] Put# [1:1:320:0:0:77:0] Put# [1:1:321:0:0:56:0] Put# [1:1:322:0:0:97:0] Put# [1:1:323:0:0:18:0] Put# [1:1:324:0:0:91:0] Put# [1:1:325:0:0:88:0] Put# [1:1:326:0:0:45:0] Put# [1:1:327:0:0:49:0] Put# [1:1:328:0:0:15:0] Put# [1:1:329:0:0:98:0] Put# [1:1:330:0:0:71:0] Put# [1:1:331:0:0:10:0] Put# [1:1:332:0:0:31:0] Put# [1:1:333:0:0:83:0] Put# [1:1:334:0:0:61:0] Put# [1:1:335:0:0:73:0] Put# [1:1:336:0:0:48:0] Put# [1:1:337:0:0:66:0] Put# [1:1:338:0:0:19:0] Put# [1:1:339:0:0:69:0] Put# [1:1:340:0:0:50:0] Put# [1:1:341:0:0:24:0] Put# [1:1:342:0:0:29:0] Put# [1:1:343:0:0:68:0] Put# [1:1:344:0:0:94:0] Put# [1:1:345:0:0:11:0] Put# [1:1:346:0:0:84:0] Put# [1:1:347:0:0:60:0] Put# [1:1:348:0:0:68:0] Put# [1:1:349:0:0:30:0] Put# [1:1:350:0:0:19:0] Put# [1:1:351:0:0:38:0] Put# [1:1:352:0:0:48:0] Put# [1:1:353:0:0:63:0] Put# [1:1:354:0:0:32:0] Put# [1:1:355:0:0:89:0] Put# [1:1:356:0:0:55:0] Put# [1:1:357:0:0:10:0] Put# [1:1:358:0:0:92:0] Put# [1:1:359:0:0:23:0] Put# [1:1:360:0:0:81:0] Put# [1:1:361:0:0:42:0] Put# [1:1:362:0:0:72:0] Put# [1:1:363:0:0:12:0] Put# [1:1:364:0:0:42:0] Put# [1:1:365:0:0:14:0] Put# [1:1:366:0:0:49:0] Put# [1:1:367:0:0:53:0] Put# [1:1:368:0:0:34:0] Put# [1:1:369:0:0:64:0] Put# [1:1:370:0:0:88:0] Put# [1:1:371:0:0:72:0] Put# [1:1:372:0:0:56:0] Put# [1:1:373:0:0:29:0] Put# [1:1:374:0:0:89:0] Put# [1:1:375:0:0:45:0] Put# [1:1:376:0:0:72:0] Put# [1:1:377:0:0:55:0] Put# [1:1:378:0:0:37:0] Put# [1:1:379:0:0:23:0] Put# [1:1:380:0:0:99:0] Put# [1:1:381:0:0:38:0] Put# [1:1:382:0:0:26:0] Put# [1:1:383:0:0:76:0] Put# [1:1:384:0:0:76:0] Put# [1:1:385:0:0:39:0] Put# [1:1:386:0:0:67:0] Put# [1:1:387:0:0:71:0] Put# [1:1:388:0:0:68:0] Put# [1:1:389:0:0:99:0] Put# [1:1:390:0:0:24:0] Put# [1:1:391:0:0:98:0] Put# [1:1:392:0:0:27:0] Put# [1:1:393:0:0:75:0] Put# [1:1:394:0:0:72:0] Put# [1:1:395:0:0:69:0] Put# [1:1:396:0:0:93:0] Put# [1:1:397:0:0:89:0] Put# [1:1:398:0:0:15:0] Put# [1:1:399:0:0:97:0] Put# [1:1:400:0:0:6:0] Put# [1:1:401:0:0:80:0] Put# [1:1:402:0:0:43:0] Put# [1:1:403:0:0:35:0] Put# [1:1:404:0:0:68:0] Put# [1:1:405:0:0:13:0] Put# [1:1:406:0:0:10:0] Put# [1:1:407:0:0:54:0] Put# [1:1:408:0:0:73:0] Put# [1:1:409:0:0:1:0] Put# [1:1:410:0:0:76:0] Put# [1:1:411:0:0:7:0] Put# [1:1:412:0:0:78:0] Put# [1:1:413:0:0:3:0] Put# [1:1:414:0:0:73:0] Put# [1:1:415:0:0:54:0] Put# [1:1:416:0:0:4:0] Put# [1:1:417:0:0:12:0] Put# [1:1:418:0:0:1:0] Put# [1:1:419:0:0:99:0] Put# [1:1:420:0:0:67:0] Put# [1:1:421:0:0:51:0] Put# [1:1:422:0:0:38:0] Put# [1:1:423:0:0:46:0] Put# [1:1:424:0:0:40:0] Put# [1:1:425:0:0:53:0] Put# [1:1:426:0:0:10:0] Put# [1:1:427:0:0:73:0] Put# [1:1:428:0:0:91:0] Put# [1:1:429:0:0:68:0] Put# [1:1:430:0:0:20:0] Put# [1:1:431:0:0:40:0] Put# [1:1:432:0:0:42:0] Put# [1:1:433:0:0:97:0] Put# [1:1:434:0:0:57:0] Put# [1:1:435:0:0:46:0] Put# [1:1:436:0:0:17:0] Put# [1:1:437:0:0:99:0] Put# [1:1:438:0:0:10:0] Put# [1:1:439:0:0:14:0] Put# [1:1:440:0:0:14:0] Put# [1:1:441:0:0:5:0] Put# [1:1:442:0:0:7:0] Put# [1:1:443:0:0:29:0] Put# [1:1:444:0:0:93:0] Put# [1:1:445:0:0:73:0] Put# [1:1:446:0:0:1 ... h02m43.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.586579Z 2 00h02m44.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.586788Z 2 00h02m44.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.586943Z 2 00h02m44.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.587076Z 2 00h02m44.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.587202Z 2 00h02m44.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.587324Z 2 00h02m44.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.587453Z 2 00h02m44.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.667786Z 2 00h02m45.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.668005Z 2 00h02m45.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.668145Z 2 00h02m45.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.668284Z 2 00h02m45.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.668420Z 2 00h02m45.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.668584Z 2 00h02m45.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.668714Z 2 00h02m45.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.736030Z 2 00h02m46.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.736226Z 2 00h02m46.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.736358Z 2 00h02m46.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.736500Z 2 00h02m46.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.736630Z 2 00h02m46.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.736749Z 2 00h02m46.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.736871Z 2 00h02m46.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.797963Z 2 00h02m47.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.798197Z 2 00h02m47.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.798336Z 2 00h02m47.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.798477Z 2 00h02m47.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.798602Z 2 00h02m47.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.798728Z 2 00h02m47.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.798849Z 2 00h02m47.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.871727Z 2 00h02m48.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.871931Z 2 00h02m48.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.872063Z 2 00h02m48.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.872191Z 2 00h02m48.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.872321Z 2 00h02m48.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.872463Z 2 00h02m48.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.872583Z 2 00h02m48.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.933168Z 2 00h02m49.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.933366Z 2 00h02m49.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.933497Z 2 00h02m49.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.933628Z 2 00h02m49.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.933800Z 2 00h02m49.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.933929Z 2 00h02m49.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:30.934049Z 2 00h02m49.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.025806Z 2 00h02m50.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.025995Z 2 00h02m50.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.026112Z 2 00h02m50.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.026513Z 2 00h02m50.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.026662Z 2 00h02m50.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.026792Z 2 00h02m50.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.026934Z 2 00h02m50.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.108899Z 2 00h02m51.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.109117Z 2 00h02m51.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.109266Z 2 00h02m51.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.109446Z 2 00h02m51.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.109612Z 2 00h02m51.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.109772Z 2 00h02m51.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.109951Z 2 00h02m51.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.177978Z 2 00h02m52.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.178338Z 2 00h02m52.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.178512Z 2 00h02m52.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.178650Z 2 00h02m52.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.178784Z 2 00h02m52.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.178914Z 2 00h02m52.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-17T17:59:31.179041Z 2 00h02m52.800571s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2025-04-17T17:59:10.569817Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:10.569912Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:10.603037Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:59:10.603288Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:10.603707Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:178:2193] 2025-04-17T17:59:10.604618Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:59:10.604795Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:59:10.604948Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:59:10.605136Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:59:10.605543Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-17T17:59:10.605637Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:59:10.605681Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:59:10.605757Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:10.000000Z 2025-04-17T17:59:10.605797Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:59:10.605847Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:178:2193] 2025-04-17T17:59:10.605913Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-17T17:59:10.605967Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:10.968998Z node 1 :PERSQUEUE INFO: new Cookie src1|f72f3ec4-bff73872-22d522e7-e98e373a_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-04-17T17:59:10.969186Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 2025-04-17T17:59:12.026485Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:59:13.446697Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Wait batch completion 2025-04-17T17:59:13.446953Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:13.447015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:13.447152Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 1 partNo 0 2025-04-17T17:59:13.448136Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-04-17T17:59:13.448233Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:13.448286Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:13.448337Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-04-17T17:59:13.475656Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:59:13.475805Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 3 partNo 0 2025-04-17T17:59:13.477257Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src1' seqNo 3 partNo 0 result is x0000000000_00000000000000000020_00000_0000000001_00000 size 70 2025-04-17T17:59:13.477347Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000020_00000_0000000001_00000 new key d0000000000_00000000000000000020_00000_0000000001_00000 size 70 WTime 10139 2025-04-17T17:59:13.478287Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 3 partNo 0 FormedBlobsCount 1 NewHead: Offset 50 PartNo 0 PackedSize 84 count 1 nextOffset 51 batches 1 2025-04-17T17:59:13.478367Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:13.478400Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:13.478427Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 84 count 1 nextOffset 51 batches 1 2025-04-17T17:59:13.478471Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:59:13.478961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 50,1 HeadOffset 1 endOffset 1 curOffset 51 d0000000000_00000000000000000050_00000_0000000001_00000| size 70 WTime 10139 2025-04-17T17:59:13.479174Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:13.479232Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:13.479264Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:59:13.479293Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:13.479329Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000020_00000_0000000001_00000 2025-04-17T17:59:13.479355Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-17T17:59:13.479369Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000050_00000_0000000001_00000| 2025-04-17T17:59:13.479393Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:13.479407Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:13.479419Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:13.479437Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:13.479507Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 5 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 3 2025-04-17T17:59:13.491725Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 34 WriteNewSizeFromSupportivePartitions# 2 2025-04-17T17:59:13.491791Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:13.491861Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 1 is already written 2025-04-17T17:59:13.491895Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:13.491929Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 1 is already written Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 2025-04-17T17:59:14.853162Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-04-17T17:59:14.853316Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 12 2025-04-17T17:59:16.318724Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Wait batch completion 2025-04-17T17:59:16.318917Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:16.318971Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:16.319050Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:16.319138Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:16.319186Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 70 count 1 nextOffset 51 batches 1, NewHead=Offset 51 PartNo 0 PackedSize 0 count 0 nextOffset 51 batches 0 2025-04-17T17:59:16.319279Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedu ... KV request Got KV request Send disk status response with cookie: 0 2025-04-17T17:59:29.709805Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 170 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:29.709907Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.709991Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 70 is stored on disk 2025-04-17T17:59:29.710039Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.710078Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 71 is stored on disk 2025-04-17T17:59:29.710104Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.710140Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 72 is stored on disk 2025-04-17T17:59:29.710197Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.710231Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 73 is stored on disk 2025-04-17T17:59:29.710261Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.710296Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 74 is stored on disk 2025-04-17T17:59:29.710328Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.710362Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 75 is stored on disk 2025-04-17T17:59:29.710393Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.710428Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 76 is stored on disk 2025-04-17T17:59:29.710472Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.710506Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 77 is stored on disk 2025-04-17T17:59:29.710533Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.710571Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 78 is stored on disk 2025-04-17T17:59:29.710603Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:29.710638Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 79 is stored on disk 2025-04-17T17:59:29.710902Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:29.710950Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:29.710995Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000020_00000_0000000005_00000|, d0000000000_00000000000000000020_00000_0000000005_00000|] 2025-04-17T17:59:29.711034Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:29.711077Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:29.711116Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:29.711165Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 2025-04-17T17:59:29.711452Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-04-17T17:59:29.711555Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 12 2025-04-17T17:59:30.753064Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:30.753176Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait batch completion 2025-04-17T17:59:30.753339Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:30.753395Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:30.753460Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=MinSeqNo violation failure on src2 Got batch complete: 3 2025-04-17T17:59:31.018720Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 12 2025-04-17T17:59:31.018806Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 12 2025-04-17T17:59:31.018873Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:31.018928Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 70 PartNo 0 PackedSize 299 count 10 nextOffset 80 batches 1, NewHead=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0 2025-04-17T17:59:31.019136Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:31.019178Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:31.019217Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:31.019252Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrcId2 2025-04-17T17:59:31.019285Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:31.019309Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:31.019331Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:31.019361Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:31.019398Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 10 2025-04-17T17:59:31.054917Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "MinSeqNo violation failure on src2" } Wait tx committed for tx 12 2025-04-17T17:59:31.845154Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:31.845218Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:31.860586Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [3:179:2194] 2025-04-17T17:59:31.862319Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:31.000000Z 2025-04-17T17:59:31.862383Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [3:179:2194] 2025-04-17T17:59:32.888685Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:32.888753Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:32.905091Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [4:179:2194] 2025-04-17T17:59:32.906891Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:59:32.906963Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:179:2194] 2025-04-17T17:59:33.875439Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:33.875539Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:33.923160Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [5:179:2194] >>>> ADD BLOB 0 writeTimestamp=2025-04-17T17:59:33.915139Z >>>> ADD BLOB 1 writeTimestamp=2025-04-17T17:59:33.915166Z >>>> ADD BLOB 2 writeTimestamp=2025-04-17T17:59:33.915183Z >>>> ADD BLOB 3 writeTimestamp=2025-04-17T17:59:33.915199Z >>>> ADD BLOB 4 writeTimestamp=2025-04-17T17:59:33.915213Z >>>> ADD BLOB 5 writeTimestamp=2025-04-17T17:59:33.915229Z >>>> ADD BLOB 6 writeTimestamp=2025-04-17T17:59:33.915243Z >>>> ADD BLOB 7 writeTimestamp=2025-04-17T17:59:33.915256Z >>>> ADD BLOB 8 writeTimestamp=2025-04-17T17:59:33.915268Z >>>> ADD BLOB 9 writeTimestamp=2025-04-17T17:59:33.915286Z 2025-04-17T17:59:33.931222Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:33.000000Z 2025-04-17T17:59:33.931312Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:179:2194] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/etcd_proxy/etcd_proxy |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |82.8%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy >> TPersQueueTest::DirectReadRestartPQRB [GOOD] >> TPersQueueTest::DirectReadRestartTablet >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:59:26.563369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:59:26.563481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:26.563517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:59:26.563556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:59:26.563610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:59:26.563640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:59:26.563765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:26.563856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:59:26.564166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:59:26.699818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:59:26.699878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:26.735995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:59:26.736180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:59:26.736355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:59:26.764637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:59:26.765039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:26.766408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:26.766788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:59:26.777691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:26.779162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:26.779249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:26.779348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:59:26.779395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:26.779432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:59:26.779661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:59:26.800805Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:59:26.971267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:26.971501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:26.971708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:59:26.971964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:59:26.972040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:26.978150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:26.978339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:59:26.978549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:26.978620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:26.978662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:59:26.978695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:59:26.981159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:26.981220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:26.981253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:59:26.991017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:26.991074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:26.991108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:26.991158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:59:26.993814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:26.999058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:59:26.999259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:59:27.000225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:27.000379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:27.000431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:27.000720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:59:27.000767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:27.000912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:27.000992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:59:27.008043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:27.008102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:27.008295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:27.008337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:59:27.008674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:27.008723Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:59:27.008811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:27.008843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:27.008882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:27.008915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:27.008952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:59:27.009012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:27.009052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:59:27.009098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:59:27.009173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:27.009209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:59:27.009244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:59:27.011270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:27.011413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:27.011459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:35.338925Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_0', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter), operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-04-17T17:59:35.347645Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:35.354313Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxAlterExtSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:35.354422Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 ProgressState no shards to create, do next state 2025-04-17T17:59:35.354472Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-04-17T17:59:35.363265Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:35.363345Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:35.363393Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-04-17T17:59:35.367362Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:35.367432Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:35.367492Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 103:0, at tablet# 72057594046678944 2025-04-17T17:59:35.367559Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-17T17:59:35.367754Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:35.369820Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-17T17:59:35.369983Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-17T17:59:35.370458Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:35.370605Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:35.370669Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-04-17T17:59:35.370966Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-17T17:59:35.371031Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-04-17T17:59:35.371183Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:59:35.371369Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:396:2367], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 103 2025-04-17T17:59:35.379501Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2025-04-17T17:59:35.379664Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2025-04-17T17:59:35.379869Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-17T17:59:35.380172Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:35.380225Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:59:35.380463Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:35.380514Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T17:59:35.381025Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:35.381097Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-04-17T17:59:35.381139Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-04-17T17:59:35.382319Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:35.382454Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:35.382502Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:59:35.382553Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-17T17:59:35.382607Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T17:59:35.382727Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T17:59:35.395130Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-17T17:59:35.395260Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:35.395377Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:396:2367], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:59:35.395497Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-04-17T17:59:35.395534Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-04-17T17:59:35.395668Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-04-17T17:59:35.395701Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:491:2434], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-04-17T17:59:35.396959Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-04-17T17:59:35.397063Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:35.397111Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:59:35.397254Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:59:35.397299Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:35.397357Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:59:35.397397Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:35.397443Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T17:59:35.397501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:35.397547Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:59:35.397589Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:59:35.397686Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:59:35.407647Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:59:35.407766Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-04-17T17:59:35.420645Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:59:35.420721Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:59:35.421205Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:59:35.421322Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:59:35.421363Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:571:2512] TestWaitNotification: OK eventTxId 103 >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> KqpLimits::CancelAfterRoTx [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndWait >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TopicService::AccessRights [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] Test command err: 2025-04-17T17:59:20.571555Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:20.571653Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:20.639899Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-04-17T17:59:20.641821Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:20.000000Z 2025-04-17T17:59:20.641913Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\300\371\236\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\300\371\236\247\3442" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-17T17:59:21.937868Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:21.937942Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:21.977812Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] 2025-04-17T17:59:21.979473Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:21.000000Z 2025-04-17T17:59:21.979536Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\250\201\237\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-17T17:59:23.383209Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:23.383287Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:23.415040Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:59:23.415347Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:23.415678Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:178:2193] 2025-04-17T17:59:23.416635Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:59:23.416819Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:59:23.416999Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:59:23.417203Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:59:23.417635Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-17T17:59:23.417735Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:59:23.417806Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:59:23.417854Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:23.000000Z 2025-04-17T17:59:23.417898Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:59:23.417949Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:178:2193] 2025-04-17T17:59:23.418013Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-17T17:59:23.418060Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:24.874390Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client session is set to 0 (startOffset 0) session session 2025-04-17T17:59:24.874605Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:24.874667Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:24.874722Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:24.874772Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:24.874805Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient 2025-04-17T17:59:24.874832Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient 2025-04-17T17:59:24.874873Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:24.874936Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\370\220\237\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-17T17:59:24.899065Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 2025-04-17T17:59:24.899393Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Wait first predicate result 2025-04-17T17:59:26.421276Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-04-17T17:59:26.421576Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 Wait second predicate result 2025-04-17T17:59:27.943806Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:27.943897Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-17T17:59:27.943953Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-17T17:59:27.944021Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:27.944079Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 Got batch complete: 1 2025-04-17T17:59:27.944284Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-04-17T17:59:27.944342Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-04-17T17:59:27.944397Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:27.944442Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-17T17:59:27.944682Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:27.944735Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:27.944774Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:27.944814Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-17T17:59:27.944860Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:27.944885Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:27.944906Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:27.944936Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:27.944974Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 2025-04-17T17:59:27.945201Z node 3 :PERSQUEUE INFO: new Cookie owner1|8c62d5e4-956dc094-6c202de1-b7a4983e_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-04-17T17:59:27.961647Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 2025-04-17T17:59:27.961781Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-04-17T17:59:27.964085Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'SourceId' seqNo 5 partNo 0 2025-04-1 ... E DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:59:30.329362Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:59:30.329569Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:59:30.330020Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-17T17:59:30.330118Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:59:30.334281Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:59:30.334385Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:30.000000Z 2025-04-17T17:59:30.334434Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:59:30.334490Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:178:2193] 2025-04-17T17:59:30.334574Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-17T17:59:30.334629Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:31.815532Z node 4 :PERSQUEUE INFO: new Cookie SourceId|812a3344-f5cddf0e-4bb770ed-8fd2f841_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner SourceId 2025-04-17T17:59:31.815707Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Wait write response Wait kv request 2025-04-17T17:59:31.816029Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-04-17T17:59:31.816930Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 11 PartNo 0 PackedSize 118 count 1 nextOffset 12 batches 1 2025-04-17T17:59:31.817544Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 11,1 HeadOffset 1 endOffset 1 curOffset 12 d0000000000_00000000000000000011_00000_0000000001_00000| size 104 WTime 5132 2025-04-17T17:59:31.817694Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:31.817740Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:31.817804Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T17:59:31.817850Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:31.817892Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000pSourceId 2025-04-17T17:59:31.817928Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000011_00000_0000000001_00000| 2025-04-17T17:59:31.817952Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:31.817982Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:31.818023Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request 2025-04-17T17:59:31.842647Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:31.842797Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T17:59:31.842882Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 11 is stored on disk Wait second predicate result Create distr tx with id = 0 and act no: 1 2025-04-17T17:59:31.843268Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:59:33.276415Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-04-17T17:59:33.774700Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:33.774759Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:33.791988Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:59:33.792234Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:33.792523Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-04-17T17:59:33.793490Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:59:33.793667Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:59:33.793847Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:59:33.794042Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:59:33.794621Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-17T17:59:33.794753Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:59:33.794801Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:59:33.794855Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:33.000000Z 2025-04-17T17:59:33.794896Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:59:33.794947Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-04-17T17:59:33.795012Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-17T17:59:33.795063Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 2025-04-17T17:59:35.129533Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:59:36.594106Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-04-17T17:59:36.594481Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-04-17T17:59:36.594606Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-17T17:59:36.594661Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-17T17:59:36.594723Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:36.594775Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-17T17:59:38.018696Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-04-17T17:59:38.018947Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-04-17T17:59:38.019005Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-04-17T17:59:38.019071Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:38.019125Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-17T17:59:38.019333Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:38.019380Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:38.019420Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:38.019464Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-17T17:59:38.019497Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:38.019524Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:38.019547Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:38.019582Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:38.019628Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait tx committed for tx 0 2025-04-17T17:59:38.042334Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait tx committed for tx 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:59:26.806503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:59:26.806609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:26.806655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:59:26.806691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:59:26.806733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:59:26.806765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:59:26.806849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:26.806931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:59:26.807326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:59:26.915864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:59:26.915923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:26.947793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:59:26.947921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:59:26.948108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:59:26.958167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:59:26.958568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:26.959266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:26.959604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:59:26.971074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:26.972594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:26.972660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:26.972785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:59:26.972843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:26.972885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:59:26.973092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:59:26.981671Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:59:27.206544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:27.206893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:27.207088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:59:27.207345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:59:27.207402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:27.219330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:27.219443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:59:27.219630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:27.219690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:27.219756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:59:27.219783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:59:27.225570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:27.225845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:27.225897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:59:27.228390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:27.228468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:27.234255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:27.234408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:59:27.238227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:27.253676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:59:27.253977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:59:27.255143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:27.255278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:27.255347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:27.255647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:59:27.255713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:27.255892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:27.255980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:59:27.275609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:27.275672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:27.275854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:27.275896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:59:27.276431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:27.276524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:59:27.276622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:27.276668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:27.276705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:27.276794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:27.276830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:59:27.276873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:27.276910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:59:27.276938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:59:27.277012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:27.277045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:59:27.277074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:59:27.292943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:27.293106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:27.293150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:37.385969Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 104:0, stepId:5000005, at schemeshard: 72057594046678944 2025-04-17T17:59:37.386307Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:59:37.386379Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:59:37.386444Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T17:59:37.386495Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:59:37.386588Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:59:37.386687Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T17:59:37.386888Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:352:2330], msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72057594046678944 2025-04-17T17:59:37.386957Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T17:59:37.387017Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T17:59:37.387079Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T17:59:37.387152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:59:37.387207Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-04-17T17:59:37.387262Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2025-04-17T17:59:37.390380Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2025-04-17T17:59:37.390558Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186233409546 2025-04-17T17:59:37.390871Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-17T17:59:37.391154Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:37.391237Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:59:37.391517Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:37.391585Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: Erasing txId 104 2025-04-17T17:59:37.392876Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:59:37.393011Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T17:59:37.393060Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T17:59:37.393117Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-17T17:59:37.393164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:59:37.393266Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-17T17:59:37.396188Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-17T17:59:37.396296Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:37.396401Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:352:2330], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:59:37.396519Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T17:59:37.396549Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T17:59:37.396700Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T17:59:37.396731Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:446:2398], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-17T17:59:37.397519Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T17:59:37.397608Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:37.397756Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 0 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T17:59:37.398055Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T17:59:37.398108Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T17:59:37.398623Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T17:59:37.398732Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T17:59:37.398777Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:543:2493] TestWaitNotification: OK eventTxId 104 2025-04-17T17:59:37.399358Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:59:37.399612Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 284us result status StatusSuccess 2025-04-17T17:59:37.400035Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:37.400759Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-17T17:59:37.400966Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 234us result status StatusSuccess 2025-04-17T17:59:37.401390Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 20778, MsgBus: 15389 2025-04-17T17:57:04.962563Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342655687112357:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:04.968020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b1a/r3tmp/tmpMor185/pdisk_1.dat 2025-04-17T17:57:05.646326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:05.662441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:05.662592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:05.681081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20778, node 1 2025-04-17T17:57:05.862661Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:05.862678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:05.862684Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:05.862774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15389 TClient is connected to server localhost:15389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:06.856779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:06.870604Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:06.883229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:07.113807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:07.369840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:07.502878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:09.966507Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342655687112357:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:09.966587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:10.474743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342681456917896:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:10.474920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:10.915985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:10.973507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.051838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.134552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.198343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.253788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.321034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:11.410313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342685751885871:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.410464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.410990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342685751885876:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:11.414757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:11.434333Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:57:11.434595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342685751885879:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:11.517918Z node 1 :TX_PROXY ERROR: Actor# [1:7494342685751885934:3561] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:12.847408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:57:13.408809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:57:13.409064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:57:13.409317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:57:13.409423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:57:13.409529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:57:13.409627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037946;self_id=[1:7494342694341821179:2536];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:57:13.409676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:57:13.409688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037946;self_id=[1:7494342694341821179:2536];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:57:13.414939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:57:13.415120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:57:13.415252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:57:13.415361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7494342694341821204:2539];tablet_id=72075186224037949;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:57:13.415470Z node 1 :T ...
: Error: Cancelling after 449ms during execution } ] 2025-04-17T17:59:15.177515Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343219377431431:6047], TxId: 281474976710924, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. TraceId : 01js2efa164y79ept8e92h8n1h. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343219377431422:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:15.177640Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343219377431439:6053], TxId: 281474976710924, task: 8. Ctx: { CustomerSuppliedId : . TraceId : 01js2efa164y79ept8e92h8n1h. SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343219377431422:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:15.177756Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343219377431432:6048], TxId: 281474976710924, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. TraceId : 01js2efa164y79ept8e92h8n1h. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343219377431422:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:15.177874Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343219377431441:6054], TxId: 281474976710924, task: 9. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. CustomerSuppliedId : . TraceId : 01js2efa164y79ept8e92h8n1h. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343219377431422:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:15.177946Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343219377431433:6049], TxId: 281474976710924, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. TraceId : 01js2efa164y79ept8e92h8n1h. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7494343219377431422:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:15.178525Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efa164y79ept8e92h8n1h, Create QueryResponse for error on request, msg: 2025-04-17T17:59:17.055274Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efbvtbqkaspxbphw7jynx, Create QueryResponse for error on request, msg: 2025-04-17T17:59:18.004518Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efcs97q24m9crqw6dzcc2, Create QueryResponse for error on request, msg: 2025-04-17T17:59:18.616068Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efdce03153r31ypqj57r1, Create QueryResponse for error on request, msg: 2025-04-17T17:59:22.453773Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494343249442203601:2517] TxId: 281474976710942. Ctx: { TraceId: 01js2efh347da710hphj77f5ed, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 468ms } {
: Error: Cancelling after 473ms during execution } ] 2025-04-17T17:59:22.454391Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203608:6305], TxId: 281474976710942, task: 1. Ctx: { TraceId : 01js2efh347da710hphj77f5ed. SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343249442203601:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.454777Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203609:6306], TxId: 281474976710942, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2efh347da710hphj77f5ed. SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343249442203601:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.454982Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203610:6307], TxId: 281474976710942, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01js2efh347da710hphj77f5ed. SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343249442203601:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.455177Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203611:6308], TxId: 281474976710942, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. TraceId : 01js2efh347da710hphj77f5ed. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7494343249442203601:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.455361Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203612:6309], TxId: 281474976710942, task: 5. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. TraceId : 01js2efh347da710hphj77f5ed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343249442203601:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.455549Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203613:6310], TxId: 281474976710942, task: 6. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. TraceId : 01js2efh347da710hphj77f5ed. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343249442203601:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.455732Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203614:6311], TxId: 281474976710942, task: 7. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. TraceId : 01js2efh347da710hphj77f5ed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7494343249442203601:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.455913Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203615:6312], TxId: 281474976710942, task: 8. Ctx: { TraceId : 01js2efh347da710hphj77f5ed. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343249442203601:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.456080Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203616:6313], TxId: 281474976710942, task: 9. Ctx: { TraceId : 01js2efh347da710hphj77f5ed. SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343249442203601:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.456804Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efh347da710hphj77f5ed, Create QueryResponse for error on request, msg: 2025-04-17T17:59:22.970593Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494343249442203673:2517] TxId: 281474976710944. Ctx: { TraceId: 01js2efhm58jeyrmm8k3zxhqhw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 469ms } {
: Error: Cancelling after 468ms during execution } ] 2025-04-17T17:59:22.970726Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203683:6323], TxId: 281474976710944, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01js2efhm58jeyrmm8k3zxhqhw. SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7494343249442203673:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.971003Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343249442203688:6328], TxId: 281474976710944, task: 9. Ctx: { SessionId : ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=. TraceId : 01js2efhm58jeyrmm8k3zxhqhw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494343249442203673:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:22.971514Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efhm58jeyrmm8k3zxhqhw, Create QueryResponse for error on request, msg: 2025-04-17T17:59:24.962538Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efkj316t1033g1yzppkxh, Create QueryResponse for error on request, msg: 2025-04-17T17:59:26.170547Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efmqs9n5jay3dr7nge1pd, Create QueryResponse for error on request, msg: 2025-04-17T17:59:27.678828Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efp6wcvj7abnafk7h62ca, Create QueryResponse for error on request, msg: 2025-04-17T17:59:34.874111Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGFhMDQ0MzQtYWQ2MzI1M2YtMWU4YzQyNTItZmU5ZDhkMTA=, ActorId: [4:7494342828535392750:2517], ActorState: ExecuteState, TraceId: 01js2efx72a9ngxabqf3x0jmb0, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] Test command err: 2025-04-17T17:57:46.019251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342832488109497:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:46.047295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:46.554601Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e5/r3tmp/tmprf0z8D/pdisk_1.dat 2025-04-17T17:57:46.607851Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:46.713990Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:47.216464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:47.452969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:47.453644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:47.454782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:47.454849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:47.461078Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:57:47.469101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:47.470426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:47.471921Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25094, node 1 2025-04-17T17:57:48.010099Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0015e5/r3tmp/yandextTugB5.tmp 2025-04-17T17:57:48.010124Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0015e5/r3tmp/yandextTugB5.tmp 2025-04-17T17:57:48.010306Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0015e5/r3tmp/yandextTugB5.tmp 2025-04-17T17:57:48.010445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:48.188052Z INFO: TTestServer started on Port 18023 GrpcPort 25094 TClient is connected to server localhost:18023 PQClient connected to localhost:25094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:48.596173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:57:48.718897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:57:50.994317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342832488109497:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:50.994413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:51.594482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342858760266822:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:51.594614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:51.598304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342858257914390:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:51.598518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:51.598620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342858760266834:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:51.603528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342858257914403:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:51.637618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342858257914435:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:51.637694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:51.643562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:57:51.690622Z node 2 :TX_PROXY ERROR: Actor# [2:7494342858760266837:2174] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:57:51.755428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342858257914405:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:57:51.758500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342858760266836:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:57:51.843628Z node 1 :TX_PROXY ERROR: Actor# [1:7494342858257914497:2774] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:51.851674Z node 2 :TX_PROXY ERROR: Actor# [2:7494342858760266863:2180] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:53.061964Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342858257914520:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:53.061412Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342858760266877:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:53.087138Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQ2ZmEwODktNmU5ZDUxY2YtMzE4ZmJlNTItNmNiMmQ2ZTI=, ActorId: [2:7494342858760266820:2313], ActorState: ExecuteState, TraceId: 01js2ecrte953r8f0z69x34evz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:53.099735Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmI0MDUwZWYtNTI0ZGIwN2MtYzA2Yzk1MjQtOTNiZTI5MTk=, ActorId: [1:7494342858257914388:2339], ActorState: ExecuteState, TraceId: 01js2ecrta0s9ercvzd4284xbs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:53.110160Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:53.111136Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ ... , status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:59:19.785391Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=Yzg5MGIyZDktZWM1MTliOTgtMzQwZTRjNzEtYmE3MmVmNzI=, ActorId: [9:7494343237369298952:2347], ActorState: ExecuteState, TraceId: 01js2efepq75pe2m26p9n9b4cj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:59:19.785922Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:59:19.791974Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:20.214163Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:59:20.864231Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2effkd7esk2yvjd9bk1y60, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGQ0YWRjZWEtNjA1NGIzYTgtYzUwOTM3ZTItNGM4NGQ3Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7494343241664266877:3143] === CheckClustersList. Ok 2025-04-17T17:59:26.110541Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480 2025-04-17T17:59:26.519745Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:59:26.519776Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:27.522385Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T17:59:28.925050Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T17:59:30.079167Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-17T17:59:31.213027Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-04-17T17:59:32.367725Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710703:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (16261273835729377752, "Root", "00415F536F757263655F3130", 1744912773295, 1744912773295, 0, 13); 2025-04-17T17:59:33.516377Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710708. Ctx: { TraceId: 01js2efw7hec2edzq8pesh3k8g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YmQ0ZjhiM2EtZmU5NjM0NGEtOTUzMTAzODEtOWI0NzlmY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:33.570117Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-17T17:59:33.570142Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T17:59:33.570154Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T17:59:33.570199Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343297498843375:4127] (SourceId=A_Source_10, PreferedPartition=1) GetOwnershipFast Partition=1 TabletId=1001 2025-04-17T17:59:33.570382Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7494343297498843376:4127], Recipient [9:7494343267434071007:3343]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [9:7494343297498843375:4127] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-17T17:59:33.570481Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7494343297498843375:4127], Recipient [9:7494343267434071007:3343]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_10" 2025-04-17T17:59:33.570577Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7494343267434071007:3343], Recipient [9:7494343297498843375:4127]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-17T17:59:33.570615Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7494343297498843375:4127] (SourceId=A_Source_10, PreferedPartition=1) InitTable: SourceId=A_Source_10 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-17T17:59:33.570679Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7494343297498843375:4127], Recipient [9:7494343267434071007:3343]: NActors::TEvents::TEvPoison 2025-04-17T17:59:33.578385Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7494343203009559447:2070], Recipient [9:7494343297498843375:4127]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-17T17:59:33.578431Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343297498843375:4127] (SourceId=A_Source_10, PreferedPartition=1) StartKqpSession 2025-04-17T17:59:33.782373Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7494343203009559519:2126], Recipient [9:7494343297498843375:4127]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=NWNmNmI4YTUtMmUzOWY3YS1kMjk3YmU1ZC1mMjk2Y2Y4Mg==" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-17T17:59:33.782424Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343297498843375:4127] (SourceId=A_Source_10, PreferedPartition=1) Select from the table 2025-04-17T17:59:34.145927Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7494343203009559519:2126], Recipient [9:7494343297498843375:4127]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=NWNmNmI4YTUtMmUzOWY3YS1kMjk3YmU1ZC1mMjk2Y2Y4Mg==" PreparedQuery: "6bcf96da-68ea6c91-367dd0b2-b8790104" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01js2efwzc14a40wmw4ckq8pnb" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1744912773295 } items { uint64_value: 1744912773295 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 215 Received TEvChooseError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 16261273835729377752 AND Topic = "Root" AND ProducerId = "00415F536F757263655F3130" 2025-04-17T17:59:34.146169Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7494343297498843375:4127] (SourceId=A_Source_10, PreferedPartition=1) Selected from table PartitionId=0 SeqNo=13 2025-04-17T17:59:34.146220Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7494343297498843375:4127] (SourceId=A_Source_10, PreferedPartition=1) OnPartitionChosen 2025-04-17T17:59:34.146265Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7494343297498843375:4127] (SourceId=A_Source_10, PreferedPartition=1) ReplyError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. 2025-04-17T17:59:34.523797Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710714. Ctx: { TraceId: 01js2efx21fcmsbtwr7e3bxfch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZTc4NDlhZWYtNThlMTRiYzktNmQ3NTQwMzUtODdiZDAxZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:35.442408Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7494343306088778177:2733] TxId: 281474976710715. Ctx: { TraceId: 01js2efy079jjqbhcww7hj4fse, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YjNmN2I1OTMtYWY5YWJhZTctM2YzMzU4YzItNmU2YTUyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2025-04-17T17:59:35.442565Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7494343306088778181:2733], TxId: 281474976710715, task: 2. Ctx: { SessionId : ydb://session/3?node_id=9&id=YjNmN2I1OTMtYWY5YWJhZTctM2YzMzU4YzItNmU2YTUyYjI=. CustomerSuppliedId : . TraceId : 01js2efy079jjqbhcww7hj4fse. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [9:7494343306088778177:2733], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] >> TModifyUserTest::ModifyUser >> KqpBatchUpdate::SimplePartitions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 14489, MsgBus: 11123 2025-04-17T17:55:03.705717Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342138375660567:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:03.705830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d97/r3tmp/tmpRYsRcK/pdisk_1.dat 2025-04-17T17:55:04.161580Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:04.166210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:04.166301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:04.169525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14489, node 1 2025-04-17T17:55:04.266788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:04.266811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:04.266821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:04.266934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11123 TClient is connected to server localhost:11123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:04.876595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:04.922586Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:04.964685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.168962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.398865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.490132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:06.971445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342151260564265:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:06.971582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.233332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.264740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.294947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.325160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.366331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.401621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.432338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.497367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342155555532217:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.497424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.497885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342155555532222:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.502965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:07.513408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342155555532224:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:07.584434Z node 1 :TX_PROXY ERROR: Actor# [1:7494342155555532278:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:08.705678Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342138375660567:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:08.706030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:08.709309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:55:09.902974Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2e7ty84hgsgvw3n0w5fcfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzcyYmU1MGMtNDhmZDAxMTgtYzgyNjk1ZjItZTFjNzJlMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.918818Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2e7ty84hgsgvw3n0w5fcfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzcyYmU1MGMtNDhmZDAxMTgtYzgyNjk1ZjItZTFjNzJlMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.926713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01js2e7tyj9krztva4t8287v94, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc3OTE3NjMtZDU0ZDFmZmEtMTEzZWZkMDItMzU3NzhmNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.928979Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01js2e7tyj0hhcq8tayb189ffp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWNjZWM0MjQtMzAyMTdjMDktZWMwYmI2N2UtM2YxM2ZjY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.929000Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2e7tyjbw3k0xxg5brxhw1v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzViYzhlMjItYWUzOTU4MjAtZmM5MWYzMTItODIxOTIxYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.938006Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01js2e7tyjav66157xvrr3vv5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDEzNjA5MGYtMmQ5YTY2LWFiOThkODBlLTc0NmE2ZGNh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.940864Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01js2e7tyj4sskfdckpjqzv1d9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjljNTA3ZDYtMTZmYmNmZGUtYjE4MzE1NDgtY2Y0YjM0NGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.957498Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01js2e7tyqdtbt799ynz1cksry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2RhNDEwNDgtZmFlZjgwODktNmYyMmY3YTYtNDg5MGZhMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.958423Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01js2e7tyq8rehyd9e20xn7fhh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY3ZDhlNmUtZTA3MDg4YjUtZDY3ODAyN2EtNjJmY2NjYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.975685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01js2e7tyq7b6d6kns886psx6c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRiZDQtNzRmNjUzMjUtNjUyNDRkZDItOWM0YzE2MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:09.981354Z node 1 :KQP_EXECUTER ERROR: T ... sion/3?node_id=2&id=ZmI1OTEyMzItYzdjYTljMzQtNDlhYTVhYzktYTUyZGQ3Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.928711Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727119. Ctx: { TraceId: 01js2efqwbb0ge561t9gygwjjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU0NTFjNTktODQ5Njk1NTAtY2Y4NDk4MDYtZjgyZjVmYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.929853Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727120. Ctx: { TraceId: 01js2efqwb5j2hb9j985z2sqje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWFiM2Y0MzQtNWYyZGMzMDgtNjE0ZDAzYjktYTQ4MTkyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.939529Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727121. Ctx: { TraceId: 01js2efqvje7ajxxhqp3j4bb88, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmI1OTEyMzItYzdjYTljMzQtNDlhYTVhYzktYTUyZGQ3Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.940621Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727124. Ctx: { TraceId: 01js2efqwb5j2hb9j985z2sqje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWFiM2Y0MzQtNWYyZGMzMDgtNjE0ZDAzYjktYTQ4MTkyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.941309Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727122. Ctx: { TraceId: 01js2efqwv43z906kbmxa1b187, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZThhODNhN2MtNzQ4MjY3ZTgtZDc2MmMwOGQtNjRkYjdkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.942011Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727123. Ctx: { TraceId: 01js2efqwbb0ge561t9gygwjjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU0NTFjNTktODQ5Njk1NTAtY2Y4NDk4MDYtZjgyZjVmYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.957175Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727125. Ctx: { TraceId: 01js2efqwb5j2hb9j985z2sqje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWFiM2Y0MzQtNWYyZGMzMDgtNjE0ZDAzYjktYTQ4MTkyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.957567Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727126. Ctx: { TraceId: 01js2efqwbb0ge561t9gygwjjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU0NTFjNTktODQ5Njk1NTAtY2Y4NDk4MDYtZjgyZjVmYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.965463Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727127. Ctx: { TraceId: 01js2efqwv43z906kbmxa1b187, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZThhODNhN2MtNzQ4MjY3ZTgtZDc2MmMwOGQtNjRkYjdkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.967875Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727128. Ctx: { TraceId: 01js2efqwb5j2hb9j985z2sqje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWFiM2Y0MzQtNWYyZGMzMDgtNjE0ZDAzYjktYTQ4MTkyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.967878Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727129. Ctx: { TraceId: 01js2efqxkcrjz6wgf3ggz8xda, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOTk5NGQtMjU4NmE3NjUtMzhmYjg0MWYtZTRlYjgxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.975282Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727130. Ctx: { TraceId: 01js2efqwbb0ge561t9gygwjjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU0NTFjNTktODQ5Njk1NTAtY2Y4NDk4MDYtZjgyZjVmYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.985807Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727131. Ctx: { TraceId: 01js2efqwv43z906kbmxa1b187, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZThhODNhN2MtNzQ4MjY3ZTgtZDc2MmMwOGQtNjRkYjdkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.989301Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727132. Ctx: { TraceId: 01js2efqxkcrjz6wgf3ggz8xda, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOTk5NGQtMjU4NmE3NjUtMzhmYjg0MWYtZTRlYjgxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:28.995618Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727133. Ctx: { TraceId: 01js2efqxkcrjz6wgf3ggz8xda, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOTk5NGQtMjU4NmE3NjUtMzhmYjg0MWYtZTRlYjgxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.027396Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727134. Ctx: { TraceId: 01js2efqz7etajcxthewb8eaqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGU3NTdjODYtOWE4MTVlNDEtZjE4NTQ0OWQtNjQ2Y2I0OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.038669Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727135. Ctx: { TraceId: 01js2efqzj08bhqx0bgmpjy50h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmI1OTEyMzItYzdjYTljMzQtNDlhYTVhYzktYTUyZGQ3Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.046603Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727136. Ctx: { TraceId: 01js2efqze2c9t6g9h7dkyvzgf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjFjODdlZjctNmU2YTFjYjgtZmM0NTYxZTUtZjIwMTI3ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.052108Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727137. Ctx: { TraceId: 01js2efr0b9c8b99h9zs53v1x7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU0NTFjNTktODQ5Njk1NTAtY2Y4NDk4MDYtZjgyZjVmYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.064539Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727138. Ctx: { TraceId: 01js2efqz7etajcxthewb8eaqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGU3NTdjODYtOWE4MTVlNDEtZjE4NTQ0OWQtNjQ2Y2I0OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.075792Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727139. Ctx: { TraceId: 01js2efqz7etajcxthewb8eaqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGU3NTdjODYtOWE4MTVlNDEtZjE4NTQ0OWQtNjQ2Y2I0OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.096007Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727141. Ctx: { TraceId: 01js2efqze2c9t6g9h7dkyvzgf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjFjODdlZjctNmU2YTFjYjgtZmM0NTYxZTUtZjIwMTI3ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.097115Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727140. Ctx: { TraceId: 01js2efr0vc40z5zcs5djhfc16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZThhODNhN2MtNzQ4MjY3ZTgtZDc2MmMwOGQtNjRkYjdkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.104428Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727142. Ctx: { TraceId: 01js2efr0vc40z5zcs5djhfc16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZThhODNhN2MtNzQ4MjY3ZTgtZDc2MmMwOGQtNjRkYjdkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.109910Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727143. Ctx: { TraceId: 01js2efr0vc40z5zcs5djhfc16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZThhODNhN2MtNzQ4MjY3ZTgtZDc2MmMwOGQtNjRkYjdkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:59:29.117111Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727144. Ctx: { TraceId: 01js2efr2n4q78bzr11eghsare, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWFiM2Y0MzQtNWYyZGMzMDgtNjE0ZDAzYjktYTQ4MTkyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.126004Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727145. Ctx: { TraceId: 01js2efr2n4q78bzr11eghsare, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWFiM2Y0MzQtNWYyZGMzMDgtNjE0ZDAzYjktYTQ4MTkyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.129021Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727146. Ctx: { TraceId: 01js2efr2v0gad2z6dw35tp84j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOTk5NGQtMjU4NmE3NjUtMzhmYjg0MWYtZTRlYjgxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.130073Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727147. Ctx: { TraceId: 01js2efr2n4q78bzr11eghsare, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWFiM2Y0MzQtNWYyZGMzMDgtNjE0ZDAzYjktYTQ4MTkyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.135910Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727148. Ctx: { TraceId: 01js2efr3673zbgjw8g0m6vdeh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmI1OTEyMzItYzdjYTljMzQtNDlhYTVhYzktYTUyZGQ3Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.142376Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727149. Ctx: { TraceId: 01js2efr2v0gad2z6dw35tp84j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOTk5NGQtMjU4NmE3NjUtMzhmYjg0MWYtZTRlYjgxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:59:29.147262Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727150. Ctx: { TraceId: 01js2efr3a98vz4mkcvrbmdt5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU0NTFjNTktODQ5Njk1NTAtY2Y4NDk4MDYtZjgyZjVmYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:59:29.157283Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727151. Ctx: { TraceId: 01js2efr2v0gad2z6dw35tp84j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgwOTk5NGQtMjU4NmE3NjUtMzhmYjg0MWYtZTRlYjgxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-17T17:59:29.186458Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727152. Ctx: { TraceId: 01js2efr3a98vz4mkcvrbmdt5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU0NTFjNTktODQ5Njk1NTAtY2Y4NDk4MDYtZjgyZjVmYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:29.191085Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727153. Ctx: { TraceId: 01js2efr3a98vz4mkcvrbmdt5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU0NTFjNTktODQ5Njk1NTAtY2Y4NDk4MDYtZjgyZjVmYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:59:28.918519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:59:28.918618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:28.918674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:59:28.918705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:59:28.918763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:59:28.918786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:59:28.918860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:28.918934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:59:28.919290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:59:29.069090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:59:29.069157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:29.101956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:59:29.102068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:59:29.120789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:59:29.158983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:59:29.159408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:29.160063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:29.160383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:59:29.189384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:29.190948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:29.191029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:29.191168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:59:29.191218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:29.191257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:59:29.191448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:59:29.213661Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:59:29.361782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:29.362000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:29.362190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:59:29.362414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:59:29.362473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:29.367683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:29.367823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:59:29.368008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:29.368082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:29.368129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:59:29.368198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:59:29.374133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:29.374226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:29.374268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:59:29.378004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:29.378063Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:29.378117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:29.378199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:59:29.381754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:29.395061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:59:29.395253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:59:29.396174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:29.396311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:29.396367Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:29.396665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:59:29.396735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:29.396926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:29.397015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:59:29.403333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:29.403389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:29.403583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:29.403649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:59:29.403991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:29.404044Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:59:29.404144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:29.404179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:29.404214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:29.404242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:29.404277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:59:29.404313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:29.404349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:59:29.404419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:59:29.404521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:29.404557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:59:29.404598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:59:29.406628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:29.406730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:29.406776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 9.223211Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-17T17:59:39.223359Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T17:59:39.223408Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T17:59:39.223470Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T17:59:39.223521Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T17:59:39.223578Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-17T17:59:39.223634Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T17:59:39.223681Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T17:59:39.223727Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T17:59:39.223949Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:59:39.227710Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T17:59:39.228168Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T17:59:39.228387Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:39.228745Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T17:59:39.229188Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 Forgetting tablet 72075186234409547 2025-04-17T17:59:39.229625Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T17:59:39.229858Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T17:59:39.231051Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-04-17T17:59:39.231398Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T17:59:39.231585Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:59:39.232697Z node 6 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409546 2025-04-17T17:59:39.251715Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-04-17T17:59:39.252081Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409550 2025-04-17T17:59:39.253163Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409549 Forgetting tablet 72075186234409548 2025-04-17T17:59:39.253468Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T17:59:39.253723Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:59:39.262708Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T17:59:39.263717Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:59:39.263804Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:39.263995Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:59:39.264770Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:59:39.264832Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:39.264924Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:39.273854Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T17:59:39.273953Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T17:59:39.274074Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T17:59:39.274116Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-04-17T17:59:39.274812Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T17:59:39.274862Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-04-17T17:59:39.276888Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T17:59:39.276963Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-04-17T17:59:39.277222Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:59:39.277349Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T17:59:39.277747Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T17:59:39.277809Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T17:59:39.283033Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T17:59:39.283206Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T17:59:39.283285Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:782:2691] TestWaitNotification: OK eventTxId 105 2025-04-17T17:59:39.284098Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:59:39.284354Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir/table_1" took 302us result status StatusPathDoesNotExist 2025-04-17T17:59:39.284534Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/dir/table_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/dir/table_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:59:39.285288Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:59:39.285479Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 209us result status StatusPathDoesNotExist 2025-04-17T17:59:39.285642Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:59:39.288900Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:59:39.289143Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 279us result status StatusSuccess 2025-04-17T17:59:39.289532Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2025-04-17T17:59:21.469072Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:21.469171Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:21.487249Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-04-17T17:59:21.489281Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:21.000000Z 2025-04-17T17:59:21.489358Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\250\201\237\247\3442" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-2@\000H\000" StorageChannel: INLINE } 2025-04-17T17:59:22.623254Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:22.623319Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:22.636346Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [2:177:2192] 2025-04-17T17:59:22.640041Z node 2 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T17:59:22.640108Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:177:2192] 2025-04-17T17:59:23.991629Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:23.991708Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:24.032499Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:59:24.032751Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:24.033012Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:178:2193] 2025-04-17T17:59:24.033995Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:59:24.034214Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:59:24.034412Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:59:24.034650Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:59:24.035123Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-17T17:59:24.035226Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:59:24.035272Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:59:24.035329Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:24.000000Z 2025-04-17T17:59:24.035378Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:59:24.035435Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:178:2193] 2025-04-17T17:59:24.035505Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-17T17:59:24.035564Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:24.394704Z node 3 :PERSQUEUE INFO: new Cookie src3|af54d093-cb6bcb36-e4d6911-31874da1_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src3 2025-04-17T17:59:24.394830Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-04-17T17:59:24.395029Z node 3 :PERSQUEUE INFO: new Cookie src4|bcf7ba41-f8a6f846-f2f79ee3-b96aa2ea_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 2025-04-17T17:59:24.395109Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 2025-04-17T17:59:26.947021Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Create distr tx with id = 8 and act no: 9 Create distr tx with id = 10 and act no: 11 2025-04-17T17:59:27.464123Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-04-17T17:59:27.464313Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 10 2025-04-17T17:59:27.464395Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:27.464472Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:28.312388Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:29.742812Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:29.743031Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 17 Wait batch completion 2025-04-17T17:59:29.743268Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 10 Wait kv request 2025-04-17T17:59:30.015328Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-17T17:59:30.015424Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-17T17:59:30.015498Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:30.015551Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-17T17:59:30.015646Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:59:30.015687Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T17:59:30.015797Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:30.015839Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-17T17:59:30.015885Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-17T17:59:30.036108Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:59:30.036269Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src4'. Message seqNo: 7. Committed seqNo: (NULL). Writing seqNo: 7. EndOffset: 50. CurOffset: 50. Offset: 50 2025-04-17T17:59:30.036401Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 8 partNo 0 2025-04-17T17:59:30.037438Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 84 count 1 nextOffset 52 batches 1 2025-04-17T17:59:30.037526Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 9 partNo 0 2025-04-17T17:59:30.037569Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 136 count 2 nextOffset 53 batches 1 2025-04-17T17:59:30.037615Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 10 partNo 0 2025-04-17T17:59:30.037654Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 188 count 3 nextOffset 54 batches 1 2025-04-17T17:59:30.037732Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 11 partNo 0 2025-04-17T17:59:30.037765Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 240 count 4 nextOffset 55 batches 1 2025-04-17T ... have tx writes info 2025-04-17T17:59:35.439692Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-17T17:59:35.440118Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:35.440494Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-04-17T17:59:35.441589Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-17T17:59:35.441802Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-17T17:59:35.441975Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-17T17:59:35.446956Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-17T17:59:35.447584Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-17T17:59:35.447698Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T17:59:35.447763Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T17:59:35.447821Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:35.000000Z 2025-04-17T17:59:35.447872Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-17T17:59:35.447923Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-04-17T17:59:35.448005Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-17T17:59:35.448066Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:35.448161Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:59:35.448214Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T17:59:35.448263Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T17:59:35.448681Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-04-17T17:59:35.448869Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-04-17T17:59:35.448927Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Create distr tx with id = 0 and act no: 1 2025-04-17T17:59:36.934843Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-17T17:59:36.934998Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1, TxId 3 Wait batch completion 2025-04-17T17:59:38.457902Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-17T17:59:38.458088Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:38.458147Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:59:38.458407Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:38.458478Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:38.458529Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:38.458576Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:38.458612Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:38.458641Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-17T17:59:38.458668Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-17T17:59:38.458705Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:38.458750Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 2 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 2 2025-04-17T17:59:39.926913Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion 2025-04-17T17:59:39.927265Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-04-17T17:59:39.927426Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 drop done 2025-04-17T17:59:39.927751Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:39.927850Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:39.927896Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cclient-1, m0000000000cclient-1] 2025-04-17T17:59:39.927937Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uclient-1, m0000000000uclient-1] 2025-04-17T17:59:39.927975Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:39.928021Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:39.928058Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-17T17:59:39.928084Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-17T17:59:39.928109Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-17T17:59:39.928133Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-04-17T17:59:39.928168Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:39.928212Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Send disk status response with cookie: 0 2025-04-17T17:59:39.942366Z node 5 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 0 error: cannot finish read request. Consumer client-1 is gone from partition 2025-04-17T17:59:39.942557Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:59:39.942656Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-17T17:59:39.942726Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-17T17:59:39.942904Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:39.942952Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:39.943003Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:39.943051Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:39.943087Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-17T17:59:39.943118Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-17T17:59:39.943154Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:39.943197Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request 2025-04-17T17:59:39.943740Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:59:39.943809Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 send read request for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request 2025-04-17T17:59:39.944340Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset 5 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 5 2025-04-17T17:59:39.944559Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 added 1 blobs, size 0 count 45 last offset 6, current partition end offset: 50 2025-04-17T17:59:39.944615Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 1. Send blob request. Got KV request Got KV request Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 4 2025-04-17T17:59:39.987049Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageTestClean >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> TopicService::ThereAreGapsInTheOffsetRanges |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.9%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> TSubDomainTest::UserAttributes >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TSubDomainTest::FailIfAffectedSetNotInterior >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 64803, MsgBus: 28349 2025-04-17T17:54:02.270482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341874067284868:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:02.278665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d21/r3tmp/tmp1Zy0ZU/pdisk_1.dat 2025-04-17T17:54:02.980721Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:03.029561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:03.029693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:03.037509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64803, node 1 2025-04-17T17:54:03.283513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:03.283534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:03.283540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:03.283646Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28349 TClient is connected to server localhost:28349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:04.082718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:04.113463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:04.301266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:54:04.490544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:54:04.573857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:07.111696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341895542122993:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:07.111839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:07.202275Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494341874067284868:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:54:07.202337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:54:07.520813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:54:07.571097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:54:07.600836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:54:07.642250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:54:07.684652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:54:07.757341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:54:07.798560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:54:07.890833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341895542123663:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:07.890934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:07.893176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494341895542123668:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:54:07.897189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:54:07.909454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494341895542123670:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:54:07.969465Z node 1 :TX_PROXY ERROR: Actor# [1:7494341895542123727:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27540, MsgBus: 17933 2025-04-17T17:54:18.012501Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494341944855341267:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d21/r3tmp/tmpbTNmQT/pdisk_1.dat 2025-04-17T17:54:18.090478Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:54:18.198129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:54:18.198233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:54:18.201039Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:54:18.215549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27540, node 2 2025-04-17T17:54:18.391119Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:54:18.391140Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:54:18.391148Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:54:18.391298Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17933 TClient is connected to server localhost:17933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:54:19.186100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:54:19.194254Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:54:19.212898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:54:19.298526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T17:54:19.477213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... rvice] [TPoolFetcherActor] ActorId: [15:7494343128104722270:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:54.069868Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:54.162830Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.222137Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.279618Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.340664Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.395237Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.546043Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.647594Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:54.749875Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [15:7494343128104722960:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:54.749978Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:54.750426Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [15:7494343128104722965:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:54.756082Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:54.778675Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [15:7494343128104722967:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:54.833179Z node 15 :TX_PROXY ERROR: Actor# [15:7494343128104723021:3560] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:02.602582Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:59:02.602621Z node 15 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 20012, MsgBus: 23152 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d21/r3tmp/tmpnVHc9J/pdisk_1.dat 2025-04-17T17:59:13.247379Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:13.248789Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:13.267649Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:13.267760Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:13.272489Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20012, node 16 2025-04-17T17:59:13.414478Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:13.414501Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:13.414513Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:13.414712Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23152 TClient is connected to server localhost:23152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:14.423431Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:14.443020Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:59:14.453628Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:14.623028Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:14.985282Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:15.249303Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:20.355621Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7494343241217540377:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:20.355715Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:20.504954Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:20.602260Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:21.024872Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:21.149467Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:21.255473Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:21.368261Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:21.463528Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:21.582916Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7494343245512508371:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:21.583043Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:21.590417Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7494343245512508376:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:21.599620Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:21.621525Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7494343245512508378:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:21.734107Z node 16 :TX_PROXY ERROR: Actor# [16:7494343245512508435:3570] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:28.219655Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:59:28.219685Z node 16 :IMPORT WARN: Table profiles were not loaded >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:59:31.358336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:59:31.358434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:31.358486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:59:31.358523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:59:31.358560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:59:31.358586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:59:31.358635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:31.358708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:59:31.358998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:59:31.455763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:59:31.455823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:31.469967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:59:31.470071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:59:31.470391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:59:31.490559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:59:31.490952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:31.491632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:31.491964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:59:31.497792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:31.499529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:31.499613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:31.499753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:59:31.499805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:31.499846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:59:31.500073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:59:31.508743Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:59:31.630110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:31.630394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:31.630627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:59:31.630855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:59:31.630904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:31.635277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:31.635435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:59:31.635698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:31.635766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:31.635810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:59:31.635844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:59:31.640326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:31.640385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:31.640414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:59:31.642889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:31.642939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:31.642969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:31.643012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:59:31.645761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:31.651994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:59:31.652211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:59:31.653294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:31.653425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:31.653485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:31.653714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:59:31.653758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:31.653904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:31.653974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:59:31.659412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:31.659474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:31.659669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:31.659705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:59:31.660012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:31.660064Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:59:31.660160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:31.660195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:31.660227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:31.660248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:31.660275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:59:31.660310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:31.660343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:59:31.660368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:59:31.660436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:31.660489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:59:31.660515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:59:31.662149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:31.662322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:31.662366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... vConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-04-17T17:59:41.794913Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-17T17:59:41.797548Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:59:41.797811Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:59:41.797867Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:59:41.798349Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:59:41.798404Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:59:41.798460Z node 8 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:59:41.843155Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-17T17:59:41.843422Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409549 2025-04-17T17:59:41.843506Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:0 HandleReply TEvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-04-17T17:59:41.843575Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-04-17T17:59:41.843639Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-17T17:59:41.851189Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:59:41.851441Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:59:41.851506Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:59:41.851567Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-04-17T17:59:41.851639Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T17:59:41.851818Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:41.859268Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:59:41.859476Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T17:59:41.859887Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:41.860036Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 34359740523 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:41.860100Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-17T17:59:41.860454Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-17T17:59:41.860525Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-17T17:59:41.860673Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:59:41.860790Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:360:2334], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:59:41.871610Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:41.871698Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:59:41.871940Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:41.871997Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:203:2205], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:59:41.872404Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:59:41.872493Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-04-17T17:59:41.872540Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-17T17:59:41.873374Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:59:41.873505Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:59:41.873558Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:59:41.873622Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T17:59:41.873699Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T17:59:41.873810Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:59:41.887273Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:59:41.887359Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:59:41.887524Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:59:41.887576Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:59:41.887624Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:59:41.887668Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:59:41.887718Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:59:41.887813Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:299:2290] message: TxId: 102 2025-04-17T17:59:41.887879Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:59:41.887933Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:59:41.887974Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:59:41.888190Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:59:41.888794Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:59:41.895301Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:59:41.895384Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:505:2444] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-17T17:59:41.898762Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:41.898959Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-04-17T17:59:41.899012Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-04-17T17:59:41.899158Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-17T17:59:41.899218Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-17T17:59:41.906824Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:41.907033Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestReadRuleVersions >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> BackupRestore::RestoreExternalDataSourceWithoutSecret [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> TSubDomainTest::CreateDummyTabletsInDifferentDomains >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TSubDomainTest::LsLs >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> TSubDomainTest::CreateTablet >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |82.9%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf >> TModifyUserTest::ModifyUser [FAIL] >> TModifyUserTest::ModifyLdapUser >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestPartitionedBlobFails >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink >> KqpStats::DataQueryWithEffects+UseSink >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta >> ShowCreateView::Basic >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2025-04-17T17:59:32.129874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343294098027462:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:32.130907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015de/r3tmp/tmpdMnVaO/pdisk_1.dat 2025-04-17T17:59:32.918798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:32.951848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:32.951953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:32.970939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17399 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:59:33.221519Z node 1 :TX_PROXY DEBUG: actor# [1:7494343294098027558:2117] Handle TEvNavigate describe path dc-1 2025-04-17T17:59:33.221613Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343298392995338:2438] HANDLE EvNavigateScheme dc-1 2025-04-17T17:59:33.221901Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343294098027581:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:33.222072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343298392995320:2432][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494343294098027581:2130], cookie# 1 2025-04-17T17:59:33.226590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343298392995324:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343298392995321:2432], cookie# 1 2025-04-17T17:59:33.226663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343298392995325:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343298392995322:2432], cookie# 1 2025-04-17T17:59:33.226678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343298392995326:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343298392995323:2432], cookie# 1 2025-04-17T17:59:33.226715Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343289803059934:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343298392995324:2432], cookie# 1 2025-04-17T17:59:33.226739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343289803059937:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343298392995325:2432], cookie# 1 2025-04-17T17:59:33.226760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343289803059940:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343298392995326:2432], cookie# 1 2025-04-17T17:59:33.226784Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343298392995324:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343289803059934:2050], cookie# 1 2025-04-17T17:59:33.226798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343298392995325:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343289803059937:2053], cookie# 1 2025-04-17T17:59:33.226824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343298392995326:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343289803059940:2056], cookie# 1 2025-04-17T17:59:33.226857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343298392995320:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343298392995321:2432], cookie# 1 2025-04-17T17:59:33.226874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343298392995320:2432][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:59:33.226886Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343298392995320:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343298392995322:2432], cookie# 1 2025-04-17T17:59:33.226903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343298392995320:2432][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:33.226921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343298392995320:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343298392995323:2432], cookie# 1 2025-04-17T17:59:33.226941Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343298392995320:2432][/dc-1] Unexpected sync response: sender# [1:7494343298392995323:2432], cookie# 1 2025-04-17T17:59:33.227000Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494343294098027581:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T17:59:33.235574Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494343294098027581:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494343298392995320:2432] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:33.235717Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494343294098027581:2130], cacheItem# { Subscriber: { Subscriber: [1:7494343298392995320:2432] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T17:59:33.237914Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494343298392995339:2439], recipient# [1:7494343298392995338:2438], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:59:33.237980Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343298392995338:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:59:33.290950Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343298392995338:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-17T17:59:33.294574Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343298392995338:2438] Handle TEvDescribeSchemeResult Forward to# [1:7494343298392995337:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:59:33.334696Z node 1 :TX_PROXY DEBUG: actor# [1:7494343294098027558:2117] Handle TEvProposeTransaction 2025-04-17T17:59:33.334731Z node 1 :TX_PROXY DEBUG: actor# [1:7494343294098027558:2117] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T17:59:33.334843Z node 1 :TX_PROXY DEBUG: actor# [1:7494343294098027558:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494343298392995346:2445] 2025-04-17T17:59:33.452205Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343298392995346:2445] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-17T17:59:33.452257Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343298392995346:2445] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:59:33.452314Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343298392995346:2445] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:59:33.452454Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343294098027581:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... NotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7494343344063274415:2543] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:44.399418Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494343326883404604:2125], cacheItem# { Subscriber: { Subscriber: [3:7494343344063274415:2543] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:44.399475Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494343344063274436:2546], recipient# [3:7494343344063274414:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:44.399554Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494343344063274437:2547], recipient# [3:7494343344063274412:2313], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:44.399578Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494343322588436979:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494343344063274421:2543] 2025-04-17T17:59:45.008093Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343326883404339:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:45.008190Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:45.034375Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494343326883404604:2125], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:45.034536Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494343326883404604:2125], cacheItem# { Subscriber: { Subscriber: [3:7494343331178372449:2509] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:45.034627Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494343348358241741:2551], recipient# [3:7494343348358241740:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:45.403570Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494343326883404604:2125], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:45.403753Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494343326883404604:2125], cacheItem# { Subscriber: { Subscriber: [3:7494343344063274417:2545] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:45.403854Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494343348358241748:2554], recipient# [3:7494343348358241747:2317], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:46.022530Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494343326883404604:2125], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:46.022710Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494343326883404604:2125], cacheItem# { Subscriber: { Subscriber: [3:7494343331178372449:2509] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:46.022806Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494343352653209054:2560], recipient# [3:7494343352653209053:2318], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:46.040099Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494343326883404604:2125], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:46.040252Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494343326883404604:2125], cacheItem# { Subscriber: { Subscriber: [3:7494343331178372449:2509] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:46.040328Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494343352653209056:2561], recipient# [3:7494343352653209055:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:46.414538Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494343326883404604:2125], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:46.414707Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494343326883404604:2125], cacheItem# { Subscriber: { Subscriber: [3:7494343344063274417:2545] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:46.414803Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494343352653209061:2562], recipient# [3:7494343352653209060:2320], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:59:38.923164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:59:38.923300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:38.923341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:59:38.931642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:59:38.931722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:59:38.931808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:59:38.931945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:38.932054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:59:38.932451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:59:39.092167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:59:39.092241Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:39.130917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:59:39.131129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:59:39.131329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:59:39.173134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:59:39.173551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:39.174262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:39.174635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:59:39.197796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:39.199328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:39.199408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:39.199542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:59:39.199601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:39.199645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:59:39.199859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:59:39.222566Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:59:39.410845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:39.411143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:39.411389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:59:39.411678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:59:39.411735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:39.420474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:39.420642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:59:39.420895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:39.420979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:39.421038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:59:39.421072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:59:39.424117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:39.424224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:39.424272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:59:39.431898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:39.431972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:39.432018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:39.432121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:59:39.437251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:39.439931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:59:39.440218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:59:39.441278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:39.441444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:39.441503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:39.441801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:59:39.441854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:39.442016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:39.442095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:59:39.444917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:39.444964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:39.445173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:39.445240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:59:39.445593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:39.445644Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:59:39.445734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:39.445817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:39.445874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:39.445910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:39.445973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:59:39.446015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:39.446049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:59:39.446077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:59:39.446154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:39.446208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:59:39.446264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:59:39.448168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:39.448281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:39.448320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T17:59:48.840742Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:48.840813Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-04-17T17:59:48.840858Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-04-17T17:59:48.841935Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:48.842048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T17:59:48.842088Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T17:59:48.842131Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-17T17:59:48.842501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T17:59:48.842631Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T17:59:48.848718Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-17T17:59:48.848824Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:48.848929Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:400:2369], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T17:59:48.849046Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-04-17T17:59:48.849076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-04-17T17:59:48.849209Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-04-17T17:59:48.849242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:496:2438], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-04-17T17:59:48.851053Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-04-17T17:59:48.851667Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T17:59:48.851721Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T17:59:48.851882Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:59:48.851931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:48.851979Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T17:59:48.852030Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:48.852091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T17:59:48.852137Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T17:59:48.852178Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T17:59:48.852213Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T17:59:48.852304Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T17:59:48.852693Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T17:59:48.852763Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T17:59:48.854768Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T17:59:48.854842Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T17:59:48.855291Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T17:59:48.855405Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T17:59:48.855443Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:569:2509] TestWaitNotification: OK eventTxId 103 2025-04-17T17:59:48.856023Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:59:48.856225Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 236us result status StatusSuccess 2025-04-17T17:59:48.856591Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:48.857095Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:59:48.857246Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 176us result status StatusSuccess 2025-04-17T17:59:48.857543Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:48.858121Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-04-17T17:59:48.858434Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 320us result status StatusSuccess 2025-04-17T17:59:48.858776Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered >> TPersQueueTest::CheckACLForGrpcRead [GOOD] >> TPersQueueTest::CheckKillBalancer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2025-04-17T17:59:42.343598Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343337313967676:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:42.348011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015bd/r3tmp/tmpla1MEj/pdisk_1.dat 2025-04-17T17:59:42.839221Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:42.856799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:42.856932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:42.858780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3512 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:59:43.098601Z node 1 :TX_PROXY DEBUG: actor# [1:7494343337313967819:2103] Handle TEvNavigate describe path dc-1 2025-04-17T17:59:43.098642Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343341608935397:2257] HANDLE EvNavigateScheme dc-1 2025-04-17T17:59:43.098763Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343337313967843:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:43.098823Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494343337313967843:2117], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T17:59:43.099032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:59:43.101112Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343337313967514:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494343341608935402:2258] 2025-04-17T17:59:43.101172Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494343337313967514:2049] Subscribe: subscriber# [1:7494343341608935402:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:59:43.101230Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343337313967517:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494343341608935403:2258] 2025-04-17T17:59:43.101247Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494343337313967517:2052] Subscribe: subscriber# [1:7494343341608935403:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:59:43.101266Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343337313967520:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494343341608935404:2258] 2025-04-17T17:59:43.101280Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494343337313967520:2055] Subscribe: subscriber# [1:7494343341608935404:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:59:43.101331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343341608935402:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494343337313967514:2049] 2025-04-17T17:59:43.101374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343341608935403:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494343337313967517:2052] 2025-04-17T17:59:43.101412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343341608935404:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494343337313967520:2055] 2025-04-17T17:59:43.101471Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494343341608935399:2258] 2025-04-17T17:59:43.101496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494343341608935400:2258] 2025-04-17T17:59:43.101539Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494343341608935398:2258][/dc-1] Set up state: owner# [1:7494343337313967843:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:59:43.101788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494343341608935401:2258] 2025-04-17T17:59:43.101838Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494343341608935398:2258][/dc-1] Path was already updated: owner# [1:7494343337313967843:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:59:43.101877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343341608935402:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343341608935399:2258], cookie# 1 2025-04-17T17:59:43.101891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343341608935403:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343341608935400:2258], cookie# 1 2025-04-17T17:59:43.101904Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343341608935404:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343341608935401:2258], cookie# 1 2025-04-17T17:59:43.101996Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343337313967514:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494343341608935402:2258] 2025-04-17T17:59:43.102018Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343337313967514:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343341608935402:2258], cookie# 1 2025-04-17T17:59:43.102062Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343337313967517:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494343341608935403:2258] 2025-04-17T17:59:43.102090Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343337313967517:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343341608935403:2258], cookie# 1 2025-04-17T17:59:43.102108Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343337313967520:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494343341608935404:2258] 2025-04-17T17:59:43.102119Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343337313967520:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343341608935404:2258], cookie# 1 2025-04-17T17:59:43.102250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343341608935402:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343337313967514:2049], cookie# 1 2025-04-17T17:59:43.102267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343341608935403:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343337313967517:2052], cookie# 1 2025-04-17T17:59:43.102281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343341608935404:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343337313967520:2055], cookie# 1 2025-04-17T17:59:43.102313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343341608935399:2258], cookie# 1 2025-04-17T17:59:43.102345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:59:43.102372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343341608935400:2258], cookie# 1 2025-04-17T17:59:43.102405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:43.102429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343341608935401:2258], cookie# 1 2025-04-17T17:59:43.102457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343341608935398:2258][/dc-1] Unexpected sync response: sender# [1:7494343341608935401:2258], cookie# 1 2025-04-17T17:59:43.166616Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494343337313967843:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T17:59:43.166977Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494343337313967843:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... athId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-04-17T17:59:47.118439Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494343358270479568:2355], recipient# [2:7494343358270479567:2354], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:59:47.118463Z node 2 :TX_PROXY DEBUG: Actor# [2:7494343358270479567:2354] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:59:47.118511Z node 2 :TX_PROXY DEBUG: Actor# [2:7494343358270479567:2354] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2025-04-17T17:59:47.121117Z node 2 :TX_PROXY DEBUG: Actor# [2:7494343358270479567:2354] Handle TEvDescribeSchemeResult Forward to# [2:7494343358270479566:2353] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744912786995 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744912786995 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1... (TRUNCATED) 2025-04-17T17:59:47.162322Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494343353975511884:2115], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:47.162391Z node 2 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [2:7494343353975511884:2115], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-04-17T17:59:47.162552Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343358270479570:2356][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:59:47.162930Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7494343353975511563:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7494343358270479574:2356] 2025-04-17T17:59:47.162944Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7494343353975511563:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-17T17:59:47.163003Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7494343353975511563:2049] Subscribe: subscriber# [2:7494343358270479574:2356], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:59:47.163035Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7494343353975511566:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7494343358270479575:2356] 2025-04-17T17:59:47.163042Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7494343353975511566:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-17T17:59:47.163061Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7494343353975511566:2052] Subscribe: subscriber# [2:7494343358270479575:2356], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:59:47.163087Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7494343353975511569:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7494343358270479576:2356] 2025-04-17T17:59:47.163094Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7494343353975511569:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-17T17:59:47.163112Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7494343353975511569:2055] Subscribe: subscriber# [2:7494343358270479576:2356], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:59:47.163144Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7494343358270479574:2356][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7494343353975511563:2049] 2025-04-17T17:59:47.163166Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7494343358270479575:2356][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7494343353975511566:2052] 2025-04-17T17:59:47.163195Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7494343358270479576:2356][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7494343353975511569:2055] 2025-04-17T17:59:47.163224Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343358270479570:2356][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7494343358270479571:2356] 2025-04-17T17:59:47.163265Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343358270479570:2356][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7494343358270479572:2356] 2025-04-17T17:59:47.163291Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:7494343358270479570:2356][/dc-1/.metadata/initialization/migrations] Set up state: owner# [2:7494343353975511884:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:59:47.163314Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343358270479570:2356][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7494343358270479573:2356] 2025-04-17T17:59:47.163336Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7494343358270479570:2356][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [2:7494343353975511884:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:59:47.163360Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7494343353975511563:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7494343358270479574:2356] 2025-04-17T17:59:47.163374Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7494343353975511566:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7494343358270479575:2356] 2025-04-17T17:59:47.163385Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7494343353975511569:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7494343358270479576:2356] 2025-04-17T17:59:47.163422Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7494343353975511884:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-04-17T17:59:47.163496Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7494343353975511884:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7494343358270479570:2356] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:47.163569Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7494343353975511884:2115], cacheItem# { Subscriber: { Subscriber: [2:7494343358270479570:2356] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:47.163628Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494343358270479577:2357], recipient# [2:7494343358270479569:2300], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |82.9%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> TModifyUserTest::ModifyLdapUser [GOOD] >> TModifyUserTest::ModifyUserIsEnabled |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay |82.9%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay >> KqpParams::ImplicitParameterTypes |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> KqpQuery::QueryTimeout >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> KqpQuery::CurrentUtcTimestamp >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> TFstClassSrcIdPQTest::ProperPartitionSelected [GOOD] >> TPQCompatTest::DiscoverTopics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T17:59:42.664005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T17:59:42.664106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:42.664141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T17:59:42.664175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T17:59:42.664235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T17:59:42.664259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T17:59:42.664314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T17:59:42.664379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T17:59:42.664705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:59:42.751759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T17:59:42.751829Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:42.766172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:59:42.766412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T17:59:42.766588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T17:59:42.773355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T17:59:42.773642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T17:59:42.774323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:42.774610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T17:59:42.781469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:42.782746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:42.782805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:42.782907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T17:59:42.782948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:42.782981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T17:59:42.783155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T17:59:42.789982Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T17:59:42.935698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T17:59:42.935936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:42.936125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T17:59:42.936401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T17:59:42.936470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:42.938833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:42.938984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T17:59:42.939168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:42.939241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T17:59:42.939300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T17:59:42.939330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T17:59:42.941389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:42.941457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:42.941490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T17:59:42.943281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:42.943327Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:42.943368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:42.943431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T17:59:42.947087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:59:42.949070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T17:59:42.949264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T17:59:42.950254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:42.950397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:42.950456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:42.950724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T17:59:42.950776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T17:59:42.950922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:42.950993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T17:59:42.953223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:42.953267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:42.953450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:42.953491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T17:59:42.953795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T17:59:42.953838Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T17:59:42.953933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:42.953962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:42.954005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T17:59:42.954053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:42.954085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T17:59:42.954155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T17:59:42.954229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T17:59:42.954259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T17:59:42.954327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:42.954368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T17:59:42.954401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T17:59:42.956253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:42.956346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T17:59:42.956379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... D INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T17:59:53.857911Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:59:53.858014Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:59:53.860350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T17:59:53.860538Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T17:59:53.861303Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:59:53.861669Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T17:59:53.861822Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773229 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T17:59:53.861926Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-17T17:59:53.862040Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 102 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:53.862094Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:59:53.862152Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 134 2025-04-17T17:59:53.865436Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:59:53.866093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:59:53.866164Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T17:59:53.866339Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 134 -> 135 2025-04-17T17:59:53.866636Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:53.866716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T17:59:53.868647Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T17:59:53.868690Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T17:59:53.868827Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T17:59:53.868983Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T17:59:53.869015Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T17:59:53.869070Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T17:59:53.869125Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:59:53.869181Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-04-17T17:59:53.869236Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 135 -> 240 2025-04-17T17:59:53.870581Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:59:53.870667Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:59:53.870696Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:59:53.870728Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T17:59:53.870762Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T17:59:53.878679Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:59:53.878822Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T17:59:53.878856Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T17:59:53.878893Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T17:59:53.878933Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T17:59:53.879036Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T17:59:53.884134Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T17:59:53.884216Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T17:59:53.884398Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:59:53.884464Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:59:53.884535Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T17:59:53.884581Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:59:53.884635Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T17:59:53.884685Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T17:59:53.884744Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T17:59:53.884794Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T17:59:53.884898Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T17:59:53.885414Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:59:53.885476Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:53.885554Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T17:59:53.886028Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T17:59:53.886098Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T17:59:53.886197Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T17:59:53.887797Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:59:53.888429Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T17:59:53.890917Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T17:59:53.891007Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T17:59:53.891287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T17:59:53.891342Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T17:59:53.891833Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T17:59:53.891950Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T17:59:53.892001Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:344:2335] TestWaitNotification: OK eventTxId 102 2025-04-17T17:59:53.892628Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T17:59:53.892868Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 284us result status StatusPathDoesNotExist 2025-04-17T17:59:53.893064Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TopicService::ThereAreGapsInTheOffsetRanges [GOOD] >> TSubDomainTest::LsAltered [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionTests::AfterRestart_1 >> TPersQueueTest::SetupWriteSession [GOOD] >> TPersQueueTest::StoreNoMoreThanXSourceIDs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2025-04-17T17:59:45.375282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343346032745283:2246];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:45.375335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00159f/r3tmp/tmp7rTv2t/pdisk_1.dat 2025-04-17T17:59:46.076156Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:46.099116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:46.099202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:46.113262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29610 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:59:46.434545Z node 1 :TX_PROXY DEBUG: actor# [1:7494343346032745354:2105] Handle TEvNavigate describe path dc-1 2025-04-17T17:59:46.434583Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343350327712948:2266] HANDLE EvNavigateScheme dc-1 2025-04-17T17:59:46.434690Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343346032745389:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:46.434781Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343350327712930:2260][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494343346032745389:2121], cookie# 1 2025-04-17T17:59:46.436283Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343350327712935:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343350327712931:2260], cookie# 1 2025-04-17T17:59:46.436313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343350327712936:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343350327712932:2260], cookie# 1 2025-04-17T17:59:46.436326Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343350327712937:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343350327712933:2260], cookie# 1 2025-04-17T17:59:46.436361Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343346032745046:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343350327712935:2260], cookie# 1 2025-04-17T17:59:46.436387Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343346032745049:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343350327712936:2260], cookie# 1 2025-04-17T17:59:46.436401Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343346032745052:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343350327712937:2260], cookie# 1 2025-04-17T17:59:46.436442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343350327712935:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343346032745046:2049], cookie# 1 2025-04-17T17:59:46.436454Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343350327712936:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343346032745049:2052], cookie# 1 2025-04-17T17:59:46.436466Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343350327712937:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343346032745052:2055], cookie# 1 2025-04-17T17:59:46.436494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343350327712930:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343350327712931:2260], cookie# 1 2025-04-17T17:59:46.436513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343350327712930:2260][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:59:46.436547Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343350327712930:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343350327712932:2260], cookie# 1 2025-04-17T17:59:46.436568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343350327712930:2260][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:46.436602Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343350327712930:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343350327712933:2260], cookie# 1 2025-04-17T17:59:46.436616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343350327712930:2260][/dc-1] Unexpected sync response: sender# [1:7494343350327712933:2260], cookie# 1 2025-04-17T17:59:46.436672Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494343346032745389:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T17:59:46.455499Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494343346032745389:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494343350327712930:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:46.455625Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494343346032745389:2121], cacheItem# { Subscriber: { Subscriber: [1:7494343350327712930:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T17:59:46.470829Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494343350327712949:2267], recipient# [1:7494343350327712948:2266], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:59:46.470914Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343350327712948:2266] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:59:46.573038Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343350327712948:2266] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } TClient::Ls response: 2025-04-17T17:59:46.576106Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343350327712948:2266] Handle TEvDescribeSchemeResult Forward to# [1:7494343350327712947:2265] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:59:46.610657Z node 1 :TX_PROXY DEBUG: actor# [1:7494343346032745354:2105] Handle TEvProposeTransaction 2025-04-17T17:59:46.610703Z node 1 :TX_PROXY DEBUG: actor# [1:7494343346032745354:2105] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T17:59:46.610789Z node 1 :TX_PROXY DEBUG: actor# [1:7494343346032745354:2105] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494343350327712955:2272] 2025-04-17T17:59:46.750619Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343350327712955:2272] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-17T17:59:46.750666Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343350327712955:2272] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:59:46.750734Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343350327712955:2272] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:59:46.750823Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343346032745389:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... 4912791517 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /dc-1 2025-04-17T17:59:52.504052Z node 2 :TX_PROXY DEBUG: actor# [2:7494343371548979341:2086] Handle TEvNavigate describe path /dc-1 2025-04-17T17:59:52.504097Z node 2 :TX_PROXY DEBUG: Actor# [2:7494343380138914426:2335] HANDLE EvNavigateScheme /dc-1 2025-04-17T17:59:52.504192Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494343371548979441:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:52.504278Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343375843947000:2261][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7494343371548979441:2115], cookie# 4 2025-04-17T17:59:52.504326Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7494343375843947004:2261][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7494343375843947001:2261], cookie# 4 2025-04-17T17:59:52.504341Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7494343375843947005:2261][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7494343375843947002:2261], cookie# 4 2025-04-17T17:59:52.504354Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7494343375843947006:2261][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7494343375843947003:2261], cookie# 4 2025-04-17T17:59:52.504380Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7494343371548979123:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7494343375843947004:2261], cookie# 4 2025-04-17T17:59:52.504402Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7494343371548979126:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7494343375843947005:2261], cookie# 4 2025-04-17T17:59:52.504432Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7494343371548979129:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7494343375843947006:2261], cookie# 4 2025-04-17T17:59:52.504455Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7494343375843947004:2261][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7494343371548979123:2049], cookie# 4 2025-04-17T17:59:52.504485Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7494343375843947005:2261][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7494343371548979126:2052], cookie# 4 2025-04-17T17:59:52.504501Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7494343375843947006:2261][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7494343371548979129:2055], cookie# 4 2025-04-17T17:59:52.504526Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343375843947000:2261][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7494343375843947001:2261], cookie# 4 2025-04-17T17:59:52.504544Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343375843947000:2261][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:59:52.504555Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343375843947000:2261][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7494343375843947002:2261], cookie# 4 2025-04-17T17:59:52.504574Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343375843947000:2261][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:52.504591Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343375843947000:2261][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7494343375843947003:2261], cookie# 4 2025-04-17T17:59:52.504602Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7494343375843947000:2261][/dc-1] Unexpected sync response: sender# [2:7494343375843947003:2261], cookie# 4 2025-04-17T17:59:52.504653Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7494343371548979441:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T17:59:52.504712Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7494343371548979441:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7494343375843947000:2261] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744912791482 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:52.504790Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7494343371548979441:2115], cacheItem# { Subscriber: { Subscriber: [2:7494343375843947000:2261] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744912791482 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-17T17:59:52.504916Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494343380138914427:2336], recipient# [2:7494343380138914426:2335], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:59:52.504942Z node 2 :TX_PROXY DEBUG: Actor# [2:7494343380138914426:2335] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:59:52.505013Z node 2 :TX_PROXY DEBUG: Actor# [2:7494343380138914426:2335] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-17T17:59:52.505468Z node 2 :TX_PROXY DEBUG: Actor# [2:7494343380138914426:2335] Handle TEvDescribeSchemeResult Forward to# [2:7494343380138914425:2334] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744912791482 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744912791482 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912791517 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) 2025-04-17T17:59:52.684983Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494343371548979441:2115], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:52.685122Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7494343371548979441:2115], cacheItem# { Subscriber: { Subscriber: [2:7494343375843947102:2325] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:52.685188Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494343380138914429:2337], recipient# [2:7494343380138914428:2300], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain >> TPartitionTests::AfterRestart_1 [GOOD] >> TModifyUserTest::ModifyUserIsEnabled [GOOD] >> TopicService::OnePartitionAndNoGapsInTheOffsets >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryReplySize >> KqpJoinOrder::CanonizedJoinOrderTPCH2 >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit >> TPersQueueTest::ReadRuleDisallowDefaultServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestore::PrefixedVectorIndex >> TPQTest::TestMaxTimeLagRewind [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::AfterRestart_1 [GOOD] Test command err: 2025-04-17T17:58:44.572337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343085945859995:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:44.572529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:58:44.591106Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343084711267554:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:44.591160Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:58:45.023946Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:58:45.031151Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001594/r3tmp/tmpmAzYZf/pdisk_1.dat 2025-04-17T17:58:45.685278Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:58:45.689720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:58:45.789534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:45.789634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:45.798600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:45.798787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:45.802003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:58:45.808562Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:58:45.809840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8392, node 1 2025-04-17T17:58:45.874045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:46.034111Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:58:46.070882Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:58:46.071555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001594/r3tmp/yandex1GWkHS.tmp 2025-04-17T17:58:46.071564Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001594/r3tmp/yandex1GWkHS.tmp 2025-04-17T17:58:46.071692Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001594/r3tmp/yandex1GWkHS.tmp 2025-04-17T17:58:46.071804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:58:46.204295Z INFO: TTestServer started on Port 20843 GrpcPort 8392 TClient is connected to server localhost:20843 PQClient connected to localhost:8392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:47.049789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:58:47.182086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:58:49.518325Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343085945859995:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:49.518420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:49.594394Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343084711267554:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:49.594480Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:50.748081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343111715664752:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.748200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.751150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343111715664772:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.756856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:58:50.769984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343111715664813:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.770151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:50.805567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343111715664781:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:58:51.010551Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.160392s 2025-04-17T17:58:51.010598Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.160510s 2025-04-17T17:58:51.012054Z node 1 :TX_PROXY ERROR: Actor# [1:7494343111715664864:2783] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:51.054315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:51.254783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:51.268562Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494343114776039052:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:58:51.270673Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494343116010632174:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:58:51.272626Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWU2ZWQ5ZDktZTdkMWFkZC0xYzIxNjFjYy0zM2E3NWIwNg==, ActorId: [1:7494343111715664749:2341], ActorState: ExecuteState, TraceId: 01js2eejkn6btpbxvkgswrfgya, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:58:51.276566Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTE4OGU4NGQtMjEzYjQ2M2UtNzgyYTRiMmYtMjJmYWRiMzU=, ActorId: [2:7494343114776039027:2316], ActorState: ExecuteState, TraceId: 01js2eejyb7g58n4h2kmk5hntc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:58:51.288816Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:58:51.289395Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:58:51.469409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation ... RD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:1, at schemeshard: 72057594046644480 2025-04-17T17:59:46.757209Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720685:0, at schemeshard: 72057594046644480 2025-04-17T17:59:47.464198Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720689:0, at schemeshard: 72057594046644480 2025-04-17T17:59:48.231081Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:59:48.231110Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:49.267796Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976720695:0, at schemeshard: 72057594046644480 2025-04-17T17:59:50.216536Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976720700:0, at schemeshard: 72057594046644480 2025-04-17T17:59:51.614528Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720706:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (11131928866524144434, "Root", "00415F536F757263655F35", 1744912792922, 1744912792922, 0, 13); 2025-04-17T17:59:53.210094Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976720712. Ctx: { TraceId: 01js2egfc89y48y5am7jcwq6sj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ODBlOGQ4ZDEtYWZkZWMxZDktYzU4OTZlYjUtN2NhMmJhY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:53.256731Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-17T17:59:53.256754Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T17:59:53.256765Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T17:59:53.256787Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-04-17T17:59:53.256907Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [5:7494343382430430852:4192], Recipient [5:7494343348070691211:3458]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [5:7494343382430430851:4192] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-17T17:59:53.256997Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [5:7494343382430430851:4192], Recipient [5:7494343348070691211:3458]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2025-04-17T17:59:53.257060Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [5:7494343348070691211:3458], Recipient [5:7494343382430430851:4192]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-17T17:59:53.257085Z node 5 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-17T17:59:53.257150Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [5:7494343382430430851:4192], Recipient [5:7494343348070691211:3458]: NActors::TEvents::TEvPoison 2025-04-17T17:59:53.257756Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [5:7494343292236114114:2069], Recipient [5:7494343382430430851:4192]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-17T17:59:53.257785Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T17:59:53.262574Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [5:7494343292236114212:2156], Recipient [5:7494343382430430851:4192]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=5&id=MzU4NTU3ZTYtN2UyN2RjZjItMTQ1MWZjNWMtMmNlYTQ2OTQ=" NodeId: 5 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-17T17:59:53.262614Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2025-04-17T17:59:53.494077Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [5:7494343292236114212:2156], Recipient [5:7494343382430430851:4192]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=5&id=MzU4NTU3ZTYtN2UyN2RjZjItMTQ1MWZjNWMtMmNlYTQ2OTQ=" PreparedQuery: "af49df9c-f97b7a8f-b7df311c-d4956858" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01js2egfwa5wvsp3sb9xpqce9a" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1744912792922 } items { uint64_value: 1744912792922 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 142 2025-04-17T17:59:53.494451Z node 5 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-04-17T17:59:53.494473Z node 5 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2025-04-17T17:59:53.494603Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [5:7494343382430430893:4192], Recipient [5:7494343348070691211:3458]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [5:7494343382430430851:4192] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-17T17:59:53.494666Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [5:7494343382430430851:4192], Recipient [5:7494343348070691211:3458]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2025-04-17T17:59:53.494726Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [5:7494343348070691211:3458], Recipient [5:7494343382430430851:4192]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-17T17:59:53.494770Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2025-04-17T17:59:53.495073Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [5:7494343382430430851:4192], Recipient [5:7494343348070691211:3458]: NActors::TEvents::TEvPoison Received TEvChooseResult: 1 Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" 2025-04-17T17:59:53.636933Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [5:7494343292236114212:2156], Recipient [5:7494343382430430851:4192]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=5&id=MzU4NTU3ZTYtN2UyN2RjZjItMTQ1MWZjNWMtMmNlYTQ2OTQ=" PreparedQuery: "b580225e-1db8cf77-4864b5bb-614afb30" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 78 2025-04-17T17:59:53.636984Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T17:59:53.637015Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2025-04-17T17:59:53.637040Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7494343382430430851:4192] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle 2025-04-17T17:59:53.949676Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976720715. Ctx: { TraceId: 01js2egg2ycejyxt9m2qqn2tnc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NmRhY2U5NGItZmIxYjFhMzktY2ExYjEyNTMtZDY3NGZiZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:59:56.822468Z node 7 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:56.822605Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:56.857718Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [7:179:2194] 2025-04-17T17:59:56.865118Z node 7 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-17T17:59:56.000000Z 2025-04-17T17:59:56.865230Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [7:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\340\222\241\247\3442" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\316\255\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\004\020\000\030\000\"\007session(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session" StorageChannel: INLINE } |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestMaxTimeLagRewind [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:58:47.410997Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:47.411105Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:58:47.433608Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:47.454213Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:58:47.455258Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:58:47.458271Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:58:47.460527Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-17T17:58:47.462609Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-17T17:58:47.469267Z node 1 :PERSQUEUE INFO: new Cookie default|9b046deb-9626442b-a8ca46bd-2a8f7888_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:58:48.429231Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:48.429315Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-04-17T17:58:48.472562Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:48.477373Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:58:48.482521Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-04-17T17:58:48.485197Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-04-17T17:58:48.498465Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:184:2197] 2025-04-17T17:58:48.500496Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:184:2197] 2025-04-17T17:58:48.519849Z node 2 :PERSQUEUE INFO: new Cookie default|15ff8c76-46b8c499-4a3b542d-96e5594_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-17T17:58:49.435095Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:49.435173Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-17T17:58:49.463617Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:49.464445Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-17T17:58:49.465028Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-17T17:58:49.467372Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-17T17:58:49.468926Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-04-17T17:58:49.472299Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-04-17T17:58:49.479711Z node 3 :PERSQUEUE INFO: new Cookie default|1c6276a6-b8b5388-ce20ee41-562af3ec_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-17T17:58:50.386455Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:50.386554Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-04-17T17:58:50.408220Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:50.409135Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } 2025-04-17T17:58:50.409858Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-04-17T17:58:50.412792Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-04-17T17:58:50.414578Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:186:2199] 2025-04-17T17:58:50.416675Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:186:2199] 2025-04-17T17:58:50.422468Z node 4 :PERSQUEUE INFO: new Cookie default|7aa02041-7be5e312-60d579c7-9e1f3c15_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:58:50.423049Z node 4 :PERSQUEUE INFO: new Cookie default|ef7a811f-45e73ad4-2ef98a01-5ee2c4bd_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101:2135] Leader for TabletID 72057594037927937 is [5:107:2139] sender: [5:108:2057] recipient: [5:101:2135] 2025-04-17T17:58:51.285896Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:51.285979Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [5:149:2057] recipient: [5:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [5:149:2057] recipient: [5:147:2170] Leader for TabletID 72057594037927938 is [5:153:2174] sender: [5:154:2057] recipient: [5:147:2170] Leader for TabletID 7205759403792793 ... 0] Leader for TabletID 72057594037927937 is [41:107:2139] sender: [41:179:2057] recipient: [41:14:2061] 2025-04-17T17:59:56.606432Z node 41 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:56.607584Z node 41 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 58 actor [41:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 58 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 58 ReadRuleGenerations: 58 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 58 Important: false } Consumers { Name: "aaa" Generation: 58 Important: true } 2025-04-17T17:59:56.608587Z node 41 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [41:185:2198] 2025-04-17T17:59:56.611403Z node 41 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [41:185:2198] 2025-04-17T17:59:56.613669Z node 41 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [41:186:2199] 2025-04-17T17:59:56.615723Z node 41 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [41:186:2199] 2025-04-17T17:59:56.643036Z node 41 :PERSQUEUE INFO: new Cookie default|168d1db2-1d332d51-f6dad2c5-20497a5a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:56.747536Z node 41 :PERSQUEUE INFO: new Cookie default|2c3d5d-ee941b6f-44ad2648-ea475a9b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:56.849477Z node 41 :PERSQUEUE INFO: new Cookie default|44e409ac-bc4a9c4-ecba4425-27d04c23_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:56.946382Z node 41 :PERSQUEUE INFO: new Cookie default|320eaed3-dbcf714f-140532da-41631e7d_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:57.043415Z node 41 :PERSQUEUE INFO: new Cookie default|b72bb752-36361468-adb24922-33c0139b_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 2147483647 } Cookie: 123 } via pipe: [41:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 2147483647 MaxTimeLagMs: 180000 } Cookie: 123 } via pipe: [41:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 22 Count: 1 Bytes: 2147483647 MaxTimeLagMs: 180000 } Cookie: 123 } via pipe: [41:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 1 Bytes: 2147483647 MaxTimeLagMs: 1000 } Cookie: 123 } via pipe: [41:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 2147483647 ReadTimestampMs: 120250 } Cookie: 123 } via pipe: [41:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 22 Count: 1 Bytes: 2147483647 ReadTimestampMs: 120250 } Cookie: 123 } via pipe: [41:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 1 Bytes: 2147483647 ReadTimestampMs: 299250 } Cookie: 123 } via pipe: [41:177:2192] 2025-04-17T17:59:57.286519Z node 41 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:57.295080Z node 41 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 59 actor [41:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 59 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 58 ReadRuleGenerations: 58 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 299250 Generation: 58 Important: false } Consumers { Name: "aaa" Generation: 58 Important: true } Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 2147483647 } Cookie: 123 } via pipe: [41:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:103:2057] recipient: [42:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:103:2057] recipient: [42:101:2135] Leader for TabletID 72057594037927937 is [42:107:2139] sender: [42:108:2057] recipient: [42:101:2135] 2025-04-17T17:59:57.937809Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:57.937893Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [42:149:2057] recipient: [42:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [42:149:2057] recipient: [42:147:2170] Leader for TabletID 72057594037927938 is [42:153:2174] sender: [42:154:2057] recipient: [42:147:2170] Leader for TabletID 72057594037927937 is [42:107:2139] sender: [42:177:2057] recipient: [42:14:2061] 2025-04-17T17:59:57.958744Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:57.960025Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 60 actor [42:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 60 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: true } 2025-04-17T17:59:57.960992Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [42:184:2197] 2025-04-17T17:59:57.963666Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [42:184:2197] 2025-04-17T17:59:57.967760Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [42:183:2196] 2025-04-17T17:59:57.969702Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [42:183:2196] 2025-04-17T17:59:57.993711Z node 42 :PERSQUEUE INFO: new Cookie default|40f1bf96-a25b3b81-fd762d37-822cee67_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:58.177994Z node 42 :PERSQUEUE INFO: new Cookie default|17408e3f-def42a56-6cce0333-1057a71f_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:58.334059Z node 42 :PERSQUEUE INFO: new Cookie default|85d74d26-6ce064c4-1e28db3b-b4283cd4_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:58.436917Z node 42 :PERSQUEUE INFO: new Cookie default|bc95c26a-292dd321-3474be56-76fbfabe_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:58.538000Z node 42 :PERSQUEUE INFO: new Cookie default|a892404a-2eb48502-ac1af75e-621c74c0_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 2147483647 } Cookie: 123 } via pipe: [42:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 2147483647 MaxTimeLagMs: 180000 } Cookie: 123 } via pipe: [42:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 22 Count: 1 Bytes: 2147483647 MaxTimeLagMs: 180000 } Cookie: 123 } via pipe: [42:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 1 Bytes: 2147483647 MaxTimeLagMs: 1000 } Cookie: 123 } via pipe: [42:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 2147483647 ReadTimestampMs: 120249 } Cookie: 123 } via pipe: [42:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 22 Count: 1 Bytes: 2147483647 ReadTimestampMs: 120249 } Cookie: 123 } via pipe: [42:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 1 Bytes: 2147483647 ReadTimestampMs: 299249 } Cookie: 123 } via pipe: [42:175:2190] 2025-04-17T17:59:58.770701Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:58.776890Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 61 actor [42:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 61 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 299249 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: true } Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 2147483647 } Cookie: 123 } via pipe: [42:175:2190] >> TSubDomainTest::GenericCases [GOOD] >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> TSubDomainTest::Boot >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::DdlInDataQuery >> KqpStats::RequestUnitForBadRequestExecute >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2025-04-17T17:59:43.540346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343341324494591:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:43.540569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ca/r3tmp/tmpqJ09yS/pdisk_1.dat 2025-04-17T17:59:44.515147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:44.565444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:44.567380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:44.570298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:44.576763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15541 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:59:44.789454Z node 1 :TX_PROXY DEBUG: actor# [1:7494343341324494606:2141] Handle TEvNavigate describe path dc-1 2025-04-17T17:59:44.789516Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343345619462358:2449] HANDLE EvNavigateScheme dc-1 2025-04-17T17:59:44.789633Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343341324494631:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:44.789731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343345619462257:2369][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494343341324494631:2155], cookie# 1 2025-04-17T17:59:44.791279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343345619462264:2369][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343345619462261:2369], cookie# 1 2025-04-17T17:59:44.791312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343345619462265:2369][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343345619462262:2369], cookie# 1 2025-04-17T17:59:44.791330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343345619462266:2369][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343345619462263:2369], cookie# 1 2025-04-17T17:59:44.791362Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343341324494245:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343345619462264:2369], cookie# 1 2025-04-17T17:59:44.791389Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343341324494248:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343345619462265:2369], cookie# 1 2025-04-17T17:59:44.791404Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343341324494251:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343345619462266:2369], cookie# 1 2025-04-17T17:59:44.791448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343345619462264:2369][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343341324494245:2051], cookie# 1 2025-04-17T17:59:44.791480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343345619462265:2369][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343341324494248:2054], cookie# 1 2025-04-17T17:59:44.791519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343345619462266:2369][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343341324494251:2057], cookie# 1 2025-04-17T17:59:44.791553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343345619462257:2369][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343345619462261:2369], cookie# 1 2025-04-17T17:59:44.791581Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343345619462257:2369][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:59:44.791607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343345619462257:2369][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343345619462262:2369], cookie# 1 2025-04-17T17:59:44.791627Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343345619462257:2369][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:44.791650Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343345619462257:2369][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343345619462263:2369], cookie# 1 2025-04-17T17:59:44.791661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343345619462257:2369][/dc-1] Unexpected sync response: sender# [1:7494343345619462263:2369], cookie# 1 2025-04-17T17:59:44.791715Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494343341324494631:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T17:59:44.808470Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494343341324494631:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494343345619462257:2369] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:44.808605Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494343341324494631:2155], cacheItem# { Subscriber: { Subscriber: [1:7494343345619462257:2369] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T17:59:44.815052Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494343345619462359:2450], recipient# [1:7494343345619462358:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:59:44.815148Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343345619462358:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:59:44.860919Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343345619462358:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-17T17:59:44.864074Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343345619462358:2449] Handle TEvDescribeSchemeResult Forward to# [1:7494343345619462357:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:59:44.888057Z node 1 :TX_PROXY DEBUG: actor# [1:7494343341324494606:2141] Handle TEvProposeTransaction 2025-04-17T17:59:44.888088Z node 1 :TX_PROXY DEBUG: actor# [1:7494343341324494606:2141] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T17:59:44.888190Z node 1 :TX_PROXY DEBUG: actor# [1:7494343341324494606:2141] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494343345619462364:2454] 2025-04-17T17:59:45.019247Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343345619462364:2454] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-17T17:59:45.019300Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343345619462364:2454] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:59:45.019378Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343345619462364:2454] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:59:45.019471Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343341324494631:2155], request# { ErrorCount: 0 Databa ... d.TEvNotifyAck { Version: 0 }: sender# [4:7494343405610522852:3032] 2025-04-17T17:59:58.950904Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7494343388430652096:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7494343405610522864:3034] 2025-04-17T17:59:58.950920Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7494343388430652099:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7494343405610522853:3032] 2025-04-17T17:59:58.950935Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7494343388430652099:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7494343405610522865:3034] 2025-04-17T17:59:58.950998Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7494343388430652432:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-17T17:59:58.951484Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7494343405610522857:3033][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7494343388430652093:2050] 2025-04-17T17:59:58.951532Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7494343405610522858:3033][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7494343388430652096:2053] 2025-04-17T17:59:58.951554Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7494343405610522859:3033][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7494343388430652099:2056] 2025-04-17T17:59:58.951590Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7494343405610522846:3033][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7494343405610522854:3033] 2025-04-17T17:59:58.954285Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7494343388430652432:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7494343405610522845:3032] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:58.954444Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343388430652432:2127], cacheItem# { Subscriber: { Subscriber: [4:7494343405610522845:3032] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:58.954517Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7494343388430652432:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-17T17:59:58.954584Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7494343388430652432:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7494343405610522847:3034] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:58.954647Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343388430652432:2127], cacheItem# { Subscriber: { Subscriber: [4:7494343405610522847:3034] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:58.954738Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494343405610522866:3035], recipient# [4:7494343405610522844:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:58.954784Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7494343388430652093:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7494343405610522857:3033] 2025-04-17T17:59:58.954810Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7494343388430652096:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7494343405610522858:3033] 2025-04-17T17:59:58.954830Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7494343388430652099:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7494343405610522859:3033] 2025-04-17T17:59:58.956483Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7494343405610522846:3033][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7494343405610522855:3033] 2025-04-17T17:59:58.956564Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7494343405610522846:3033][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [4:7494343388430652432:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:59:58.956616Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7494343405610522846:3033][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7494343405610522856:3033] 2025-04-17T17:59:58.956648Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7494343405610522846:3033][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7494343388430652432:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:59:58.956670Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7494343388430652432:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-17T17:59:58.956760Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7494343388430652432:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7494343405610522846:3033] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:58.962709Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343388430652432:2127], cacheItem# { Subscriber: { Subscriber: [4:7494343405610522846:3033] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:58.974662Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494343405610522870:3039], recipient# [4:7494343405610522843:2332], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:59.332642Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343388430652197:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:59.332728Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:59.346393Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494343388430652432:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:59.346532Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343388430652432:2127], cacheItem# { Subscriber: { Subscriber: [4:7494343392725620344:2558] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:59.346654Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494343409905490170:3042], recipient# [4:7494343409905490169:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> KqpTypes::QuerySpecialTypes >> StreamCreator::Basic >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2025-04-17T17:59:42.938850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343336324048814:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:42.938991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b4/r3tmp/tmp9klSXT/pdisk_1.dat 2025-04-17T17:59:43.795285Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:43.862036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:43.862136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:43.872108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2137 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:59:44.322514Z node 1 :TX_PROXY DEBUG: actor# [1:7494343336324048900:2116] Handle TEvNavigate describe path dc-1 2025-04-17T17:59:44.322563Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343344913983984:2443] HANDLE EvNavigateScheme dc-1 2025-04-17T17:59:44.322684Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343340619016224:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:44.322766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343340619016615:2394][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494343340619016224:2130], cookie# 1 2025-04-17T17:59:44.324203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343340619016622:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343340619016619:2394], cookie# 1 2025-04-17T17:59:44.324239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343340619016623:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343340619016620:2394], cookie# 1 2025-04-17T17:59:44.324254Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343340619016624:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343340619016621:2394], cookie# 1 2025-04-17T17:59:44.324284Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343336324048579:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343340619016622:2394], cookie# 1 2025-04-17T17:59:44.324309Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343336324048582:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343340619016623:2394], cookie# 1 2025-04-17T17:59:44.324325Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343336324048585:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343340619016624:2394], cookie# 1 2025-04-17T17:59:44.324357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343340619016622:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343336324048579:2050], cookie# 1 2025-04-17T17:59:44.324372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343340619016623:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343336324048582:2053], cookie# 1 2025-04-17T17:59:44.324384Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343340619016624:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343336324048585:2056], cookie# 1 2025-04-17T17:59:44.324416Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343340619016615:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343340619016619:2394], cookie# 1 2025-04-17T17:59:44.324456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343340619016615:2394][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:59:44.324469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343340619016615:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343340619016620:2394], cookie# 1 2025-04-17T17:59:44.324499Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343340619016615:2394][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:44.324525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343340619016615:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343340619016621:2394], cookie# 1 2025-04-17T17:59:44.324554Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343340619016615:2394][/dc-1] Unexpected sync response: sender# [1:7494343340619016621:2394], cookie# 1 2025-04-17T17:59:44.324611Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494343340619016224:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T17:59:44.338482Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494343340619016224:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494343340619016615:2394] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:44.338617Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494343340619016224:2130], cacheItem# { Subscriber: { Subscriber: [1:7494343340619016615:2394] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T17:59:44.344533Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494343344913983985:2444], recipient# [1:7494343344913983984:2443], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:59:44.344673Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343344913983984:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:59:44.402946Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343344913983984:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-17T17:59:44.405289Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343344913983984:2443] Handle TEvDescribeSchemeResult Forward to# [1:7494343344913983983:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:59:44.449444Z node 1 :TX_PROXY DEBUG: actor# [1:7494343336324048900:2116] Handle TEvProposeTransaction 2025-04-17T17:59:44.449474Z node 1 :TX_PROXY DEBUG: actor# [1:7494343336324048900:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T17:59:44.449568Z node 1 :TX_PROXY DEBUG: actor# [1:7494343336324048900:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494343344913983994:2449] 2025-04-17T17:59:44.561780Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343344913983994:2449] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-17T17:59:44.561840Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343344913983994:2449] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:59:44.561933Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343344913983994:2449] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:59:44.562106Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343340619016224:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status ... avigate: self# [4:7494343387366571571:2107], cacheItem# { Subscriber: { Subscriber: [4:7494343413136375791:2360] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:00.517790Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494343413136375813:2364], recipient# [4:7494343413136375787:2333], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:00.518308Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7494343413136375787:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:00.699121Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494343387366571571:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:00.699310Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343387366571571:2107], cacheItem# { Subscriber: { Subscriber: [4:7494343413136375790:2359] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:00.699376Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343387366571571:2107], cacheItem# { Subscriber: { Subscriber: [4:7494343413136375791:2360] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:00.699501Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494343413136375814:2365], recipient# [4:7494343413136375787:2333], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:00.702690Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7494343413136375787:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:00.710797Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494343387366571571:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:00.710935Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343387366571571:2107], cacheItem# { Subscriber: { Subscriber: [4:7494343391661539239:2333] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:00.711030Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494343413136375816:2366], recipient# [4:7494343413136375815:2336], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:00.938409Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494343387366571571:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:00.938537Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343387366571571:2107], cacheItem# { Subscriber: { Subscriber: [4:7494343391661539239:2333] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:00.938629Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494343413136375818:2367], recipient# [4:7494343413136375817:2337], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:00.954658Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494343387366571571:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:00.954778Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343387366571571:2107], cacheItem# { Subscriber: { Subscriber: [4:7494343413136375790:2359] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:00.954841Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7494343387366571571:2107], cacheItem# { Subscriber: { Subscriber: [4:7494343413136375791:2360] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:00.954961Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494343413136375819:2368], recipient# [4:7494343413136375787:2333], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:00.955369Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7494343413136375787:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> KqpLimits::TooBigKey+useSink >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSubDomainTest::Boot [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> KqpStats::DataQueryWithEffects-UseSink [GOOD] >> KqpStats::DataQueryMulti >> KqpQuery::YqlSyntaxV0 >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:00:00.219065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:00:00.219169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:00:00.219212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:00:00.219248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:00:00.219293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:00:00.219342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:00:00.219408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:00:00.219493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:00:00.219848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:00:00.342974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:00:00.343040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:00.369730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:00:00.371027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:00:00.371221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:00:00.388923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:00:00.389137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:00:00.389825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:00:00.390051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:00:00.406916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:00:00.408370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:00:00.408455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:00:00.408649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:00:00.408722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:00:00.408766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:00:00.408900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:00:00.427065Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:00:00.575225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:00:00.575494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:00.575680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:00:00.575940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:00:00.576003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:00.579125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:00:00.579272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:00:00.579488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:00.579550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:00:00.579597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:00:00.579630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:00:00.583252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:00.583329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:00:00.583364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:00:00.585333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:00.585390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:00.585429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:00:00.585490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:00:00.589143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:00:00.591174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:00:00.591339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:00:00.592360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:00:00.592509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:00:00.592585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:00:00.592857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:00:00.592906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:00:00.593050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:00:00.593130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:00:00.595355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:00:00.595398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:00:00.595609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:00:00.595648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:00:00.596007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:00.596050Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:00:00.596145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:00:00.596174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:00:00.596208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:00:00.596239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:00:00.596286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:00:00.596325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:00:00.596371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:00:00.596422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:00:00.596491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:00:00.596531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:00:00.596560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:00:00.598404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:00:00.598533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:00:00.598573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [7:124:2150] sender: [7:240:2058] recipient: [7:15:2062] 2025-04-17T18:00:07.273085Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:00:07.273344Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:07.273582Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:00:07.273809Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:00:07.273871Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:07.282419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:00:07.282560Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:00:07.282788Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:07.282865Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:00:07.282922Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:00:07.282969Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:00:07.289432Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:07.289584Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:00:07.289650Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:00:07.292536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:07.292600Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:07.292659Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:00:07.292730Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:00:07.292916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:00:07.299025Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:00:07.299283Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:00:07.300426Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:00:07.300606Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773229 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:00:07.300676Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:00:07.306937Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:00:07.307079Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:00:07.307375Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:00:07.307492Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:00:07.310529Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:00:07.310600Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:00:07.310838Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:00:07.310894Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:00:07.311278Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:00:07.311349Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:00:07.311516Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:00:07.311564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:00:07.311625Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:00:07.311674Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:00:07.311724Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:00:07.311777Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:00:07.311824Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:00:07.318378Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:00:07.318531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:00:07.318596Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:00:07.318655Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:00:07.319640Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:00:07.319797Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:00:07.319847Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:00:07.319909Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:00:07.319963Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:00:07.320094Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:00:07.324773Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:00:07.325378Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:00:07.326105Z node 7 :TX_PROXY DEBUG: actor# [7:270:2261] Bootstrap 2025-04-17T18:00:07.405082Z node 7 :TX_PROXY DEBUG: actor# [7:270:2261] Become StateWork (SchemeCache [7:275:2266]) 2025-04-17T18:00:07.414906Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:00:07.415157Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-04-17T18:00:07.415215Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-04-17T18:00:07.415403Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-17T18:00:07.415466Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-17T18:00:07.416471Z node 7 :TX_PROXY DEBUG: actor# [7:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:00:07.443959Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:00:07.444182Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-04-17T18:00:07.444873Z node 7 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::DeleteWhereInSubquery >> StreamCreator::Basic [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> ReadLoad::ShouldReadKqp >> UpsertLoad::ShouldWriteKqpUpsert >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed [GOOD] >> TPersQueueTest::TestWriteStat >> TopicService::OnePartitionAndNoGapsInTheOffsets [GOOD] >> BackupRestore::PrefixedVectorIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2025-04-17T18:00:04.179767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343430860919805:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:04.179823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ab6/r3tmp/tmpyOH0pn/pdisk_1.dat 2025-04-17T18:00:05.501021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:00:05.618481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:05.731255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:05.731362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:05.747035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4498 TServer::EnableGrpc on GrpcPort 2640, node 1 2025-04-17T18:00:07.171882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:07.171906Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:07.171915Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:07.172006Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:08.216369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:08.245479Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:08.274940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912808408 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744912808282 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912808408 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-17T18:00:08.456368Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:00:08.456654Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:00:08.456673Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:00:08.457628Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:00:09.200236Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343430860919805:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:09.200598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:09.262368Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744912808408, tx_id: 281474976710658 } } } 2025-04-17T18:00:09.263692Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:00:09.265161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:00:09.266405Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-17T18:00:09.266417Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-17T18:00:09.327737Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-17T18:00:09.327763Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-17T18:00:09.334857Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-17T18:00:09.467255Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7494343452335757208:2347] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T18:00:09.536727Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-17T18:00:09.536748Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-17T18:00:09.562274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:09.600425Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-17T18:00:09.600447Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744912808408 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) >> KqpLimits::TooBigKey+useSink [GOOD] >> KqpLimits::TooBigKey-useSink |83.0%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |83.0%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSubDomainTest::ConsistentCopyTable >> KqpQuery::RowsLimitServiceOverride >> KqpStats::DataQueryMulti [GOOD] >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> TFetchRequestTests::CheckAccess [GOOD] >> PQCountersSimple::PartitionWriteQuota >> TPQCompatTest::DiscoverTopics [GOOD] >> TPQCompatTest::SetupLockSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti [GOOD] Test command err: Trying to start YDB, gRPC: 18617, MsgBus: 17457 2025-04-17T17:59:49.199318Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343364338693481:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:49.206833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b08/r3tmp/tmpHiRARG/pdisk_1.dat 2025-04-17T17:59:50.093319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:50.127448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:50.127546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:50.129198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18617, node 1 2025-04-17T17:59:50.410661Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:50.410684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:50.410690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:50.410781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17457 TClient is connected to server localhost:17457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:51.576176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:51.604545Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:59:51.624563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:51.901043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:52.322583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:52.450304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:54.187920Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343364338693481:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:54.187974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:54.688300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343385813531588:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:54.688384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:55.014515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.057942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.119026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.211541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.294264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.341198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.405693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.513132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343390108499563:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:55.513199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:55.515797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343390108499568:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:55.519957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:55.534490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343390108499570:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:55.620440Z node 1 :TX_PROXY ERROR: Actor# [1:7494343390108499625:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29529, MsgBus: 30047 2025-04-17T17:59:58.414923Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343403917057148:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:58.415426Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b08/r3tmp/tmpsrSTuW/pdisk_1.dat 2025-04-17T17:59:58.648669Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:58.660535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:58.660589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:58.664785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29529, node 2 2025-04-17T17:59:58.854640Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:58.854661Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:58.854667Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:58.854764Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30047 TClient is connected to server localhost:30047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:59.626418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:59.634801Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:59:59.644142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:59.741005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04 ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:02.913449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.049634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.116644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.148914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.243175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.311244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.379567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343425391896055:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:03.379630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:03.379806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343425391896060:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:03.383822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:03.397181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343425391896062:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:03.415597Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343403917057148:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:03.415838Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:03.491014Z node 2 :TX_PROXY ERROR: Actor# [2:7494343425391896123:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2160, MsgBus: 22255 2025-04-17T18:00:06.518680Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343440410314650:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b08/r3tmp/tmpdGvLjo/pdisk_1.dat 2025-04-17T18:00:06.613806Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:00:06.659965Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:06.698742Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:06.698829Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:06.700253Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2160, node 3 2025-04-17T18:00:06.835667Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:06.835692Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:06.835699Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:06.835798Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22255 TClient is connected to server localhost:22255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:07.686079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:07.698990Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:07.711384Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:07.803621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:08.040502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:08.145634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:10.743857Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343457590185408:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:10.743951Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:10.820777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.861930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.901529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.975924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.017504Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.127103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.191263Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.302334Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343461885153377:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:11.302457Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:11.306352Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343461885153382:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:11.311119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:11.327800Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343461885153384:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:11.430321Z node 3 :TX_PROXY ERROR: Actor# [3:7494343461885153442:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:11.513227Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343440410314650:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:11.513296Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::InvalidJson >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::NoEvaluate ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::PrefixedVectorIndex [GOOD] Test command err: 2025-04-17T17:56:44.295014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342570395022202:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:44.295073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpkvm7OF/pdisk_1.dat 2025-04-17T17:56:45.015391Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4920, node 1 2025-04-17T17:56:45.063492Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:45.063511Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:45.161794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:45.161877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:45.162130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:45.162143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:45.162150Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:45.162278Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:56:45.171855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:45.614352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:47.699102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342583279925015:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:47.699231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:47.995359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/"Create temporary directory "/Root/~backup_20250417T175648" in databaseProcess "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250417T175648/table" }Backup table "/Root/~backup_20250417T175648/table" to "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table"Describe table "/Root/~backup_20250417T175648/table"Write scheme into "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table/permissions.pb"Read table "/Root/~backup_20250417T175648/table"Write data into "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table/data_00.csv"Drop table "/Root/~backup_20250417T175648/table"Remove temporary directory "/Root/~backup_20250417T175648" in database2025-04-17T17:56:48.989844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:56:49.007785Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Backup completed successfully2025-04-17T17:56:49.036759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342591869860199:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:49.036823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/" to "/Root"2025-04-17T17:56:49.135563Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table"Read scheme from "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table" to "/Root/table"2025-04-17T17:56:49.227028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:56:49.234633Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342570395022202:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:49.234770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table/data_00.csv"Restore ACL "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmp0oSRRZ/table/permissions.pb"2025-04-17T17:56:49.430235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-17T17:56:51.780596Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342599619118756:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:51.780928Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpoROyyf/pdisk_1.dat 2025-04-17T17:56:52.322558Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:56:52.393639Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:52.393730Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:52.403759Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26917, node 4 2025-04-17T17:56:52.610985Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:56:52.611012Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:56:52.611019Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:56:52.611181Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:53.021045Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:56:56.780102Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494342599619118756:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:56.780188Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:57.201642Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342625388923511:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:57.201735Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:57.230611Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T17:56:57.538307Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494342625388923746:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-1 ... d 2025-04-17T17:59:55.893678Z node 37 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 37, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpdkaHzt/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpdkaHzt/table"Read scheme from "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpdkaHzt/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpdkaHzt/table" to "/Root/table"2025-04-17T17:59:55.989669Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpdkaHzt/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-04-17T17:59:56.247586Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T17:59:56.469714Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Restore ACL "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpdkaHzt/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpdkaHzt/table/permissions.pb"2025-04-17T17:59:56.647397Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-17T17:59:59.102960Z node 40 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[40:7494343409394095156:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:59.103073Z node 40 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpTC4fnV/pdisk_1.dat 2025-04-17T17:59:59.485034Z node 40 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:59.557072Z node 40 :HIVE WARN: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:59.557211Z node 40 :HIVE WARN: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:59.567838Z node 40 :HIVE WARN: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13097, node 40 2025-04-17T17:59:59.712089Z node 40 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:59.712118Z node 40 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:59.712138Z node 40 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:59.712364Z node 40 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:00.357186Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:04.106366Z node 40 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[40:7494343409394095156:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:04.106506Z node 40 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:06.018478Z node 40 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [40:7494343439458867382:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:06.018720Z node 40 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:06.164409Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/"Create temporary directory "/Root/~backup_20250417T180006" in databaseProcess "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250417T180006/table" }Backup table "/Root/~backup_20250417T180006/table" to "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table"Describe table "/Root/~backup_20250417T180006/table"Write scheme into "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table/permissions.pb"Read table "/Root/~backup_20250417T180006/table"Write data into "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table/data_00.csv"Drop table "/Root/~backup_20250417T180006/table"2025-04-17T18:00:07.484027Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037892 not found 2025-04-17T18:00:07.484078Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037894 not found 2025-04-17T18:00:07.492579Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037893 not found 2025-04-17T18:00:07.492631Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037895 not found Remove temporary directory "/Root/~backup_20250417T180006" in database2025-04-17T18:00:07.539454Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-17T18:00:07.610559Z node 40 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [40:7494343443753835995:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:07.610751Z node 40 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/" to "/Root"2025-04-17T18:00:07.827043Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037890 not found 2025-04-17T18:00:07.829311Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037889 not found 2025-04-17T18:00:07.829834Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037891 not found 2025-04-17T18:00:07.829870Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table"Read scheme from "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table" to "/Root/table"2025-04-17T18:00:07.922554Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-04-17T18:00:08.221791Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:00:08.479759Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T18:00:08.772588Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-04-17T18:00:09.003303Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 2025-04-17T18:00:09.042920Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037900 not found 2025-04-17T18:00:09.153496Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-04-17T18:00:09.298778Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037902 not found 2025-04-17T18:00:09.299195Z node 40 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 40, TabletId: 72075186224037901 not found Restore ACL "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/6873/000942/r3tmp/tmpyzdJEg/table/permissions.pb"2025-04-17T18:00:09.915975Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64 >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> KqpLimits::TooBigQuery+useSink >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> KqpQuery::QueryCacheTtl >> KqpLimits::TooBigKey-useSink [GOOD] >> KqpLimits::TooBigColumn+useSink >> TPersQueueTest::ReadRuleServiceTypeMigration [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 |83.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] >> TPersQueueTest::CheckKillBalancer [GOOD] >> TPersQueueTest::CheckDeleteTopic >> KqpQuery::YqlTableSample [GOOD] >> KqpQuery::UpdateWhereInSubquery >> TSubDomainTest::ConsistentCopyTable [GOOD] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectWhereInSubquery >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> KqpStats::RequestUnitForExecute [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2025-04-17T18:00:01.301798Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343418037636022:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:01.301839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00159d/r3tmp/tmprVHM2z/pdisk_1.dat 2025-04-17T18:00:02.067994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:02.068081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:02.086298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:02.089144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15791 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:00:02.416813Z node 1 :TX_PROXY DEBUG: actor# [1:7494343418037636285:2104] Handle TEvNavigate describe path dc-1 2025-04-17T18:00:02.416861Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343422332603882:2265] HANDLE EvNavigateScheme dc-1 2025-04-17T18:00:02.416993Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343418037636309:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:02.417155Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343422332603865:2260][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494343418037636309:2117], cookie# 1 2025-04-17T18:00:02.418999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343422332603869:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343422332603866:2260], cookie# 1 2025-04-17T18:00:02.419040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343422332603870:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343422332603867:2260], cookie# 1 2025-04-17T18:00:02.419067Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343422332603871:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343422332603868:2260], cookie# 1 2025-04-17T18:00:02.419115Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343418037635977:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343422332603869:2260], cookie# 1 2025-04-17T18:00:02.419156Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343418037635980:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343422332603870:2260], cookie# 1 2025-04-17T18:00:02.419173Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343418037635983:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343422332603871:2260], cookie# 1 2025-04-17T18:00:02.419201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343422332603869:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343418037635977:2049], cookie# 1 2025-04-17T18:00:02.419215Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343422332603870:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343418037635980:2052], cookie# 1 2025-04-17T18:00:02.419228Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343422332603871:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343418037635983:2055], cookie# 1 2025-04-17T18:00:02.419268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343422332603865:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343422332603866:2260], cookie# 1 2025-04-17T18:00:02.419295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343422332603865:2260][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:00:02.419310Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343422332603865:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343422332603867:2260], cookie# 1 2025-04-17T18:00:02.419331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343422332603865:2260][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:00:02.419356Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343422332603865:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343422332603868:2260], cookie# 1 2025-04-17T18:00:02.419370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343422332603865:2260][/dc-1] Unexpected sync response: sender# [1:7494343422332603868:2260], cookie# 1 2025-04-17T18:00:02.419437Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494343418037636309:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T18:00:02.440301Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494343418037636309:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494343422332603865:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:00:02.440475Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494343418037636309:2117], cacheItem# { Subscriber: { Subscriber: [1:7494343422332603865:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T18:00:02.446793Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494343422332603883:2266], recipient# [1:7494343422332603882:2265], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:00:02.446894Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343422332603882:2265] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:00:02.549731Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343422332603882:2265] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } TClient::Ls response: 2025-04-17T18:00:02.552658Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343422332603882:2265] Handle TEvDescribeSchemeResult Forward to# [1:7494343422332603881:2264] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:00:02.586668Z node 1 :TX_PROXY DEBUG: actor# [1:7494343418037636285:2104] Handle TEvProposeTransaction 2025-04-17T18:00:02.586694Z node 1 :TX_PROXY DEBUG: actor# [1:7494343418037636285:2104] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:00:02.586772Z node 1 :TX_PROXY DEBUG: actor# [1:7494343418037636285:2104] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494343422332603890:2272] 2025-04-17T18:00:02.802517Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343422332603890:2272] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-17T18:00:02.802583Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343422332603890:2272] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:00:02.802651Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343422332603890:2272] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:00:02.802750Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343418037636309:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... sult: self# [6:7494343498114370162:2256], recipient# [6:7494343498114370137:2322], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:20.315796Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7494343498114370137:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:20.408294Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494343476639533164:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:20.408402Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:20.483393Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7494343476639533431:2116], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:20.483528Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7494343476639533431:2116], cacheItem# { Subscriber: { Subscriber: [6:7494343480934500906:2234] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:20.483633Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7494343498114370164:2257], recipient# [6:7494343498114370163:2324], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:20.510339Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7494343476639533431:2116], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:20.510489Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7494343476639533431:2116], cacheItem# { Subscriber: { Subscriber: [6:7494343498114370139:2251] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:20.510540Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7494343476639533431:2116], cacheItem# { Subscriber: { Subscriber: [6:7494343498114370140:2252] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:20.510659Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7494343498114370165:2258], recipient# [6:7494343498114370137:2322], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:20.510874Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7494343498114370137:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:20.774388Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7494343476639533431:2116], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:20.774546Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7494343476639533431:2116], cacheItem# { Subscriber: { Subscriber: [6:7494343498114370139:2251] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:20.774594Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7494343476639533431:2116], cacheItem# { Subscriber: { Subscriber: [6:7494343498114370140:2252] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:20.774706Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7494343498114370166:2259], recipient# [6:7494343498114370137:2322], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:20.780108Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7494343498114370137:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:21.246964Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7494343476639533431:2116], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:21.247106Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7494343476639533431:2116], cacheItem# { Subscriber: { Subscriber: [6:7494343498114370141:2253] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:21.247201Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7494343502409337464:2260], recipient# [6:7494343502409337463:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:21.247595Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; >> KqpParams::InvalidJson [GOOD] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |83.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |83.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 29327, MsgBus: 7620 2025-04-17T18:00:02.548291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343420194984062:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:02.548348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aea/r3tmp/tmpo0vMdk/pdisk_1.dat 2025-04-17T18:00:03.311706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:03.335296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:03.335377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:03.337943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29327, node 1 2025-04-17T18:00:03.490884Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:03.490903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:03.490923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:03.491038Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7620 TClient is connected to server localhost:7620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:04.279730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:04.342348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:04.636571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:04.907141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:05.019905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:07.312082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343441669822313:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:07.312186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:07.554348Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343420194984062:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:07.554445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:07.748561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:07.818987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:07.879382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:07.918947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:07.977883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:08.056642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:08.103837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:08.209090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343445964790279:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:08.209161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:08.209660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343445964790284:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:08.213567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:08.227229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343445964790286:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:08.296170Z node 1 :TX_PROXY ERROR: Actor# [1:7494343445964790344:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8375, MsgBus: 1334 2025-04-17T18:00:10.914399Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343456860732086:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:10.978087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aea/r3tmp/tmptvr217/pdisk_1.dat 2025-04-17T18:00:11.116727Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:11.193422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:11.193509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:11.195334Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8375, node 2 2025-04-17T18:00:11.342825Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:11.342848Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:11.342854Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:11.342980Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1334 TClient is connected to server localhost:1334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:11.947598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:11.983524Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:12.065524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:12.152926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:12.340355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:12.445076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:14.551339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343474040602902:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:14.551423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:14.627450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.670746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.717650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.757703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.831630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.916796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.955796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:15.056259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343478335570870:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:15.056330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:15.056402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343478335570876:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:15.060697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:15.079013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343478335570878:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:15.134723Z node 2 :TX_PROXY ERROR: Actor# [2:7494343478335570934:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:15.972539Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343456860732086:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:15.972747Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12833, MsgBus: 18001 2025-04-17T18:00:17.091678Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343487397885009:2108];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aea/r3tmp/tmpxliaS6/pdisk_1.dat 2025-04-17T18:00:17.227437Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:00:17.326962Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:17.378822Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:17.378925Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:17.383041Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12833, node 3 2025-04-17T18:00:17.470449Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:17.470466Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:17.470474Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:17.470586Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18001 TClient is connected to server localhost:18001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:00:18.054798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.077347Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:20.791104Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343500282787505:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:20.799432Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:20.803093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:00:20.881975Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343500282787605:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:20.882069Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:20.885292Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343500282787610:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:20.889598Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:00:20.900803Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343500282787612:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:00:21.001304Z node 3 :TX_PROXY ERROR: Actor# [3:7494343500282787663:2393] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 24300, MsgBus: 25648 2025-04-17T18:00:01.632041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343416800634324:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:01.632092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aee/r3tmp/tmpUUSM8o/pdisk_1.dat 2025-04-17T18:00:02.120658Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:02.124507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:02.124586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:02.126333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24300, node 1 2025-04-17T18:00:02.251039Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:02.251060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:02.251075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:02.251195Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25648 TClient is connected to server localhost:25648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:02.988281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:00:03.014621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.196369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:03.374405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:03.466385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:05.511578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343433980505296:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:05.511727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:06.099363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:06.167987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:06.252550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:06.287850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:06.317461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:06.391225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:06.433881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:06.523771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343438275473263:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:06.523865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:06.524154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343438275473268:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:06.528528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:06.545489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343438275473270:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:06.609974Z node 1 :TX_PROXY ERROR: Actor# [1:7494343438275473326:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:06.634492Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343416800634324:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:06.634559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:08.168631Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494343446865408270:2521], status: GENERIC_ERROR, issues:
:2:12: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} 2025-04-17T18:00:08.169972Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGE0OWZiMzYtZDEzNDA0YjAtZGNjNjJhYjMtZWMwYjAzYmQ=, ActorId: [1:7494343446865408262:2516], ActorState: ExecuteState, TraceId: 01js2egy6k9brzdb5xxm060dfb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:12: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} Trying to start YDB, gRPC: 15472, MsgBus: 10728 2025-04-17T18:00:09.016390Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343451185548961:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:09.017156Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aee/r3tmp/tmpXMzmGw/pdisk_1.dat 2025-04-17T18:00:09.130975Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15472, node 2 2025-04-17T18:00:09.161924Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:09.162027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:09.165080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:09.204176Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:09.204200Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:09.204207Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:09.204318Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10728 TClient is connected to server localhost:10728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 720575940466444 ... 76715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:12.901075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:13.000449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343464070453259:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:13.000535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:13.000751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343468365420560:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:13.004905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:13.018539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343468365420562:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:13.080122Z node 2 :TX_PROXY ERROR: Actor# [2:7494343468365420620:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:14.018511Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343451185548961:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:14.018582Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:14.144689Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494343472660388258:2518], status: GENERIC_ERROR, issues:
:2:8: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} 2025-04-17T18:00:14.146043Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWY4ZDY0OC1lNTE4MjYyZi01NTc1MDNjMy1hOGE0NTI1MA==, ActorId: [2:7494343472660388250:2513], ActorState: ExecuteState, TraceId: 01js2eh41r3f2w5ex0qsv9f2ab, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:8: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} Trying to start YDB, gRPC: 65323, MsgBus: 23355 2025-04-17T18:00:15.017279Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343475126425647:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:15.017382Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aee/r3tmp/tmpRil372/pdisk_1.dat 2025-04-17T18:00:15.137279Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:15.165304Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:15.165382Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:15.166853Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65323, node 3 2025-04-17T18:00:15.203925Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:15.203952Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:15.203960Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:15.204049Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23355 TClient is connected to server localhost:23355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:15.711031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:15.717635Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:00:15.724879Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:15.842055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:16.024810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:16.122154Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:18.987763Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343488011329310:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.987869Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:19.155207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.246259Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.328666Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.415749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.473654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.568301Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.647364Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.776640Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343492306297276:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:19.776761Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:19.782355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343492306297281:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:19.786364Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:19.803491Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343492306297283:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:19.870907Z node 3 :TX_PROXY ERROR: Actor# [3:7494343492306297338:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:20.020643Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343475126425647:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:20.020720Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Consumed units: 462 Consumed units: 6 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2025-04-17T17:59:05.840783Z :HappyWay INFO: Random seed for debugging is 1744912745840734 2025-04-17T17:59:06.470046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343178913699937:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:06.470114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:59:06.816845Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:59:06.835693Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343181985980222:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:06.836078Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00156d/r3tmp/tmpVBXlp1/pdisk_1.dat 2025-04-17T17:59:06.965866Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:59:07.508858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:07.535869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:07.535952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:07.537876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:07.538242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:07.538340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:07.542534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:59:07.547111Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:59:07.548105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9386, node 1 2025-04-17T17:59:07.730910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00156d/r3tmp/yandexfl5met.tmp 2025-04-17T17:59:07.730931Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00156d/r3tmp/yandexfl5met.tmp 2025-04-17T17:59:07.731112Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00156d/r3tmp/yandexfl5met.tmp 2025-04-17T17:59:07.731244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:59:07.835939Z INFO: TTestServer started on Port 26520 GrpcPort 9386 TClient is connected to server localhost:26520 PQClient connected to localhost:9386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:08.201097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T17:59:11.207500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343203460816853:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:11.207595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343203460816873:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:11.207666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:11.285304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-04-17T17:59:11.318205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343203460816876:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-04-17T17:59:11.408323Z node 2 :TX_PROXY ERROR: Actor# [2:7494343203460816904:2131] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:11.614677Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343181985980222:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:11.614786Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:11.808500Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343178913699937:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:11.808798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:11.841667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:59:11.867534Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494343203460816919:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:59:11.869466Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTIwYmIxYTQtMjZkMmY3OGQtY2QxZjk2ZjQtZTNhOWFjNjg=, ActorId: [2:7494343203460816835:2307], ActorState: ExecuteState, TraceId: 01js2ef6j92jch9m6hkpr14ee1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:59:11.880117Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:59:11.884717Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494343200388537485:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:59:11.886280Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWRmNGQwYS00YzQ3Y2QxOC0yZTdkYTVjNS0yZmFhZDFkYg==, ActorId: [1:7494343200388537457:2337], ActorState: ExecuteState, TraceId: 01js2ef6qs7pmp29wjrg22hhpm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:59:11.886796Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:59:12.168115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:12.455828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:9386", true, true, 1000); 2025-04-17T17:59:13.060199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2ef80k8bx08rg06k37b1vc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRkNGVkOGItMzZlMWM2YmQtM2JlNjNkM2EtYzUzYzRhN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494343208978472537:2995] === CheckClustersList. Ok 2025-04-17T17:59:19.394241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:9386 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T17:59:19.610359Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:9386 MetaRequest ... answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic1" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic1" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-17T18:00:03.011671Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.028195Z node 5 :PERSQUEUE DEBUG: sending HasDataInfoResponse Partition: 1 Offset: 1 Deadline: 1744912813026 2025-04-17T18:00:03.030516Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server connected, pipe [5:7494343423274529449:3800], now have 1 active actors on pipe 2025-04-17T18:00:13.030283Z node 5 :PERSQUEUE DEBUG: got HasDatainfoResponse 2025-04-17T18:00:13.034375Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: request-id-0-1 2025-04-17T18:00:13.034430Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-04-17T18:00:13.034518Z node 6 :PERSQUEUE ERROR: [PQ: 72075186224037894, Partition: 1, State: StateIdle] reading from too big offset - topic rt3.dc1--topic1 partition 1 client $without_consumer EndOffset 0 offset 1 2025-04-17T18:00:13.034556Z node 6 :PERSQUEUE DEBUG: tablet 72075186224037894 topic 'rt3.dc1--topic1' partition 1 error: trying to read from future. ReadOffset 1, 0 EndOffset 0 2025-04-17T18:00:13.034593Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvError Cookie 1, Error trying to read from future. ReadOffset 1, 0 EndOffset 0 2025-04-17T18:00:13.034609Z node 6 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: request-id-0-1 error: trying to read from future. ReadOffset 1, 0 EndOffset 0 2025-04-17T18:00:13.035333Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [5:7494343423274529449:3800] destroyed 2025-04-17T18:00:15.430299Z node 7 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:00:15.430421Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:00:15.475455Z node 7 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:00:15.476463Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [7:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T18:00:15.477256Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [7:207:2220] 2025-04-17T18:00:15.484256Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [7:207:2220] 2025-04-17T18:00:15.501199Z node 7 :PERSQUEUE INFO: new Cookie default|9d7c2824-7e526076-6a18f1d7-7d36a85a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-17T18:00:15.511523Z node 7 :PERSQUEUE INFO: new Cookie default|cc70f16c-2f325b8e-49cd8bbb-4706da9f_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-17T18:00:15.875297Z node 7 :PERSQUEUE INFO: new Cookie default|742da477-578406dc-8abbfa55-6817de17_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-17T18:00:16.186877Z node 7 :PERSQUEUE INFO: new Cookie default|a380431a-485770a5-2e99b151-991bbe96_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-17T18:00:16.534063Z node 7 :PERSQUEUE INFO: new Cookie default|1244c5bd-85e63fb6-2ade7980-41ca7b00_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-17T18:00:16.847368Z node 7 :PERSQUEUE INFO: new Cookie default|81a914b5-479e99af-69881153-1267c8c0_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] **** Total histogram: ****
Interval=0ms: 1
Interval=10000ms: 0
Interval=1000ms: 3
Interval=100ms: 0
Interval=10ms: 0
Interval=1ms: 0
Interval=20ms: 0
Interval=2500ms: 2
Interval=5000ms: 0
Interval=500ms: 0
Interval=50ms: 0
Interval=5ms: 0
Interval=999999ms: 0
**** **** **** **** 2025-04-17T18:00:17.993049Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:00:17.993157Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:00:18.018291Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:00:18.019472Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [8:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T18:00:18.020443Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [8:206:2219] 2025-04-17T18:00:18.021698Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [8:206:2219] 2025-04-17T18:00:18.023294Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [8:207:2220] 2025-04-17T18:00:18.024149Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [8:207:2220] 2025-04-17T18:00:18.031519Z node 8 :PERSQUEUE INFO: new Cookie default|fd4be72c-2f09a53b-f2777342-c1709d25_0 generated for partition 0 topic 'topic' owner default 2025-04-17T18:00:18.040884Z node 8 :PERSQUEUE INFO: new Cookie default|96745b8a-1fe3dda1-bc79d850-c66bc283_1 generated for partition 0 topic 'topic' owner default 2025-04-17T18:00:18.058567Z node 8 :PERSQUEUE INFO: new Cookie default|f2f2f6ff-c4bf42c9-90f8d2c1-2fa23e1c_2 generated for partition 0 topic 'topic' owner default 2025-04-17T18:00:18.066716Z node 8 :PERSQUEUE INFO: new Cookie default|35d7faa8-45209795-8b2cdcc6-7b79c9dc_3 generated for partition 0 topic 'topic' owner default 2025-04-17T18:00:18.999693Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:00:18.999795Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:00:19.024938Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:00:19.025711Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [9:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-17T18:00:19.026333Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [9:207:2220] 2025-04-17T18:00:19.029606Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [9:207:2220] 2025-04-17T18:00:19.037654Z node 9 :PERSQUEUE INFO: new Cookie default|60c90438-9af3c00d-4b51c332-28407759_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-04-17T18:00:19.055280Z node 9 :PERSQUEUE INFO: new Cookie default|b133a3d2-4a3328ed-7e26aca-c5162a6a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-04-17T18:00:19.424596Z node 9 :PERSQUEUE INFO: new Cookie default|fed5a40d-65ea81a5-a3010011-9c952578_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-04-17T18:00:19.732342Z node 9 :PERSQUEUE INFO: new Cookie default|161780dd-8644dea3-274c6d1-b3d03581_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] Captured TEvRequest, cmd write size: 1 Captured TEvRequest, cmd write size: 1 2025-04-17T18:00:20.034649Z node 9 :PERSQUEUE INFO: new Cookie default|98f9076a-dcb67ccb-b19ce998-74d9b7e2_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-04-17T18:00:20.324037Z node 9 :PERSQUEUE INFO: new Cookie default|9955f9d3-6fb07f2c-d9a19898-8e338d9b_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [9:223:2233] Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 >> KqpQuery::NoEvaluate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 5054, MsgBus: 22320 2025-04-17T17:59:53.300444Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343381607564119:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:53.301314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001af6/r3tmp/tmpriDCSd/pdisk_1.dat 2025-04-17T17:59:53.756561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:53.793796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:53.793900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5054, node 1 2025-04-17T17:59:53.813986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:59:53.954692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:53.954711Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:53.954723Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:53.954812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22320 TClient is connected to server localhost:22320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:54.576301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:54.617336Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:59:54.638210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:54.846551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:55.045236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:55.137364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:57.418868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343398787434940:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:57.419089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:57.926707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:57.990923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.034020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.081037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.129335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.183495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.249953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.288596Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343381607564119:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:58.288695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:58.358426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343403082402902:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.358528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.359361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343403082402907:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.363642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:58.376367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343403082402909:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:58.465404Z node 1 :TX_PROXY ERROR: Actor# [1:7494343403082402966:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1459, MsgBus: 20064 2025-04-17T18:00:00.978512Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343414537113427:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:00.978710Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001af6/r3tmp/tmpvr946R/pdisk_1.dat 2025-04-17T18:00:01.116003Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:01.133385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:01.133613Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:01.135574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1459, node 2 2025-04-17T18:00:01.206063Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:01.206086Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:01.206094Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:01.206232Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20064 TClient is connected to server localhost:20064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:01.686605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:01.702693Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:00:01.717306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:01.801786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T ... pId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:13.119890Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:13.160782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:13.235609Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:13.313177Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:13.384579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:13.482706Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343468882277683:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:13.482807Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:13.484195Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343468882277688:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:13.488203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:13.506923Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343468882277690:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:13.592702Z node 3 :TX_PROXY ERROR: Actor# [3:7494343468882277746:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1351, MsgBus: 10478 2025-04-17T18:00:16.235084Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343483058173154:2204];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001af6/r3tmp/tmpBppjbi/pdisk_1.dat 2025-04-17T18:00:16.257933Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:00:16.339818Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:16.356210Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:16.356294Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:16.357937Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1351, node 4 2025-04-17T18:00:16.404051Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:16.404077Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:16.404086Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:16.404221Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10478 TClient is connected to server localhost:10478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:00:16.956546Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:00:16.982758Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:00:17.000865Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:17.076548Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:17.366498Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:00:17.446736Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:00:20.875504Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343500238043968:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:20.875583Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:21.012851Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:21.073386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:21.147876Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:21.192333Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343483058173154:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:21.192414Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:21.235951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:21.318263Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:21.404810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:21.519838Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:21.683797Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343504533011944:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:21.683886Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:21.684259Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343504533011949:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:21.690197Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:21.703017Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343504533011951:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:21.769491Z node 4 :TX_PROXY ERROR: Actor# [4:7494343504533012006:3560] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:23.480301Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:00:23.806633Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjA1NTAyN2UtZTViNDkzMTctYjY1NjdkMGItZGNmOTFlNGY=, ActorId: [4:7494343513122946958:2518], ActorState: ExecuteState, TraceId: 01js2ehd947b9e1317hxxpjpws, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: Invalid Json value
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: Invalid Json value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2025-04-17T18:00:15.806213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:15.806907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:00:15.806971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001921/r3tmp/tmp9gGoUP/pdisk_1.dat 2025-04-17T18:00:17.315901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:00:17.463353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:17.546867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:17.550983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:17.571956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:17.753298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.530585Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-04-17T18:00:18.530712Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-04-17T18:00:18.603693Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-04-17T18:00:18.603794Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T18:00:18.603856Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T18:00:18.603906Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T18:00:18.603935Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T18:00:18.603960Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-17T18:00:18.649420Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=OGNlZmQ4OWUtYTU2ZWQ3ZjEtNzgxYTNiMzgtYzA0YzYyOGE= 2025-04-17T18:00:18.652124Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=MTBhYWMzMDUtYzgzOTk0YTQtZWNkNTU4MDItZThmYTY0OTk= 2025-04-17T18:00:18.653835Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=YTc3NjJhNWItMWZiMzJhNTQtNmY4NjUxZi1iZDNmM2RlYw== 2025-04-17T18:00:18.656134Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=Mjg2NTk0MzktNDQ1MjQ0ODAtMmQ4OGVlNWItMjRmZWEzZGY= 2025-04-17T18:00:18.657736Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=M2MyYzAzYS1lZGJiYzQ3OS1lNTM5YWY1LTg2YTcxODYz 2025-04-17T18:00:18.705279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.705432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.705502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.705558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.705621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.705665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2659], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.705775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.780209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:00:18.841394Z node 1 :TX_PROXY ERROR: Actor# [1:792:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:00:18.841962Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:00:18.846986Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:00:18.847448Z node 1 :TX_PROXY ERROR: Actor# [1:802:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:00:19.039384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:00:19.039654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:00:19.039716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:00:19.039819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2670], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:00:19.039905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2671], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:00:19.081853Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:23.187907Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1744912823.187830s, errors=0 2025-04-17T18:00:23.188366Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744912823187 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:00:23.202031Z node 1 :TX_PROXY ERROR: Actor# [1:973:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:23.305329Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1744912823.305265s, errors=0 2025-04-17T18:00:23.305635Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744912823305 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:00:23.324843Z node 1 :TX_PROXY ERROR: Actor# [1:1024:2801] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:23.420888Z node 1 :TX_PROXY ERROR: Actor# [1:1074:2822] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:23.439191Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1744912823.439138s, errors=0 2025-04-17T18:00:23.439567Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744912823439 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:00:23.480932Z node 1 :TX_PROXY ERROR: Actor# [1:1113:2839] txid# 281474976715682, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:23.526302Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1744912823.526251s, errors=0 2025-04-17T18:00:23.530672Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744912823526 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:00:23.563650Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1744912823.563585s, errors=0 2025-04-17T18:00:23.564260Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744912823563 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:00:23.564325Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 4.960907s, oks# 20, errors# 0 2025-04-17T18:00:23.564538Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> KqpQuery::QueryResultsTruncated >> KqpQuery::DictJoin [GOOD] >> KqpLimits::TooBigColumn+useSink [GOOD] >> KqpLimits::TooBigColumn-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::NoEvaluate [GOOD] Test command err: Trying to start YDB, gRPC: 20090, MsgBus: 65523 2025-04-17T17:59:49.422644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343365737627278:2233];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:49.423186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aff/r3tmp/tmpmB7InL/pdisk_1.dat 2025-04-17T17:59:50.266422Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:50.268882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:50.268948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:50.273028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20090, node 1 2025-04-17T17:59:50.510874Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:50.510896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:50.510913Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:50.511061Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65523 TClient is connected to server localhost:65523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:51.799344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:51.890143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:52.221002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:52.420573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:59:52.517556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T17:59:54.425180Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343365737627278:2233];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:54.425249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:54.462454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343387212465336:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:54.462583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:54.851096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:54.927050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:54.974350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.020106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.077880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.172592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.263167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:55.365881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343391507433312:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:55.365963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:55.366339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343391507433317:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:55.374919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:55.417293Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:59:55.419627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343391507433319:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:55.511902Z node 1 :TX_PROXY ERROR: Actor# [1:7494343391507433386:3566] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:57.054388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27855, MsgBus: 24404 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aff/r3tmp/tmp0SfekJ/pdisk_1.dat 2025-04-17T17:59:58.641231Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:58.647559Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:58.663399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:58.666366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:58.672456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27855, node 2 2025-04-17T17:59:58.862487Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:58.862512Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:58.862518Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:58.862636Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24404 TClient is connected to server localhost:24404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:59.804474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:59.826557Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:59:59.838323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:59.941040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644 ... oadService] [TPoolFetcherActor] ActorId: [3:7494343464829384582:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:12.156626Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:12.157019Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343464829384587:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:12.161335Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:12.177430Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343464829384589:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:12.265073Z node 3 :TX_PROXY ERROR: Actor# [3:7494343464829384646:3561] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:13.553080Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 18269, MsgBus: 9152 2025-04-17T18:00:16.472575Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343482057197590:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:16.472630Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aff/r3tmp/tmpbPyt0f/pdisk_1.dat 2025-04-17T18:00:16.728813Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:16.750131Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:16.758340Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:16.759659Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18269, node 4 2025-04-17T18:00:16.926814Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:16.926838Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:16.926845Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:16.926986Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9152 TClient is connected to server localhost:9152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:17.806009Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:17.819646Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:00:17.837507Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:17.931010Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:18.349457Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:18.470049Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:21.474295Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343482057197590:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:21.474374Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:22.467490Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343507827003084:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:22.467605Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:22.577358Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:22.675417Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:22.758824Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:22.841205Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:22.919169Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:23.094213Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:23.197530Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:23.332784Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343512121971069:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:23.332915Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:23.333459Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343512121971074:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:23.339321Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:23.379757Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343512121971076:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:23.449655Z node 4 :TX_PROXY ERROR: Actor# [4:7494343512121971140:3569] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:25.433347Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494343520711906082:2526], status: UNSUPPORTED, issues:
: Error: Default error
:7:24: Error: EVALUATE IF is not supported in YDB queries., code: 2030 2025-04-17T18:00:25.434508Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTA0ODgzNWYtYjRjZGNiMzEtM2E0MmVhZmMtMmQ2MzdmYjE=, ActorId: [4:7494343520711906074:2521], ActorState: ExecuteState, TraceId: 01js2ehf0819wtvxcgqgejp86h, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: 2025-04-17T18:00:25.516102Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494343520711906106:2529], status: UNSUPPORTED, issues:
: Error: Default error
:4:28: Error: EVALUATE is not supported in YDB queries., code: 2030 2025-04-17T18:00:25.518427Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTA0ODgzNWYtYjRjZGNiMzEtM2E0MmVhZmMtMmQ2MzdmYjE=, ActorId: [4:7494343520711906074:2521], ActorState: ExecuteState, TraceId: 01js2ehf3d0e0cswtbhp1rvgg8, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: 2025-04-17T18:00:25.758045Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494343520711906140:2535], status: UNSUPPORTED, issues:
: Error: Default error
:8:78: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-04-17T18:00:25.759211Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTA0ODgzNWYtYjRjZGNiMzEtM2E0MmVhZmMtMmQ2MzdmYjE=, ActorId: [4:7494343520711906074:2521], ActorState: ExecuteState, TraceId: 01js2ehfafapq6v524ehym4mdd, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: >> KqpBatchDelete::ManyPartitions_2 [GOOD] >> TTopicYqlTest::DropTopicYql >> KqpLimits::DatashardProgramSize+useSink >> KqpLimits::OutOfSpaceBulkUpsertFail >> KqpQuery::QueryClientTimeout >> KqpLimits::KqpMkqlMemoryLimitException >> KqpExplain::LimitOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 6125, MsgBus: 22344 2025-04-17T17:59:53.946505Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343382367453703:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:53.946562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001af2/r3tmp/tmp74eATy/pdisk_1.dat 2025-04-17T17:59:54.520395Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:54.576256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:54.576381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:54.578344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6125, node 1 2025-04-17T17:59:54.794665Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:54.794684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:54.794692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:54.794812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22344 TClient is connected to server localhost:22344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:55.366353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:55.410136Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:59:55.426123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:55.667219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:55.963109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:56.082276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:57.942638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343399547324553:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:57.942769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.248503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.291164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.329656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.364887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.442468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.532035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.576600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.677047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343403842292519:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.677164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.677434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343403842292524:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.681895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:58.712329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343403842292526:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:58.799041Z node 1 :TX_PROXY ERROR: Actor# [1:7494343403842292584:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:58.952191Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343382367453703:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:58.952353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27874, MsgBus: 9790 2025-04-17T18:00:01.381795Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343414858089682:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:01.381861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001af2/r3tmp/tmpx5cYeH/pdisk_1.dat 2025-04-17T18:00:01.528684Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27874, node 2 2025-04-17T18:00:01.564151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:01.564241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:01.570717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:01.650758Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:01.650784Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:01.650792Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:01.650899Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9790 TClient is connected to server localhost:9790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:02.263806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:02.278921Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:00:02.300055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:02.415350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T1 ... tatus: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:13.913593Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.016392Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.079595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.137835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.217353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.284153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.400285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:14.497520Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343474185817245:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:14.497586Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:14.497777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343474185817250:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:14.502025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:14.520644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343474185817252:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:14.577984Z node 3 :TX_PROXY ERROR: Actor# [3:7494343474185817308:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25355, MsgBus: 22150 2025-04-17T18:00:18.089216Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343488205076473:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:18.089333Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001af2/r3tmp/tmpqcmXur/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25355, node 4 2025-04-17T18:00:18.381403Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:18.450938Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:00:18.450990Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:00:18.473512Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:18.473617Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:18.474693Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:18.538841Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:18.538864Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:18.538872Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:18.539004Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22150 TClient is connected to server localhost:22150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:19.378331Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:19.385107Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:19.390048Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:19.526462Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:19.864661Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:20.031880Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.092572Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343488205076473:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:23.092649Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:24.010268Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343509679914733:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.010372Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.067929Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.138485Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.188860Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.232982Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.294744Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.383471Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.479479Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.600449Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343513974882705:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.600555Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.600887Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343513974882710:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.605560Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:24.622999Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343513974882712:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:24.699250Z node 4 :TX_PROXY ERROR: Actor# [4:7494343513974882767:3561] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test >> ShowCreateView::Basic [GOOD] >> ShowCreateView::FromTable >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate >> KqpQuery::PreparedQueryInvalidate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 7874, MsgBus: 15842 2025-04-17T17:57:16.029804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342707132576998:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:16.058411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c8e/r3tmp/tmpkaINol/pdisk_1.dat 2025-04-17T17:57:16.819214Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:16.830143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:16.830252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:16.843335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7874, node 1 2025-04-17T17:57:17.059086Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:17.059109Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:17.059116Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:17.059215Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15842 TClient is connected to server localhost:15842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:18.093212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:18.105812Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:18.118634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:18.344041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:18.701105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:18.892549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:21.028268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342707132576998:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:21.028368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:21.051434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342728607415267:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:21.051560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:21.420118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.475330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.527977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.574555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.615045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.670599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.761071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.950777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342728607415942:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:21.950864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:21.951249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342728607415948:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:21.956161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:57:21.984040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342728607415950:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:57:22.074465Z node 1 :TX_PROXY ERROR: Actor# [1:7494342732902383307:3563] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:23.243002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 9415, MsgBus: 15976 2025-04-17T17:57:31.027390Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342769577537501:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:31.077835Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c8e/r3tmp/tmpOS4qnR/pdisk_1.dat 2025-04-17T17:57:31.241657Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:31.258579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:31.258678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:31.265125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9415, node 2 2025-04-17T17:57:31.438886Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:57:31.438911Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:57:31.438917Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:57:31.439054Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15976 TClient is connected to server localhost:15976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:32.096115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:32.102628Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T17:57:32.109094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T ... k, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.137952Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.234262Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.302898Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.354625Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.422081Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.572416Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.692678Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494343423358565560:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:03.692843Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:03.693607Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494343423358565565:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:03.701450Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:03.730371Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494343423358565567:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:03.793780Z node 11 :TX_PROXY ERROR: Actor# [11:7494343423358565623:3578] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:06.036075Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 14606, MsgBus: 24499 2025-04-17T18:00:12.112195Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494343462494435535:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:12.113408Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c8e/r3tmp/tmpseets9/pdisk_1.dat 2025-04-17T18:00:12.459521Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:12.544295Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:12.544450Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:12.546086Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14606, node 12 2025-04-17T18:00:12.824950Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:12.824979Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:12.824991Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:12.825164Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24499 TClient is connected to server localhost:24499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:14.008769Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:14.048914Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:14.241077Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:14.659921Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:00:14.806580Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:00:17.120661Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7494343462494435535:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:17.138127Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:19.088275Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494343492559208352:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:19.088432Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:19.261889Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.361589Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.438137Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.509709Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.657186Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.769773Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.895664Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:20.043923Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494343496854176335:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:20.044087Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:20.044651Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494343496854176340:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:20.053101Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:20.097031Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494343496854176342:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:20.181006Z node 12 :TX_PROXY ERROR: Actor# [12:7494343496854176407:3579] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:22.512804Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... >> KqpQuery::UpdateWhereInSubquery [GOOD] |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateWhereInSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 10522, MsgBus: 14756 2025-04-17T18:00:06.696655Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343438197500891:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:06.697261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ae1/r3tmp/tmpTohyc6/pdisk_1.dat 2025-04-17T18:00:07.215030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:07.215098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:07.221056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:07.251995Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10522, node 1 2025-04-17T18:00:07.475009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:07.475033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:07.475041Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:07.475158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14756 TClient is connected to server localhost:14756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:08.314864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:08.360578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:08.598076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:08.760840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:08.846461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:10.752094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343455377371828:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:10.752217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:11.080902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.123268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.176843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.222283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.263573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.317909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.387595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.471751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343459672339785:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:11.471821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:11.472043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343459672339790:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:11.476147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:11.489559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343459672339792:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:11.592284Z node 1 :TX_PROXY ERROR: Actor# [1:7494343459672339849:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:11.711943Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343438197500891:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:11.712016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:13.233077Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494343468262274853:2525], status: GENERIC_ERROR, issues:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FULL, FUNCTION, GLOB, GLOBAL, GRANT, GROUP, GROUPING, GROUPS, HASH, HAVING, HOP, IF, IGNORE, ILIKE, IMMEDIATE, IMPORT, IN, INCREMENT, INCREMENTAL, INDEX, INDEXED, INHERITS, INITIAL, INITIALLY, INNER, INSERT, INSTEAD, INTERSECT, INTO, IS, ISNULL, JOIN, JSON_EXISTS, JSON_QUERY, JSON_VALUE, KEY, LAST, LEFT, LEGACY, LIKE, LIMIT, LIST, LOCAL, LOGIN, MANAGE, MATCH, MATCHES, MATCH_RECOGNIZE, MEASURES, MICROSECONDS, MILLISECONDS, MODIFY, NANOSECONDS, NATURAL, NEXT, NO, NOLOGIN, NOT, NOTNULL, NULL, NULLS, OBJECT, OF, OFFSET, OMIT, ON, ONE, ONLY, OPTION, OPTIONAL, OR, ORDER, OTHERS, OUTER, OVER, OWNER, PARALLEL, PARTITION, PASSING, PASSWORD, PAST, PATTERN, PER, PERMUTE, PLAN, POOL, PRAGMA, PRECEDING, PRESORT, PRIMARY, PRIVILEGES, PROCESS, QUERY, QUEUE, RAISE, RANGE, REDUCE, REFERENCES, REGEXP, REINDEX, RELEASE, REMOVE, RENAME, REPLACE, REPLICATION, RESET, RESOURCE, RESPECT, RESTART, RESTORE, RESTRICT, RESULT, RETURN, RETURNING, REVERT, REVOKE, RIGHT, RLIKE, ROLLBACK, ROLLUP, ROW, ROWS, SAMPLE, SAVEPOINT, SCHEMA, SECONDS, SEEK, SELECT, SEMI, SET, SETS, SHOW, TSKIP, SEQUENCE, SOURCE, START, STREAM, STRUCT, SUBQUERY, SUBSET, SYMBOLS, SYMMETRIC, SYNC, SYSTEM, TABLE, TABLES, TABLESAMPLE, TABLESTORE, TAGGED, TEMP, TEMPORARY, THEN, TIES, TO, TOPIC, TRANSACTION, TRANSFER, TRIGGER, TUPLE, TYPE, UNBOUNDED, UNCONDITIONAL, UNION, UNIQUE, UNKNOWN, UNMATCHED, UPDATE, UPSERT, USE, USER, USING, VACUUM, VALUES, VARIANT, VIEW, VIRTUAL, WHEN, WHERE, WINDOW, WITH, WITHOUT, WRAPPER, XOR, STRING_VALUE, ID_PLAIN, ID_QUOTED} 2025-04-17T18:00:13.234518Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGEzMjhiMjYtMTI4N2ZhZWItZmZkYjJiNC03OWMyYjAyZA==, ActorId: [1:7494343463967307488:2516], ActorState: ExecuteState, TraceId: 01js2eh3467kdpcdbzs7b0vpvy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMIL ... 5-04-17T18:00:19.157161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.200290Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343471407842918:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:19.200364Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:19.224909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.337820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.428776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:19.537053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343492882681829:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:19.537148Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:19.537831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343492882681834:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:19.541067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:19.569127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343492882681836:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:19.666644Z node 2 :TX_PROXY ERROR: Actor# [2:7494343492882681894:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:21.134998Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494343501472616851:2521], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-04-17T18:00:21.136648Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGZlNWE4NTQtYzI1NzJjNWItNzAwMWE3ODMtNWEzZjk1MTE=, ActorId: [2:7494343501472616843:2516], ActorState: ExecuteState, TraceId: 01js2ehav5btg9tb27r28spfqd, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: Trying to start YDB, gRPC: 3679, MsgBus: 29929 2025-04-17T18:00:22.066743Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343507612226927:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:22.068473Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ae1/r3tmp/tmp2sK3Cm/pdisk_1.dat 2025-04-17T18:00:22.223756Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:22.251597Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:22.251707Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:22.255300Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3679, node 3 2025-04-17T18:00:22.430138Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:22.430170Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:22.430201Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:22.430342Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29929 TClient is connected to server localhost:29929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:23.632045Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.646778Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:00:23.665010Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.808744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:24.087619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:24.197913Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:27.066325Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343507612226927:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:27.112301Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:27.555858Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343529087065197:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:27.555970Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:27.634048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:27.690373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:27.753688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:27.804759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:27.898203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:27.948914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:28.014840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:28.138197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343533382033168:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:28.138290Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:28.149648Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343533382033173:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:28.163197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:28.186449Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343533382033175:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:28.279929Z node 3 :TX_PROXY ERROR: Actor# [3:7494343533382033234:3563] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TPersQueueTest::AllEqual [GOOD] >> TPersQueueTest::BadSids >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows >> KqpQuery::RewriteIfPresentToMap >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpTypes::UnsafeTimestampCastV0 >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestChangeConfig >> TPersQueueTest::DirectReadRestartTablet [GOOD] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2754, MsgBus: 26938 2025-04-17T17:59:53.448608Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343382591845044:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:53.448657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001afb/r3tmp/tmpfL4X8e/pdisk_1.dat 2025-04-17T17:59:54.011444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:54.011556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:54.013584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:59:54.029251Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2754, node 1 2025-04-17T17:59:54.158864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:54.158918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:54.158927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:54.159061Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26938 TClient is connected to server localhost:26938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:54.930669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:54.956304Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:59:54.965187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:55.197239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:55.434226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:55.517915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:57.383228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343399771715765:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:57.383363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:57.809511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:57.877377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:57.951872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.038509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.093631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.158507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.239477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:58.314423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343404066683734:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.314496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.314828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343404066683739:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:58.322719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:58.367863Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:59:58.369404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343404066683741:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:58.449143Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343382591845044:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:58.449235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:58.476377Z node 1 :TX_PROXY ERROR: Actor# [1:7494343404066683805:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:00.061006Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWNmMTU0NzctOGYzMGY0NGUtNjVmMjIxYi00MGFiZmVjNQ==, ActorId: [1:7494343408361651434:2516], ActorState: ExecuteState, TraceId: 01js2egp88064pbqg3v7qxx0e1, Create QueryResponse for error on request, msg:
: Error: Request timeout 50ms exceeded
: Error: Cancelling after 50ms during compilation Trying to start YDB, gRPC: 3752, MsgBus: 27259 2025-04-17T18:00:01.242445Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343417582610465:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:01.242525Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001afb/r3tmp/tmpSs4gB6/pdisk_1.dat 2025-04-17T18:00:01.364375Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:01.391122Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:01.391199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:01.393109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3752, node 2 2025-04-17T18:00:01.527135Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:01.527160Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:01.527167Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:01.527278Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27259 TClient is connected to server localhost:27259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:01.966477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:01.984886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... 14.222362Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343474203229053:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:14.321795Z node 3 :TX_PROXY ERROR: Actor# [3:7494343474203229110:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9891, MsgBus: 24561 2025-04-17T18:00:21.661313Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:21.661776Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:00:21.661926Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001afb/r3tmp/tmpIMncSm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9891, node 4 2025-04-17T18:00:22.420594Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:22.421655Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:22.421718Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:22.421764Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:22.428699Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:00:22.473802Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:22.473958Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:22.488239Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24561 TClient is connected to server localhost:24561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:23.156435Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.285449Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.722531Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:24.336202Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:24.717695Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:25.574585Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1825:3419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:25.574848Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:25.608945Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:25.811015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:26.149394Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:26.502279Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:26.868895Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:27.223492Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:27.619890Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:28.053349Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2563:3981], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:28.053482Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:28.053863Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2568:3986], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:28.066500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:28.264082Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2570:3988], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:28.311530Z node 4 :TX_PROXY ERROR: Actor# [4:2637:4036] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:30.450956Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:00:30.904116Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:00:31.375840Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:00:33.947531Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:3503:4727], TxId: 281474976715675, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2ehnjv3t0h6xjntv2vmsqg. SessionId : ydb://session/3?node_id=4&id=NTc1N2NlNjUtZDM3MmY5MjctOWJhOTFkYzctNDE3OGMzNWY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys/Index/indexImplTable' retry limit exceeded. } 2025-04-17T18:00:33.948124Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:3503:4727], TxId: 281474976715675, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2ehnjv3t0h6xjntv2vmsqg. SessionId : ydb://session/3?node_id=4&id=NTc1N2NlNjUtZDM3MmY5MjctOWJhOTFkYzctNDE3OGMzNWY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys/Index/indexImplTable' retry limit exceeded. }. 2025-04-17T18:00:33.949263Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:3504:4728], TxId: 281474976715675, task: 2. Ctx: { TraceId : 01js2ehnjv3t0h6xjntv2vmsqg. SessionId : ydb://session/3?node_id=4&id=NTc1N2NlNjUtZDM3MmY5MjctOWJhOTFkYzctNDE3OGMzNWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:3497:4259], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-17T18:00:33.949602Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:3505:4729], TxId: 281474976715675, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01js2ehnjv3t0h6xjntv2vmsqg. SessionId : ydb://session/3?node_id=4&id=NTc1N2NlNjUtZDM3MmY5MjctOWJhOTFkYzctNDE3OGMzNWY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:3497:4259], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-17T18:00:33.949900Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:3506:4730], TxId: 281474976715675, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=NTc1N2NlNjUtZDM3MmY5MjctOWJhOTFkYzctNDE3OGMzNWY=. TraceId : 01js2ehnjv3t0h6xjntv2vmsqg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:3497:4259], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-17T18:00:33.956605Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTc1N2NlNjUtZDM3MmY5MjctOWJhOTFkYzctNDE3OGMzNWY=, ActorId: [4:2919:4259], ActorState: ExecuteState, TraceId: 01js2ehnjv3t0h6xjntv2vmsqg, Create QueryResponse for error on request, msg: |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> TPQTest::TestLowWatermark [GOOD] >> TPQTest::TestGetTimestamps >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> TPQCompatTest::SetupLockSession [GOOD] >> TPQCompatTest::BadTopics |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large >> KqpLimits::TooBigColumn-useSink [GOOD] >> TPQTest::TestChangeConfig [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 25799, MsgBus: 1172 2025-04-17T18:00:14.081853Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343474286608607:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:14.092738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ad6/r3tmp/tmpBsTXHT/pdisk_1.dat 2025-04-17T18:00:14.719567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:14.721314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:14.721383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:14.725039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25799, node 1 2025-04-17T18:00:14.830632Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:14.830656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:14.830665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:14.830774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1172 TClient is connected to server localhost:1172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:15.349279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:15.396736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:15.588957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:15.778959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:15.868640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:18.033195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343491466479562:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.033293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.341820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.381031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.417079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.474044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.542133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.615972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.673382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.761539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343491466480226:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.761612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.761907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343491466480231:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.765347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:18.779339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343491466480233:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:18.847148Z node 1 :TX_PROXY ERROR: Actor# [1:7494343491466480289:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:19.086642Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343474286608607:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:19.086803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19893, MsgBus: 17868 2025-04-17T18:00:22.238724Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343507151334432:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:22.238807Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ad6/r3tmp/tmp9coJ4h/pdisk_1.dat 2025-04-17T18:00:22.363599Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:22.394669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:22.394749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:22.397649Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19893, node 2 2025-04-17T18:00:22.607508Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:22.607528Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:22.607535Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:22.607667Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17868 TClient is connected to server localhost:17868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:23.378443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.409094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.492192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.705182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.797436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:26.507315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:26.588806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:26.640174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:26.709837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:26.790814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:26.891363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:27.014254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343528626173381:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:27.014360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:27.014690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343528626173386:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:27.019507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:27.039939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343528626173388:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:27.137439Z node 2 :TX_PROXY ERROR: Actor# [2:7494343528626173445:3559] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:27.239188Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343507151334432:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:27.239259Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23684, MsgBus: 25068 2025-04-17T18:00:30.188788Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343539779657060:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:30.189341Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ad6/r3tmp/tmp0MxTmd/pdisk_1.dat 2025-04-17T18:00:30.362539Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23684, node 3 2025-04-17T18:00:30.467820Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:30.467934Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:30.479467Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:30.495284Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:30.495306Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:30.495313Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:30.495475Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25068 TClient is connected to server localhost:25068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:31.349916Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:31.366528Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:00:31.378930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:31.483452Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:31.692008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:31.806053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:34.139482Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343556959527829:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.139586Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.212557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.266138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.313385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.399606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.471992Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.554231Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.676781Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.859421Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343556959528509:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.859515Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.860112Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343556959528514:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.865303Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:34.893981Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343556959528517:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:34.973392Z node 3 :TX_PROXY ERROR: Actor# [3:7494343556959528571:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:35.155494Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343539779657060:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:35.155613Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::SysViewClientLost |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MultiUsedStage >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::QueryCache |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |83.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 8521, MsgBus: 2367 2025-04-17T18:00:04.753326Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343427791038879:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:04.753383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ae5/r3tmp/tmpAMW9YU/pdisk_1.dat 2025-04-17T18:00:05.371724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:05.371849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:05.377584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:05.416837Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8521, node 1 2025-04-17T18:00:05.626984Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:05.627005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:05.627016Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:05.627114Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2367 TClient is connected to server localhost:2367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:06.690794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:06.732478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:06.937424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:07.225199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:07.382353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:09.551005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343449265877119:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:09.551100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:09.754408Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343427791038879:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:09.754462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:09.994143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.044504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.103899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.163482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.229097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.297294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.343687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:10.446333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343453560845080:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:10.446444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:10.446736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343453560845085:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:10.451426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:10.469230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343453560845087:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:10.551622Z node 1 :TX_PROXY ERROR: Actor# [1:7494343453560845145:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:11.879679Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914;tx_id=3; 2025-04-17T18:00:11.888759Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 2025-04-17T18:00:11.889305Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343457855812827:2517], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7494343457855812777:2517]Got BAD REQUEST for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7494343457855812827:2517].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } 2025-04-17T18:00:11.889752Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343457855812820:2517], SessionActorId: [1:7494343457855812777:2517], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 . sessionActorId=[1:7494343457855812777:2517]. isRollback=0 2025-04-17T18:00:11.890699Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGZlYWQwMWUtYzBkNzM1ZGEtMWZlOGU5NWMtZTU0OGY0YWU=, ActorId: [1:7494343457855812777:2517], ActorState: ExecuteState, TraceId: 01js2eh1pc5stt3gb97ecns41p, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [1:7494343457855812821:2517] from: [1:7494343457855812820:2517] 2025-04-17T18:00:11.890796Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494343457855812821:2517] TxId: 281474976710672. Ctx: { TraceId: 01js2eh1pc5stt3gb97ecns41p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGZlYWQwMWUtYzBkNzM1ZGEtMWZlOGU5NWMtZTU0OGY0YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/Test`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } } 2025-04-17T18:00:11.890965Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGZlYWQwMWUtYzBkNzM1ZGEtMWZlOGU5NWMtZTU0OGY0YWU=, ActorId: [1:7494343457855812777:2517], ActorState: ExecuteState, TraceId: 01js2eh1pc5stt3gb97ecns41p, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 Trying to start YDB, gRPC: 3447, MsgBus: 10177 2025-04-17T18:00:12.625418Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343462775692836:2154];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ae5/r3tmp/tmpQWCtth/pdisk_1.dat 2025-04-17T18:00:12.793314Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:00:12.853005Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:12.854549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:12.854620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18: ... EST for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[3:7494343526547463623:2516].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } 2025-04-17T18:00:27.334531Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494343522252496280:2516], SessionActorId: [3:7494343522252496206:2516], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 . sessionActorId=[3:7494343522252496206:2516]. isRollback=0 2025-04-17T18:00:27.408952Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjBjMWMxMzMtM2UyZTQ2NGUtZWRlZTU3NGYtOTBhZjYxODA=, ActorId: [3:7494343522252496206:2516], ActorState: ExecuteState, TraceId: 01js2ehg9j1x5nqtfc6kj7drna, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [3:7494343522252496281:2516] from: [3:7494343522252496280:2516] 2025-04-17T18:00:27.409053Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494343522252496281:2516] TxId: 281474976710672. Ctx: { TraceId: 01js2ehg9j1x5nqtfc6kj7drna, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjBjMWMxMzMtM2UyZTQ2NGUtZWRlZTU3NGYtOTBhZjYxODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } } 2025-04-17T18:00:27.409236Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjBjMWMxMzMtM2UyZTQ2NGUtZWRlZTU3NGYtOTBhZjYxODA=, ActorId: [3:7494343522252496206:2516], ActorState: ExecuteState, TraceId: 01js2ehg9j1x5nqtfc6kj7drna, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 Trying to start YDB, gRPC: 17225, MsgBus: 25879 2025-04-17T18:00:28.297283Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343534155427842:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:28.297348Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ae5/r3tmp/tmpaGCKqU/pdisk_1.dat 2025-04-17T18:00:28.445507Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:28.479502Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:28.479593Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:28.483188Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17225, node 4 2025-04-17T18:00:28.627851Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:28.627879Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:28.627886Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:28.628039Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25879 TClient is connected to server localhost:25879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:29.377989Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:29.386899Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:00:29.401332Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:29.531207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:29.918446Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:30.055440Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:33.298518Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343534155427842:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:33.298593Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:34.826301Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343559925233405:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.826389Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.968814Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.084522Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.184670Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.256672Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.343420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.472171Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.575280Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.703196Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343564220201399:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:35.703297Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:35.703514Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343564220201404:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:35.708522Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:35.739788Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343564220201406:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:35.801416Z node 4 :TX_PROXY ERROR: Actor# [4:7494343564220201464:3567] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:39.351122Z node 4 :TX_DATASHARD ERROR: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2025-04-17T18:00:39.351251Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715672 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2025-04-17T18:00:39.351477Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494343577105103790:2513] TxId: 281474976715672. Ctx: { TraceId: 01js2ehvkve8r2n4q894nnqe3c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Zjc0YzU0NzktMmZlNDE0NGEtOTY1MjFlYWUtYTI1ZGNlZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; 2025-04-17T18:00:39.351854Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Zjc0YzU0NzktMmZlNDE0NGEtOTY1MjFlYWUtYTI1ZGNlZTQ=, ActorId: [4:7494343572810136349:2513], ActorState: ExecuteState, TraceId: 01js2ehvkve8r2n4q894nnqe3c, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RowsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] Test command err: 2025-04-17T17:59:22.431700Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:22.431786Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:59:22.455861Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:22.507715Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-04-17T17:59:22.508667Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-17T17:59:22.509587Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] Run 1 CmdWrite 2025-04-17T17:59:22.520872Z node 1 :PERSQUEUE INFO: new Cookie default|eb1b3e3f-910c7c-28371ce-7b939e22_0 generated for partition 0 topic 'topic' owner default Captured kesus quota request event from [1:207:2216] Captured kesus quota request event from [1:208:2217] CmdRead Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Captured kesus quota request event from [1:207:2216] Currently have 3 quoter requests Run 2 CmdWrite 2025-04-17T17:59:23.561156Z node 1 :PERSQUEUE INFO: new Cookie default|e81ac14b-9af5ed1-bc2984d1-d9f0d221_1 generated for partition 0 topic 'topic' owner default CmdRead Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Captured kesus quota request event from [1:207:2216] Currently have 4 quoter requests Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:59:26.518955Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:26.519035Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:207:2057] recipient: [2:14:2061] 2025-04-17T17:59:26.560193Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [2:205:2211] connected; active server actors: 1 2025-04-17T17:59:26.560295Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [2:206:2212] connected; active server actors: 1 Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:218:2057] recipient: [2:145:2169] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:221:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:222:2057] recipient: [2:220:2221] Leader for TabletID 72057594037927938 is [2:223:2222] sender: [2:224:2057] recipient: [2:220:2221] Leader for TabletID 72057594037927938 is [2:223:2222] sender: [2:276:2057] recipient: [2:14:2061] 2025-04-17T17:59:27.609444Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:275:2260] connected; active server actors: 1 2025-04-17T17:59:28.032567Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:281:2265] connected; active server actors: 1 2025-04-17T17:59:28.032965Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:283:2267] connected; active server actors: 1 2025-04-17T17:59:28.033283Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:285:2269] connected; active server actors: 1 2025-04-17T17:59:28.033619Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:287:2271] connected; active server actors: 1 2025-04-17T17:59:28.033954Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:289:2273] connected; active server actors: 1 Leader for TabletID 72057594037927938 is [2:223:2222] sender: [2:294:2057] recipient: [2:145:2169] Leader for TabletID 72057594037927938 is [2:223:2222] sender: [2:297:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927938 is [2:223:2222] sender: [2:298:2057] recipient: [2:296:2278] Leader for TabletID 72057594037927938 is [2:299:2279] sender: [2:300:2057] recipient: [2:296:2278] Leader for TabletID 72057594037927938 is [2:299:2279] sender: [2:341:2057] recipient: [2:14:2061] 2025-04-17T17:59:28.076035Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:340:2313] connected; active server actors: 1 2025-04-17T17:59:28.076381Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:343:2315] connected; active server actors: 1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-17T17:59:29.046667Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:29.046748Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:209:2057] recipient: [3:14:2061] 2025-04-17T17:59:29.078498Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [3:207:2213] connected; active server actors: 1 2025-04-17T17:59:29.078649Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [3:208:2214] connected; active server actors: 1 Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:220:2057] recipient: [3:145:2169] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:223:2057] recipient: [3:222:2223] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:224:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927938 is [3:225:2224] sender: [3:226:2057] recipient: [3:222:2223] Leader for TabletID 72057594037927938 is [3:225:2224] sender: [3:277:2057] recipient: [3:14:2061] 2025-04-17T17:59:30.069608Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [3:276:2262] connected; active server actors: 1 2025-04-17T17:59:30.481256Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [3:283:2267] connected; active server actors: 1 2025-04-17T17:59:30.481690Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [3:285:2269] connected; active server actors: 1 2025-04-17T17:59:30.482118Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [3:287:2271] connected; active server actors: 1 2025-04-17T17:59:30.482560Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [3:289:2273] connected; active server actors: 1 2025-04-17T17:59:30.482943Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [3:291:2275] connected; active server actors: 1 Leader for TabletID 72057594037927938 is [3:225:2224] sender: [3:296:2057] recipient: [3:145:2169] Leader for TabletID 72057594037927938 is [3:225:2224] sender: [3:299:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927938 is [3:225:2224] sender: [3:300:2057] recipient: [3:298:2280] Leader for TabletID 72057594037927938 is [3:301:2281] sender: [3:302:2057] recipient: [3:298:2280] Leader for TabletID 72057594037927938 is [3:301:2281] sender: [3:343:2057] recipient: [3:14:2061] 2025-04-17T17:59:30.582623Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [3:342:2315] connected; active server actors: 1 2025-04-17T17:59:30.583124Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [3:345:2317] connected; active server actors: 1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-17T17:59:31.530143Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:31.530272Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:209:2057] recipient: [4:14:2061] 2025-04-17T17:59:31.565096Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [4:207:2213] connected; active server actors: 1 2025-04-17T17:59:31.565157Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [4:208:2214] connected; active server actors: 1 Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:219:2057] recipient: [4:145:2169] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:222:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:223:2057] recipient: [4:221:2222] Leader for TabletID 72057594037927938 is [4:224:2223] sender: [4:225:2057] recipient: [4:221:2222] Leader for TabletID 72057594037927938 is [4:224:2223] sender: [4:275:2057] recipient: [4:14:2061] 2025-04-17T17:59:32.593558Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [4:274:2260] connected; active server actors: 1 2025-04-17T17:59:32.969842Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [4:281:2265] connected; active server actors: 1 2025-04-17T17:59:32.970266Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [4:283:2267] connected; active server actors: 1 2025-04-17T17:59:32.970577Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [4:285:2269] connected; active ... .size=0 2025-04-17T18:00:41.676755Z node 39 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:00:41.676819Z node 39 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:00:41.676969Z node 39 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:00:41.677417Z node 39 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:00:41.682497Z node 39 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [39:276:2270] 2025-04-17T18:00:41.683688Z node 39 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-04-17T18:00:41.685313Z node 39 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-04-17T18:00:41.685662Z node 39 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-17T18:00:41.687686Z node 39 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-17T18:00:41.688752Z node 39 :PERSQUEUE DEBUG: [topic:0:TInitDataRangeStep] Got data offset 0 count 2 size 1049870 so 0 eo 2 d0000000000_00000000000000000000_00000_0000000002_00002| 2025-04-17T18:00:41.694440Z node 39 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-17T18:00:41.703039Z node 39 :PERSQUEUE DEBUG: [topic:0:TInitDataStep] read res partition offset 0 endOffset 2 key 0,2 valuesize 1049870 expected 1049870 2025-04-17T18:00:41.709960Z node 39 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T18:00:41.710091Z node 39 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:00:41.710142Z node 39 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-17T18:00:41.710232Z node 39 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [39:276:2270] 2025-04-17T18:00:41.710331Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 2 Head Offset 0 PartNo 0 PackedSize 1049870 count 2 nextOffset 2 batches 4 SYNC INIT sourceId sourceid1 seqNo 2 offset 1 SYNC INIT HEAD KEY: d0000000000_00000000000000000000_00000_0000000002_00002| size 1049870 2025-04-17T18:00:41.710415Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T18:00:41.710559Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:00:41.710623Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:00:41.710688Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000000_00000_0000000001_00000|, d0000000000_00000000000000000000_00000_0000000001_00000|] 2025-04-17T18:00:41.710749Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:00:41.710830Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:00:41.710888Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:00:41.710948Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:00:41.711051Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Init complete for topic 'topic' Partition: 0 SourceId: sourceid1 SeqNo: 2 offset: 1 MaxOffset: 2 2025-04-17T18:00:41.711129Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 39 2025-04-17T18:00:41.711202Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 39 2025-04-17T18:00:41.711646Z node 39 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:00:41.711719Z node 39 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00000|(+) to d0000000000_00000000000000000000_00000_0000000001_00000|(+) 2025-04-17T18:00:41.712253Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2025-04-17T18:00:41.712347Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-04-17T18:00:41.718820Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-17T18:00:41.718930Z node 39 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T18:00:41.719332Z node 39 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user user readTimeStamp done, result 129 queuesize 0 startOffset 0 2025-04-17T18:00:41.729346Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 >>> write #3 2025-04-17T18:00:41.740040Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [39:301:2288], now have 1 active actors on pipe 2025-04-17T18:00:41.740216Z node 39 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T18:00:41.740280Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T18:00:41.740381Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 3 partNo : 0 messageNo: 1 size 1024 offset: 2 2025-04-17T18:00:41.740485Z node 39 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-04-17T18:00:41.740589Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-04-17T18:00:41.740640Z node 39 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-04-17T18:00:41.741100Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [39:304:2290], now have 1 active actors on pipe 2025-04-17T18:00:41.741193Z node 39 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T18:00:41.741249Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T18:00:41.741368Z node 39 :PERSQUEUE INFO: new Cookie default|696eb77f-c6d1dcb5-e85b1b01-c494e27a_0 generated for partition 0 topic 'topic' owner default 2025-04-17T18:00:41.741489Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T18:00:41.741590Z node 39 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T18:00:41.741961Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [39:306:2292], now have 1 active actors on pipe 2025-04-17T18:00:41.742049Z node 39 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T18:00:41.742089Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-17T18:00:41.746169Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 3 partNo : 0 messageNo: 0 size 1024 offset: 2 2025-04-17T18:00:41.746395Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 1033. Cookie: 1 2025-04-17T18:00:41.746537Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 1 2025-04-17T18:00:41.746736Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 3 partNo 0 2025-04-17T18:00:41.750817Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 1096 count 1 nextOffset 3 batches 1 2025-04-17T18:00:41.751691Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 1084 WTime 252 2025-04-17T18:00:41.751900Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:00:41.751961Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:00:41.752015Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T18:00:41.752073Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:00:41.752126Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid1 2025-04-17T18:00:41.752176Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-04-17T18:00:41.752207Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:00:41.752250Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:00:41.752345Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:00:41.752459Z node 39 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:00:41.752578Z node 39 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 1084 2025-04-17T18:00:41.770453Z node 39 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 1084 actorID [39:267:2263] 2025-04-17T18:00:41.770620Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1033 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:00:41.770699Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:00:41.770793Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-17T18:00:41.771114Z node 39 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 0 count 1 parts 0 size 1084 2025-04-17T18:00:41.771237Z node 39 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >>> write #1 2025-04-17T18:00:41.771808Z node 39 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [39:313:2298], now have 1 active actors on pipe >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestManyConsumers |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |83.2%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |83.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> KqpQuery::UdfTerminate >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::ComputeNodeMemoryLimit >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault [GOOD] >> TPersQueueTest::SetMeteringMode >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpQuery::QueryFromSqs >> TPersQueueTest::CheckDeleteTopic [GOOD] >> TPersQueueTest::CheckDecompressionTasksWithoutSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] Test command err: 2025-04-17T17:59:45.150064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343349053712847:2240];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:45.150125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015af/r3tmp/tmpUbWSea/pdisk_1.dat 2025-04-17T17:59:46.179316Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:46.182363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:46.186112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:46.195051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:46.205534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14443 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:59:46.486516Z node 1 :TX_PROXY DEBUG: actor# [1:7494343349053712904:2117] Handle TEvNavigate describe path dc-1 2025-04-17T17:59:46.486555Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343353348680681:2440] HANDLE EvNavigateScheme dc-1 2025-04-17T17:59:46.486645Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343349053712929:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:46.486723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343353348680575:2355][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494343349053712929:2131], cookie# 1 2025-04-17T17:59:46.488046Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343353348680583:2355][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343353348680580:2355], cookie# 1 2025-04-17T17:59:46.488086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343353348680584:2355][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343353348680581:2355], cookie# 1 2025-04-17T17:59:46.488102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343353348680585:2355][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343353348680582:2355], cookie# 1 2025-04-17T17:59:46.488135Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343344758745282:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343353348680583:2355], cookie# 1 2025-04-17T17:59:46.488155Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343344758745285:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343353348680584:2355], cookie# 1 2025-04-17T17:59:46.488167Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343344758745288:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343353348680585:2355], cookie# 1 2025-04-17T17:59:46.488191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343353348680583:2355][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343344758745282:2051], cookie# 1 2025-04-17T17:59:46.488203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343353348680584:2355][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343344758745285:2054], cookie# 1 2025-04-17T17:59:46.488228Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343353348680585:2355][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343344758745288:2057], cookie# 1 2025-04-17T17:59:46.488267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343353348680575:2355][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343353348680580:2355], cookie# 1 2025-04-17T17:59:46.488303Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343353348680575:2355][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:59:46.488320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343353348680575:2355][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343353348680581:2355], cookie# 1 2025-04-17T17:59:46.488346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343353348680575:2355][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:46.488367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343353348680575:2355][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343353348680582:2355], cookie# 1 2025-04-17T17:59:46.488382Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343353348680575:2355][/dc-1] Unexpected sync response: sender# [1:7494343353348680582:2355], cookie# 1 2025-04-17T17:59:46.488469Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494343349053712929:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T17:59:46.497620Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494343349053712929:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494343353348680575:2355] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:46.497791Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494343349053712929:2131], cacheItem# { Subscriber: { Subscriber: [1:7494343353348680575:2355] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T17:59:46.500997Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494343353348680682:2441], recipient# [1:7494343353348680681:2440], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:59:46.501061Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343353348680681:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 TClient::Ls response: 2025-04-17T17:59:46.530615Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343353348680681:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-17T17:59:46.532926Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343353348680681:2440] Handle TEvDescribeSchemeResult Forward to# [1:7494343353348680680:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:59:46.548169Z node 1 :TX_PROXY DEBUG: actor# [1:7494343349053712904:2117] Handle TEvProposeTransaction 2025-04-17T17:59:46.548188Z node 1 :TX_PROXY DEBUG: actor# [1:7494343349053712904:2117] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T17:59:46.548250Z node 1 :TX_PROXY DEBUG: actor# [1:7494343349053712904:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494343353348680689:2447] 2025-04-17T17:59:46.674661Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343353348680689:2447] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-17T17:59:46.674712Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343353348680689:2447] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T17:59:46.674787Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343353348680689:2447] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T17:59:46.674892Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343349053712929:2131], request# { ErrorCount: 0 Databa ... bscriber: { Subscriber: [13:7494343528147573413:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:45.637665Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7494343605456984884:2174], recipient# [13:7494343605456984883:2341], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:45.789201Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7494343523324539251:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:45.789346Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7494343523324539251:2108], cacheItem# { Subscriber: { Subscriber: [11:7494343531914473921:2143] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:45.789443Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7494343604928918132:2233], recipient# [11:7494343604928918131:2342], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:46.238241Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7494343523324539251:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:46.238398Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7494343523324539251:2108], cacheItem# { Subscriber: { Subscriber: [11:7494343531914473921:2143] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:46.238509Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7494343609223885430:2234], recipient# [11:7494343609223885429:2343], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:46.325722Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7494343523324539251:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:46.325854Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7494343523324539251:2108], cacheItem# { Subscriber: { Subscriber: [11:7494343596338983507:2219] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:46.326109Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7494343523324539251:2108], cacheItem# { Subscriber: { Subscriber: [11:7494343596338983508:2220] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:46.326242Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7494343609223885431:2235], recipient# [11:7494343596338983504:2335], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:46.326492Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7494343596338983504:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:46.507583Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7494343523324539251:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:46.507716Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7494343523324539251:2108], cacheItem# { Subscriber: { Subscriber: [11:7494343596338983489:2217] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:46.507798Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7494343609223885433:2236], recipient# [11:7494343609223885432:2344], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:46.508049Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:00:46.789920Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7494343523324539251:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:00:46.790088Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7494343523324539251:2108], cacheItem# { Subscriber: { Subscriber: [11:7494343531914473921:2143] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:00:46.790205Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7494343609223885435:2237], recipient# [11:7494343609223885434:2345], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> KqpQuery::QueryCache [GOOD] >> KqpQuery::Pure >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::MergeConnection |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |83.3%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] >> TTopicYqlTest::DropTopicYql [GOOD] >> TTopicYqlTest::CreateTopicYqlBackCompatibility >> TPersQueueTest::StoreNoMoreThanXSourceIDs [GOOD] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::QueryCancelWrite |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> KqpExplain::SortStage >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64 >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> TPersQueueTest::BadSids [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2025-04-17T17:59:05.730687Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343178261212053:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:05.730731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:59:05.826699Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343176738928254:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:05.826757Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:59:06.456303Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:59:06.479405Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001571/r3tmp/tmpbACIjG/pdisk_1.dat 2025-04-17T17:59:06.769086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:06.842359Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:07.408884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:07.408973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:07.414966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:07.415059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:07.434254Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:59:07.434384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:59:07.440150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:59:07.543423Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65053, node 1 2025-04-17T17:59:07.838596Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:07.893071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:07.910517Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:59:07.910548Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:59:07.974722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001571/r3tmp/yandexsD1p76.tmp 2025-04-17T17:59:07.974748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001571/r3tmp/yandexsD1p76.tmp 2025-04-17T17:59:07.974886Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001571/r3tmp/yandexsD1p76.tmp 2025-04-17T17:59:07.975005Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:59:08.117592Z INFO: TTestServer started on Port 22340 GrpcPort 65053 TClient is connected to server localhost:22340 PQClient connected to localhost:65053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:08.966837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:59:09.096218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:59:10.734847Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343178261212053:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:10.734911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:10.827714Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343176738928254:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:10.827778Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:13.067857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343211098667041:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:13.067932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343211098667063:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:13.075586Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:13.055838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343208325984301:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:13.063308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343212620951615:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:13.070680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:13.101561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:59:13.137417Z node 2 :TX_PROXY ERROR: Actor# [2:7494343211098667068:2177] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T17:59:13.149985Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-17T17:59:13.153753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343211098667067:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:59:13.158568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343212620951617:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:59:13.261540Z node 1 :TX_PROXY ERROR: Actor# [1:7494343212620951707:2787] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:13.306450Z node 2 :TX_PROXY ERROR: Actor# [2:7494343211098667095:2184] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:13.462019Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494343211098667103:2327], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:59:13.462365Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWQ1NmNhYWYtZDBmMzZhMzktM2QyMmRhMmQtNGYzOWZhM2E=, ActorId: [2:7494343211098667039:2318], ActorState: ExecuteState, TraceId: 01js2ef8cgewmwq479b63ysx8y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:59:13.471870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:13.474820Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494343212620951724:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:59:13.476195Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWM1YmI4NjMtNDNhNThmMjMtNmZmYTkzZmMtNzZmODZlM2M=, ActorId: [1:7494343208325984298:2343], ActorState: ExecuteState, TraceId: 01js2ef8ah99bbhx2rxbebck42, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:59:13.4 ... 00:27.345028Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7494343499390934226:3405] (SourceId=A_Source, PreferedPartition=0) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T18:00:27.345056Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7494343499390934226:3405] (SourceId=A_Source, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-17T18:00:27.345074Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7494343499390934226:3405] (SourceId=A_Source, PreferedPartition=0) Start idle 2025-04-17T18:00:28.106352Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7494343533750673938:2719] TxId: 281474976710714. Ctx: { TraceId: 01js2ehhgn9jp2jdr98zk5gjfx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzM1MzJmMTAtN2Q4NGIzY2MtNDhlYzYyNjQtM2IwZDNiYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 8 2025-04-17T18:00:28.106530Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7494343533750673942:2719], TxId: 281474976710714, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2ehhgn9jp2jdr98zk5gjfx. SessionId : ydb://session/3?node_id=7&id=NzM1MzJmMTAtN2Q4NGIzY2MtNDhlYzYyNjQtM2IwZDNiYTc=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [7:7494343533750673938:2719], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-17T18:00:30.758426Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494343539864362920:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:30.758522Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:00:31.075641Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:00:31.107967Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001571/r3tmp/tmpE8ERLs/pdisk_1.dat 2025-04-17T18:00:31.165532Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:00:31.405838Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:31.468854Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:31.468964Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:31.473631Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:31.473723Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:31.475817Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:31.480890Z node 9 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-04-17T18:00:31.482115Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9720, node 9 2025-04-17T18:00:31.674933Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001571/r3tmp/yandexzDJCgk.tmp 2025-04-17T18:00:31.674964Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001571/r3tmp/yandexzDJCgk.tmp 2025-04-17T18:00:31.675093Z node 9 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001571/r3tmp/yandexzDJCgk.tmp 2025-04-17T18:00:31.675250Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:00:31.935435Z INFO: TTestServer started on Port 31478 GrpcPort 9720 TClient is connected to server localhost:31478 PQClient connected to localhost:9720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:32.419603Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:00:32.534602Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:00:35.762475Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7494343539864362920:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:35.762547Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:38.545358Z node 9 :KQP_PROXY ERROR: TraceId: "01js2ehp9k5ymma5yqk0614xw3", Request deadline has expired for 0.737141s seconds 2025-04-17T18:00:38.564576Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7494343574224102430:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.567359Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7494343574224102451:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.569081Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.572215Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:00:38.586681Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7494343574224102486:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.586908Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.626474Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7494343574224102453:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:00:38.649605Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.698307Z node 9 :TX_PROXY ERROR: Actor# [9:7494343574224102598:2817] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:38.860861Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7494343574224102612:2363], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:00:38.865718Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YjM2YWI5ZjQtYmE1YzhhNTctYTM4Y2U5NzAtY2M2ZTIzMTc=, ActorId: [9:7494343574224102411:2344], ActorState: ExecuteState, TraceId: 01js2ehvwq7mybg9nkfrbxyy85, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:00:38.866163Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:00:38.988641Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:39.246861Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:00:39.841331Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2ehwtzd85qszp6vambmcdd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=M2FjMTA1NmEtMTZjNzE3YzktODYzMjRhMWItNzI3YjRmN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7494343578519070321:3131] 2025-04-17T18:00:46.358312Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:00:46.358341Z node 9 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-04-17T18:00:46.517208Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343608583841889:3416] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-04-17T18:00:46.517250Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7494343608583841889:3416] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2025-04-17T18:00:15.803600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:15.804290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:00:15.804350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001911/r3tmp/tmpvWumHI/pdisk_1.dat 2025-04-17T18:00:17.315967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:00:17.461884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:17.546272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:17.550404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:17.570967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:17.754900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:00:18.651654Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2025-04-17T18:00:18.653569Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-17T18:00:18.707186Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor finished in 0.053254s, errors=0 2025-04-17T18:00:18.707482Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2025-04-17T18:00:18.707589Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2025-04-17T18:00:18.708503Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-17T18:00:18.716509Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} started fullscan actor# [1:750:2632] 2025-04-17T18:00:18.716696Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Bootstrap called, sample# 100 2025-04-17T18:00:18.716737Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-04-17T18:00:18.717791Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-17T18:00:18.751282Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} finished in 0.033285s, sampled# 100, iter finished# 1, oks# 100 2025-04-17T18:00:18.751508Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} received keyCount# 100 2025-04-17T18:00:18.751827Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} started# 10 actors each with inflight# 1 2025-04-17T18:00:18.751921Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} Bootstrap called 2025-04-17T18:00:18.751963Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.752040Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} Bootstrap called 2025-04-17T18:00:18.752066Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.752094Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} Bootstrap called 2025-04-17T18:00:18.752132Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.752162Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} Bootstrap called 2025-04-17T18:00:18.752195Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.752237Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} Bootstrap called 2025-04-17T18:00:18.752261Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.752289Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} Bootstrap called 2025-04-17T18:00:18.752312Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.752342Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} Bootstrap called 2025-04-17T18:00:18.752391Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.752418Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} Bootstrap called 2025-04-17T18:00:18.752454Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.752506Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} Bootstrap called 2025-04-17T18:00:18.752541Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.752570Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} Bootstrap called 2025-04-17T18:00:18.752594Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-17T18:00:18.756645Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} session: ydb://session/3?node_id=1&id=YzdjNmM1NS1jZjNmZmQ5Ni05ODFkZGQ4MC1hMGVjOTE2Ng== 2025-04-17T18:00:18.756914Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} session: ydb://session/3?node_id=1&id=ZjVhM2E1MDgtYmI4NTdjMDctNTNkMDE2Y2ItMzhhYWQ2OGI= 2025-04-17T18:00:18.758119Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} session: ydb://session/3?node_id=1&id=N2VjNjBlZGQtMzNmMzI3N2YtMjRiYWZiZjktZjNlMGY2YTQ= 2025-04-17T18:00:18.759749Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} session: ydb://session/3?node_id=1&id=NGQ0NDcyMzMtMjBiNGQwNzEtMWRhMjdiNzUtYThlZDA1Y2U= 2025-04-17T18:00:18.760932Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} session: ydb://session/3?node_id=1&id=M2RkOTIwZWYtZTY0ZWFmNWItODkwOGI2MzYtNmRkYTYyNTg= 2025-04-17T18:00:18.762340Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} session: ydb://session/3?node_id=1&id=NDUzMWVjZmMtZDYzOGZjYWYtZjM1YjZhNWQtMmNhMGI1ZTA= 2025-04-17T18:00:18.763842Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} session: ydb://session/3?node_id=1&id=YjZlNmQzOTAtOWY3OTA1YTItMjI1OGFhZi1iMDZlY2FiZg== 2025-04-17T18:00:18.765508Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} session: ydb://session/3?node_id=1&id=MzJhYzE1Y2UtZGM2Mzc4M2ItYzliYTMzYTItYTk5MzYwNjg= 2025-04-17T18:00:18.766868Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} session: ydb://session/3?node_id=1&id=OThiNmVmOGYtMWFkZGM4NzAtOWVhNzMwZTMtOGFlM2RiMTY= 2025-04-17T18:00:18.768710Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} session: ydb://session/3?node_id=1&id=ODdhNjNkMzItZjg2YWFjNjktNTdmN2I1MDMtMTk2ZmUyOWU= 2025-04-17T18:00:18.774347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:775:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.774480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:808:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.774562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:809:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.774622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:810:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.774674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:811:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.774719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:812:2688], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.774804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:813:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.774868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:816:2692], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:18.776182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:818:2694], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Erro ... 76715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:00:40.888678Z node 2 :TX_PROXY ERROR: Actor# [2:854:2724] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:00:40.889470Z node 2 :TX_PROXY ERROR: Actor# [2:855:2725] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:00:40.890415Z node 2 :TX_PROXY ERROR: Actor# [2:863:2728] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:00:40.890870Z node 2 :TX_PROXY ERROR: Actor# [2:866:2729] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:00:41.069980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:829:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.070129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:830:2706], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.080095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:831:2707], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.080313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:832:2708], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.080401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:833:2709], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.080493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:834:2710], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.080561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:835:2711], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.080631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:837:2713], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.080694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:843:2719], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.080748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:856:2726], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:00:41.124180Z node 2 :TX_PROXY ERROR: Actor# [2:986:2822] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:42.116147Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 2} finished in 1.358336s, errors=0 2025-04-17T18:00:42.116515Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 2 { Tag: 2 DurationMs: 1358 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:42.134431Z node 2 :TX_PROXY ERROR: Actor# [2:1915:3144] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:42.958102Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 3} finished in 2.193815s, errors=0 2025-04-17T18:00:42.958353Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 3 { Tag: 3 DurationMs: 2193 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:42.973869Z node 2 :TX_PROXY ERROR: Actor# [2:2822:3450] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:44.249440Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 10} finished in 3.434710s, errors=0 2025-04-17T18:00:44.249812Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 10 { Tag: 10 DurationMs: 3434 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:44.265126Z node 2 :TX_PROXY ERROR: Actor# [2:3729:3756] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:45.280968Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 5} finished in 4.503840s, errors=0 2025-04-17T18:00:45.281375Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 5 { Tag: 5 DurationMs: 4503 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:45.300154Z node 2 :TX_PROXY ERROR: Actor# [2:4636:4062] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:46.569551Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 11} finished in 5.754630s, errors=0 2025-04-17T18:00:46.569955Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 11 { Tag: 11 DurationMs: 5754 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:46.592992Z node 2 :TX_PROXY ERROR: Actor# [2:5543:4368] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:47.401960Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 6} finished in 6.618447s, errors=0 2025-04-17T18:00:47.402374Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 6 { Tag: 6 DurationMs: 6618 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:47.421184Z node 2 :TX_PROXY ERROR: Actor# [2:6450:4674] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:48.257831Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 4} finished in 7.483137s, errors=0 2025-04-17T18:00:48.258497Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 4 { Tag: 4 DurationMs: 7483 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:48.281039Z node 2 :TX_PROXY ERROR: Actor# [2:7357:4980] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:50.390609Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 9} finished in 9.596475s, errors=0 2025-04-17T18:00:50.391114Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 9 { Tag: 9 DurationMs: 9596 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:50.419056Z node 2 :TX_PROXY ERROR: Actor# [2:8264:5286] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:51.788140Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 7} finished in 10.996320s, errors=0 2025-04-17T18:00:51.788678Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 7 { Tag: 7 DurationMs: 10996 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:51.821103Z node 2 :TX_PROXY ERROR: Actor# [2:9171:5592] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:51.861476Z node 2 :KQP_SLOW_LOG WARN: TraceId: "01js2ehy3ge92y2qya7d3gtxj6", SessionId: ydb://session/3?node_id=2&id=OGY1OWE0ZDYtYjJjYjgwZDQtOTBhNmU3ZGUtOGQ4YjM0ZGQ=, Slow query, duration: 11.044463s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n --!syntax_v1\n\n DECLARE $key AS Text;\n\n SELECT * FROM `usertable` WHERE id == $key;\n ", parameters: 36b 2025-04-17T18:00:53.781891Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 8} finished in 12.989941s, errors=0 2025-04-17T18:00:53.791638Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 8 { Tag: 8 DurationMs: 12989 OperationsOK: 100 OperationsError: 0 } 2025-04-17T18:00:53.791757Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished in 13.037143s, oks# 1000, errors# 0 2025-04-17T18:00:53.792367Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:747:2629] with tag# 3 >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |83.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris >> KqpExplain::PrecomputeRange >> KqpQuery::CreateAsSelect_BadCases |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> KqpQuery::QueryFromSqs [GOOD] >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::MultiJoinCteLinks >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [GOOD] >> TPersQueueTest::Delete >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 24159, MsgBus: 14657 2025-04-17T18:00:27.405938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343530543113884:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:27.410641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a98/r3tmp/tmpGRbz3M/pdisk_1.dat 2025-04-17T18:00:27.876173Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:27.894142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:27.894268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:27.896593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24159, node 1 2025-04-17T18:00:28.021883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:28.021905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:28.021934Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:28.022056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14657 TClient is connected to server localhost:14657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:28.711342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:28.746852Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:28.769297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:29.001046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:29.188198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:29.266362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:31.330568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343547722984838:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:31.330694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:31.729650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:31.804693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:31.845615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:31.928635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:31.972475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:32.022067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:32.088922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:32.153411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343552017952803:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:32.153481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:32.153866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343552017952808:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:32.158058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:32.173816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343552017952810:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:32.234060Z node 1 :TX_PROXY ERROR: Actor# [1:7494343552017952864:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:32.404801Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343530543113884:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:32.404861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:33.355490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16895, MsgBus: 17192 2025-04-17T18:00:35.306942Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343564670101716:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:35.307792Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a98/r3tmp/tmpRWKk3A/pdisk_1.dat 2025-04-17T18:00:35.705909Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:35.709628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:35.709698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:35.715224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16895, node 2 2025-04-17T18:00:35.866302Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:35.866321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:35.866326Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:35.866431Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17192 TClient is connected to server localhost:17192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:36.531403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:36.539514Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:36.549897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:3 ... :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:46.734199Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:46.807523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:46.855686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:46.919075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343609406209605:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:46.919161Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:46.919396Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343609406209610:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:46.923203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:46.936018Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343609406209612:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:47.033236Z node 3 :TX_PROXY ERROR: Actor# [3:7494343613701176966:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:47.398470Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343592226338028:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:47.398541Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 5322 table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 3683 affected_shards: 1 } query_phases { duration_us: 9470 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } cpu_time_us: 2228 affected_shards: 2 } compilation { duration_us: 264504 cpu_time_us: 261090 } process_cpu_time_us: 590 total_duration_us: 283671 total_cpu_time_us: 267591 Trying to start YDB, gRPC: 8832, MsgBus: 12864 2025-04-17T18:00:49.616535Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343623613092865:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:49.616594Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a98/r3tmp/tmpuXJAEk/pdisk_1.dat 2025-04-17T18:00:49.753818Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:49.759275Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:49.759355Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:49.765226Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8832, node 4 2025-04-17T18:00:49.828380Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:49.828401Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:49.828408Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:49.828519Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12864 TClient is connected to server localhost:12864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:50.409572Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:50.440089Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:50.585939Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:50.894953Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:00:51.017077Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:00:54.618058Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343623613092865:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:54.618162Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:55.378336Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343649382898428:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:55.378444Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:55.431669Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:55.525142Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:55.631086Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:55.757366Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:55.872356Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:55.959809Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:56.051449Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:56.148654Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343653677866411:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:56.148735Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:56.148987Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343653677866416:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:56.152262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:56.170550Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343653677866418:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:56.233625Z node 4 :TX_PROXY ERROR: Actor# [4:7494343653677866473:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:58.298621Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 >> KqpQuery::Pure [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> KqpQuery::QueryCancelWrite [GOOD] >> TPQTest::TestManyConsumers [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::BadSids [GOOD] Test command err: 2025-04-17T17:57:18.060451Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:57:18.060527Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T17:57:18.516997Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:57:18.517062Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info === Server->StartServer(false); 2025-04-17T17:57:19.026993Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494342721737710116:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:19.027064Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:19.343122Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:19.355620Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494342719660870792:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:19.358559Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:19.358669Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0007f5/r3tmp/tmpXaWK3b/pdisk_1.dat 2025-04-17T17:57:19.758778Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:19.758930Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:19.759924Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:19.760005Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:19.763677Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:19.771245Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T17:57:19.771390Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:19.772160Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23342, node 3 2025-04-17T17:57:20.006075Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0007f5/r3tmp/yandexNbq6OM.tmp 2025-04-17T17:57:20.006114Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0007f5/r3tmp/yandexNbq6OM.tmp 2025-04-17T17:57:20.006277Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0007f5/r3tmp/yandexNbq6OM.tmp 2025-04-17T17:57:20.006398Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:20.070609Z INFO: TTestServer started on Port 1602 GrpcPort 23342 TClient is connected to server localhost:1602 PQClient connected to localhost:23342 === TenantModeEnabled() = 0 === Init PQ - start server on port 23342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:20.520849Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:57:20.521086Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.521317Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:57:20.521531Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:57:20.521558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:20.530978Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:57:20.531103Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:57:20.531256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.531292Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:57:20.531311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T17:57:20.531321Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-17T17:57:20.539061Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.539099Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:57:20.539124Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T17:57:20.546914Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.546949Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.546980Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:57:20.547013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:20.551576Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:57:20.551904Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:20.551917Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T17:57:20.551955Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:20.553679Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T17:57:20.553795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:57:20.556525Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912640604, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:57:20.556654Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912640604 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:57:20.556695Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:57:20.556930Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T17:57:20.556953Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:57:20.557113Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:57:20.557173Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T17:57:20.558991Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:57:20.559013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:57:20.559169Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:57:20.559188Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7494342721737710773:2404], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T17:57:20.559227Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.559247Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T17:57:20.559315Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:57:20.559332Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:20.559365Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:57:20.559382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:20.559395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2 ... ms 2025-04-17T18:00:51.256664Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0] Write session: try to update token 2025-04-17T18:00:51.256737Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-17T18:00:51.266418Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T18:00:51.266823Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=1) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-17T18:00:51.267176Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-17T18:00:51.267213Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-04-17T18:00:51.267333Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 1 2025-04-17T18:00:51.267412Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=1) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:00:51.267760Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-17T18:00:51.267786Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-04-17T18:00:51.268200Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--topic1 partition: 1 SourceId: '\0base64:aa' SeqNo: 1 partNo : 0 messageNo: 1 size 92 offset: -1 2025-04-17T18:00:51.268490Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Topic 'rt3.dc1--topic1' partition 1 part blob processing sourceId '\0base64:aa' seqNo 1 partNo 0 2025-04-17T18:00:51.275424Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Topic 'rt3.dc1--topic1' partition 1 part blob complete sourceId '\0base64:aa' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 169 count 1 nextOffset 1 batches 1 2025-04-17T18:00:51.276441Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Add new write blob: topic 'rt3.dc1--topic1' partition 1 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000001_00000000000000000000_00000_0000000001_00000| size 157 WTime 1744912851270 2025-04-17T18:00:51.276657Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:00:51.276699Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T18:00:51.276739Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-04-17T18:00:51.276784Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T18:00:51.276821Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] m0000000001pbase64:aa 2025-04-17T18:00:51.276837Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] d0000000001_00000000000000000000_00000_0000000001_00000| 2025-04-17T18:00:51.276853Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] i0000000001 2025-04-17T18:00:51.276899Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T18:00:51.276935Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] =========================== 2025-04-17T18:00:51.277011Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:00:51.277109Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 1 offset 0 partNo 0 count 1 size 157 2025-04-17T18:00:51.401833Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 1 offset 0 count 1 size 157 actorID [21:7494343622492354324:2488] 2025-04-17T18:00:51.401985Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:00:51.402042Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-04-17T18:00:51.402107Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Answering for message sourceid: '\0base64:aa', Topic: 'rt3.dc1--topic1', Partition: 1, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-17T18:00:51.402331Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Topic 'rt3.dc1--topic1' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T18:00:51.402365Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Topic 'rt3.dc1--topic1' partition 1 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T18:00:51.402472Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 1 requestId: cookie: 1 2025-04-17T18:00:51.402625Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=1) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:00:51.403040Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 0 Topic 'rt3.dc1--topic1' partition 1 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-17T18:00:51.403073Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-17T18:00:51.403489Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 1 write_statistics { persist_duration_ms: 130 queued_in_partition_duration_ms: 3 } 2025-04-17T18:00:51.403119Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-17T18:00:51.403141Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T18:00:51.403545Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0] Write session: acknoledged message 1 2025-04-17T18:00:51.403217Z node 21 :PERSQUEUE DEBUG: Topic 'rt3.dc1--topic1' partition 1 user user readTimeStamp done, result 1744912851267 queuesize 0 startOffset 0 2025-04-17T18:00:51.403657Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 1 offset 0 partno 0 count 1 parts 0 size 157 2025-04-17T18:00:51.464159Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0] Write session will now close 2025-04-17T18:00:51.464249Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0] Write session: aborting 2025-04-17T18:00:51.464903Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:00:51.464963Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0] Write session: destroy 2025-04-17T18:00:51.469462Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0 grpc read done: success: 0 data: 2025-04-17T18:00:51.469498Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0 grpc read failed 2025-04-17T18:00:51.469542Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0 grpc closed 2025-04-17T18:00:51.469567Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|ee880738-8a5aa87-606cedbc-c428362d_0 is DEAD 2025-04-17T18:00:51.470578Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:00:51.471348Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [21:7494343631082289395:2554] destroyed 2025-04-17T18:00:51.471398Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] TPartition::DropOwner. 2025-04-17T18:00:52.131824Z node 21 :KQP_COMPUTE WARN: TxId: 281474976715687, task: 1, CA Id [21:7494343635377256742:2564]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-04-17T18:00:52.166408Z node 21 :KQP_COMPUTE WARN: TxId: 281474976715687, task: 1, CA Id [21:7494343635377256742:2564]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-17T18:00:52.224310Z node 21 :KQP_COMPUTE WARN: TxId: 281474976715687, task: 1, CA Id [21:7494343635377256742:2564]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-17T18:00:52.301058Z node 21 :KQP_COMPUTE WARN: TxId: 281474976715687, task: 1, CA Id [21:7494343635377256742:2564]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-17T18:00:52.396559Z node 21 :KQP_COMPUTE WARN: TxId: 281474976715687, task: 1, CA Id [21:7494343635377256742:2564]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-17T18:00:52.523134Z node 21 :KQP_COMPUTE WARN: TxId: 281474976715687, task: 1, CA Id [21:7494343635377256742:2564]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-17T18:00:52.759934Z node 21 :KQP_COMPUTE WARN: TxId: 281474976715687, task: 1, CA Id [21:7494343635377256742:2564]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-17T18:00:53.372004Z node 21 :KQP_COMPUTE WARN: TxId: 281474976715687, task: 1, CA Id [21:7494343635377256742:2564]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-17T18:00:54.080317Z node 21 :KQP_COMPUTE WARN: TxId: 281474976715687, task: 1, CA Id [21:7494343635377256742:2564]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-17T18:00:54.156653Z node 21 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715688. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-17T18:00:54.156843Z node 21 :KQP_EXECUTER WARN: ActorId: [21:7494343639672224135:2566] TxId: 281474976715688. Ctx: { TraceId: 01js2ej9h74v9fj6vkhp46945n, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=21&id=OGMwYTdlOTMtMjU4ZGMzZjMtYmQ0OTlkZmUtMjE2MDUwZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-17T18:00:54.157195Z node 21 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=21&id=OGMwYTdlOTMtMjU4ZGMzZjMtYmQ0OTlkZmUtMjE2MDUwZWM=, ActorId: [21:7494343635377256786:2566], ActorState: ExecuteState, TraceId: 01js2ej9h74v9fj6vkhp46945n, Create QueryResponse for error on request, msg: 2025-04-17T18:00:54.159384Z node 21 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01js2ejayaa6n90a46hr7fm6mg" } } YdbStatus: UNAVAILABLE ConsumedRu: 950 } >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 9503, MsgBus: 23502 2025-04-17T18:00:35.570508Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343563547980326:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:35.575194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a7d/r3tmp/tmpZ9afje/pdisk_1.dat 2025-04-17T18:00:36.244518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:36.244613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:36.269012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:36.269309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9503, node 1 2025-04-17T18:00:36.666909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:36.666940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:36.666948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:36.667062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23502 TClient is connected to server localhost:23502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:37.699847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:37.727750Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:37.759009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:37.996971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:38.247090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:38.392713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:40.570305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343563547980326:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:40.570382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:40.606558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343585022818433:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:40.606735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:41.168228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:41.217512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:41.264862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:41.305037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:41.347522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:41.430712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:41.543004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:41.654593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343589317786403:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:41.654663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:41.654913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343589317786408:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:41.659353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:41.678213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343589317786410:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:41.738833Z node 1 :TX_PROXY ERROR: Actor# [1:7494343589317786465:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:44.062745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Warning: Optimization, code: 1070
:3:29: Warning: Unsafe conversion integral value to Timestamp, consider using date types, code: 1102 Trying to start YDB, gRPC: 14318, MsgBus: 29293 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a7d/r3tmp/tmpCl73X9/pdisk_1.dat 2025-04-17T18:00:46.235860Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:00:46.425620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:46.434392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:46.435057Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:46.463555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14318, node 2 2025-04-17T18:00:46.558422Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:46.558448Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:46.558455Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:46.558570Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29293 TClient is connected to server localhost:29293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:47.053389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.062884Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:47.082581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.149368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.372170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.546383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:51.789828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343633167701649:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:51.790019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:51.826443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:51.870919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:51.919274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:51.954125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.011496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.068480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.149706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.262561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343637462669608:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:52.262643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:52.263031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343637462669613:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:52.266584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:52.281196Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:00:52.283739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343637462669615:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:52.385161Z node 2 :TX_PROXY ERROR: Actor# [2:7494343637462669681:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:53.545520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:00:53.652633Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494343641757637401:2527], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2025-04-17T18:00:53.654761Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTY2MDU1ZWUtOTBmNDc5ZTMtZDJjYWMyM2UtZGM1YjNiZWI=, ActorId: [2:7494343641757637322:2516], ActorState: ExecuteState, TraceId: 01js2ejak63hh3wnfcj7txgh2s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. Trying to start YDB, gRPC: 4991, MsgBus: 13893 2025-04-17T18:00:55.713569Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343649551274457:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:55.827019Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a7d/r3tmp/tmppMXZV1/pdisk_1.dat 2025-04-17T18:00:56.137622Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:56.224011Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:56.224106Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:56.235449Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4991, node 3 2025-04-17T18:00:56.384175Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:56.384196Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:56.384202Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:56.384344Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13893 TClient is connected to server localhost:13893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:57.344559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:00.594776Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343649551274457:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:00.594850Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:01.848679Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343675321078742:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:01.848775Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:01.913441Z node 3 :TX_PROXY ERROR: Actor# [3:7494343675321078765:2315] txid# 281474976710658, issues: { message: "Type \'Datetime64\' specified for column \'DatetimePK\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::QueryExecTimeout |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> ShowCreateView::FromTable 2025-04-17 18:00:45,324 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 18:00:45,729 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 62474 46.4M 46.1M 23.3M test_tool run_ut @/home/runner/.ya/build/build_root/6873/00055a/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args 62814 1.8G 1.7G 1.5G └─ ydb-core-sys_view-ut --trace-path-append /home/runner/.ya/build/build_root/6873/00055a/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/chunk0/ytest.report.tr Test command err: 2025-04-17T17:50:46.963692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494341033123960324:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:46.963865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00055a/r3tmp/tmpCK87GG/pdisk_1.dat 2025-04-17T17:50:47.306052Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:47.317928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:47.318025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:47.329048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12252, node 1 2025-04-17T17:50:47.418959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:50:47.419019Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:50:47.419029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:50:47.419169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:50:47.689982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:47.729044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:50:47.744104Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T17:50:47.750675Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494341038607469777:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:50:47.750947Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-17T17:50:47.801394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:50:47.801487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:50:47.820246Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:50:47.824945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:50:47.904570Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7494341038607469841:2196], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-04-17T17:50:47.906268Z node 3 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-04-17T17:50:47.906326Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-04-17T17:50:47.912395Z node 3 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-17T17:50:47.912506Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:50:47.914193Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7494341038607469841:2196], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Database1 2025-04-17T17:50:47.915087Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7494341038607469841:2196], database# /Root/Database1, no sysview processor 2025-04-17T17:50:47.917733Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-04-17T17:50:47.917820Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2025-04-17T17:50:47.918552Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-17T17:50:47.918616Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2025-04-17T17:50:47.918654Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2025-04-17T17:50:47.918717Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-04-17T17:50:47.918748Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2025-04-17T17:50:47.918775Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2025-04-17T17:50:47.918803Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2025-04-17T17:50:47.918864Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2025-04-17T17:50:47.918901Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2025-04-17T17:50:47.918938Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2025-04-17T17:50:47.918973Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2025-04-17T17:50:47.919037Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2025-04-17T17:50:47.919070Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2025-04-17T17:50:47.919286Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2025-04-17T17:50:47.919341Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2025-04-17T17:50:47.919379Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2025-04-17T17:50:47.919421Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2025-04-17T17:50:47.919475Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2025-04-17T17:50:47.919516Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 20, result count# 0 2025-04-17T17:50:47.919549Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 21, result count# 0 2025-04-17T17:50:47.919642Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2025-04-17T17:50:47.000000Z 2025-04-17T17:50:47.938343Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2025-04-17T17:50:47.941262Z node 3 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-04-17T17:50:47.941305Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2025-04-17T17:50:47.941773Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2025-04-17T17:50:47.000000Z, query count# 0 2025-04-17T17:50:47.941798Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2025-04-17T17:50:47.000000Z, query count# 0, persisted# 0 2025-04-17T17:50:47.941816Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2025-04-17T17:50:47.000000Z, query count# 0, persisted# 0 2025-04-17T17:50:47.941832Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2025-04-17T17:50:47.000000Z, query count# 0, persisted# 0 2025-04-17T17:50:47.941941Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2025-04-17T17:50:47.000000Z, query count# 0, persisted# 0 2025-04-17T17:50:47.941959Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:50:47.941974Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:50:47.943756Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:50:47.943789Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2025-04-17T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-17T17:50:47.943989Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Database1 2025-04-17T17:50:47.991249Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2025-04-17T17:50:47.991292Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2025-04-17T17:50:48.000404Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7494341033123960475:2216], interval end# 2025-04-17T17:50:48.000000Z, event interval end# 2025-04-17T17:50:48.000000Z 2025-04-17T17:50:48.000446Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7494341033123960475:2216], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:50:48.000474Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7494341033123960212:2070], interval end# 2025-04-17T17:50:48.000000Z, event interval end# 2025-04-17T17:50:48.000000Z 2025-04-17T17:50:48.000492Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7494341033123960212:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:50:48.000738Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [3:7494341034312502343:2061], interval end# 2025-04-17T17:50:48.000000Z, event interval end# 2025-04-17T17:50:48.000000Z 2025-04-17T17:50:48.000759Z node 3 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [3:7494341034312502343:2061], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T17:50:48.000907Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [3:7494341038607469841:2196], interval end# 2025-04-17T17:50:48.000000Z, event interval end# 2025-04-17T17:50:48.000000Z 2025-04-17T17:50:48.000919Z node 3 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [3:7494341038607469841:2196], query logs count# 0, processor ids count# 1, processor id t ... Empty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:53.095993Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:55.708214Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7494343369373841334:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:55.708334Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:59.033575Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7494343403733580904:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:59.053128Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:59.070659Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7494343408028548212:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:59.391615Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T17:59:59.578524Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [19:7494343408028548214:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T17:59:59.791079Z node 19 :TX_PROXY ERROR: Actor# [19:7494343408028548307:2756] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:04.331335Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-17T18:00:05.974226Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:00:05.974256Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:08.819194Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:09.756951Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:1, at schemeshard: 72057594046644480 2025-04-17T18:00:10.921595Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:00:11.816450Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:00:12.686480Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-17T18:00:13.765729Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:00:13.889502Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T18:00:21.183491Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715721:0, at schemeshard: 72057594046644480 2025-04-17T18:00:21.218218Z node 19 :KQP_SLOW_LOG WARN: TraceId: "01js2egn5n0mvzyz0rpkgmzj94", SessionId: ydb://session/3?node_id=19&id=YTg4NzEzMWItNWQyZGNiOGYtNzY0MTlhY2UtN2NkNzg5ZDE=, Slow query, duration: 22.213052s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", parameters: 0b 2025-04-17T18:00:27.052013Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-04-17T18:00:27.666494Z node 19 :SYSTEM_VIEWS INFO: Scan started, actor: [19:7494343528287637356:3257], owner: [19:7494343528287637340:3250], scan id: 0, table id: [1:0:0:show_create] 2025-04-17T18:00:27.674578Z node 19 :SYSTEM_VIEWS INFO: Scan prepared, actor: [19:7494343528287637356:3257] 2025-04-17T18:00:27.846276Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7494343528287637356:3257], row count: 1, finished: 1 2025-04-17T18:00:27.846355Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7494343528287637356:3257], owner: [19:7494343528287637340:3250], scan id: 0, table id: [1:0:0:show_create] 2025-04-17T18:00:32.029553Z node 24 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[24:7494343551035164114:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:32.041125Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00055a/r3tmp/tmpX3oY0H/pdisk_1.dat 2025-04-17T18:00:32.309918Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:32.366539Z node 24 :HIVE WARN: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:32.366698Z node 24 :HIVE WARN: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:32.370312Z node 24 :HIVE WARN: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13997, node 24 2025-04-17T18:00:32.507007Z node 24 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:32.507039Z node 24 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:32.507053Z node 24 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:32.507251Z node 24 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1641 TClient is connected to server localhost:1641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:33.861176Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:37.018394Z node 24 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[24:7494343551035164114:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:37.018513Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; GRpc shutdown warning: left infly: 1, spent: 3.386495 sec 2025-04-17T18:00:44.163607Z node 24 :KQP_PROXY ERROR: TraceId: "01js2ehqcy5kp5p3jpv0awyksk", Request deadline has expired for 5.227375s seconds Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/00055a/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/00055a/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWrite [GOOD] Test command err: Trying to start YDB, gRPC: 27029, MsgBus: 9341 2025-04-17T18:00:18.982331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343491070684015:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:18.983365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a9b/r3tmp/tmpOUCD7P/pdisk_1.dat 2025-04-17T18:00:19.652265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:19.652399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:19.675824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:19.706911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27029, node 1 2025-04-17T18:00:19.914799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:19.914825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:19.914838Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:19.914985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9341 TClient is connected to server localhost:9341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:20.624696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:20.640525Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:20.656660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:20.821368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:21.074727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:21.176582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.491097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343512545522283:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:23.491211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:23.823836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:23.904397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:23.969485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:23.990347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343491070684015:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:23.990467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:24.026424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.111041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.187877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.268592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.355526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343516840490258:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.355645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.356221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343516840490263:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.360997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:24.416150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343516840490265:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:24.502325Z node 1 :TX_PROXY ERROR: Actor# [1:7494343516840490326:3566] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1894, MsgBus: 62594 2025-04-17T18:00:31.161630Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343543852128106:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:31.161664Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a9b/r3tmp/tmps0EQdh/pdisk_1.dat 2025-04-17T18:00:31.437460Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:31.453692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:31.453767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:31.457977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1894, node 2 2025-04-17T18:00:31.604101Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:31.604119Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:31.604127Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:31.604228Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62594 TClient is connected to server localhost:62594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:32.221050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.232640Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.251544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:00:32.330553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2 ... jVhYTY=, ActorId: [3:7494343636142285816:2660], ActorState: ExecuteState, TraceId: 01js2ej9w6d2xwkmj83h7516xe, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:00:52.910816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.991177Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494343636142285870:4083], for# user0@builtin, access# DescribeSchema 2025-04-17T18:00:52.991211Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494343636142285870:4083], for# user0@builtin, access# DescribeSchema 2025-04-17T18:00:52.992838Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494343636142285867:2681], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:00:52.994472Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTk2MWJjNjctNjZmMDY4Mi1lNmZmZjM2My01NGFhNjFhNg==, ActorId: [3:7494343636142285863:2679], ActorState: ExecuteState, TraceId: 01js2ej9zg5qzyqzfzhd93g3rq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:00:53.066688Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494343640437253185:4088], for# user0@builtin, access# DescribeSchema 2025-04-17T18:00:53.066719Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494343640437253185:4088], for# user0@builtin, access# DescribeSchema 2025-04-17T18:00:53.072910Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494343640437253182:2691], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:00:53.074468Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmZkNzg0ZmItMTUzOWM5MTctZWVhNjNlMjAtNDE5YjEyODE=, ActorId: [3:7494343640437253178:2689], ActorState: ExecuteState, TraceId: 01js2eja181trtza6w9dgkkq67, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:00:53.164927Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494343640437253204:4093], for# user0@builtin, access# DescribeSchema 2025-04-17T18:00:53.164972Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7494343640437253204:4093], for# user0@builtin, access# DescribeSchema 2025-04-17T18:00:53.167967Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494343640437253201:2701], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:00:53.169616Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGQ3NTc5MzAtNmZmYWYxN2MtNzlmNzY2YjctNWY3MjJhMmQ=, ActorId: [3:7494343640437253197:2699], ActorState: ExecuteState, TraceId: 01js2eja4javrrdqc7wy7eg4rr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 19425, MsgBus: 14970 2025-04-17T18:00:54.523184Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343646348303714:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:54.523223Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a9b/r3tmp/tmp0X9p2V/pdisk_1.dat 2025-04-17T18:00:55.036243Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:55.069773Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:55.069868Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:55.079549Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19425, node 4 2025-04-17T18:00:55.258800Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:55.258827Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:55.258835Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:55.258952Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14970 TClient is connected to server localhost:14970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:55.918594Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:55.926570Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:00:55.940803Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:56.079959Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:56.334928Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:56.459493Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:59.523616Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343646348303714:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:59.523683Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:59.698338Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343667823141956:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:59.698439Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:59.939360Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.073081Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.199148Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.333444Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.433085Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.561477Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.677051Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.898956Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343672118109947:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:00.899065Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:00.899388Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343672118109953:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:00.904635Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:00.933954Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343672118109955:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:01:01.001286Z node 4 :TX_PROXY ERROR: Actor# [4:7494343672118110010:3569] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut >> KqpLimits::StreamWrite+Allowed |83.4%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut >> TPQCompatTest::BadTopics [GOOD] >> TPQCompatTest::CommitOffsets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] Test command err: 2025-04-17T17:58:57.933351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343142336827547:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:57.933393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:58:58.004098Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343144455982804:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:58.004144Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001579/r3tmp/tmpYZhr08/pdisk_1.dat 2025-04-17T17:58:58.335894Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:58:58.339502Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:58:58.784789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:58.784917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:58.787938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:58.788007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:58.792790Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:58:58.792961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:58:58.793966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27951, node 1 2025-04-17T17:58:58.858960Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:59.086423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:58:59.126976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:58:59.230779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001579/r3tmp/yandexk70yLO.tmp 2025-04-17T17:58:59.230822Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001579/r3tmp/yandexk70yLO.tmp 2025-04-17T17:58:59.230972Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001579/r3tmp/yandexk70yLO.tmp 2025-04-17T17:58:59.231091Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:58:59.281640Z INFO: TTestServer started on Port 7721 GrpcPort 27951 TClient is connected to server localhost:7721 PQClient connected to localhost:27951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:59.609282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:58:59.695881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:59:02.937039Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343142336827547:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:02.937141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:02.962585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343161635852375:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:02.962707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343161635852393:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:02.962907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:02.993010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T17:59:03.004506Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343144455982804:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:03.004588Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:03.078545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343161635852398:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T17:59:03.159768Z node 2 :TX_PROXY ERROR: Actor# [2:7494343165930819723:2181] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:03.548908Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494343168106632509:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:59:03.551088Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmE1MTkwYjktY2E2ZmViZjktYjYzMjZlZS1jZDM1Mzk2ZQ==, ActorId: [1:7494343168106632482:2338], ActorState: ExecuteState, TraceId: 01js2eeyzs0g0rqdkmv3yqq6rx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:59:03.553367Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:59:03.552589Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494343165930819730:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:59:03.556015Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWFmM2M0YjYtOWIzZWMxZWUtYTVmMDQyMy1jNDQyZWI1Nw==, ActorId: [2:7494343161635852357:2311], ActorState: ExecuteState, TraceId: 01js2eeygp7h4x56jxxysan8pg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:59:03.556458Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:59:03.606650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:03.722128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:03.880274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:59:04.226773Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2eezmhd0qayzhyemk9v4f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE2NjgzZjgtN2MxY2ZlNzQtZjBiZGY5MzItNjEyYTIyODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494343172401600277:3106] === CheckClustersList. Ok 2025-04-17T17:59:10.739091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:1, at schemeshard: 72057594046644480 2025-04-17T17:59:11.599064Z node 1 :FLAT_TX_SCHEMESH ... e: ESchemeOpCreateTable, opId: 281474976720688:0, at schemeshard: 72057594046644480 2025-04-17T18:00:53.408732Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976720694:0, at schemeshard: 72057594046644480 2025-04-17T18:00:55.427577Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976720700:0, at schemeshard: 72057594046644480 2025-04-17T18:00:56.803916Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720703:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (3059208687299993691, "Root", "00595F536F757263655F37", 1744912858158, 1744912858158, 0, 13); 2025-04-17T18:00:58.383041Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976720711. Ctx: { TraceId: 01js2ejf368bv0589ca0dktxjb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YjA4ZDNkYjgtNTIzMTQ2OGUtZjc2YTA1MTMtYWEwNmYwMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:00:58.429738Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-17T18:00:58.429780Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:00:58.429796Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:00:58.429826Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-04-17T18:00:58.429999Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7494343659769889780:4222], Recipient [9:7494343616820215424:3395]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [9:7494343659769889779:4222] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-17T18:00:58.430114Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7494343659769889779:4222], Recipient [9:7494343616820215424:3395]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "Y_Source_7" 2025-04-17T18:00:58.430232Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7494343616820215424:3395], Recipient [9:7494343659769889779:4222]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-17T18:00:58.430276Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) InitTable: SourceId=Y_Source_7 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-17T18:00:58.430369Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7494343659769889779:4222], Recipient [9:7494343616820215424:3395]: NActors::TEvents::TEvPoison 2025-04-17T18:00:58.437418Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7494343539510801866:2064], Recipient [9:7494343659769889779:4222]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-17T18:00:58.437484Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T18:00:58.446096Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7494343539510801962:2157], Recipient [9:7494343659769889779:4222]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=ODhjNTQ1MTAtZjM0NjE0ZmItNWQ4M2ZmYjUtN2IyNWI0MGY=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-17T18:00:58.446147Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Select from the table 2025-04-17T18:00:58.912072Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7494343539510801962:2157], Recipient [9:7494343659769889779:4222]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=ODhjNTQ1MTAtZjM0NjE0ZmItNWQ4M2ZmYjUtN2IyNWI0MGY=" PreparedQuery: "1b6849e4-b1d2c92a-4ffff4c-8088c01a" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01js2ejfra10bgv0xpehearpp3" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1744912858158 } items { uint64_value: 1744912858158 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 260 2025-04-17T18:00:58.912283Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-04-17T18:00:58.912324Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOldSeqNo 2025-04-17T18:00:58.912481Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7494343659769889838:4222], Recipient [9:7494343616820215423:3394]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1000 Status: OK ServerId: [9:7494343659769889779:4222] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-17T18:00:58.912592Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271187968, Sender [9:7494343659769889779:4222], Recipient [9:7494343616820215423:3394]: NKikimrClient.TPersQueueRequest PartitionRequest { Partition: 0 CmdGetMaxSeqNo { SourceId: "\000Y_Source_7" } PipeClient { RawX1: 7494343659769889838 RawX2: 38654709886 } } 2025-04-17T18:00:58.912638Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) OnPartitionChosen 2025-04-17T18:00:58.912720Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7494343659769889779:4222], Recipient [9:7494343616820215423:3394]: NActors::TEvents::TEvPoison 2025-04-17T18:00:58.912772Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7494343659769889839:4222], Recipient [9:7494343616820215425:3396]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1002 Status: OK ServerId: [9:7494343659769889779:4222] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-17T18:00:58.912825Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7494343659769889779:4222], Recipient [9:7494343616820215425:3396]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 2 2025-04-17T18:00:58.912892Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [9:7494343616820215425:3396], Recipient [9:7494343659769889779:4222]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-17T18:00:58.912939Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Update the table 2025-04-17T18:00:58.913165Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7494343659769889779:4222], Recipient [9:7494343616820215425:3396]: NActors::TEvents::TEvPoison Received TEvChooseResult: 2 Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 3059208687299993691 AND Topic = "Root" AND ProducerId = "00595F536F757263655F37" 2025-04-17T18:00:59.132622Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [9:7494343539510801962:2157], Recipient [9:7494343659769889779:4222]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=ODhjNTQ1MTAtZjM0NjE0ZmItNWQ4M2ZmYjUtN2IyNWI0MGY=" PreparedQuery: "193aaca2-b60b085a-49f06905-e6f038a8" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 121 2025-04-17T18:00:59.132677Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T18:00:59.132722Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) ReplyResult: Partition=2, SeqNo=157 2025-04-17T18:00:59.132744Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7494343659769889779:4222] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Start idle 2025-04-17T18:00:59.523344Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976720715. Ctx: { TraceId: 01js2ejg24escdfyry938tpp53, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ODBlZjkxMTUtMmM1ZDc2ZjctNmFmMTk1MzgtNWZjZjk5NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:01.279525Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7494343672654791951:2776] TxId: 281474976720718. Ctx: { TraceId: 01js2ejhm79a4r7dj6eg7y4bdc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YzdkMzNiYTUtYWUyNDljYjktNThmODJmYTctNGI3MmU0Y2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2025-04-17T18:01:01.279702Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7494343672654791955:2776], TxId: 281474976720718, task: 2. Ctx: { TraceId : 01js2ejhm79a4r7dj6eg7y4bdc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=9&id=YzdkMzNiYTUtYWUyNDljYjktNThmODJmYTctNGI3MmU0Y2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [9:7494343672654791951:2776], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQSmallRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:57:48.526696Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:57:48.530018Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:57:48.530319Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-17T17:57:48.530367Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:57:48.530404Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-17T17:57:48.530455Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:57:48.530492Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:57:48.530564Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-04-17T17:57:48.545928Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:179:2194], now have 1 active actors on pipe 2025-04-17T17:57:48.546087Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:57:48.569931Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-17T17:57:48.585635Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-17T17:57:48.594315Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:57:48.603220Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-17T17:57:48.603368Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:57:48.603881Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:57:48.604312Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-17T17:57:48.606891Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-04-17T17:57:48.606997Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2199] 2025-04-17T17:57:48.607066Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:57:48.616056Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:57:48.616234Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-04-17T17:57:48.616281Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-04-17T17:57:48.616320Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-04-17T17:57:48.616344Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-04-17T17:57:48.616520Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:57:48.616562Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:57:48.616601Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:57:48.616682Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:57:48.616721Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:57:48.616745Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:57:48.616766Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser1 2025-04-17T17:57:48.616785Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser1 2025-04-17T17:57:48.616817Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:57:48.616858Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:57:48.616961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:57:48.617004Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:57:48.617182Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:57:48.634532Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T17:57:48.635117Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-04-17T17:57:48.637926Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-04-17T17:57:48.638059Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-17T17:57:48.638107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-17T17:57:48.646997Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-04-17T17:57:48.647545Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-04-17T17:57:48.648189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-04-17T17:57:48.648775Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2025-04-17T17:57:48.648877Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2025-04-17T17:57:48.648918Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2025-04-17T17:57:48.649088Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-04-17T17:57:48.650963Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-04-17T17:57:48.651041Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-04-17T17:57:48.651602Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2208], now have 1 active actors on pipe 2025-04-17T17:57:48.651724Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-17T17:57:48.651769Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-17T17:57:48.651884Z node 1 :PERSQUEUE INFO: new Cookie default|c31c9ad7-4ce97fbb-1f2476b0-5c4efcbb_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:57:48.652033Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T17:57:48.665561Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T17:57:48.666109Z node 1 :PERSQUEUE DEBUG: [ ... 6 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:00.399693Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:00.504939Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:999:2995], now have 1 active actors on pipe 2025-04-17T18:01:00.506999Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:00.558054Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:00.651613Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1002:2998], now have 1 active actors on pipe 2025-04-17T18:01:00.659132Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:00.698861Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:00.794016Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1005:3001], now have 1 active actors on pipe 2025-04-17T18:01:00.796396Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:00.836914Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:00.976662Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1008:3004], now have 1 active actors on pipe 2025-04-17T18:01:00.978777Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.017104Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.062750Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1011:3007], now have 1 active actors on pipe 2025-04-17T18:01:01.064847Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.138855Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.234789Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1014:3010], now have 1 active actors on pipe 2025-04-17T18:01:01.236921Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.292211Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.429563Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1017:3013], now have 1 active actors on pipe 2025-04-17T18:01:01.436188Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.551229Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.659711Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1020:3016], now have 1 active actors on pipe 2025-04-17T18:01:01.662004Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.706461Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.756133Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1023:3019], now have 1 active actors on pipe 2025-04-17T18:01:01.758171Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.781994Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.825013Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1026:3022], now have 1 active actors on pipe 2025-04-17T18:01:01.827570Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.847896Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.907692Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1029:3025], now have 1 active actors on pipe 2025-04-17T18:01:01.909688Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:01.929364Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:02.024429Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1032:3028], now have 1 active actors on pipe 2025-04-17T18:01:02.027189Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:02.056160Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:02.099495Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [71:1035:3031], now have 1 active actors on pipe 2025-04-17T18:01:02.101545Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:02.117565Z node 71 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:01:02.160898Z node 71 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [71:1038:3034] connected; active server actors: 1 >> KqpAnalyze::AnalyzeTable+ColumnStore >> KqpParams::RowsList >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> KqpQuery::QueryExplain >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] >> KqpParams::BadParameterType >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::Decimal+QueryService-UseSink >> KqpExplain::ExplainStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 6030, MsgBus: 30836 2025-04-17T18:00:33.637541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343554394805940:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:33.638886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a83/r3tmp/tmppYFKv0/pdisk_1.dat 2025-04-17T18:00:34.551976Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:34.606015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:34.606152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:34.652519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6030, node 1 2025-04-17T18:00:34.774872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:34.774905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:34.774919Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:34.775046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30836 TClient is connected to server localhost:30836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:35.874459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:35.941036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:36.237043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:36.570505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:36.726408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:38.622937Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343554394805940:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:38.623037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:38.996535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343575869644061:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.996639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:39.677477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:39.724925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:39.779173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:39.805876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:39.882779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:39.961116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:40.012914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:40.093669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343584459579327:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:40.093775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:40.094745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343584459579332:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:40.099548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:40.115011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343584459579334:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:40.217301Z node 1 :TX_PROXY ERROR: Actor# [1:7494343584459579391:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15926, MsgBus: 20192 2025-04-17T18:00:43.087110Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343598305033324:2222];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a83/r3tmp/tmpgY4NTJ/pdisk_1.dat 2025-04-17T18:00:43.141436Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:00:43.324240Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:43.329135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:43.329218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:43.330813Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15926, node 2 2025-04-17T18:00:43.414880Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:43.414898Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:43.414904Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:43.415018Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20192 TClient is connected to server localhost:20192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:44.212201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:44.228595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:44.338366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:44.626716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:44.700977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... ], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:48.647803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:48.648131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343619779872090:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:48.652110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:48.663463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343619779872092:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:48.738711Z node 2 :TX_PROXY ERROR: Actor# [2:7494343619779872148:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21217, MsgBus: 13733 2025-04-17T18:00:54.888558Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:00:54.888826Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:00:54.888984Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a83/r3tmp/tmpGJ2feK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21217, node 3 2025-04-17T18:00:55.675596Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:55.676812Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:55.676878Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:55.676925Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:55.677437Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:00:55.716245Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:55.716426Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:55.728229Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13733 TClient is connected to server localhost:13733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:56.073232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:56.218024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:56.602816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:57.634978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:58.117020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:59.242913Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1807:3403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:59.243176Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:59.277406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:59.537330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:59.852538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.180148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.531624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.906840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:01.268771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:01.681638Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2559:3976], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:01.681789Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:01.682141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2564:3981], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:01.690206Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:01.900401Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2566:3983], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:01:01.970661Z node 3 :TX_PROXY ERROR: Actor# [3:2635:4033] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:03.512917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:01:03.985984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:01:04.452445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:01:06.683905Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3497:4721], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZDExNzIyZjktMTYxODcxYmUtZGY2Nzg1NjEtZDg2YzRkNTc=. TraceId : 01js2ejnnz5d87p8gpr2b9jr5p. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2025-04-17T18:01:06.715657Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3497:4721], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZDExNzIyZjktMTYxODcxYmUtZGY2Nzg1NjEtZDg2YzRkNTc=. TraceId : 01js2ejnnz5d87p8gpr2b9jr5p. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2025-04-17T18:01:06.716940Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3498:4722], TxId: 281474976715675, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2ejnnz5d87p8gpr2b9jr5p. SessionId : ydb://session/3?node_id=3&id=ZDExNzIyZjktMTYxODcxYmUtZGY2Nzg1NjEtZDg2YzRkNTc=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:3491:4256], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-17T18:01:06.717697Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDExNzIyZjktMTYxODcxYmUtZGY2Nzg1NjEtZDg2YzRkNTc=, ActorId: [3:2916:4256], ActorState: ExecuteState, TraceId: 01js2ejnnz5d87p8gpr2b9jr5p, Create QueryResponse for error on request, msg: |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> KqpExplain::MultiJoinCteLinks [GOOD] >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestReadAndDeleteConsumer >> KqpExplain::FewEffects+UseSink >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed [GOOD] >> TTopicYqlTest::CreateAndAlterTopicYql >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::PureExpr >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] Test command err: Trying to start YDB, gRPC: 10093, MsgBus: 11993 2025-04-17T18:00:46.063611Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343611276358731:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:46.064044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a72/r3tmp/tmppxJjux/pdisk_1.dat 2025-04-17T18:00:46.477819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:46.486413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:46.486512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:46.492056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10093, node 1 2025-04-17T18:00:46.591889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:46.591909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:46.591919Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:46.592017Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11993 TClient is connected to server localhost:11993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:47.236912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.266342Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:47.285391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.461788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.634936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.729446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:50.487077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343628456229701:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:50.487185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:50.750856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:50.809058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:50.862253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:50.945811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:51.003266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:51.055555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:51.067115Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343611276358731:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:51.067168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:51.132409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:51.275061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343632751197672:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:51.275139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:51.275369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343632751197677:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:51.359449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:51.389717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343632751197679:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:51.503287Z node 1 :TX_PROXY ERROR: Actor# [1:7494343632751197746:3563] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:54.288447Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343645636100074:2526], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MmM2ODFiNjktZDA1OWI1NzEtMjA4M2JiNjAtZGYzNzljOWY=. TraceId : 01js2ejamc3nq0jf7gsyr980v8. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2025-04-17T18:00:54.289229Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343645636100075:2527], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MmM2ODFiNjktZDA1OWI1NzEtMjA4M2JiNjAtZGYzNzljOWY=. CustomerSuppliedId : . TraceId : 01js2ejamc3nq0jf7gsyr980v8. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494343645636100070:2518], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:00:54.289708Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmM2ODFiNjktZDA1OWI1NzEtMjA4M2JiNjAtZGYzNzljOWY=, ActorId: [1:7494343641341132691:2518], ActorState: ExecuteState, TraceId: 01js2ejamc3nq0jf7gsyr980v8, Create QueryResponse for error on request, msg:
: Error: Terminate was called, reason(17): Bad filter value. Trying to start YDB, gRPC: 12072, MsgBus: 25747 2025-04-17T18:00:55.749265Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343647697490207:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:55.749312Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a72/r3tmp/tmpIxBXPO/pdisk_1.dat 2025-04-17T18:00:55.948451Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:55.992092Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:55.992177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:55.993563Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12072, node 2 2025-04-17T18:00:56.092490Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:56.092545Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:56.092556Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:56.092686Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25747 TClient is connected to server localhost:25747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion ... path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:00.771910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.818493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.904481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.997828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:01.103116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:01.196425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343673467296395:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:01.196521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:01.196816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343673467296400:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:01.200966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:01.213849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343673467296402:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:01.306098Z node 2 :TX_PROXY ERROR: Actor# [2:7494343673467296457:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16310, MsgBus: 13632 2025-04-17T18:01:04.214701Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343688112278114:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:04.234899Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a72/r3tmp/tmp6x6KCt/pdisk_1.dat 2025-04-17T18:01:04.410552Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:04.455127Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:04.455208Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:04.460610Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16310, node 3 2025-04-17T18:01:04.578769Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:04.578795Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:04.578803Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:04.578916Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13632 TClient is connected to server localhost:13632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:05.168656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:05.186696Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:01:05.197081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:05.329505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:05.599509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:05.721902Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:08.155741Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343705292149065:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:08.155843Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:08.246676Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.321745Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.371755Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.430731Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.556961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.655235Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.735774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.876103Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343705292149734:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:08.876199Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:08.876687Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343705292149740:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:08.881885Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:08.925360Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343705292149742:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:01:09.017507Z node 3 :TX_PROXY ERROR: Actor# [3:7494343709587117099:3555] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:09.244053Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343688112278114:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:09.244177Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:10.821873Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494343713882084749:2521], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:3:84: Error: At function: KiUpdateTable!
:3:84: Error: Column 'NonExistentColumn' does not exist in table '/Root/KeyValue'., code: 2017 2025-04-17T18:01:10.822208Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzY3MDIyMmQtYzg4YmY4YWMtNTBhZTVmZTEtYWFiYmUyOGQ=, ActorId: [3:7494343713882084741:2516], ActorState: ExecuteState, TraceId: 01js2ejvc727y09a1978pxez04, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 3657, MsgBus: 23605 2025-04-17T18:00:30.031979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343543025670050:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:30.032303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a8c/r3tmp/tmpIUIkUH/pdisk_1.dat 2025-04-17T18:00:30.818293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:30.821322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:30.821408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:30.836348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3657, node 1 2025-04-17T18:00:31.119468Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:31.119489Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:31.119503Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:31.119605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23605 TClient is connected to server localhost:23605 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:31.995434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.100618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.373345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.683282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.833831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:35.021781Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343543025670050:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:35.021874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:36.551451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343568795475462:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:36.551568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.283481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.349573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.424349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.508326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.601812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.723442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.832173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.952273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343573090443449:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.952359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.952731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343573090443454:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.956534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:37.969111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343573090443456:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:38.061424Z node 1 :TX_PROXY ERROR: Actor# [1:7494343577385410810:3571] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Inputs":[{"ExternalPlanNodeId":4}],"Offset":"15","Name":"Offset"}],"Node Type":"Limit-Offset"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Operators":[{"Offset":"15","Name":"Offset"}],"Node Type":"Offset"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 30765, MsgBus: 14238 2025-04-17T18:00:42.056724Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343593817136052:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:42.082840Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a8c/r3tmp/tmpA6uIie/pdisk_1.dat 2025-04-17T18:00:42.296573Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:42.340956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:42.341040Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:42.342439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30765, node 2 2025-04-17T18:00:42.568733Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:42.568759Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:42.568766Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:42.568873Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to se ... f_id=[4:7494343680058537976:2152];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a8c/r3tmp/tmpmBvXHh/pdisk_1.dat 2025-04-17T18:01:02.390085Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:01:02.535930Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:02.545293Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:02.545402Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:02.547264Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11323, node 4 2025-04-17T18:01:02.666819Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:02.666846Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:02.666857Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:02.667013Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22352 TClient is connected to server localhost:22352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:03.362269Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:03.389638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:03.561356Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:03.936102Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:04.178129Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:07.203493Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343701533376123:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:07.203603Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:07.244911Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343680058537976:2152];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:07.244998Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:07.280945Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.364724Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.431599Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.524540Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.584136Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.692275Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.870872Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.988540Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343701533376803:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:07.988659Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:07.989057Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343701533376808:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:07.994244Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:08.009168Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-04-17T18:01:08.012161Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343701533376810:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:01:08.089070Z node 4 :TX_PROXY ERROR: Actor# [4:7494343705828344163:3561] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","PlanNodeId":8,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"No estimate","Table":"EightShard","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"No estimate","Columns":["Data","Key","Text"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 29515, MsgBus: 27644 2025-04-17T18:00:43.808265Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343598579377248:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:43.808475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a78/r3tmp/tmpebYuyE/pdisk_1.dat 2025-04-17T18:00:44.380043Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29515, node 1 2025-04-17T18:00:44.408010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:44.408102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:44.409575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:44.480637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:44.480654Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:44.480667Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:44.480785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27644 TClient is connected to server localhost:27644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:44.999850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:45.019048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:45.045339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.563342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343615759247414:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:47.563491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:47.569984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343615759247426:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:47.576297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:00:47.598441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343615759247428:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:00:47.710345Z node 1 :TX_PROXY ERROR: Actor# [1:7494343615759247479:2607] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:48.449838Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=2;memory=1048576; 2025-04-17T18:00:48.449872Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 2. [Mem] memory 1048576 NOT granted 2025-04-17T18:00:48.468804Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343620054214821:2370], TxId: 281474976710661, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2ej4p75av0mkgrfme74yq0. SessionId : ydb://session/3?node_id=1&id=ZjEwYWQxYzgtYzY3Y2NlOGUtMzRhNmM4MzMtMjA2NmQ5MmE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-gkdwnlcfnq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-04-17T18:00:48.442655Z }, code: 2029 }. 2025-04-17T18:00:48.471990Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976710661, task: 1. Output channel actor is unavailable 2025-04-17T18:00:48.472137Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343620054214820:2369], TxId: 281474976710661, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjEwYWQxYzgtYzY3Y2NlOGUtMzRhNmM4MzMtMjA2NmQ5MmE=. TraceId : 01js2ej4p75av0mkgrfme74yq0. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494343620054214808:2353], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-17T18:00:48.474019Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjEwYWQxYzgtYzY3Y2NlOGUtMzRhNmM4MzMtMjA2NmQ5MmE=, ActorId: [1:7494343615759247387:2353], ActorState: ExecuteState, TraceId: 01js2ej4p75av0mkgrfme74yq0, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-gkdwnlcfnq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-04-17T18:00:48.442655Z } , code: 2029 Trying to start YDB, gRPC: 21560, MsgBus: 9421 2025-04-17T18:00:49.615842Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343622700360449:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:49.616389Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a78/r3tmp/tmpOgBgSJ/pdisk_1.dat 2025-04-17T18:00:49.845957Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:49.849795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:49.849884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:49.855624Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21560, node 2 2025-04-17T18:00:49.918298Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:49.918316Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:49.918322Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:49.918427Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9421 TClient is connected to server localhost:9421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:50.416057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:50.448772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:53.047096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343639880230543:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:53.047234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343639880230551:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:53.047330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:53.059223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... e.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:10.750049Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343714460832368:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:10.750300Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:10.750867Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343714460832403:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:10.755024Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:01:10.779934Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343714460832405:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:01:10.865732Z node 4 :TX_PROXY ERROR: Actor# [4:7494343714460832457:2617] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:10.984296Z node 4 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=2;memory=1048576; 2025-04-17T18:01:10.984322Z node 4 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 2. [Mem] memory 1048576 NOT granted 2025-04-17T18:01:10.984651Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343714460832513:2374], TxId: 281474976710661, task: 2. Ctx: { TraceId : 01js2ejpaj7f0nqb0v6vrqasse. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTRhYTY2M2ItOTk1ZDYyZjMtODAwZjA3OWEtNGI3NjJhYjQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-gkdwnlcfnq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-04-17T18:01:10.982484Z }, code: 2029 }. 2025-04-17T18:01:10.985083Z node 4 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=3;memory=1048576; 2025-04-17T18:01:10.985096Z node 4 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 3. [Mem] memory 1048576 NOT granted 2025-04-17T18:01:10.985385Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343714460832514:2375], TxId: 281474976710661, task: 3. Ctx: { TraceId : 01js2ejpaj7f0nqb0v6vrqasse. SessionId : ydb://session/3?node_id=4&id=MTRhYTY2M2ItOTk1ZDYyZjMtODAwZjA3OWEtNGI3NjJhYjQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 3: 10, host: ghrun-gkdwnlcfnq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 40B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 4, started at: 2025-04-17T18:01:10.982484Z }, code: 2029 }. 2025-04-17T18:01:10.985712Z node 4 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=4;memory=1048576; 2025-04-17T18:01:10.985721Z node 4 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 4. [Mem] memory 1048576 NOT granted 2025-04-17T18:01:10.985972Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343714460832515:2376], TxId: 281474976710661, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01js2ejpaj7f0nqb0v6vrqasse. SessionId : ydb://session/3?node_id=4&id=MTRhYTY2M2ItOTk1ZDYyZjMtODAwZjA3OWEtNGI3NjJhYjQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 4: 10, host: ghrun-gkdwnlcfnq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 30B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 3, started at: 2025-04-17T18:01:10.982484Z }, code: 2029 }. 2025-04-17T18:01:10.986291Z node 4 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=5;memory=1048576; 2025-04-17T18:01:10.986302Z node 4 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 5. [Mem] memory 1048576 NOT granted 2025-04-17T18:01:10.986596Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343714460832516:2377], TxId: 281474976710661, task: 5. Ctx: { TraceId : 01js2ejpaj7f0nqb0v6vrqasse. SessionId : ydb://session/3?node_id=4&id=MTRhYTY2M2ItOTk1ZDYyZjMtODAwZjA3OWEtNGI3NjJhYjQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 5: 10, host: ghrun-gkdwnlcfnq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-04-17T18:01:10.982484Z }, code: 2029 }. 2025-04-17T18:01:10.987445Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494343714460832512:2373], TxId: 281474976710661, task: 1. Ctx: { TraceId : 01js2ejpaj7f0nqb0v6vrqasse. SessionId : ydb://session/3?node_id=4&id=MTRhYTY2M2ItOTk1ZDYyZjMtODAwZjA3OWEtNGI3NjJhYjQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7494343714460832488:2358], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-17T18:01:10.989475Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTRhYTY2M2ItOTk1ZDYyZjMtODAwZjA3OWEtNGI3NjJhYjQ=, ActorId: [4:7494343714460832364:2358], ActorState: ExecuteState, TraceId: 01js2ejpaj7f0nqb0v6vrqasse, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-gkdwnlcfnq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-04-17T18:01:10.982484Z } , code: 2029 query_phases { duration_us: 8724 table_access { name: "/Root/LargeTable" partitions_count: 1 } cpu_time_us: 5278 affected_shards: 8 } compilation { duration_us: 108650 cpu_time_us: 103118 } process_cpu_time_us: 344 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Data\",\"DataText\",\"Key\",\"KeyText\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"4\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":1048616,\"Max\":1048586,\"Min\":10},\"Tasks\":4,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/LargeTable\"}],\"BaseTimeMs\":1744912870983,\"CpuTimeUs\":{\"Count\":4,\"Sum\":742,\"Max\":671,\"Min\":20},\"Ingress\":[{\"Pop\":{},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":3367,\"Max\":3367,\"Min\":3367}}}],\"UpdateTimeMs\":4}}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":10,\"Max\":10,\"Min\":10},\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"BaseTimeMs\":1744912870983,\"CpuTimeUs\":{\"Count\":1,\"Sum\":17,\"Max\":17,\"Min\":17},\"UpdateTimeMs\":3}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":108650,\"CpuTimeUs\":103118},\"ProcessCpuTimeUs\":344,\"TotalDurationUs\":245044,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":122734},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Data\",\"DataText\",\"Key\",\"KeyText\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/LargeTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 \'(\'\"Data\" \'\"DataText\" \'\"Key\" \'\"KeyText\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'() (Void) \'()))\n(let $4 (lambda \'($10) $10))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) $4 \'(\'(\'\"_logical_id\" \'354) \'(\'\"_id\" \'\"dce6a1d8-4282b2ef-94dc2b50-c7113fd0\"))))\n(let $6 (DqCnUnionAll (TDqOutput $5 \'\"0\")))\n(let $7 (DqPhyStage \'($6) $4 \'(\'(\'\"_logical_id\" \'377) \'(\'\"_id\" \'\"34f626a7-fb4e7033-e4763d09-2090f36f\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'()))\n(let $9 (OptionalType (DataType \'String)))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"Data\" (OptionalType (DataType \'Int64))) \'(\'\"DataText\" $9) \'(\'\"Key\" (OptionalType (DataType \'Uint64))) \'(\'\"KeyText\" $9))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 245044 total_cpu_time_us: 108740 >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] Test command err: Trying to start YDB, gRPC: 32029, MsgBus: 15962 2025-04-17T18:00:31.610351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343547340947590:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:31.617730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a87/r3tmp/tmpT0FrRT/pdisk_1.dat 2025-04-17T18:00:32.211819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:32.236769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:32.236872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:32.243472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32029, node 1 2025-04-17T18:00:32.545415Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:32.545441Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:32.545448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:32.545540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15962 TClient is connected to server localhost:15962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:33.704883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:33.758430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:34.223400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:34.425931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:34.533948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:36.595044Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343547340947590:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:36.595114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:37.412134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343573110752909:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.412222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.744477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.822941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.859841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.904132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.954838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.017286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.080136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.194612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343577405720878:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.194684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.195123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343577405720883:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.199150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:38.222553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343577405720885:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:38.291147Z node 1 :TX_PROXY ERROR: Actor# [1:7494343577405720939:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:40.719515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15582, MsgBus: 10601 2025-04-17T18:00:42.273855Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343593864909090:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:42.273989Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a87/r3tmp/tmpoEnpTu/pdisk_1.dat 2025-04-17T18:00:42.765514Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:42.823996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:42.824071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:42.840444Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15582, node 2 2025-04-17T18:00:43.054552Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:43.054573Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:43.054579Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:43.054677Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10601 TClient is connected to server localhost:10601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:44.164435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:44.176117Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:44.191119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:44.366283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 2814749 ... ion { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:53.679584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:53.690855Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:53.704533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:53.815336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:54.154726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:54.310437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:56.835881Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343631983202311:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:56.835969Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:59.726370Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343666342942352:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:59.726491Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:59.829593Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:59.904453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:59.966224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.064991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.130530Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.241163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.368787Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:00.550890Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343670637910329:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:00.551012Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:00.551689Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343670637910334:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:00.557226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:00.572692Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343670637910337:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:00.649915Z node 3 :TX_PROXY ERROR: Actor# [3:7494343670637910393:3573] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10240, MsgBus: 11104 2025-04-17T18:01:03.344172Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343682940724303:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:03.344444Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a87/r3tmp/tmpguFjkK/pdisk_1.dat 2025-04-17T18:01:03.626210Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:03.692933Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:03.693033Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:03.695109Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10240, node 4 2025-04-17T18:01:03.862244Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:03.862265Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:03.862273Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:03.862408Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11104 TClient is connected to server localhost:11104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:04.685099Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:04.702573Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:01:04.720516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:01:04.749024Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.353123Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343682940724303:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:08.353204Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:09.795020Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343708710528771:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:09.795108Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:09.802356Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343708710528783:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:09.809922Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:01:09.855300Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343708710528785:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:01:09.921361Z node 4 :TX_PROXY ERROR: Actor# [4:7494343708710528836:2358] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:09.973089Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:10.632293Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> KqpParams::RowsList [GOOD] >> KqpParams::MissingParameter >> KqpStats::MultiTxStatsFullExpYql >> KqpQuery::QueryExplain [GOOD] >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> KqpExplain::SqlIn >> KqpStats::MultiTxStatsFullYql |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] Test command err: 2025-04-17T17:57:33.626411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342783005454912:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:33.626466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:33.870601Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342780601290000:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:33.870658Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005f9/r3tmp/tmpPgaXdz/pdisk_1.dat 2025-04-17T17:57:34.356892Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:34.362341Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:34.667797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:34.916035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:35.002834Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:35.025038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:35.025131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:35.027561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:35.027627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:35.031803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:35.034165Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:57:35.038646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30366, node 1 2025-04-17T17:57:35.303664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0005f9/r3tmp/yandexkx9qhl.tmp 2025-04-17T17:57:35.303684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0005f9/r3tmp/yandexkx9qhl.tmp 2025-04-17T17:57:35.303833Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0005f9/r3tmp/yandexkx9qhl.tmp 2025-04-17T17:57:35.303935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:35.398651Z INFO: TTestServer started on Port 4268 GrpcPort 30366 TClient is connected to server localhost:4268 PQClient connected to localhost:30366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:36.065241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:57:36.186901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:57:38.630648Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342783005454912:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:38.630718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:38.864361Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342780601290000:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:38.864439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:39.798853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342808775259841:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:39.798994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:39.799278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342808775259853:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:39.803295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:57:39.848978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342808775259855:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:57:40.090435Z node 1 :TX_PROXY ERROR: Actor# [1:7494342808775259946:2794] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:40.129158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:40.283539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:40.304754Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342813070227266:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:40.306142Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGMzNGEzMjYtNmYzY2I5NDQtYmFiODYxNGMtMzFhM2NkMTM=, ActorId: [1:7494342808775259823:2341], ActorState: ExecuteState, TraceId: 01js2ecd8a875vg8kkvjbn0vsw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:40.308071Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:40.469856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:57:40.803431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2ece3x9g67s8tn7kh6znrt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjcyOGZmN2UtNTY5ZjhiOTEtYzZmYWYxNjItN2JkMDEyMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342813070227724:3140] === CheckClustersList. Ok 2025-04-17T17:57:46.010082Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDcxNDA1OTQtNzk5N2NmZjgtZWQ4MjNhZmYtNzNhOWQxYmU=, ActorId: [1:7494342834545064510:2445], ActorState: unknown state, TraceId: 01js2ecjx0759jyqtk45a3rcfs, Session actor destroyed 2025-04-17T17:57:46.010413Z node 1 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=1&id=NDcxNDA1OTQtNzk5N2NmZjgtZWQ4MjNhZmYtNzNhOWQxYmU=, workerId: [1:7494342834545064510:2445], local sessions count: 0 2025-04-17T17:57:46.010524Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494342787300422499:2141], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:57:46.010705Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494342787300422499:2141], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-17T17:57:46.010772Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494342787300422499:2141], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494342787300422687:2264] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744912656151 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 720575940466 ... Seconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-17T18:01:11.569718Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:11.569843Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976715678 2025-04-17T18:01:11.569869Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715678, NewState EXECUTED 2025-04-17T18:01:11.569898Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715678 moved from EXECUTING to EXECUTED 2025-04-17T18:01:11.570466Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976715678] save tx TxId: 281474976715678 State: EXECUTED MinStep: 1744912871002 MaxStep: 18446744073709551615 Step: 1744912871513 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7494343646002242708 RawX2: 107374184663 } Partitions { Partition { PartitionId: 1 } } 2025-04-17T18:01:11.570749Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:01:11.572052Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:01:11.572101Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] TPartition::CheckScaleStatus splitMergeAvgWriteBytes# 0 writeSpeedUsagePercent# 0 scaleThresholdSeconds# 300 totalPartitionWriteSpeed# 1048576 sourceIdCount=0 canSplit=0 Topic: "rt3.dc1--legacy--topic1". Partition: 1 2025-04-17T18:01:11.618812Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:01:11.618851Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-17T18:01:11.618871Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715678, State EXECUTED 2025-04-17T18:01:11.618893Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715678 State EXECUTED FrontTxId 281474976715678 2025-04-17T18:01:11.618918Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T18:01:11.618938Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715678, NewState WAIT_RS_ACKS 2025-04-17T18:01:11.618956Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715678 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T18:01:11.618981Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976715678] PredicateAcks: 0/0 2025-04-17T18:01:11.618991Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T18:01:11.619006Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976715678] PredicateAcks: 0/0 2025-04-17T18:01:11.619026Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715678 to the list for deletion 2025-04-17T18:01:11.619049Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715678, NewState DELETING 2025-04-17T18:01:11.619075Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715678 2025-04-17T18:01:11.619134Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:01:11.635055Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:01:11.635090Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-17T18:01:11.635109Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715678, State DELETING 2025-04-17T18:01:11.635131Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715678 TClient::Ls request: /Root/PQ/rt3.dc1--legacy--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--legacy--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976715678 CreateStep: 1744912871513 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037894 } PersQueueGroup { Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 10... (TRUNCATED) === PATH DESCRIPTION: Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037893 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 2 2025-04-17T18:01:12.297500Z node 25 :KQP_EXECUTER ERROR: ActorId: [25:7494343723311656159:2517] TxId: 281474976715680. Ctx: { TraceId: 01js2ejwtq58k3bkdg1fs8z1xx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=ZThhYzEyNjAtNTFkNGYyYjktNGE5MmMxYi1mMmI5MTgzNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 26 2025-04-17T18:01:12.297723Z node 25 :KQP_COMPUTE ERROR: SelfId: [25:7494343723311656165:2517], TxId: 281474976715680, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2ejwtq58k3bkdg1fs8z1xx. SessionId : ydb://session/3?node_id=25&id=ZThhYzEyNjAtNTFkNGYyYjktNGE5MmMxYi1mMmI5MTgzNQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [25:7494343723311656159:2517], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-17T18:01:13.296930Z node 25 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=25&id=ZThhYzEyNjAtNTFkNGYyYjktNGE5MmMxYi1mMmI5MTgzNQ==, ActorId: [25:7494343723311656147:2517], ActorState: ExecuteState, TraceId: 01js2ejwtq58k3bkdg1fs8z1xx, Create QueryResponse for error on request, msg: 2025-04-17T18:01:13.302354Z node 25 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 26" severity: 1 } } TxMeta { id: "01js2ejwty98fwc1jcy4128q06" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryExplain [GOOD] Test command err: Trying to start YDB, gRPC: 27711, MsgBus: 61089 2025-04-17T18:00:29.525831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343537342602748:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:29.526296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a95/r3tmp/tmpAP34Ec/pdisk_1.dat 2025-04-17T18:00:30.207776Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:30.208464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:30.208555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:30.215542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27711, node 1 2025-04-17T18:00:30.410674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:30.410695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:30.410701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:30.410794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61089 TClient is connected to server localhost:61089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:31.487365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:31.540546Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:31.567190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:31.868511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:00:32.256651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:00:32.403820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:34.526328Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343537342602748:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:34.526405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:35.048042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343563112408295:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:35.048130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:35.514843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.570959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.650078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.740877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.800528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.902059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:35.990089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:36.106635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343567407376275:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:36.106706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:36.106912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343567407376280:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:36.118939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:36.149416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343567407376282:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:36.222108Z node 1 :TX_PROXY ERROR: Actor# [1:7494343567407376337:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:37.812784Z node 1 :GRPC_SERVER DEBUG: [0x51b000310780] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=OWZlNTdlMDktMTIwMDQ5ZmYtYTY5Nzg1MzItOWRiMDNhNjg=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:43322 2025-04-17T18:00:37.812859Z node 1 :GRPC_SERVER DEBUG: [0x51b000220b80] created request Name# ExecuteDataQuery 2025-04-17T18:00:37.813006Z node 1 :GRPC_SERVER DEBUG: [0x51b000310780] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=OWZlNTdlMDktMTIwMDQ5ZmYtYTY5Nzg1MzItOWRiMDNhNjg=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:43322 database# /Root 2025-04-17T18:00:37.813346Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01js2ehv5n7fnk86d16qxwabh1, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:43322, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.987859s
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27711 2025-04-17T18:00:40.802665Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494343575997311351:2520] TxId: 281474976710672. Ctx: { TraceId: 01js2ehv5n7fnk86d16qxwabh1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWZlNTdlMDktMTIwMDQ5ZmYtYTY5Nzg1MzItOWRiMDNhNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:00:40.803309Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343575997311358:2531], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OWZlNTdlMDktMTIwMDQ5ZmYtYTY5Nzg1MzItOWRiMDNhNjg=. TraceId : 01js2ehv5n7fnk86d16qxwabh1. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494343575997311351:2520], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:00:40.803754Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343575997311359:2532], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01js2ehv5n7fnk86d16qxwabh1. SessionId : ydb://session/3?node_id=1&id=OWZlNTdlMDktMTIwMDQ5ZmYtYTY5Nzg1MzItOWRiMDNhNjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494343575997311351:2520], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:00:40.804179Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWZlNTdlMDktMTIwMDQ5ZmYtYTY5Nzg1MzItOWRiMDNhNjg=, ActorId: [1:7494343571702343990:2520], ActorState: ExecuteState, TraceId: 01js2ehv5n7fnk86d16qxwabh1, Create QueryResponse for error on request, msg: 2025-04-17T18:00:40.812371Z node 1 :GRPC_SERVER DEBUG: [0x51b000220b80] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=OWZlNTdlMDktMTIwMDQ5ZmYtYTY5Nzg1MzItOWRiMDNhNjg=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:43328 2025-04-17T18:00:40.812441Z node 1 :GRPC_SERVER DEBU ... blechecking } 2025-04-17T18:01:03.204831Z node 3 :TX_PROXY ERROR: Actor# [3:7494343684526837755:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8198, MsgBus: 1299 2025-04-17T18:01:06.893701Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343696086380070:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:06.893746Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a95/r3tmp/tmp0ugRUb/pdisk_1.dat 2025-04-17T18:01:07.247380Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:07.307275Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:07.307374Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:07.319970Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8198, node 4 2025-04-17T18:01:07.530735Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:07.530761Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:07.530770Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:07.530912Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1299 TClient is connected to server localhost:1299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:08.254044Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:08.270563Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:08.281944Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:08.505852Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:08.781812Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:08.930760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.897666Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343696086380070:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:11.897747Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:12.866570Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343721856185635:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:12.866677Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:13.020904Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.118146Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.169948Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.220609Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.266142Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.342923Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.425121Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.523961Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343726151153605:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:13.524059Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:13.524137Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343726151153610:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:13.530077Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:13.548152Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343726151153612:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:13.637469Z node 4 :TX_PROXY ERROR: Actor# [4:7494343726151153667:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 (Uint32 '1)) (let $5 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) '((KqlKeyExc $4 (String '"Name")) (KqlKeyInc $4)))) (let $6 (OptionalType (DataType 'String))) (let $7 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $6) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $6))) (let $8 '('('"_logical_id" '710) '('"_id" '"26b709e7-f566cc74-3182d410-e99d4f1a") '('"_wide_channels" $7))) (let $9 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $5)) (lambda '($13) (block '( (let $14 (lambda '($15) (Member $15 '"Amount") (Member $15 '"Comment") (Member $15 '"Group") (Member $15 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $13) $3) $14))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '"0"))) (let $11 (DqPhyStage '($10) (lambda '($16) (FromFlow (NarrowMap (Take (ToFlow $16) $3) (lambda '($17 $18 $19 $20) (AsStruct '('"Amount" $17) '('"Comment" $18) '('"Group" $19) '('"Name" $20)))))) '('('"_logical_id" '723) '('"_id" '"349e234d-89075a3f-f321b8f3-45da773f")))) (let $12 (DqCnResult (TDqOutput $11 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($9 $11) '($12) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $7) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","ReadLimit":"1001","Name":"TableRangeScan","Path":"\/Root\/Test","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::ReadsetCountLimit >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::StatsProfile |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/pgwire/pgwire |83.5%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |83.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage >> TPersQueueTest::CheckDecompressionTasksWithoutSession [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink >> TPQTest::TestReadAndDeleteConsumer [GOOD] >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRanges >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:58:36.815743Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:36.815843Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:58:36.863363Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:36.881757Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-04-17T17:58:36.882809Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:58:36.885632Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:58:36.904506Z node 1 :PERSQUEUE INFO: new Cookie default|35382fa6-bd786e1e-e8439c86-9bef09f1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:58:46.022582Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:46.022681Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-04-17T17:58:46.053829Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:46.054539Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "important_user" Generation: 2 Important: true } 2025-04-17T17:58:46.055177Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-04-17T17:58:46.057789Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-04-17T17:58:46.083314Z node 2 :PERSQUEUE INFO: new Cookie default|5c816c58-577d79e2-36a02161-ba981a25_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:175:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-17T17:58:55.088972Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:55.089043Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-17T17:58:55.106914Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:58:55.107457Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "important_user" Generation: 3 Important: true } 2025-04-17T17:58:55.107974Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-17T17:58:55.110149Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-17T17:58:55.119778Z node 3 :PERSQUEUE INFO: new Cookie default|7764ea74-782a5b99-8845076c-295cb3b0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-17T17:59:03.866119Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:03.866233Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-04-17T17:59:03.889880Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:03.890742Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 4 ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } Consumers { Name: "important_user" Generation: 4 Important: true } 2025-04-17T17:59:03.891413Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:186:2199] 2025-04-17T17:59:03.894561Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:186:2199] 2025-04-17T17:59:03.909636Z node 4 :PERSQUEUE INFO: new Cookie default|97cd6a6a-5427d6fc-6d5fde84-5c41e176_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [4:202:2212] 2025-04-17T17:59:03.956562Z node 4 :PERSQUEUE INFO: new Cookie default|e053a7ad-ca964a47-2af072f3-376de18d_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [4:202:2212] 2025-04-17T17:59:09.767877Z node 4 :PERSQUEUE INFO: new Cookie default|d9d7a94-e1e48ac5-94f50f6c-90e2c972_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [4:202:2212] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101 ... gConfig 2025-04-17T18:01:13.712240Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:01:13.713052Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [19:294:2285] 2025-04-17T18:01:13.750976Z node 19 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:01:13.751120Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [19:294:2285] 2025-04-17T18:01:13.797010Z node 19 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [19:245:2244] sender: [19:317:2057] recipient: [19:14:2061] 2025-04-17T18:01:13.807584Z node 19 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:13.812059Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1001 actor [19:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1001 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1000 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1000 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:103:2057] recipient: [20:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:103:2057] recipient: [20:101:2135] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:108:2057] recipient: [20:101:2135] 2025-04-17T18:01:15.199760Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:15.199867Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [20:149:2057] recipient: [20:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [20:149:2057] recipient: [20:147:2170] Leader for TabletID 72057594037927938 is [20:153:2174] sender: [20:154:2057] recipient: [20:147:2170] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:179:2057] recipient: [20:14:2061] 2025-04-17T18:01:15.262842Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:15.263714Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1002 actor [20:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1002 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1002 Important: false } Consumers { Name: "user1" Generation: 1002 Important: true } Consumers { Name: "user2" Generation: 1002 Important: true } 2025-04-17T18:01:15.264555Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [20:185:2198] 2025-04-17T18:01:15.272642Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [20:185:2198] 2025-04-17T18:01:15.289698Z node 20 :PERSQUEUE INFO: new Cookie default|5651aa58-b90ecbc9-f401ba7c-71c581ca_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:01:17.184723Z node 20 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-17T18:01:17.263298Z node 20 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:240:2057] recipient: [20:99:2134] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:243:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:244:2057] recipient: [20:242:2243] Leader for TabletID 72057594037927937 is [20:245:2244] sender: [20:246:2057] recipient: [20:242:2243] 2025-04-17T18:01:17.318280Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:17.318364Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:01:17.319064Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [20:294:2285] 2025-04-17T18:01:17.358447Z node 20 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:01:17.358587Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [20:294:2285] 2025-04-17T18:01:17.397495Z node 20 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [20:245:2244] sender: [20:317:2057] recipient: [20:14:2061] 2025-04-17T18:01:17.406814Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:17.411351Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1003 actor [20:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:103:2057] recipient: [21:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:103:2057] recipient: [21:101:2135] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:108:2057] recipient: [21:101:2135] 2025-04-17T18:01:18.451257Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:18.451352Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [21:149:2057] recipient: [21:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [21:149:2057] recipient: [21:147:2170] Leader for TabletID 72057594037927938 is [21:153:2174] sender: [21:154:2057] recipient: [21:147:2170] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:179:2057] recipient: [21:14:2061] 2025-04-17T18:01:18.510586Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:18.511284Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1004 actor [21:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 Important: false } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2025-04-17T18:01:18.511922Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [21:185:2198] 2025-04-17T18:01:18.523321Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [21:185:2198] 2025-04-17T18:01:18.550704Z node 21 :PERSQUEUE INFO: new Cookie default|24dfa100-2dec9639-21bfb1-352f383f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:01:20.120079Z node 21 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-17T18:01:20.216084Z node 21 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:240:2057] recipient: [21:99:2134] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:243:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:244:2057] recipient: [21:242:2243] Leader for TabletID 72057594037927937 is [21:245:2244] sender: [21:246:2057] recipient: [21:242:2243] 2025-04-17T18:01:20.256584Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:20.256650Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:01:20.257165Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [21:294:2285] 2025-04-17T18:01:20.287011Z node 21 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:01:20.287114Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [21:294:2285] 2025-04-17T18:01:20.349794Z node 21 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [21:245:2244] sender: [21:317:2057] recipient: [21:14:2061] 2025-04-17T18:01:20.368268Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:20.375589Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1005 actor [21:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } >> KqpLimits::DatashardReplySize [GOOD] >> KqpLimits::ManyPartitions >> KqpParams::MissingParameter [GOOD] >> KqpParams::ParameterTypes >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> TPersQueueTest::Delete [GOOD] >> TPersQueueTest::FetchRequest >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::ExplainDataQuery >> TPersQueueTest::SetupWriteSessionOnDisabledCluster [GOOD] >> TPersQueueTest::SetupReadSession >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2025-04-17T17:59:40.422351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343327938355552:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:40.422820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cc/r3tmp/tmpAOuiAN/pdisk_1.dat 2025-04-17T17:59:41.354686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:41.479784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:41.479891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:41.481213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:41.487561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61685 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:59:42.014836Z node 1 :TX_PROXY DEBUG: actor# [1:7494343327938355646:2097] Handle TEvNavigate describe path dc-1 2025-04-17T17:59:42.014901Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343336528290550:2266] HANDLE EvNavigateScheme dc-1 2025-04-17T17:59:42.015060Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343327938355679:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:42.015144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343332233323227:2245][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494343327938355679:2115], cookie# 1 2025-04-17T17:59:42.016946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343332233323232:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343332233323229:2245], cookie# 1 2025-04-17T17:59:42.017001Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343332233323237:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343332233323230:2245], cookie# 1 2025-04-17T17:59:42.017019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343332233323238:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343332233323231:2245], cookie# 1 2025-04-17T17:59:42.017074Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343327938355365:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343332233323232:2245], cookie# 1 2025-04-17T17:59:42.017100Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343327938355368:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343332233323237:2245], cookie# 1 2025-04-17T17:59:42.017115Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343327938355371:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343332233323238:2245], cookie# 1 2025-04-17T17:59:42.017138Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343332233323232:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343327938355365:2049], cookie# 1 2025-04-17T17:59:42.017151Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343332233323237:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343327938355368:2052], cookie# 1 2025-04-17T17:59:42.017163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343332233323238:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343327938355371:2055], cookie# 1 2025-04-17T17:59:42.017192Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343332233323227:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343332233323229:2245], cookie# 1 2025-04-17T17:59:42.017214Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343332233323227:2245][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:59:42.017227Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343332233323227:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343332233323230:2245], cookie# 1 2025-04-17T17:59:42.017262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343332233323227:2245][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:42.017284Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343332233323227:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343332233323231:2245], cookie# 1 2025-04-17T17:59:42.017312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343332233323227:2245][/dc-1] Unexpected sync response: sender# [1:7494343332233323231:2245], cookie# 1 2025-04-17T17:59:42.017364Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494343327938355679:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T17:59:42.033448Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494343327938355679:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494343332233323227:2245] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:42.042370Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494343327938355679:2115], cacheItem# { Subscriber: { Subscriber: [1:7494343332233323227:2245] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T17:59:42.044860Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494343336528290551:2267], recipient# [1:7494343336528290550:2266], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:59:42.044932Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343336528290550:2266] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:59:42.178569Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343336528290550:2266] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-17T17:59:42.181584Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343336528290550:2266] Handle TEvDescribeSchemeResult Forward to# [1:7494343336528290549:2265] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T17:59:42.255078Z node 1 :TX_PROXY DEBUG: actor# [1:7494343327938355646:2097] Handle TEvProposeTransaction 2025-04-17T17:59:42.255111Z node 1 :TX_PROXY DEBUG: actor# [1:7494343327938355646:2097] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T17:59:42.255229Z node 1 :TX_PROXY DEBUG: actor# [1:7494343327938355646:2097] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494343336528290557:2272] 2025-04-17T17:59:42.482476Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343327938355679:2115], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:42.482563Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494343327938355679:2115], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-04-17T17:59:42.482734Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343336528290559:2273][/dc-1/.metadata/initialization/migrations] Handle NKikimr::T ... es# 0 2025-04-17T17:59:53.872702Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494343380394110109:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7494343380394110111:2260], cookie# 10 2025-04-17T17:59:53.872717Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494343380394110109:2260][/dc-1] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:53.872735Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494343380394110109:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7494343380394110112:2260], cookie# 10 2025-04-17T17:59:53.872746Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494343380394110109:2260][/dc-1] Unexpected sync response: sender# [3:7494343380394110112:2260], cookie# 10 2025-04-17T17:59:53.872780Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494343376099142611:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T17:59:53.872865Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494343376099142611:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7494343380394110109:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744912793554 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:53.872934Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494343376099142611:2137], cacheItem# { Subscriber: { Subscriber: [3:7494343380394110109:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744912793554 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-04-17T17:59:53.873059Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494343380394110197:2332], recipient# [3:7494343380394110196:2331], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-04-17T17:59:53.873090Z node 3 :TX_PROXY DEBUG: Actor# [3:7494343380394110196:2331] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T17:59:53.873123Z node 3 :TX_PROXY ERROR: Actor# [3:7494343380394110196:2331] txid# 281474976710663, Access denied for user2 on path /dc-1, with access AlterSchema 2025-04-17T17:59:53.873193Z node 3 :TX_PROXY ERROR: Actor# [3:7494343380394110196:2331] txid# 281474976710663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-17T17:59:53.873212Z node 3 :TX_PROXY DEBUG: Actor# [3:7494343380394110196:2331] txid# 281474976710663 SEND to# [3:7494343380394110195:2330] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T17:59:53.910610Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494343376099142611:2137], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:53.910676Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7494343376099142611:2137], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-04-17T17:59:53.910843Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494343380394110200:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T17:59:53.911295Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494343376099142232:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7494343380394110204:2334] 2025-04-17T17:59:53.911312Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494343376099142232:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-17T17:59:53.911368Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494343376099142232:2049] Subscribe: subscriber# [3:7494343380394110204:2334], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:59:53.911399Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494343376099142235:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7494343380394110205:2334] 2025-04-17T17:59:53.911406Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494343376099142235:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-17T17:59:53.911424Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494343376099142235:2052] Subscribe: subscriber# [3:7494343380394110205:2334], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:59:53.911446Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494343376099142238:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7494343380394110206:2334] 2025-04-17T17:59:53.911454Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494343376099142238:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-17T17:59:53.911471Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494343376099142238:2055] Subscribe: subscriber# [3:7494343380394110206:2334], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T17:59:53.911508Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494343380394110204:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7494343376099142232:2049] 2025-04-17T17:59:53.911528Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494343380394110205:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7494343376099142235:2052] 2025-04-17T17:59:53.911545Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494343380394110206:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7494343376099142238:2055] 2025-04-17T17:59:53.911600Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494343380394110200:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7494343380394110201:2334] 2025-04-17T17:59:53.911641Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494343380394110200:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7494343380394110202:2334] 2025-04-17T17:59:53.911667Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7494343380394110200:2334][/dc-1/.metadata/initialization/migrations] Set up state: owner# [3:7494343376099142611:2137], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:59:53.911687Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494343380394110200:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7494343380394110203:2334] 2025-04-17T17:59:53.911707Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7494343380394110200:2334][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [3:7494343376099142611:2137], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T17:59:53.911732Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494343376099142232:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494343380394110204:2334] 2025-04-17T17:59:53.911746Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494343376099142235:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494343380394110205:2334] 2025-04-17T17:59:53.911758Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494343376099142238:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494343380394110206:2334] 2025-04-17T17:59:53.911796Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494343376099142611:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-04-17T17:59:53.911854Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494343376099142611:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7494343380394110200:2334] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:53.911925Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494343376099142611:2137], cacheItem# { Subscriber: { Subscriber: [3:7494343380394110200:2334] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T17:59:53.912001Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494343380394110207:2335], recipient# [3:7494343380394110199:2297], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpExplain::SqlIn [GOOD] >> KqpExplain::ReadTableRangesFullScan >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckCacheByAst >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> KqpStats::StatsProfile [GOOD] >> KqpStats::SelfJoin >> TPQCompatTest::CommitOffsets [GOOD] >> TPQCompatTest::LongProducerAndLongMessageGroupId >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestPQReadAhead >> KqpExplain::FewEffects-UseSink [GOOD] >> KqpExplain::FullOuterJoin >> KqpLimits::ManyPartitions [GOOD] >> KqpParams::Decimal+QueryService+UseSink [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap >> KqpExplain::ReadTableRanges [GOOD] >> KqpExplain::Predicates >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitions [GOOD] Test command err: Trying to start YDB, gRPC: 8818, MsgBus: 23533 2025-04-17T18:00:29.976184Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343538575109356:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:29.982077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a92/r3tmp/tmp2Lozd4/pdisk_1.dat 2025-04-17T18:00:31.019076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:31.037531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:00:31.043959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:31.044049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:31.055648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8818, node 1 2025-04-17T18:00:31.272980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:31.273004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:31.273010Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:31.273137Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23533 TClient is connected to server localhost:23533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:32.618881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.651538Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:32.667287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:33.095592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:33.345898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:33.446542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:34.970972Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343538575109356:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:34.971033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:36.293692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343568639882062:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:36.293792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:36.788440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:36.838781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:36.891840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:36.930382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:36.973869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.051615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.121975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:37.230954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343572934850027:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.231028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.231200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343572934850032:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.235016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:37.247510Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:00:37.247764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343572934850034:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:37.322338Z node 1 :TX_PROXY ERROR: Actor# [1:7494343572934850091:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:39.181768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:44.042880Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494343602999622534:2654] TxId: 281474976710673. Ctx: { TraceId: 01js2ej11q2gr1cj1dgdwedzjj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU4OGIyNjgtZjY0YTVhZGItZTZhZTBkMzYtNTgzNzYwNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. } 2025-04-17T18:00:44.043127Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWU4OGIyNjgtZjY0YTVhZGItZTZhZTBkMzYtNTgzNzYwNjA=, ActorId: [1:7494343598704655222:2654], ActorState: ExecuteState, TraceId: 01js2ej11q2gr1cj1dgdwedzjj, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. Trying to start YDB, gRPC: 11710, MsgBus: 25659 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a92/r3tmp/tmp8NFexZ/pdisk_1.dat 2025-04-17T18:00:45.766008Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343606434957135:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:45.770250Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:00:46.034139Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:46.076688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:46.076783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:46.079075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11710, node 2 2025-04-17T18:00:46.263023Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:46.263044Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:46.263051Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:46.263154Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25659 TClient is connected to server localhost:25659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 Ef ... Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:58.376991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:58.384497Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:58.397310Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:58.569173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:59.025978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:59.181777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:01.601299Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343654817605437:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:01.619571Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:04.044738Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343689177345631:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:04.044817Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:04.178872Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:04.279818Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:04.335112Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:04.428859Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:04.488730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:04.569961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:04.668816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:04.786391Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343689177346315:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:04.786501Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:04.790724Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343689177346320:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:04.794828Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:04.814330Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:01:04.818457Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343689177346322:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:04.890798Z node 3 :TX_PROXY ERROR: Actor# [3:7494343689177346379:3566] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:06.556100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.906646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:01:11.906672Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:20.892818Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDI5NzRiYTYtN2MwMmI5MjQtNTIyNWI1YjgtYmM5Nzk4YWU=, ActorId: [3:7494343749306890592:2807], ActorState: ExecuteState, TraceId: 01js2ek2k36eqhmzak3e59mtgx, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (200003953 > 50331648), code: 2013 Trying to start YDB, gRPC: 16820, MsgBus: 21170 2025-04-17T18:01:22.358385Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343766690532894:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:22.358444Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a92/r3tmp/tmpykXBFO/pdisk_1.dat 2025-04-17T18:01:22.613716Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:22.617236Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:22.617347Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:22.619828Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 4 2025-04-17T18:01:22.695945Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:22.695968Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:22.695976Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:22.696184Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21170 TClient is connected to server localhost:21170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:23.358532Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:23.390567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:27.199062Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343788165374747:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.199153Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.205706Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343788165374786:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.215789Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:01:27.245956Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343788165374788:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:01:27.325302Z node 4 :TX_PROXY ERROR: Actor# [4:7494343788165374844:5657] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:27.362593Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343766690532894:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:27.362729Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 4771, MsgBus: 22513 2025-04-17T17:55:04.180333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342141862983284:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:04.180380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c4d/r3tmp/tmpBn0K1O/pdisk_1.dat 2025-04-17T17:55:04.581884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:04.591775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:04.591873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:04.593236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4771, node 1 2025-04-17T17:55:04.709777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:04.709808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:04.709822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:04.709954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22513 TClient is connected to server localhost:22513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:05.457106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.498150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.641499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.781324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:05.857041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:07.590659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342154747886945:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.590760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:07.898385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.937488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:07.975891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:08.022949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:08.070101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:08.107022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:08.174731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:08.245865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342159042854910:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:08.245963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:08.246353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342159042854915:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:08.250314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:08.262360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342159042854917:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:08.351111Z node 1 :TX_PROXY ERROR: Actor# [1:7494342159042854974:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:09.182323Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342141862983284:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:09.182406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:09.467357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T17:55:10.104935Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2e7v4g454zpwb69vp3sh7s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQyYWY2N2QtODMwYmQ2MTEtZjJhNjQ3NDMtYmVjMzY2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.116071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2e7v4g454zpwb69vp3sh7s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQyYWY2N2QtODMwYmQ2MTEtZjJhNjQ3NDMtYmVjMzY2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.165394Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2e7v6e4pv6v6db0j883v7a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM5MjZmZWItZDRmNTM2NjMtYjMwOWJkODMtODZjY2RkOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.174289Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01js2e7v6e4pv6v6db0j883v7a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM5MjZmZWItZDRmNTM2NjMtYjMwOWJkODMtODZjY2RkOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.226544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01js2e7v8e76v24xjssxqvqb1d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQyYWY2N2QtODMwYmQ2MTEtZjJhNjQ3NDMtYmVjMzY2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.233605Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01js2e7v8e76v24xjssxqvqb1d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQyYWY2N2QtODMwYmQ2MTEtZjJhNjQ3NDMtYmVjMzY2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.271300Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01js2e7v9s4aqb4rt2pwemga4b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM5MjZmZWItZDRmNTM2NjMtYjMwOWJkODMtODZjY2RkOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.277508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01js2e7v9s4aqb4rt2pwemga4b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM5MjZmZWItZDRmNTM2NjMtYjMwOWJkODMtODZjY2RkOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.312590Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01js2e7vb4etyz8tz42kpatss4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQyYWY2N2QtODMwYmQ2MTEtZjJhNjQ3NDMtYmVjMzY2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.317067Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01js2e7vb4etyz8tz42kpatss4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQyYWY2N2QtODMwYmQ2MTEtZjJhNjQ3NDMtYmVjMzY2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T17:55:10.357681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01js2e7vch8rskf863wwdhwwds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM5MjZmZWItZDRmNTM2N ... : TxId: 281474976715636. Ctx: { TraceId: 01js2ek0y77v7wqbygjrvfyd3j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.523147Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715637. Ctx: { TraceId: 01js2ek0y77v7wqbygjrvfyd3j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.626095Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715638. Ctx: { TraceId: 01js2ek124as8fatdnpfmnkhn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.636794Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715639. Ctx: { TraceId: 01js2ek124as8fatdnpfmnkhn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.681558Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715640. Ctx: { TraceId: 01js2ek1404j67nzd7362ea43q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.691201Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715641. Ctx: { TraceId: 01js2ek1404j67nzd7362ea43q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.727220Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715642. Ctx: { TraceId: 01js2ek15j42t553wrnc218ggt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.733880Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715643. Ctx: { TraceId: 01js2ek15j42t553wrnc218ggt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.780613Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715644. Ctx: { TraceId: 01js2ek17626s2nf1pa6h13852, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.790145Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715645. Ctx: { TraceId: 01js2ek17626s2nf1pa6h13852, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.845968Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715646. Ctx: { TraceId: 01js2ek1943b4wmaj1tk8s0fq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.865630Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715647. Ctx: { TraceId: 01js2ek1943b4wmaj1tk8s0fq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.914525Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715648. Ctx: { TraceId: 01js2ek1ba5q63xtzfjrkzfhd0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.938973Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715649. Ctx: { TraceId: 01js2ek1ba5q63xtzfjrkzfhd0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:16.998544Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715650. Ctx: { TraceId: 01js2ek1dy73wxa8azzc0g224n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.005482Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715651. Ctx: { TraceId: 01js2ek1dy73wxa8azzc0g224n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.043730Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715652. Ctx: { TraceId: 01js2ek1fe4rm5x8z67qfd9shq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.050746Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715653. Ctx: { TraceId: 01js2ek1fe4rm5x8z67qfd9shq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.091342Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715654. Ctx: { TraceId: 01js2ek1gydextsjj5qywhdzfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.096445Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715655. Ctx: { TraceId: 01js2ek1gydextsjj5qywhdzfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.131572Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715656. Ctx: { TraceId: 01js2ek1j773n60dzs3gveqnzw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.138457Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720657. Ctx: { TraceId: 01js2ek1j773n60dzs3gveqnzw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.182759Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720658. Ctx: { TraceId: 01js2ek1kjexhv98vnwkcbzggf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.188049Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720659. Ctx: { TraceId: 01js2ek1kjexhv98vnwkcbzggf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.238114Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720660. Ctx: { TraceId: 01js2ek1nf52rs98a2n927vswr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.249622Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720661. Ctx: { TraceId: 01js2ek1nf52rs98a2n927vswr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.287825Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720662. Ctx: { TraceId: 01js2ek1q39x0v3d7dqq796hg2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.295317Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720663. Ctx: { TraceId: 01js2ek1q39x0v3d7dqq796hg2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.342910Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01js2ek1rk10e1knj5wq77hqmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.351179Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01js2ek1rk10e1knj5wq77hqmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.388842Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01js2ek1t7315vez2tqh726pa2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.399246Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01js2ek1t7315vez2tqh726pa2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.442286Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720668. Ctx: { TraceId: 01js2ek1vtd0a10cm5199gm669, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.451397Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720669. Ctx: { TraceId: 01js2ek1vtd0a10cm5199gm669, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhOThmY2EtNzI0MTZkYzgtZDZjOGFkM2EtMmVhMjk1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.487335Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720670. Ctx: { TraceId: 01js2ek1xa2xr17x1wcw71rfwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:17.495577Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720671. Ctx: { TraceId: 01js2ek1xa2xr17x1wcw71rfwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTA0ZjMxYTYtYWFlMGNkZTgtNDIwYzUyMTgtMjgwMjIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> KqpParams::ParameterTypes [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12830, MsgBus: 7575 2025-04-17T18:00:45.682403Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343604745912692:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:45.683012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a75/r3tmp/tmp7s4TXQ/pdisk_1.dat 2025-04-17T18:00:46.414299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:46.414407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:46.441860Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:46.453290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12830, node 1 2025-04-17T18:00:46.766649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:46.766673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:46.766680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:46.766788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7575 TClient is connected to server localhost:7575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:47.733793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:47.792677Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:47.811432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:48.087253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:48.346029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:48.416069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:50.639008Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343604745912692:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:50.639082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:51.907184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343630515718124:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:51.907308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:52.546129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.623471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.677633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.722468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.801102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.898386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:52.981612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:53.110612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343639105653402:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:53.110710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:53.111131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343639105653407:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:53.115318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:53.143340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343639105653409:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:53.236713Z node 1 :TX_PROXY ERROR: Actor# [1:7494343639105653468:3570] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 65225, MsgBus: 28077 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a75/r3tmp/tmpjgGEWI/pdisk_1.dat 2025-04-17T18:00:57.950412Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:00:57.974881Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:58.066987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:58.067066Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:58.070979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65225, node 2 2025-04-17T18:00:58.265076Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:58.265097Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:58.265103Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:58.265206Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28077 TClient is connected to server localhost:28077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:59.303860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:59.327329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:59.417054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:59.671576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:59.807240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 0: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:01:19.050828Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWE3NDU2OGQtMzk0NDQ2N2YtNmNkNTMzYmYtYTA5MGVlOTY=, ActorId: [3:7494343753035028168:2577], ActorState: ExecuteState, TraceId: 01js2ek3dtdmkdwhrp3axmcq37, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 7969, MsgBus: 17055 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a75/r3tmp/tmpYrB1H0/pdisk_1.dat 2025-04-17T18:01:20.934405Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:20.946656Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:20.946748Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:20.951774Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:20.972258Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7969, node 4 2025-04-17T18:01:21.089209Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:21.089232Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:21.089241Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:21.089405Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17055 TClient is connected to server localhost:17055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:22.054457Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:22.063172Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:22.087015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:01:22.203315Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:22.406553Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:22.503713Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:25.136240Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343775634699046:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:25.136349Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:25.211434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:25.262526Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:25.313283Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:25.365594Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:25.448912Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:25.518924Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:25.626334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:25.861923Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343775634699721:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:25.862021Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:25.862394Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343775634699726:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:25.866899Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:25.880864Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:01:25.881143Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343775634699728:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:25.977172Z node 4 :TX_PROXY ERROR: Actor# [4:7494343775634699789:3560] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:27.312646Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:01:28.468516Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494343788519602371:2565], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-04-17T18:01:28.470203Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTZkOGQyOS04YWUxZDM5LTY1NDI1Y2Y2LWY5YTBiNmRm, ActorId: [4:7494343788519602369:2564], ActorState: ExecuteState, TraceId: 01js2ekckv2kwe58ycjjwf7xsg, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:01:28.565194Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTJiMTBjNjQtN2JlMjdkNGItODkyNjI2NjktZDE0MzI5N2Y=, ActorId: [4:7494343788519602375:2567], ActorState: ExecuteState, TraceId: 01js2ekcn0cvy595tek8qe9bt9, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1320: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-04-17T18:01:28.656826Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494343788519602391:2573], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:01:28.658432Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTNhNjVhZGEtYjE5ODlkMTctNGI1N2ViNzUtYTM1YWNkNDU=, ActorId: [4:7494343788519602389:2572], ActorState: ExecuteState, TraceId: 01js2ekcr57466jer1avycrtyc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:01:28.685837Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494343788519602402:2578], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:01:28.686109Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTVhMTQ2NjEtNDZkNjAxMDgtMTQzOWY1ZGEtZmVkMWU5NGM=, ActorId: [4:7494343788519602400:2577], ActorState: ExecuteState, TraceId: 01js2ekcttd7bp0ke5jhvrvjm4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> KqpQuery::TableSinkWithSubquery >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [GOOD] >> TPersQueueTest::PartitionsMapping >> TTopicYqlTest::CreateAndAlterTopicYql [GOOD] >> TTopicYqlTest::AlterAutopartitioning >> KqpStats::OneShardLocalExec-UseSink >> KqpStats::SelfJoin [GOOD] >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpStats::OneShardLocalExec+UseSink >> KqpExplain::ReadTableRangesFullScan [GOOD] >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |83.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge >> TPersQueueTest::SetMeteringMode [GOOD] >> TPersQueueTest::ReadWithoutConsumerFederation >> SelfHealActorTest::SingleErrorDisk >> BsControllerTest::DecommitRejected >> SelfHealActorTest::NoMoreThanOneReplicating >> BsControllerTest::TestLocalSelfHeal |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> SelfHealActorTest::SingleErrorDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: Trying to start YDB, gRPC: 11990, MsgBus: 11207 2025-04-17T18:01:07.941662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343702166394377:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:07.946048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a60/r3tmp/tmp3Adu04/pdisk_1.dat 2025-04-17T18:01:08.987561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:09.038285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:09.055364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:09.055457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:09.069333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11990, node 1 2025-04-17T18:01:09.366749Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:09.366772Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:09.366779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:09.366879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11207 TClient is connected to server localhost:11207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:10.584546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:10.632168Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:10.646273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.049074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.295989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.386073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:12.938492Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343702166394377:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:12.938564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:14.403115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343732231167108:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:14.403224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:14.924410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:14.978064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.026418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.124775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.179391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.288393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.363062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.434352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343736526135082:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:15.434554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:15.438533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343736526135087:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:15.442753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:15.461561Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:01:15.462007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343736526135090:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:15.534504Z node 1 :TX_PROXY ERROR: Actor# [1:7494343736526135148:3568] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2947, MsgBus: 3709 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a60/r3tmp/tmpoDzlQF/pdisk_1.dat 2025-04-17T18:01:19.268019Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:19.301767Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:19.304488Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:19.304578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:19.327936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2947, node 2 2025-04-17T18:01:19.582667Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:19.582691Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:19.582697Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:19.582808Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3709 TClient is connected to server localhost:3709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:20.344888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:20.366965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:20.456762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 wait ... ullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":48,"Max":24,"Min":24}}],"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":0,"FinishedTasks":2,"IngressRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576},"BaseTimeMs":1744912893475,"Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"ActiveMessageMs":{"Count":2,"Max":52,"Min":4},"FirstMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResumeMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"FirstMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"ActiveMessageMs":{"Count":2,"Max":52,"Min":4},"PauseMessageMs":{"Count":2,"Sum":3,"Max":3,"Min":0},"WaitTimeUs":{"Count":2,"Sum":53262,"Max":52655,"Min":607},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"CpuTimeUs":{"Count":2,"Sum":853,"Max":646,"Min":207},"Ingress":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"ActiveMessageMs":{"Count":2,"Max":52,"Min":4},"FirstMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"FirstMessageMs":{"Count":2,"Sum":56,"Max":52,"Min":4},"ActiveMessageMs":{"Count":2,"Max":52,"Min":4},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48},"PauseMessageMs":{"Count":2,"Sum":3,"Max":3,"Min":0},"WaitTimeUs":{"Count":2,"Sum":53312,"Max":52656,"Min":656},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"StageDurationUs":49000,"ResultRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResultBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"UpdateTimeMs":53,"Tasks":2}}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":24,"Max":12,"Min":12}}],"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":1,"FinishedTasks":2,"InputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"DurationUs":{"Count":2,"Sum":9000,"Max":7000,"Min":2000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576},"BaseTimeMs":1744912893475,"Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":61,"Max":55,"Min":6},"ActiveMessageMs":{"Count":2,"Max":55,"Min":6},"FirstMessageMs":{"Count":2,"Sum":61,"Max":55,"Min":6},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12}},"Name":"6","Push":{"LastMessageMs":{"Count":2,"Sum":60,"Max":54,"Min":6},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResumeMessageMs":{"Count":2,"Sum":60,"Max":54,"Min":6},"FirstMessageMs":{"Count":2,"Sum":60,"Max":54,"Min":6},"ActiveMessageMs":{"Count":2,"Max":54,"Min":6},"PauseMessageMs":{"Count":2,"Sum":57,"Max":53,"Min":4},"WaitTimeUs":{"Count":2,"Sum":59525,"Max":53627,"Min":5898},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"WaitMessageMs":{"Count":2,"Max":54,"Min":4}}}],"CpuTimeUs":{"Count":2,"Sum":619,"Max":418,"Min":201},"StageDurationUs":51000,"WaitInputTimeUs":{"Count":2,"Sum":59305,"Max":53529,"Min":5776},"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"Input":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":57,"Max":53,"Min":4},"ActiveMessageMs":{"Count":2,"Max":53,"Min":4},"FirstMessageMs":{"Count":2,"Sum":57,"Max":53,"Min":4},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12}},"Name":"2","Push":{"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":57,"Max":53,"Min":4},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":57,"Max":53,"Min":4},"FirstMessageMs":{"Count":2,"Sum":57,"Max":53,"Min":4},"ActiveMessageMs":{"Count":2,"Max":53,"Min":4},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"PauseMessageMs":{"Count":2,"Sum":1,"Max":1,"Min":0},"WaitTimeUs":{"Count":2,"Sum":55587,"Max":51818,"Min":3769},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"UpdateTimeMs":55,"InputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Tasks":2}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"ActiveMessageMs":{"Count":1,"Max":55,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"ActiveTimeUs":{"Count":1,"Sum":44000,"Max":44000,"Min":44000}},"Name":"8","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"ActiveMessageMs":{"Count":1,"Max":55,"Min":11},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"ActiveTimeUs":{"Count":1,"Sum":44000,"Max":44000,"Min":44000},"WaitTimeUs":{"Count":1,"Sum":53101,"Max":53101,"Min":53101},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":55,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"DurationUs":{"Count":1,"Sum":49000,"Max":49000,"Min":49000},"InputBytes":{"Count":1,"Sum":48,"Max":48,"Min":48},"Tasks":1,"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"StageDurationUs":49000,"BaseTimeMs":1744912893475,"WaitInputTimeUs":{"Count":1,"Sum":43592,"Max":43592,"Min":43592},"OutputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"CpuTimeUs":{"Count":1,"Sum":5045,"Max":5045,"Min":5045},"UpdateTimeMs":55,"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"ActiveMessageMs":{"Count":1,"Max":55,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48},"ActiveTimeUs":{"Count":1,"Sum":44000,"Max":44000,"Min":44000}},"Name":"4","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"ActiveMessageMs":{"Count":1,"Max":55,"Min":6},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"ActiveTimeUs":{"Count":1,"Sum":49000,"Max":49000,"Min":49000},"WaitTimeUs":{"Count":1,"Sum":29077,"Max":29077,"Min":29077},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":55,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"DurationUs":{"Count":1,"Sum":44000,"Max":44000,"Min":44000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"BaseTimeMs":1744912893475,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"ActiveMessageMs":{"Count":1,"Max":55,"Min":12},"FirstMessageMs":{"Count":1,"Sum":12,"Max":12,"Min":12},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"ActiveTimeUs":{"Count":1,"Sum":43000,"Max":43000,"Min":43000}},"Name":"RESULT","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"FirstMessageMs":{"Count":1,"Sum":12,"Max":12,"Min":12},"ActiveMessageMs":{"Count":1,"Max":55,"Min":12},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"ActiveTimeUs":{"Count":1,"Sum":43000,"Max":43000,"Min":43000},"WaitTimeUs":{"Count":1,"Sum":52978,"Max":52978,"Min":52978},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":55,"Min":2}}}],"CpuTimeUs":{"Count":1,"Sum":496,"Max":496,"Min":496},"StageDurationUs":44000,"WaitInputTimeUs":{"Count":1,"Sum":42771,"Max":42771,"Min":42771},"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResultBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"OutputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"ActiveMessageMs":{"Count":1,"Max":55,"Min":12},"FirstMessageMs":{"Count":1,"Sum":12,"Max":12,"Min":12},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"ActiveTimeUs":{"Count":1,"Sum":43000,"Max":43000,"Min":43000}},"Name":"6","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":55,"Max":55,"Min":55},"FirstMessageMs":{"Count":1,"Sum":12,"Max":12,"Min":12},"ActiveMessageMs":{"Count":1,"Max":55,"Min":12},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"ActiveTimeUs":{"Count":1,"Sum":43000,"Max":43000,"Min":43000},"WaitTimeUs":{"Count":1,"Sum":52813,"Max":52813,"Min":52813},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":55,"Min":2}}}],"UpdateTimeMs":56,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":345721,"CpuTimeUs":341678},"ProcessCpuTimeUs":392,"TotalDurationUs":435361,"ResourcePoolId":"default","QueuedTimeUs":4563},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"TwoShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"No estimate","Columns":["Key"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"A-Rows":6,"A-SelfCpu":5.045,"A-Cpu":5.045,"A-Size":27,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.496,"A-Cpu":5.541,"A-Size":27,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ReadTableRangesFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 15551, MsgBus: 2149 2025-04-17T18:00:55.011375Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343644598438470:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:55.011713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a71/r3tmp/tmpyTUzJC/pdisk_1.dat 2025-04-17T18:00:55.822145Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:55.824329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:55.824421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:55.841042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15551, node 1 2025-04-17T18:00:56.089480Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:56.089499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:56.089505Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:56.089595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2149 TClient is connected to server localhost:2149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:57.585275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:57.622414Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:57.636337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:57.920791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:58.236901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:58.431241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:00.013610Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343644598438470:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:00.013689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:02.424415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343678958178458:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:02.424524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:03.030797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:03.071865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:03.127957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:03.160975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:03.202677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:03.266056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:03.347450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:03.497169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343683253146438:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:03.497236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:03.497509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343683253146443:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:03.501355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:03.519202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343683253146445:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:03.626245Z node 1 :TX_PROXY ERROR: Actor# [1:7494343683253146502:3562] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"Sort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.Text","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 17447, MsgBus: 20503 2025-04-17T18:01:07.331057Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343699102305467:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:07.331862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a71/r3tmp/tmp5A0VFt/pdisk_1.dat 2025-04-17T18:01:07.760298Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:07.819282Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:07.819359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:07.827186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17447, node 2 2025-04-17T18:01:08.074798Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:08.074818Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:08.074826Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:08.074923Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20503 TClient is connected to server localhost:20503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVe ... te","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":4}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 65197, MsgBus: 10457 2025-04-17T18:01:25.286781Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343778638595144:2149];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a71/r3tmp/tmpJM6aa7/pdisk_1.dat 2025-04-17T18:01:25.517362Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:01:25.661312Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:25.663293Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:25.663363Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:25.671615Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65197, node 4 2025-04-17T18:01:25.894697Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:25.894717Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:25.894724Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:25.894846Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10457 TClient is connected to server localhost:10457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:26.901160Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:26.908755Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:26.912222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:27.006326Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:27.286542Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:01:27.501127Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:01:30.286292Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343778638595144:2149];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:30.286381Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:31.289949Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343804408400591:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:31.290064Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:31.377409Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:31.448755Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:31.541574Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:31.586873Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:31.632869Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:31.714223Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:31.806625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:31.911656Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343804408401273:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:31.911762Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:31.911975Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343804408401278:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:31.915506Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:31.930457Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343804408401280:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:31.991044Z node 4 :TX_PROXY ERROR: Actor# [4:7494343804408401338:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:33.222995Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap >> BsControllerTest::DecommitRejected [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2025-04-17T18:01:35.392334Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-17T18:01:35.399044Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-17T18:01:35.417497Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-17T18:01:35.417561Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-17T18:01:35.417643Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-17T18:01:35.417671Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-17T18:01:35.417730Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-17T18:01:35.417759Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-17T18:01:35.417797Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-17T18:01:35.417821Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-17T18:01:35.417880Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-17T18:01:35.417917Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-17T18:01:35.417961Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-17T18:01:35.417998Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-17T18:01:35.418050Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-17T18:01:35.418073Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-17T18:01:35.418118Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-17T18:01:35.418152Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-17T18:01:35.418212Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-17T18:01:35.418236Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-17T18:01:35.418291Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-17T18:01:35.418317Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-17T18:01:35.418359Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-17T18:01:35.418381Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-17T18:01:35.418418Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-17T18:01:35.418440Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-17T18:01:35.418486Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-17T18:01:35.418512Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-17T18:01:35.418551Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-17T18:01:35.418578Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-17T18:01:35.515740Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:508:32] Status# ERROR ClientId# [1:508:32] ServerId# [0:0:0] PipeClient# [1:508:32] 2025-04-17T18:01:35.516433Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:509:20] Status# ERROR ClientId# [2:509:20] ServerId# [0:0:0] PipeClient# [2:509:20] 2025-04-17T18:01:35.516508Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:510:20] Status# ERROR ClientId# [3:510:20] ServerId# [0:0:0] PipeClient# [3:510:20] 2025-04-17T18:01:35.516555Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:511:20] Status# ERROR ClientId# [4:511:20] ServerId# [0:0:0] PipeClient# [4:511:20] 2025-04-17T18:01:35.516602Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:512:20] Status# ERROR ClientId# [5:512:20] ServerId# [0:0:0] PipeClient# [5:512:20] 2025-04-17T18:01:35.516651Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:513:20] Status# ERROR ClientId# [6:513:20] ServerId# [0:0:0] PipeClient# [6:513:20] 2025-04-17T18:01:35.516697Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:514:20] Status# ERROR ClientId# [7:514:20] ServerId# [0:0:0] PipeClient# [7:514:20] 2025-04-17T18:01:35.516732Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:515:20] Status# ERROR ClientId# [8:515:20] ServerId# [0:0:0] PipeClient# [8:515:20] 2025-04-17T18:01:35.516784Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:516:20] Status# ERROR ClientId# [9:516:20] ServerId# [0:0:0] PipeClient# [9:516:20] 2025-04-17T18:01:35.516825Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:517:20] Status# ERROR ClientId# [10:517:20] ServerId# [0:0:0] PipeClient# [10:517:20] 2025-04-17T18:01:35.516863Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:518:20] Status# ERROR ClientId# [11:518:20] ServerId# [0:0:0] PipeClient# [11:518:20] 2025-04-17T18:01:35.516903Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:519:20] Status# ERROR ClientId# [12:519:20] ServerId# [0:0:0] PipeClient# [12:519:20] 2025-04-17T18:01:35.516975Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:520:20] Status# ERROR ClientId# [13:520:20] ServerId# [0:0:0] PipeClient# [13:520:20] 2025-04-17T18:01:35.517018Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:521:20] Status# ERROR ClientId# [14:521:20] ServerId# [0:0:0] PipeClient# [14:521:20] 2025-04-17T18:01:35.517051Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:522:20] Status# ERROR ClientId# [15:522:20] ServerId# [0:0:0] PipeClient# [15:522:20] 2025-04-17T18:01:36.248569Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.209657s 2025-04-17T18:01:36.248798Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.209900s 2025-04-17T18:01:36.251040Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2025-04-17T18:01:36.251121Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2025-04-17T18:01:36.251260Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2025-04-17T18:01:36.251312Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2025-04-17T18:01:36.251358Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2025-04-17T18:01:36.251412Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2025-04-17T18:01:36.251468Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2025-04-17T18:01:36.251510Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2025-04-17T18:01:36.251565Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2025-04-17T18:01:36.251607Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2025-04-17T18:01:36.251645Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2025-04-17T18:01:36.252897Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2025-04-17T18:01:36.252995Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2025-04-17T18:01:36.253054Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2025-04-17T18:01:36.253110Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2025-04-17T18:01:36.255617Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:581:60] Status# OK ClientId# [1:581:60] ServerId# [1:610:61] PipeClient# [1:581:60] 2025-04-17T18:01:36.255671Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2025-04-17T18:01:36.259908Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:582:21] Status# OK ClientId# [2:582:21] ServerId# [1:611:62] PipeClient# [2:582:21] 2025-04-17T18:01:36.259958Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2025-04-17T18:01:36.260008Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:583:21] Status# OK ClientId# [3:583:21] ServerId# [1:612:63] PipeClient# [3:583:21] 2025-04-17T18:01:36.260031Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2025-04-17T18:01:36.260071Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:584:21] Status# OK ClientId# [4:584:21] ServerId# [1:613:64] PipeClient# [4:584:21] 2025-04-17T18:01:36.260096Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2025-04-17T18:01:36.260145Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:585:21] Status# OK ClientId# [5:585:21] ServerId# [1:614:65] PipeClient# [5:585:21] 2025-04-17T18:01:36.260187Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2025-04-17T18:01:36.260252Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:586:21] Status# OK ClientId# [6:586:21] ServerId# [1:615:66] PipeClient# [6:586:21] 2025-04-17T18:01:36.260275Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2025-04-17T18:01:36.260308Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:587:21] Status# OK ClientId# [7:587:21] ServerId# [1:616:67] PipeClient# [7:587:21] 2025-04-17T18:01:36.260340Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2025-04-17T18:01:36.260383Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:588:21] Status# OK ClientId# [8:588:21] ServerId# [1:617:68] PipeClient# [8:588:21] 2025-04-17T18:01:36.260410Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2025-04-17T18:01:36.260447Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:589:21] Status# OK ClientId# [9:589:21] ServerId# [1:618:69] PipeClient# [9:589:21] 2025-04-17T18:01:36.260470Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2025-04-17T18:01:36.260503Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:590:21] Status# OK ClientId# [10:590:21] ServerId# [1:619:70] PipeClient# [10:590:21] 2025-04-17T18:01:36.260529Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2025-04-17T18:01:36.260568Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:591:21] Status# OK ClientId# [11:591:21] ServerId# [1:620:71] PipeClient# [11:591:21] 2025-04-17T18:01:36.260596Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2025-04-17T18:01:36.260648Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:592:21] Status# OK ClientId# [12:592:21] ServerId# [1:621:72] PipeClient# [12:592:21] 2025-04-17T18:01:36.260672Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2025-04-17T18:01:36.260706Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:593:21] Status# OK ClientId# [13:593:21] ServerId# [1:622:73] PipeClient# [13:593:21] 2025-04-17T18:01:36.260728Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2025-04-17T18:01:36.260761Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:594:21] Status# OK ClientId# [14:594:21] ServerId# [1:623:74] PipeClient# [14:594:21] 2025-04-17T18:01:36.260783Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2025-04-17T18:01:36.260828Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:595:21] Status# OK ClientId# [15:595:21] ServerId# [1:624:75] PipeClient# [15:595:21] 2025-04-17T18:01:36.260854Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2025-04-17T18:01:36.263792Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T18:01:36.263919Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-17T18:01:36.306440Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2025-04-17T18:01:36.307960Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-04-17T18:01:36.308025Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-17T18:01:36.308137Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2025-04-17T18:01:36.308262Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-17T18:01:36.308305Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-17T18:01:36.308368Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2025-04-17T18:01:36.308452Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-17T18:01:36.308483Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-17T18:01:36.308520Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2 ... ady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.689697Z 14 00h01m11.195536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2025-04-17T18:01:36.690067Z 1 00h01m11.195536s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.690668Z 8 00h01m11.196048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-04-17T18:01:36.690723Z 8 00h01m11.196048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2025-04-17T18:01:36.690925Z 11 00h01m14.790512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2025-04-17T18:01:36.691195Z 1 00h01m14.790512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.691732Z 15 00h01m16.259512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2025-04-17T18:01:36.692137Z 1 00h01m16.259512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.692399Z 12 00h01m18.510512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2025-04-17T18:01:36.692684Z 1 00h01m18.510512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.692986Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.693107Z 2 00h01m21.248512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2025-04-17T18:01:36.693495Z 1 00h01m21.248512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.693726Z 13 00h01m21.353512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2025-04-17T18:01:36.694136Z 1 00h01m21.353512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.694358Z 10 00h01m22.899512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2025-04-17T18:01:36.694686Z 1 00h01m22.899512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.694885Z 3 00h01m24.656512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2025-04-17T18:01:36.695276Z 1 00h01m24.656512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.695674Z 1 00h01m26.195536s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] Ready},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-17T18:01:36.695899Z 13 00h01m28.436024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2025-04-17T18:01:36.696345Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483648 2025-04-17T18:01:36.697013Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-17T18:01:36.697063Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2025-04-17T18:01:36.697350Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-17T18:01:36.697392Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2025-04-17T18:01:36.697441Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-17T18:01:36.697470Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2025-04-17T18:01:36.697500Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-17T18:01:36.697527Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2025-04-17T18:01:36.697556Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-17T18:01:36.697581Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2025-04-17T18:01:36.697610Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-17T18:01:36.697644Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2025-04-17T18:01:36.697695Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-17T18:01:36.697722Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2025-04-17T18:01:36.697749Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-17T18:01:36.697788Z 1 00h01m28.436024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2025-04-17T18:01:36.700305Z 1 00h01m28.436536s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-17T18:01:36.700379Z 1 00h01m28.436536s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2025-04-17T18:01:36.700984Z 1 00h01m28.436536s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2025-04-17T18:01:36.701020Z 1 00h01m28.436536s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483648 Success# true 2025-04-17T18:01:36.701138Z 7 00h01m28.436536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-17T18:01:36.701185Z 7 00h01m28.436536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2025-04-17T18:01:36.701287Z 2 00h01m28.436536s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-04-17T18:01:36.701340Z 2 00h01m28.436536s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2025-04-17T18:01:36.701462Z 3 00h01m28.436536s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-17T18:01:36.701511Z 3 00h01m28.436536s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2025-04-17T18:01:36.701588Z 4 00h01m28.436536s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-17T18:01:36.701641Z 4 00h01m28.436536s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2025-04-17T18:01:36.701729Z 5 00h01m28.436536s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-17T18:01:36.701773Z 5 00h01m28.436536s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2025-04-17T18:01:36.701868Z 6 00h01m28.436536s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-17T18:01:36.701926Z 6 00h01m28.436536s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2025-04-17T18:01:36.701982Z 9 00h01m28.436536s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-17T18:01:36.702046Z 13 00h01m28.436536s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-17T18:01:36.702091Z 13 00h01m28.436536s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2025-04-17T18:01:36.702231Z 14 00h01m28.436536s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-17T18:01:36.702280Z 14 00h01m28.436536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2025-04-17T18:01:36.702369Z 15 00h01m28.436536s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-17T18:01:36.702406Z 15 00h01m28.436536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2025-04-17T18:01:36.702483Z 15 00h01m28.436536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2025-04-17T18:01:36.705014Z 15 00h01m34.023536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2025-04-17T18:01:36.705444Z 1 00h01m34.302512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2025-04-17T18:01:36.705818Z 14 00h01m34.944512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2025-04-17T18:01:36.707229Z 15 00h01m39.647536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2025-04-17T18:01:36.707980Z 9 00h01m39.648048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-17T18:01:36.708033Z 9 00h01m39.648048s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed >> KqpExplain::FullOuterJoin [GOOD] >> KqpExplain::IdxFullscan >> KqpQuery::TableSinkWithSubquery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 9222, MsgBus: 16172 2025-04-17T18:01:06.755242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343694326794300:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:06.755298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a61/r3tmp/tmpowuyNU/pdisk_1.dat 2025-04-17T18:01:07.357602Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:07.377943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:07.378066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:07.381411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9222, node 1 2025-04-17T18:01:07.527133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:07.527159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:07.527171Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:07.527290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16172 TClient is connected to server localhost:16172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:09.109155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:09.190837Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:09.215081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:09.445166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:09.678211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:09.772455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.758368Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343694326794300:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:11.758427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:11.931189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343715801632548:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:11.931305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:12.719357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.752461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.788269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.823848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.850718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.887962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.957443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.013460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343724391567810:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:13.013536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:13.013754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343724391567815:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:13.017509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:13.027405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343724391567817:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:13.081881Z node 1 :TX_PROXY ERROR: Actor# [1:7494343724391567872:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 63536, MsgBus: 19844 2025-04-17T18:01:15.630420Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343733735138342:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:15.630472Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a61/r3tmp/tmpA549Ja/pdisk_1.dat 2025-04-17T18:01:15.829033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:15.829122Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:15.831975Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:15.845188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63536, node 2 2025-04-17T18:01:15.982777Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:15.982795Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:15.982803Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:15.982914Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19844 TClient is connected to server localhost:19844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:16.532192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:16.543779Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:16.553124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:16.678764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-1 ... 474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:23.838582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:24.142641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:24.251722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:26.339374Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343780007658730:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:26.339492Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:26.419618Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:26.469692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:26.535384Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:26.618913Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:26.685461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:26.780407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:26.886946Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:27.082397Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343784302626702:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.082519Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.083026Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343784302626707:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.087559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:27.120668Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343784302626709:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:27.229185Z node 3 :TX_PROXY ERROR: Actor# [3:7494343784302626771:3566] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:27.598427Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343762827787796:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:27.598490Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29595, MsgBus: 28283 2025-04-17T18:01:31.669235Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343802250772750:2169];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a61/r3tmp/tmpz5ihik/pdisk_1.dat 2025-04-17T18:01:31.728316Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:01:31.819192Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:31.828037Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:31.828140Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:31.833476Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29595, node 4 2025-04-17T18:01:31.867808Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:31.867826Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:31.867835Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:31.867927Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28283 TClient is connected to server localhost:28283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:32.493052Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:32.504391Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:01:35.730225Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343819430642449:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.730361Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.734110Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343819430642485:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.741245Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:01:35.759992Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343819430642487:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:01:35.821435Z node 4 :TX_PROXY ERROR: Actor# [4:7494343819430642540:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:35.914997Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494343819430642561:2341], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:62: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-04-17T18:01:35.916898Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YzM3MmFmMGUtOGFhMTk2ZC00NTcwM2NiZC05OGI2ODFmZQ==, ActorId: [4:7494343819430642446:2327], ActorState: ExecuteState, TraceId: 01js2ekgkca51ac8g7936j2kpc, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:01:35.975365Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494343819430642586:2350], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:45: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-04-17T18:01:35.977041Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTY0OTllMC1kYTMwYjU1MC03Nzc1Y2M0My1mOGNkOGM2Nw==, ActorId: [4:7494343819430642574:2344], ActorState: ExecuteState, TraceId: 01js2ekkxye5kwbppcrb230bh9, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:01:36.027962Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494343823725609894:2356], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:43: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-04-17T18:01:36.029605Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzUyNTM0MDktNGI5Mjk3NzgtOWVkOGNiOTItM2Y1NDBiNTM=, ActorId: [4:7494343819430642592:2353], ActorState: ExecuteState, TraceId: 01js2ekkzj544st1tcq63px1zj, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> KqpExplain::Predicates [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSinkWithSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 29465, MsgBus: 13532 2025-04-17T18:01:11.142977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343718976492615:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:11.143438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a58/r3tmp/tmp2XPfkp/pdisk_1.dat 2025-04-17T18:01:12.541221Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:12.588857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:12.588949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:12.594341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:12.601682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29465, node 1 2025-04-17T18:01:13.021895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:13.021911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:13.021917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:13.022004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13532 TClient is connected to server localhost:13532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:14.576634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:16.130311Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343718976492615:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:16.130372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:18.032896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343749041264230:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:18.032995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:18.407314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:01:18.754935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:01:19.042724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343753336232899:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:19.045374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343753336232894:2447], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:19.045481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:19.048660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T18:01:19.059439Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T18:01:19.060601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343753336232901:2451], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T18:01:19.132199Z node 1 :TX_PROXY ERROR: Actor# [1:7494343753336232961:3254] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6417, MsgBus: 29393 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a58/r3tmp/tmpNGMApq/pdisk_1.dat 2025-04-17T18:01:23.126418Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:23.181143Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:23.184457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:23.184544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:23.191197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6417, node 2 2025-04-17T18:01:23.431264Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:23.431288Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:23.431295Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:23.431401Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29393 TClient is connected to server localhost:29393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:23.968358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:23.981477Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:01:27.494331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343788007305052:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.494432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.547064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:01:27.881972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:01:28.212213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343792302273716:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:28.212324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:28.212613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343792302273721:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:28.216440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:01:28.248375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343792302273723:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:01:28.339134Z node 2 :TX_PROXY ERROR: Actor# [2:7494343792302273795:3259] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 62764, MsgBus: 2214 2025-04-17T18:01:32.019496Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343806466174409:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:32.019583Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a58/r3tmp/tmpltcE6K/pdisk_1.dat 2025-04-17T18:01:32.158796Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:32.168787Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:32.168869Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:32.176884Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62764, node 3 2025-04-17T18:01:32.415575Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:32.415599Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:32.415609Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:32.415728Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2214 TClient is connected to server localhost:2214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:33.170616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:36.164486Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343823646044239:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:36.164578Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:36.191595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:01:36.263246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:01:36.319048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343823646044413:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:36.319163Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:36.319504Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343823646044418:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:36.323738Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:01:36.332713Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343823646044420:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:01:36.417777Z node 3 :TX_PROXY ERROR: Actor# [3:7494343823646044471:2444] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:37.019725Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343806466174409:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:37.019873Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged >> BsControllerTest::TestLocalSelfHeal [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2025-04-17T18:01:35.525759Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-17T18:01:35.525836Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-17T18:01:35.525946Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-17T18:01:35.525972Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-17T18:01:35.526030Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-17T18:01:35.526054Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-17T18:01:35.526097Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-17T18:01:35.526139Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-17T18:01:35.526202Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-17T18:01:35.526225Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-17T18:01:35.526263Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-17T18:01:35.526287Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-17T18:01:35.526325Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-17T18:01:35.526360Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-17T18:01:35.526417Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-17T18:01:35.526442Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-17T18:01:35.526480Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-17T18:01:35.526517Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-17T18:01:35.526562Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-17T18:01:35.526585Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-17T18:01:35.526644Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-17T18:01:35.526669Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-17T18:01:35.526709Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-17T18:01:35.526732Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-17T18:01:35.526768Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-17T18:01:35.526793Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-17T18:01:35.526848Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-17T18:01:35.526874Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-17T18:01:35.526922Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-17T18:01:35.526948Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-17T18:01:35.526997Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-17T18:01:35.527049Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-17T18:01:35.527097Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-17T18:01:35.527120Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-17T18:01:35.527156Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-17T18:01:35.527177Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-17T18:01:35.527229Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-17T18:01:35.527253Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-17T18:01:35.527295Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-17T18:01:35.527323Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-17T18:01:35.527357Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-17T18:01:35.527382Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-17T18:01:35.527417Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-17T18:01:35.527438Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-17T18:01:35.527473Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-17T18:01:35.527495Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-17T18:01:35.527545Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-17T18:01:35.527570Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-17T18:01:35.527628Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-17T18:01:35.527664Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-17T18:01:35.527720Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-17T18:01:35.527744Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-17T18:01:35.527806Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-17T18:01:35.527832Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-17T18:01:35.527869Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-17T18:01:35.527892Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-17T18:01:35.527933Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-17T18:01:35.527964Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-17T18:01:35.528018Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-17T18:01:35.528041Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-17T18:01:35.528079Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-17T18:01:35.528102Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-17T18:01:35.528154Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-17T18:01:35.528176Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-17T18:01:35.528221Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-04-17T18:01:35.528255Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-04-17T18:01:35.528302Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-04-17T18:01:35.528325Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-04-17T18:01:35.528377Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-04-17T18:01:35.528401Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-04-17T18:01:35.528441Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-04-17T18:01:35.528474Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-04-17T18:01:35.546521Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-04-17T18:01:35.547972Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-04-17T18:01:35.548046Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-04-17T18:01:35.548102Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-04-17T18:01:35.548166Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-04-17T18:01:35.548220Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-04-17T18:01:35.548262Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-04-17T18:01:35.548302Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-04-17T18:01:35.548338Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-04-17T18:01:35.548377Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-04-17T18:01:35.548417Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-04-17T18:01:35.548456Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-04-17T18:01:35.548491Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-04-17T18:01:35.548530Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-04-17T18:01:35.548569Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-04-17T18:01:35.548607Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-04-17T18:01:35.548642Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-04-17T18:01:35.548680Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-04-17T18:01:35.548721Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-04-17T18:01:35.548794Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-04-17T18:01:35.548836Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-04-17T18:01:35.548878Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-04-17T18:01:35.548933Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-04-17T18:01:35.548995Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-04-17T18:01:35.549035Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-04-17T18:01:35.549080Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-04-17T18:01:35.549120Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-04-17T18:01:35.549160Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-04-17T18:01:35.549224Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-04-17T18:01:35.549262Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-04-17T18:01:35.549299Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-04-17T18:01:35.549335Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-04-17T18:01:35.549378Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-04-17T18:01:35.549418Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-04-17T18:01:35.549459Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 0.102048s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-17T18:01:38.347965Z 31 00h05m00.102048s :BS_NODE DEBUG: [31] VDiskId# [80000043:1:2:1:0] -> [80000043:2:2:1:0] 2025-04-17T18:01:38.348132Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T18:01:38.348185Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] VDiskId# [80000043:1:1:0:0] -> [80000043:2:1:0:0] 2025-04-17T18:01:38.348324Z 34 00h05m00.102048s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2025-04-17T18:01:38.348370Z 34 00h05m00.102048s :BS_NODE DEBUG: [34] VDiskId# [80000033:1:2:2:0] -> [80000033:2:2:2:0] 2025-04-17T18:01:38.348441Z 19 00h05m00.102048s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-17T18:01:38.348496Z 19 00h05m00.102048s :BS_NODE DEBUG: [19] VDiskId# [80000033:1:1:1:0] -> [80000033:2:1:1:0] 2025-04-17T18:01:38.348581Z 4 00h05m00.102048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-17T18:01:38.348628Z 4 00h05m00.102048s :BS_NODE DEBUG: [4] VDiskId# [80000033:1:0:0:0] -> [80000033:2:0:0:0] 2025-04-17T18:01:38.348704Z 22 00h05m00.102048s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-17T18:01:38.348744Z 22 00h05m00.102048s :BS_NODE DEBUG: [22] VDiskId# [80000033:1:1:2:0] -> [80000033:2:1:2:0] 2025-04-17T18:01:38.348859Z 7 00h05m00.102048s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-17T18:01:38.348917Z 7 00h05m00.102048s :BS_NODE DEBUG: [7] VDiskId# [80000033:1:0:1:0] -> [80000033:2:0:1:0] 2025-04-17T18:01:38.349018Z 10 00h05m00.102048s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T18:01:38.349076Z 10 00h05m00.102048s :BS_NODE DEBUG: [10] VDiskId# [80000033:1:0:2:0] -> [80000033:2:0:2:0] 2025-04-17T18:01:38.349154Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.349194Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] VDiskId# [80000033:2:2:0:0] PDiskId# 1001 VSlotId# 1009 created 2025-04-17T18:01:38.349257Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] VDiskId# [80000033:2:2:0:0] status changed to INIT_PENDING 2025-04-17T18:01:38.349351Z 31 00h05m00.102048s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-17T18:01:38.349404Z 31 00h05m00.102048s :BS_NODE DEBUG: [31] VDiskId# [80000033:1:2:1:0] -> [80000033:2:2:1:0] 2025-04-17T18:01:38.349491Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T18:01:38.349533Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] VDiskId# [80000033:1:1:0:0] -> [80000033:2:1:0:0] 2025-04-17T18:01:38.349656Z 34 00h05m00.102048s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2025-04-17T18:01:38.349705Z 34 00h05m00.102048s :BS_NODE DEBUG: [34] VDiskId# [80000023:1:2:2:0] -> [80000023:2:2:2:0] 2025-04-17T18:01:38.349801Z 19 00h05m00.102048s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-17T18:01:38.349859Z 19 00h05m00.102048s :BS_NODE DEBUG: [19] VDiskId# [80000023:1:1:1:0] -> [80000023:2:1:1:0] 2025-04-17T18:01:38.349970Z 4 00h05m00.102048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-17T18:01:38.350019Z 4 00h05m00.102048s :BS_NODE DEBUG: [4] VDiskId# [80000023:1:0:0:0] -> [80000023:2:0:0:0] 2025-04-17T18:01:38.350108Z 22 00h05m00.102048s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-17T18:01:38.350156Z 22 00h05m00.102048s :BS_NODE DEBUG: [22] VDiskId# [80000023:1:1:2:0] -> [80000023:2:1:2:0] 2025-04-17T18:01:38.350309Z 7 00h05m00.102048s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-17T18:01:38.350374Z 7 00h05m00.102048s :BS_NODE DEBUG: [7] VDiskId# [80000023:1:0:1:0] -> [80000023:2:0:1:0] 2025-04-17T18:01:38.350490Z 10 00h05m00.102048s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T18:01:38.350537Z 10 00h05m00.102048s :BS_NODE DEBUG: [10] VDiskId# [80000023:1:0:2:0] -> [80000023:2:0:2:0] 2025-04-17T18:01:38.350637Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.350680Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] VDiskId# [80000023:2:2:0:0] PDiskId# 1003 VSlotId# 1009 created 2025-04-17T18:01:38.350737Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] VDiskId# [80000023:2:2:0:0] status changed to INIT_PENDING 2025-04-17T18:01:38.350831Z 31 00h05m00.102048s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-17T18:01:38.350883Z 31 00h05m00.102048s :BS_NODE DEBUG: [31] VDiskId# [80000023:1:2:1:0] -> [80000023:2:2:1:0] 2025-04-17T18:01:38.350973Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T18:01:38.351032Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] VDiskId# [80000023:1:1:0:0] -> [80000023:2:1:0:0] 2025-04-17T18:01:38.351173Z 34 00h05m00.102048s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2025-04-17T18:01:38.351221Z 34 00h05m00.102048s :BS_NODE DEBUG: [34] VDiskId# [80000013:1:2:2:0] -> [80000013:2:2:2:0] 2025-04-17T18:01:38.351319Z 19 00h05m00.102048s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-17T18:01:38.351366Z 19 00h05m00.102048s :BS_NODE DEBUG: [19] VDiskId# [80000013:1:1:1:0] -> [80000013:2:1:1:0] 2025-04-17T18:01:38.351444Z 4 00h05m00.102048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-17T18:01:38.351489Z 4 00h05m00.102048s :BS_NODE DEBUG: [4] VDiskId# [80000013:1:0:0:0] -> [80000013:2:0:0:0] 2025-04-17T18:01:38.351576Z 22 00h05m00.102048s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-17T18:01:38.351623Z 22 00h05m00.102048s :BS_NODE DEBUG: [22] VDiskId# [80000013:1:1:2:0] -> [80000013:2:1:2:0] 2025-04-17T18:01:38.351701Z 7 00h05m00.102048s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-17T18:01:38.351742Z 7 00h05m00.102048s :BS_NODE DEBUG: [7] VDiskId# [80000013:1:0:1:0] -> [80000013:2:0:1:0] 2025-04-17T18:01:38.351856Z 10 00h05m00.102048s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T18:01:38.351902Z 10 00h05m00.102048s :BS_NODE DEBUG: [10] VDiskId# [80000013:1:0:2:0] -> [80000013:2:0:2:0] 2025-04-17T18:01:38.351978Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.352015Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] VDiskId# [80000013:2:2:0:0] PDiskId# 1000 VSlotId# 1010 created 2025-04-17T18:01:38.352112Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] VDiskId# [80000013:2:2:0:0] status changed to INIT_PENDING 2025-04-17T18:01:38.352203Z 31 00h05m00.102048s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-17T18:01:38.352251Z 31 00h05m00.102048s :BS_NODE DEBUG: [31] VDiskId# [80000013:1:2:1:0] -> [80000013:2:2:1:0] 2025-04-17T18:01:38.352345Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T18:01:38.352391Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] VDiskId# [80000013:1:1:0:0] -> [80000013:2:1:0:0] 2025-04-17T18:01:38.352502Z 34 00h05m00.102048s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2025-04-17T18:01:38.352546Z 34 00h05m00.102048s :BS_NODE DEBUG: [34] VDiskId# [80000003:1:2:2:0] -> [80000003:2:2:2:0] 2025-04-17T18:01:38.352681Z 19 00h05m00.102048s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-17T18:01:38.352729Z 19 00h05m00.102048s :BS_NODE DEBUG: [19] VDiskId# [80000003:1:1:1:0] -> [80000003:2:1:1:0] 2025-04-17T18:01:38.352822Z 4 00h05m00.102048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-17T18:01:38.352868Z 4 00h05m00.102048s :BS_NODE DEBUG: [4] VDiskId# [80000003:1:0:0:0] -> [80000003:2:0:0:0] 2025-04-17T18:01:38.352955Z 22 00h05m00.102048s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-17T18:01:38.353004Z 22 00h05m00.102048s :BS_NODE DEBUG: [22] VDiskId# [80000003:1:1:2:0] -> [80000003:2:1:2:0] 2025-04-17T18:01:38.353082Z 7 00h05m00.102048s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-17T18:01:38.353124Z 7 00h05m00.102048s :BS_NODE DEBUG: [7] VDiskId# [80000003:1:0:1:0] -> [80000003:2:0:1:0] 2025-04-17T18:01:38.353208Z 10 00h05m00.102048s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-17T18:01:38.353269Z 10 00h05m00.102048s :BS_NODE DEBUG: [10] VDiskId# [80000003:1:0:2:0] -> [80000003:2:0:2:0] 2025-04-17T18:01:38.353360Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.353407Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] VDiskId# [80000003:2:2:0:0] PDiskId# 1001 VSlotId# 1010 created 2025-04-17T18:01:38.353479Z 28 00h05m00.102048s :BS_NODE DEBUG: [28] VDiskId# [80000003:2:2:0:0] status changed to INIT_PENDING 2025-04-17T18:01:38.353568Z 31 00h05m00.102048s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-17T18:01:38.353628Z 31 00h05m00.102048s :BS_NODE DEBUG: [31] VDiskId# [80000003:1:2:1:0] -> [80000003:2:2:1:0] 2025-04-17T18:01:38.353726Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-17T18:01:38.353773Z 16 00h05m00.102048s :BS_NODE DEBUG: [16] VDiskId# [80000003:1:1:0:0] -> [80000003:2:1:0:0] 2025-04-17T18:01:38.360715Z 28 00h05m01.925048s :BS_NODE DEBUG: [28] VDiskId# [80000063:2:2:0:0] status changed to REPLICATING 2025-04-17T18:01:38.361676Z 28 00h05m02.886048s :BS_NODE DEBUG: [28] VDiskId# [80000043:2:2:0:0] status changed to REPLICATING 2025-04-17T18:01:38.377707Z 28 00h05m02.895048s :BS_NODE DEBUG: [28] VDiskId# [80000033:2:2:0:0] status changed to REPLICATING 2025-04-17T18:01:38.382633Z 28 00h05m04.062048s :BS_NODE DEBUG: [28] VDiskId# [80000053:2:2:0:0] status changed to REPLICATING 2025-04-17T18:01:38.383679Z 28 00h05m04.537048s :BS_NODE DEBUG: [28] VDiskId# [80000013:2:2:0:0] status changed to REPLICATING 2025-04-17T18:01:38.384644Z 28 00h05m04.871048s :BS_NODE DEBUG: [28] VDiskId# [80000073:2:2:0:0] status changed to REPLICATING 2025-04-17T18:01:38.386520Z 28 00h05m05.431048s :BS_NODE DEBUG: [28] VDiskId# [80000023:2:2:0:0] status changed to REPLICATING 2025-04-17T18:01:38.387460Z 28 00h05m05.779048s :BS_NODE DEBUG: [28] VDiskId# [80000003:2:2:0:0] status changed to REPLICATING 2025-04-17T18:01:38.388732Z 28 00h05m13.966048s :BS_NODE DEBUG: [28] VDiskId# [80000073:2:2:0:0] status changed to READY 2025-04-17T18:01:38.390483Z 28 00h05m13.966560s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.390552Z 28 00h05m13.966560s :BS_NODE DEBUG: [28] VDiskId# [80000073:1:2:0:0] destroyed 2025-04-17T18:01:38.391483Z 28 00h05m15.524048s :BS_NODE DEBUG: [28] VDiskId# [80000023:2:2:0:0] status changed to READY 2025-04-17T18:01:38.393036Z 28 00h05m15.524560s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.393094Z 28 00h05m15.524560s :BS_NODE DEBUG: [28] VDiskId# [80000023:1:2:0:0] destroyed 2025-04-17T18:01:38.393322Z 28 00h05m19.808048s :BS_NODE DEBUG: [28] VDiskId# [80000053:2:2:0:0] status changed to READY 2025-04-17T18:01:38.395182Z 28 00h05m19.808560s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.395248Z 28 00h05m19.808560s :BS_NODE DEBUG: [28] VDiskId# [80000053:1:2:0:0] destroyed 2025-04-17T18:01:38.395766Z 28 00h05m20.867048s :BS_NODE DEBUG: [28] VDiskId# [80000013:2:2:0:0] status changed to READY 2025-04-17T18:01:38.397698Z 28 00h05m20.867560s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.397778Z 28 00h05m20.867560s :BS_NODE DEBUG: [28] VDiskId# [80000013:1:2:0:0] destroyed 2025-04-17T18:01:38.398465Z 28 00h05m28.592048s :BS_NODE DEBUG: [28] VDiskId# [80000003:2:2:0:0] status changed to READY 2025-04-17T18:01:38.400841Z 28 00h05m28.592560s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.400917Z 28 00h05m28.592560s :BS_NODE DEBUG: [28] VDiskId# [80000003:1:2:0:0] destroyed 2025-04-17T18:01:38.402527Z 28 00h05m31.843048s :BS_NODE DEBUG: [28] VDiskId# [80000033:2:2:0:0] status changed to READY 2025-04-17T18:01:38.404529Z 28 00h05m31.843560s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.404587Z 28 00h05m31.843560s :BS_NODE DEBUG: [28] VDiskId# [80000033:1:2:0:0] destroyed 2025-04-17T18:01:38.404784Z 28 00h05m33.836048s :BS_NODE DEBUG: [28] VDiskId# [80000043:2:2:0:0] status changed to READY 2025-04-17T18:01:38.406755Z 28 00h05m33.836560s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.406826Z 28 00h05m33.836560s :BS_NODE DEBUG: [28] VDiskId# [80000043:1:2:0:0] destroyed 2025-04-17T18:01:38.407391Z 28 00h05m34.884048s :BS_NODE DEBUG: [28] VDiskId# [80000063:2:2:0:0] status changed to READY 2025-04-17T18:01:38.409352Z 28 00h05m34.884560s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-17T18:01:38.409410Z 28 00h05m34.884560s :BS_NODE DEBUG: [28] VDiskId# [80000063:1:2:0:0] destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates [GOOD] Test command err: Trying to start YDB, gRPC: 31090, MsgBus: 63080 2025-04-17T18:01:00.265043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343670510133185:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:00.265095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a6a/r3tmp/tmp8RejqE/pdisk_1.dat 2025-04-17T18:01:01.147916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:01.179554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:01.179647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:01.187533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31090, node 1 2025-04-17T18:01:01.492444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:01.492470Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:01.492476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:01.492578Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63080 TClient is connected to server localhost:63080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:03.079713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:03.122612Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:03.145442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:03.483974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:03.902773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:04.077894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:05.246461Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343670510133185:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:05.266038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:07.328756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343700574905849:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:07.328919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:07.724180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.796355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.846450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.920767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.001605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.100033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.253230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.440535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343704869873825:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:08.440630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:08.446317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343704869873830:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:08.454106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:08.472734Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:01:08.473007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343704869873832:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:08.562097Z node 1 :TX_PROXY ERROR: Actor# [1:7494343704869873889:3569] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 2025, MsgBus: 16409 2025-04-17T18:01:12.124867Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343723753837982:2090];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:12.148044Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a6a/r3tmp/tmpooTbML/pdisk_1.dat 2025-04-17T18:01:12.517134Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:12.553761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:12.553841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:12.563450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2025, node 2 2025-04-17T18:01:12.762693Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:12.762718Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:12.762725Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:12.762830Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16409 TClient is connected to server localhost:16409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDesc ... 7.110360Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.110884Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343785504487644:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.115220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:27.131932Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343785504487646:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:27.209274Z node 3 :TX_PROXY ERROR: Actor# [3:7494343785504487702:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:29.133376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:29.473475Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:01:29.568018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, 100)","Key [2000, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 14356, MsgBus: 5401 2025-04-17T18:01:31.032798Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343802179534210:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:31.033179Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a6a/r3tmp/tmphE7pn0/pdisk_1.dat 2025-04-17T18:01:31.205077Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:31.219980Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:31.220095Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:31.226047Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14356, node 4 2025-04-17T18:01:31.299667Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:31.299685Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:31.299692Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:31.299801Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5401 TClient is connected to server localhost:5401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:31.890777Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:31.918224Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:32.013552Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:32.207827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:32.377687Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.041845Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343819359405144:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.041946Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.115625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.215059Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.260590Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.337253Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.400954Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.453450Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.501600Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.590687Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343819359405811:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.590773Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.590968Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343819359405816:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.595643Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:35.614563Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343819359405818:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:35.678632Z node 4 :TX_PROXY ERROR: Actor# [4:7494343819359405875:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:36.028528Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343802179534210:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:36.028608Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:37.014820Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 25593, MsgBus: 21261 2025-04-17T18:01:16.045321Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343735396557846:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:16.045710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4e/r3tmp/tmp3UZ0Az/pdisk_1.dat 2025-04-17T18:01:16.559610Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:16.587378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:16.587491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:16.589603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25593, node 1 2025-04-17T18:01:16.826998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:16.827026Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:16.827043Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:16.827185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21261 TClient is connected to server localhost:21261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:18.053768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:18.092061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:18.260425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:18.510380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:18.604087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:20.423437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343756871395954:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:20.423561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:20.902324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:20.998457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:21.036961Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343735396557846:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:21.037009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:21.042902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:21.117109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:21.185694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:21.262920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:21.333945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:21.427745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343761166363931:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:21.427853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:21.428183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343761166363938:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:21.431303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:21.442836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343761166363940:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:21.501223Z node 1 :TX_PROXY ERROR: Actor# [1:7494343761166363995:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:23.203019Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912883077, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 8111, MsgBus: 8868 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4e/r3tmp/tmphVJ4MW/pdisk_1.dat 2025-04-17T18:01:24.298027Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:24.298734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:24.344400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:24.344467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:24.351223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8111, node 2 2025-04-17T18:01:24.430686Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:24.430704Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:24.430712Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:24.430814Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8868 TClient is connected to server localhost:8868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:24.985583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:24.992488Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:01:25.007303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:25.111587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:25.282411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schem ... s permissions } 2025-04-17T18:01:27.631735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:27.682689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:27.758913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:27.804906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:27.849789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:27.898778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:28.001485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:28.082088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:28.213441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343789715876523:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:28.213548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:28.213983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343789715876528:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:28.218371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:28.235290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343789715876530:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:01:28.310826Z node 2 :TX_PROXY ERROR: Actor# [2:7494343789715876585:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:30.142335Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912890105, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 27581, MsgBus: 12674 2025-04-17T18:01:31.239829Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343801637919482:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:31.262852Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4e/r3tmp/tmpFlCiQl/pdisk_1.dat 2025-04-17T18:01:31.586545Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:31.597028Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:31.597200Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:31.599477Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27581, node 3 2025-04-17T18:01:31.742842Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:31.742867Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:31.742881Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:31.743003Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12674 TClient is connected to server localhost:12674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:32.421376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:32.452644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:01:32.552148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:32.788680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:01:32.863715Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.178336Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343818817790435:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.178422Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.245093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.281314Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.315811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.348638Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.433223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.488141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.596285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.739998Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343818817791112:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.740173Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.740569Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343818817791118:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:35.745181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:35.758482Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343818817791120:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:35.849927Z node 3 :TX_PROXY ERROR: Actor# [3:7494343818817791176:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:36.238326Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343801637919482:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:36.238416Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> KqpLimits::QueryExecTimeout [GOOD] >> KqpExplain::ExplainDataQueryWithParams [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardLocalExec+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8737, MsgBus: 12179 2025-04-17T18:01:17.476049Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343743028948377:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:17.476372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4a/r3tmp/tmpS8y3XC/pdisk_1.dat 2025-04-17T18:01:18.237638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:18.237763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:18.242082Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:18.269181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8737, node 1 2025-04-17T18:01:18.483889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:18.483910Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:18.483920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:18.484005Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12179 TClient is connected to server localhost:12179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:19.716942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:19.754921Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:19.769298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:20.030731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:20.337682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:20.473358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:22.432023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343764503786490:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:22.432149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:22.467397Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343743028948377:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:22.476197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:22.924744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:22.974645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:23.054199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:23.092555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:23.155221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:23.240667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:23.308217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:23.439603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343768798754467:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:23.439694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:23.440064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343768798754472:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:23.444477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:23.462610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343768798754474:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:23.553246Z node 1 :TX_PROXY ERROR: Actor# [1:7494343768798754531:3563] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:25.484172Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912885436, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 27176, MsgBus: 62792 2025-04-17T18:01:26.518305Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343782048293718:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:26.518397Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4a/r3tmp/tmpOPpLKY/pdisk_1.dat 2025-04-17T18:01:26.804800Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:26.805278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:26.805341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:26.847823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27176, node 2 2025-04-17T18:01:27.042239Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:27.042261Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:27.042270Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:27.042371Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62792 TClient is connected to server localhost:62792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:28.106693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:28.115026Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:28.136291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:28.317959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operati ... us: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:31.908729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:31.959164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:31.997026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:32.047897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:32.084307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:32.129652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:32.166457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:32.257853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343807818099948:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:32.257946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:32.258140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343807818099953:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:32.261800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:32.275715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343807818099955:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:32.376157Z node 2 :TX_PROXY ERROR: Actor# [2:7494343807818100013:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:33.578321Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912893591, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 26166, MsgBus: 24567 2025-04-17T18:01:34.542563Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343817032366414:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:34.542617Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a4a/r3tmp/tmp1Nx5mj/pdisk_1.dat 2025-04-17T18:01:34.734357Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:34.764866Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:34.764959Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:34.766863Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26166, node 3 2025-04-17T18:01:34.822562Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:34.822584Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:34.822592Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:34.822718Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24567 TClient is connected to server localhost:24567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:01:35.367735Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:01:35.383880Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:35.397627Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.492552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.705126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.812944Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:38.083318Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343834212237365:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:38.083432Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:38.136155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:38.196248Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:38.240445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:38.272627Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:38.310965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:38.412560Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:38.485726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:38.604808Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343834212238039:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:38.604930Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:38.605310Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343834212238044:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:38.609281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:38.620649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343834212238046:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:38.721308Z node 3 :TX_PROXY ERROR: Actor# [3:7494343834212238104:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:39.545745Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343817032366414:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:39.545820Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 20707, MsgBus: 20529 2025-04-17T18:01:08.878813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343705570599452:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:08.901437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a56/r3tmp/tmpKVzKhp/pdisk_1.dat 2025-04-17T18:01:09.757438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:09.758079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:09.759485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:01:09.770412Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20707, node 1 2025-04-17T18:01:09.994779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:09.994802Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:09.994809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:09.994922Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20529 TClient is connected to server localhost:20529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:10.807734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:10.824393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:10.854498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.072604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.289833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.416638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:13.431773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343727045437560:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:13.431882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:13.788892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.821478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.851778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.867851Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343705570599452:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:13.867907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:13.919627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:13.962906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:14.034661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:14.123180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:14.220133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343731340405530:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:14.220226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:14.220499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343731340405535:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:14.230103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:14.252487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343731340405537:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:14.350365Z node 1 :TX_PROXY ERROR: Actor# [1:7494343731340405593:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase": ... imit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_1","PlanNodeType":"ResultSet"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Plans":[{"PlanNodeId":17,"Plans":[{"PlanNodeId":19,"Plans":[{"PlanNodeId":20,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":23,"Operators":[{"Scan":"Parallel","ReadRange":["Key (20, 120]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_2","PlanNodeType":"ResultSet"},{"PlanNodeId":24,"Plans":[{"PlanNodeId":28,"Plans":[{"PlanNodeId":29,"Plans":[{"PlanNodeId":30,"Plans":[{"PlanNodeId":32,"Plans":[{"PlanNodeId":33,"Plans":[{"PlanNodeId":34,"Plans":[{"PlanNodeId":35,"Plans":[{"PlanNodeId":36,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_3","PlanNodeType":"ResultSet"},{"PlanNodeId":37,"Plans":[{"PlanNodeId":41,"Plans":[{"PlanNodeId":42,"Plans":[{"PlanNodeId":43,"Plans":[{"PlanNodeId":45,"Plans":[{"PlanNodeId":46,"Plans":[{"PlanNodeId":47,"Plans":[{"PlanNodeId":48,"Plans":[{"PlanNodeId":49,"Operators":[{"Scan":"Parallel","ReadRange":["Key [10, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_4","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 2789, MsgBus: 19031 2025-04-17T18:01:34.939080Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343816059063309:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:34.939143Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a56/r3tmp/tmpeS3cDs/pdisk_1.dat 2025-04-17T18:01:35.101733Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:35.133727Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:35.133831Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:35.135520Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2789, node 4 2025-04-17T18:01:35.186636Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:35.186669Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:35.186681Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:35.186843Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19031 TClient is connected to server localhost:19031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:35.779440Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.791899Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:35.797931Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.912055Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:01:36.158805Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:01:36.263156Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:38.925658Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343833238934259:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:38.925779Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:39.008657Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:39.049544Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:39.098817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:39.177456Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:39.254070Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:39.308834Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:39.386256Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:39.474865Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343837533902226:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:39.475059Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343837533902231:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:39.475534Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:39.478934Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:39.492469Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343837533902233:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:39.593294Z node 4 :TX_PROXY ERROR: Actor# [4:7494343837533902289:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:39.941247Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343816059063309:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:39.941315Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 17044, MsgBus: 10001 2025-04-17T17:59:48.208524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343361351399308:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:48.208660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b06/r3tmp/tmphX5ABI/pdisk_1.dat 2025-04-17T17:59:48.903105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:48.903267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:48.924410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:48.934536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17044, node 1 2025-04-17T17:59:49.058628Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:49.058649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:49.058656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:49.058760Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10001 TClient is connected to server localhost:10001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:50.058246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:50.111912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:50.441002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:50.701774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:50.824074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:52.884645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343378531270130:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:52.884821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:53.248848Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343361351399308:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:53.248932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:53.260255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:59:53.327979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:59:53.376380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:59:53.435523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:59:53.479397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:59:53.549745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:59:53.616423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:59:53.762649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343382826238096:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:53.762726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:53.763076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343382826238101:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:53.767365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:59:53.784864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343382826238103:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:59:53.880312Z node 1 :TX_PROXY ERROR: Actor# [1:7494343382826238162:3560] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:55.333690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:57.018639Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494343395711140951:2544] TxId: 281474976710673. Ctx: { TraceId: 01js2egjww1bm67cd0hznnq32z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVjYTAxNjEtYjBmMGRlMzQtYjk0MjM5ODgtZWI5YzEzYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. } 2025-04-17T17:59:57.021198Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343395711140962:2556], TxId: 281474976710673, task: 2. Ctx: { TraceId : 01js2egjww1bm67cd0hznnq32z. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YTVjYTAxNjEtYjBmMGRlMzQtYjk0MjM5ODgtZWI5YzEzYzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494343395711140951:2544], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:57.024723Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343395711140961:2555], TxId: 281474976710673, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=YTVjYTAxNjEtYjBmMGRlMzQtYjk0MjM5ODgtZWI5YzEzYzE=. CustomerSuppliedId : . TraceId : 01js2egjww1bm67cd0hznnq32z. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494343395711140951:2544], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:57.029743Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343395711140965:2559], TxId: 281474976710673, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01js2egjww1bm67cd0hznnq32z. SessionId : ydb://session/3?node_id=1&id=YTVjYTAxNjEtYjBmMGRlMzQtYjk0MjM5ODgtZWI5YzEzYzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494343395711140951:2544], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T17:59:57.030569Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTVjYTAxNjEtYjBmMGRlMzQtYjk0MjM5ODgtZWI5YzEzYzE=, ActorId: [1:7494343395711140933:2544], ActorState: ExecuteState, TraceId: 01js2egjww1bm67cd0hznnq32z, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 25191, MsgBus: 25285 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b06/r3tmp/tmpxU99cH/pdisk_1.dat 2025-04-17T17:59:58.020768Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343401875708918:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:58.021632Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:59:58.380610Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:58.388903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:58.388968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:58.390510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25191, node 2 2025-04-17T17:59:58.488934Z node 2 :NET_CLASSIFIER WARN: distributable config ... nse to: [3:7494343676170580660:2819] 2025-04-17T18:01:02.131130Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmJjM2JhNzItYzZlZjdjOTUtNDVkMjM5OWMtMTUzYmNjOTY=, ActorId: [3:7494343676170580660:2819], ActorState: ExecuteState, TraceId: 01js2ejjtc4nekz95v58qkqs6y, Create QueryResponse for error on request, msg: 2025-04-17T18:01:02.131771Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494343680465548000:2832], TxId: 281474976715673, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZmJjM2JhNzItYzZlZjdjOTUtNDVkMjM5OWMtMTUzYmNjOTY=. TraceId : 01js2ejjtc4nekz95v58qkqs6y. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494343680465547980:2819], status: TIMEOUT, reason: [ {
: Error: Request timeout 100ms exceeded } {
: Error: Cancelling after 101ms during execution } ] 2025-04-17T18:01:02.132179Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494343680465548008:2840], TxId: 281474976715673, task: 9. Ctx: { TraceId : 01js2ejjtc4nekz95v58qkqs6y. SessionId : ydb://session/3?node_id=3&id=ZmJjM2JhNzItYzZlZjdjOTUtNDVkMjM5OWMtMTUzYmNjOTY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494343680465547980:2819], status: TIMEOUT, reason: [ {
: Error: Request timeout 100ms exceeded } {
: Error: Cancelling after 101ms during execution } ]
: Error: Request timeout 100ms exceeded
: Error: Cancelling after 101ms during execution 2025-04-17T18:01:02.345185Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494343680465548052:2819] TxId: 281474976715675. Ctx: { TraceId: 01js2ejk134ryrrhfd1j1e2k70, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmJjM2JhNzItYzZlZjdjOTUtNDVkMjM5OWMtMTUzYmNjOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 101ms during execution } ] 2025-04-17T18:01:02.345331Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494343680465548083:2852], TxId: 281474976715675, task: 9. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZmJjM2JhNzItYzZlZjdjOTUtNDVkMjM5OWMtMTUzYmNjOTY=. TraceId : 01js2ejk134ryrrhfd1j1e2k70. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494343680465548052:2819], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T18:01:02.439388Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494343680465548059:2844], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZmJjM2JhNzItYzZlZjdjOTUtNDVkMjM5OWMtMTUzYmNjOTY=. TraceId : 01js2ejk134ryrrhfd1j1e2k70. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494343680465548052:2819], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T18:01:02.447886Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmJjM2JhNzItYzZlZjdjOTUtNDVkMjM5OWMtMTUzYmNjOTY=, ActorId: [3:7494343676170580660:2819], ActorState: ExecuteState, TraceId: 01js2ejk134ryrrhfd1j1e2k70, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 101ms during execution Trying to start YDB, gRPC: 24949, MsgBus: 5201 2025-04-17T18:01:04.740454Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343685845209533:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:04.740911Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b06/r3tmp/tmpvorZIc/pdisk_1.dat 2025-04-17T18:01:05.061656Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:05.061765Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:05.065346Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:01:05.084486Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24949, node 4 2025-04-17T18:01:05.238838Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:05.238861Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:05.238871Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:05.239026Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5201 TClient is connected to server localhost:5201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:06.064032Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:06.094128Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:06.250255Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:06.835673Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:07.025992Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:09.747767Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343685845209533:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:09.747846Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:11.836587Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343715909982315:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:11.836981Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:11.883049Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:11.941642Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:11.999207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.070500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.132519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.220122Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.392416Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:12.604237Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343720204950306:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:12.604353Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:12.604822Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343720204950311:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:12.619455Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:12.696374Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343720204950313:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:01:12.762406Z node 4 :TX_PROXY ERROR: Actor# [4:7494343720204950377:3579] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:19.999599Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:01:19.999627Z node 4 :IMPORT WARN: Table profiles were not loaded
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=4&id=ODZiYjdiOGUtZWYwMzhmZmQtNDcxNTliODgtOTc4MjIzNg== >> KqpExplain::IdxFullscan [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap [GOOD] |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::IdxFullscan [GOOD] Test command err: Trying to start YDB, gRPC: 5781, MsgBus: 1854 2025-04-17T18:01:11.822599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343719417013342:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:11.823294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a50/r3tmp/tmpsYRX38/pdisk_1.dat 2025-04-17T18:01:12.898715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:12.982687Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:12.988093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:12.988188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:12.991487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5781, node 1 2025-04-17T18:01:13.274784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:13.274807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:13.274814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:13.274942Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1854 TClient is connected to server localhost:1854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:14.561914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:01:14.612880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:01:14.917207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:15.189611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:15.435060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:16.762953Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343719417013342:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:16.763016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:17.483388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343745186818774:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:17.483522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:17.972338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:18.023237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:18.066862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:18.108813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:18.151296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:18.225769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:18.318990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:18.460035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343749481786747:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:18.460111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:18.460448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343749481786752:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:18.464958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:18.494929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343749481786754:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:18.567619Z node 1 :TX_PROXY ERROR: Actor# [1:7494343749481786811:3568] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":17,"Plans":[{"Tables":["EightShard"],"PlanNodeId":16,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Delete","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":14,"Plans":[{"Tables":["EightShard"],"PlanNodeId":13,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage-Sink"}],"Node Type":"Sink"},{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Update","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"Tables":["EightShard"],"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","ReadRangesPointPrefixLen":"1","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage-Sink"}],"Node Type":"Sink"},{"PlanNodeId":5,"Plans":[{"Tables":["EightShard"],"PlanNodeId":4,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Update","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","ReadRangesPointPrefixLen":"0","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage-Sink"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (350, +∞)"],"type":"Scan"},{"columns":["Data","Key"],"scan_by":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"type":"Scan"}],"writes":[{"type":"MultiErase"},{"columns":["Data","Key"],"type":"MultiUpdate"},{"columns":["Data","Key"],"type":"MultiUpdate"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/EightShard","Name":"Delete","SinkType":"KqpTableSink","Table":"EightShard"}],"Node Type":"Delete"}],"Node Type":"Sink"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":4,"Operators":[{"Path":"\/Root\/EightShard","Name":"Update","SinkType":"KqpTableSink","Table":"EightShard"}],"Node Type":"Update"}],"Node Type":"Sink"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"Path":"\/Root\/EightShard","Name":"Update","SinkType":"KqpTableSink","Table":"EightShard"}],"Node Type":"Update"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} Trying to start YDB, gRPC: 16334, MsgBus: 3990 2025-04-17T18:01:22.114680Z node 2 :METADAT ... 968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:37.902950Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7971, node 4 2025-04-17T18:01:37.958780Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:37.958809Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:37.958824Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:37.958959Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63084 TClient is connected to server localhost:63084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:38.519572Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:38.536165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:38.605637Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:38.762017Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:38.877473Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:41.406329Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343844846827747:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:41.406429Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:41.467436Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:41.500046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:41.530831Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:41.562356Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:41.592848Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:41.641784Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:41.673850Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:41.737535Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343844846828410:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:41.737616Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343844846828415:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:41.737653Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:41.741443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:41.751873Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343844846828417:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:41.841941Z node 4 :TX_PROXY ERROR: Actor# [4:7494343844846828474:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:42.658967Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343827666956785:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:42.659068Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-04-17T18:01:42.892326Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:01:43.199705Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:01:43.233468Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","PlanNodeId":8,"LookupKeyColumns":["id"],"Node Type":"TableLookup","Path":"\/Root\/test_table_idx","Columns":["Value","complex_field","id","str_field"],"E-Rows":"No estimate","Table":"test_table_idx","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"No estimate","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["test_table_idx_idx"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/test_table_idx_idx","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/test_table_idx","reads":[{"lookup_by":["id"],"columns":["Value","complex_field","id","str_field"],"type":"Lookup"}]},{"name":"\/Root\/test_table_idx_idx","reads":[{"lookup_by":["str_field (null)"],"columns":["id"],"scan_by":["complex_field (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"No estimate","Columns":["Value","complex_field","id","str_field"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"test_table_idx","LookupKeyColumns":["id"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/test_table_idx_idx","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |83.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> TPQCompatTest::LongProducerAndLongMessageGroupId [GOOD] >> TPQCompatTest::ReadWriteSessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 4617, MsgBus: 4969 2025-04-17T18:01:00.582095Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343668424054052:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:00.614641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a6b/r3tmp/tmptXbADC/pdisk_1.dat 2025-04-17T18:01:01.575058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:01.581866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:01.581956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:01.584203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4617, node 1 2025-04-17T18:01:01.914759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:01.914778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:01.914790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:01.914899Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4969 TClient is connected to server localhost:4969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:03.403664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:03.435934Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:05.579965Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343668424054052:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:05.602739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:06.161492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343694193858335:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:06.161688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:06.162122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343694193858370:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:06.165864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:01:06.179216Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:01:06.179463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343694193858372:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:01:06.242098Z node 1 :TX_PROXY ERROR: Actor# [1:7494343694193858423:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:06.844746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:01:07.432061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:01:07.432299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:01:07.432532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:01:07.432629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:01:07.432712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:01:07.432803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:01:07.432880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:01:07.432972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:01:07.433072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:01:07.433163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:01:07.433349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:01:07.433453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494343698488825910:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:01:07.436013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:01:07.436057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:01:07.436244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:01:07.436345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:01:07.436435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:01:07.436517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:01:07.436594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:01:07.436676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:01:07.436763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:01:07.436846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:01:07.436932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:01:07.437018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494343698488825917:2353];tablet_id=72075186224037890;p ... =TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.246722Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.252902Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.255348Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.262956Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.262992Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.270505Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.270904Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.276477Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.278599Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.283267Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.286117Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.289165Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.293898Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.295890Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.301387Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.303867Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.308585Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.310704Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.316029Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.317694Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.323525Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.325531Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.331939Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.339140Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.345093Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.351712Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.358618Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.359649Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.364517Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.365620Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.371065Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.372459Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.376643Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.377923Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.383282Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.383292Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.389093Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.390252Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.394915Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.397325Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.400160Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.404255Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.406686Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.411148Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.412386Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.417023Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.417112Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.422017Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.422350Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.429111Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.429751Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.436101Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:01:44.436169Z node 4 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; >> TPersQueueTest::FetchRequest [GOOD] >> TPersQueueTest::EventBatching >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded >> TTopicYqlTest::AlterAutopartitioning [GOOD] >> TTopicYqlTest::BadRequests >> TKeyValueTracingTest::WriteHuge [FAIL] >> KqpStats::OneShardNonLocalExec+UseSink [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-04-17T17:55:21.128092Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:21.128184Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:21.128205Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:55:21.128590Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:55:21.128631Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:21.128665Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:21.129812Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008859s 2025-04-17T17:55:21.130306Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:55:21.130357Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:21.130382Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:21.130431Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008899s 2025-04-17T17:55:21.137024Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:55:21.137063Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:21.137088Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:55:21.137144Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005042s 2025-04-17T17:55:21.242256Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1744912521242224 2025-04-17T17:55:21.990533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342214406971922:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:21.990840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:22.268960Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342218217182904:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:22.269318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:55:22.588363Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:55:22.598510Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017b5/r3tmp/tmpSqLza1/pdisk_1.dat 2025-04-17T17:55:23.217400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.280636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:55:23.431988Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:23.441475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:23.441580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:23.450877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:23.450946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:23.464075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:55:23.470433Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:55:23.472718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15268, node 1 2025-04-17T17:55:23.772613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0017b5/r3tmp/yandex8GR3iy.tmp 2025-04-17T17:55:23.772636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0017b5/r3tmp/yandex8GR3iy.tmp 2025-04-17T17:55:23.772777Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0017b5/r3tmp/yandex8GR3iy.tmp 2025-04-17T17:55:23.772874Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:55:23.913658Z INFO: TTestServer started on Port 64478 GrpcPort 15268 TClient is connected to server localhost:64478 PQClient connected to localhost:15268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:24.469976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:24.521828Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:55:26.987759Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342214406971922:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:26.987834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:27.221192Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342218217182904:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:27.221276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:27.858755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342240176776607:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:27.858865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342240176776592:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:27.858943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:27.861713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T17:55:27.894753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342240176776615:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:55:28.278391Z node 1 :TX_PROXY ERROR: Actor# [1:7494342240176776698:2679] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:28.322883Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342244471744008:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:28.330484Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTlkNGJmZTYtZGZkYTE4OTItZTdlOTQ3NWItN2I4MDllNzQ=, ActorId: [1:7494342240176776573:2336], ActorState: ExecuteState, TraceId: 01js2e8ceq1d2r3epe2nxh26s1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:28.339909Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342239692019577:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:55:28.341139Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTIwMjAxOGItNGMxMDFmNzYtM2JlYWY3NjgtZTUzOWUxZGY=, ActorId: [2:7494342239692019545:2310], ActorState: ExecuteState, TraceId: 01js2e8ckqa01zqn4p9hqyatyg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:55:28.341545Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:55:28.332639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:28.336076Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issu ... -04-17T18:01:37.513872Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000ptest-message-group-id 2025-04-17T18:01:37.513888Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000010_00000| 2025-04-17T18:01:37.513903Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:01:37.513934Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:01:37.513970Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:01:37.514025Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:01:37.514126Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-04-17T18:01:37.518246Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7494343820854487597:2623] 2025-04-17T18:01:37.518381Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:01:37.518439Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.518527Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-17T18:01:37.518565Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.518599Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-17T18:01:37.518620Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.518653Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-17T18:01:37.518677Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.518708Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-17T18:01:37.518728Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.518759Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-17T18:01:37.518781Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.518812Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-17T18:01:37.518834Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.518866Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-04-17T18:01:37.518889Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.518924Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-04-17T18:01:37.518943Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.518977Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-04-17T18:01:37.518998Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:01:37.519031Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-04-17T18:01:37.519259Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-04-17T18:01:37.519275Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T18:01:37.519317Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T18:01:37.519469Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-17T18:01:37.519473Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-04-17T18:01:37.519515Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-04-17T18:01:37.519556Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:01:37.519732Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-17T18:01:37.519759Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T18:01:37.519836Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1744912897510 queuesize 0 startOffset 0 2025-04-17T18:01:37.526564Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 6 queued_in_partition_duration_ms: 2 } 2025-04-17T18:01:37.526647Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 1 2025-04-17T18:01:37.526697Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 2 2025-04-17T18:01:37.526732Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 3 2025-04-17T18:01:37.526764Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 4 2025-04-17T18:01:37.526791Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 5 2025-04-17T18:01:37.526825Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 6 2025-04-17T18:01:37.526856Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 7 2025-04-17T18:01:37.526884Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 8 2025-04-17T18:01:37.526918Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 9 2025-04-17T18:01:37.526940Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: acknoledged message 10 2025-04-17T18:01:37.530316Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: close. Timeout = 0 ms 2025-04-17T18:01:37.530398Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session will now close 2025-04-17T18:01:37.530469Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: aborting 2025-04-17T18:01:37.531111Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:01:37.531172Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0] Write session: destroy 2025-04-17T18:01:37.535627Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0 grpc read done: success: 0 data: 2025-04-17T18:01:37.535678Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0 grpc read failed 2025-04-17T18:01:37.535750Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0 grpc closed 2025-04-17T18:01:37.535790Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|f1ef605e-88d5d6c5-a70d48a5-4ada0384_0 is DEAD 2025-04-17T18:01:37.537183Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:01:37.542348Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7494343829444422445:2647] destroyed 2025-04-17T18:01:37.542475Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TPersQueueTest::PartitionsMapping [GOOD] >> TPersQueueTest::MessageMetadata |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |83.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |83.7%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail >> OperationMapping::IndexBuildRejected >> OperationMapping::IndexBuildSuccess [GOOD] >> OperationMapping::IndexBuildRejected [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |83.7%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::SetupReadSession [GOOD] >> TPersQueueTest::TestBigMessage |83.7%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] |83.7%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::ReadWithoutConsumerFederation [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged [GOOD] |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> KqpJoinOrder::TPCDS16+ColumnStore [GOOD] >> TPQTest::TestPQReadAhead [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2 [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> TPQTest::TestTimeRetention |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |83.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |83.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |83.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |83.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |83.8%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-04-17T18:01:59.134483Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.134555Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.134583Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:01:59.136331Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T18:01:59.136393Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.136431Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.142902Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005956s 2025-04-17T18:01:59.143593Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:01:59.152774Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T18:01:59.154892Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.158515Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.158546Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.158582Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:01:59.158904Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T18:01:59.158952Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.158978Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.159030Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008751s 2025-04-17T18:01:59.159502Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:01:59.159940Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T18:01:59.160037Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.160888Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.160910Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.160927Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:01:59.161425Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-17T18:01:59.161465Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.161485Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.161544Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.224757s 2025-04-17T18:01:59.161898Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:01:59.162499Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T18:01:59.162557Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.167514Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.167537Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.167565Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:01:59.167850Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-17T18:01:59.167909Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.167931Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.167986Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.178018s 2025-04-17T18:01:59.168536Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:01:59.168912Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-17T18:01:59.169017Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.169919Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.169946Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.169965Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:01:59.170272Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:01:59.171036Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T18:01:59.188068Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.189316Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-04-17T18:01:59.189354Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.189374Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.189499Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.255102s 2025-04-17T18:01:59.189690Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-17T18:01:59.197025Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.197064Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.197142Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:01:59.197537Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:01:59.198050Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T18:01:59.198263Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.205377Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T18:01:59.308316Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.317172Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T18:01:59.317318Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T18:01:59.317386Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-17T18:01:59.317463Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-17T18:01:59.417822Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T18:01:59.417958Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-17T18:01:59.418871Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.418899Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.418918Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:01:59.419269Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:01:59.419914Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T18:01:59.420129Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.420667Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T18:01:59.521507Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:01:59.521685Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-17T18:01:59.521741Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T18:01:59.521778Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-17T18:01:59.521835Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-17T18:01:59.531093Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-17T18:01:59.531209Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-17T18:01:59.531312Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-17T18:01:59.531969Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/0013ed/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk7 Trying to start YDB, gRPC: 10978, MsgBus: 2637 2025-04-17T17:55:33.883618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342265759526505:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:33.883709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013ed/r3tmp/tmpm6lfS8/pdisk_1.dat 2025-04-17T17:55:34.586671Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:55:34.611339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:55:34.611415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:55:34.625241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10978, node 1 2025-04-17T17:55:34.966804Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:55:34.966838Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:55:34.966850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:55:34.966969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2637 TClient is connected to server localhost:2637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:55:36.221424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.242714Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:55:36.273350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.582131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.774026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:36.888981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:55:38.880896Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342265759526505:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:55:38.881013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:55:39.468469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342291529331930:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:39.468571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:39.852669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:55:39.911108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:55:39.954258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:55:40.044292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:55:40.132939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:55:40.209586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:55:40.301393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:55:40.433680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342295824299905:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:40.433768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:40.434271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342295824299910:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:55:40.446939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:55:40.466240Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T17:55:40.466603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342295824299912:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:55:40.568979Z node 1 :TX_PROXY ERROR: Actor# [1:7494342295824299970:3565] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:55:49.582391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:55:49.582424Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"12b47ec1-7786672e-cbed8c86-985c5592") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"c989902c-11396f65-92b01fbb-bd1c798e") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"7b33d5d0-16fa3a14-eb9372f0-42ee5ec")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17147, MsgBus: 22123 2025-04-17T18:01:33.618767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343810321885027:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:33.618816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a45/r3tmp/tmpkLuqiE/pdisk_1.dat 2025-04-17T18:01:34.078943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:34.100551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:34.100642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:34.103299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17147, node 1 2025-04-17T18:01:34.153093Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:34.153114Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:34.153121Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:34.153236Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22123 TClient is connected to server localhost:22123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:34.797670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:34.818607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:34.838739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.005173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.194366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:35.285950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:37.032253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343827501756001:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:37.032374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:37.352166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:37.381674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:37.404294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:37.431256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:37.466059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:37.552364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:37.596925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:37.680185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343827501756666:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:37.680259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:37.680374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343827501756671:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:37.683951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:37.693800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343827501756673:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:37.762941Z node 1 :TX_PROXY ERROR: Actor# [1:7494343827501756729:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:38.620446Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343810321885027:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:38.620519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22964, MsgBus: 23130 2025-04-17T18:01:40.456106Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343839983349402:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:40.456185Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:01:40.461571Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343842786144586:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:40.461645Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a45/r3tmp/tmp1bJmNT/pdisk_1.dat 2025-04-17T18:01:40.587163Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:40.619390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:40.619480Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:40.620278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:40.620347Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:40.624321Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:01:40.624469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:01:40.625416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22964, node 2 2025-04-17T18:01:40.691829Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:40.691846Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:40.691850Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:40.691944Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23130 TClient is connected to server localhost:23130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { V ... [2:7494343857163221703:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:44.122714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:44.122933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343857163221708:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:44.126767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:44.141536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343857163221710:2427], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:01:44.219523Z node 2 :TX_PROXY ERROR: Actor# [2:7494343857163221785:4454] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:45.458276Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343839983349402:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:45.458370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:45.462277Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494343842786144586:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:45.463319Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5308, MsgBus: 22932 2025-04-17T18:01:48.967239Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343876262958730:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:48.967314Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:01:48.975624Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494343875188918504:2243];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:48.975696Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a45/r3tmp/tmptSREY8/pdisk_1.dat 2025-04-17T18:01:49.286985Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:49.335245Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:49.335375Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:49.336914Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:49.337039Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:49.341808Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T18:01:49.341976Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:01:49.351084Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5308, node 4 2025-04-17T18:01:49.518750Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:49.518774Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:49.518781Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:49.518937Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22932 TClient is connected to server localhost:22932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:50.097380Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:50.136755Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:50.253543Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:50.469495Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:50.613725Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:52.993450Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343893442830123:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:52.993527Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.057735Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.107592Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.157901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.203863Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.289602Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.406226Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.465465Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.568326Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343897737798357:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.568442Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.568688Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343897737798362:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.572421Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:53.600092Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343897737798364:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:53.665359Z node 4 :TX_PROXY ERROR: Actor# [4:7494343897737798447:4463] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:53.967511Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343876262958730:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:53.967578Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:53.975222Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494343875188918504:2243];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:53.975305Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 13569, MsgBus: 17564 2025-04-17T18:01:08.271332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343704814096885:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:08.271363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a5c/r3tmp/tmpJ47kxc/pdisk_1.dat 2025-04-17T18:01:09.453003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:09.528396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:09.528481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:09.530371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:09.545528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13569, node 1 2025-04-17T18:01:09.822323Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:09.822358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:09.822368Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:09.822472Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17564 TClient is connected to server localhost:17564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:11.427304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.453373Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:11.472366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:11.757758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:12.102016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:12.297804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:13.274395Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343704814096885:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:13.274470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:14.832022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343730583902473:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:14.832170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:15.153149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.212377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.267547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.306623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.346663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.433502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.487536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:15.585325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343734878870439:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:15.585377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:15.585509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343734878870444:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:15.589785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:15.608109Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:01:15.608501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343734878870446:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:01:15.666507Z node 1 :TX_PROXY ERROR: Actor# [1:7494343734878870503:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:17.531402Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjM5N2ExNWUtNmZkNzFmYTUtMzdlNjQ1NmYtNzYwYjY4YjA=, ActorId: [1:7494343743468805448:2521], ActorState: ExecuteState, TraceId: 01js2ek1r6emk92rt72wmqxzg9, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1320: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $group type mismatch, expected: { Kind: Data Data { Scheme: 2 } }, actual: Type (Data), schemeType: Int32, schemeTypeId: 1 Trying to start YDB, gRPC: 25012, MsgBus: 16406 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a5c/r3tmp/tmp8NfoHX/pdisk_1.dat 2025-04-17T18:01:18.932246Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:19.079245Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:19.110994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:19.111071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:19.119415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25012, node 2 2025-04-17T18:01:19.366050Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:19.366073Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:19.366082Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:19.366225Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16406 TClient is connected to server localhost:16406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:20.038237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, op ... false data# peer# 2025-04-17T18:01:53.632989Z node 6 :GRPC_SERVER DEBUG: [0x51b00030f280] received request Name# Coordination/CreateNode ok# false data# peer# 2025-04-17T18:01:53.633062Z node 6 :GRPC_SERVER DEBUG: [0x51b00030eb80] received request Name# Coordination/AlterNode ok# false data# peer# 2025-04-17T18:01:53.633187Z node 6 :GRPC_SERVER DEBUG: [0x51b0001afd80] received request Name# Coordination/DropNode ok# false data# peer# 2025-04-17T18:01:53.633356Z node 6 :GRPC_SERVER DEBUG: [0x51b0001aef80] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-04-17T18:01:53.633385Z node 6 :GRPC_SERVER DEBUG: [0x51b000216a80] received request Name# CreateDatabase ok# false data# peer# 2025-04-17T18:01:53.633548Z node 6 :GRPC_SERVER DEBUG: [0x51b000218680] received request Name# GetDatabaseStatus ok# false data# peer# 2025-04-17T18:01:53.633581Z node 6 :GRPC_SERVER DEBUG: [0x51b00025a780] received request Name# AlterDatabase ok# false data# peer# 2025-04-17T18:01:53.633688Z node 6 :GRPC_SERVER DEBUG: [0x51b00019c980] received request Name# ListDatabases ok# false data# peer# 2025-04-17T18:01:53.633780Z node 6 :GRPC_SERVER DEBUG: [0x51b000214e80] received request Name# RemoveDatabase ok# false data# peer# 2025-04-17T18:01:53.633899Z node 6 :GRPC_SERVER DEBUG: [0x51b00019bb80] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-04-17T18:01:53.633979Z node 6 :GRPC_SERVER DEBUG: [0x51b0004b9480] received request Name# GetScaleRecommendation ok# false data# peer# 2025-04-17T18:01:53.634097Z node 6 :GRPC_SERVER DEBUG: [0x51b00022c180] received request Name# ListEndpoints ok# false data# peer# 2025-04-17T18:01:53.634278Z node 6 :GRPC_SERVER DEBUG: [0x51b00015c480] received request Name# NodeRegistration ok# false data# peer# 2025-04-17T18:01:53.634346Z node 6 :GRPC_SERVER DEBUG: [0x51b0002c6880] received request Name# WhoAmI ok# false data# peer# 2025-04-17T18:01:53.634491Z node 6 :GRPC_SERVER DEBUG: [0x51b0002af580] received request Name# Scan ok# false data# peer# 2025-04-17T18:01:53.634559Z node 6 :GRPC_SERVER DEBUG: [0x51b00029dd80] received request Name# GetShardLocations ok# false data# peer# 2025-04-17T18:01:53.634671Z node 6 :GRPC_SERVER DEBUG: [0x51b0002afc80] received request Name# DescribeTable ok# false data# peer# 2025-04-17T18:01:53.634744Z node 6 :GRPC_SERVER DEBUG: [0x51b00029cf80] received request Name# CreateSnapshot ok# false data# peer# 2025-04-17T18:01:53.634837Z node 6 :GRPC_SERVER DEBUG: [0x51b00029d680] received request Name# RefreshSnapshot ok# false data# peer# 2025-04-17T18:01:53.634963Z node 6 :GRPC_SERVER DEBUG: [0x51b00029e480] received request Name# DiscardSnapshot ok# false data# peer# 2025-04-17T18:01:53.634984Z node 6 :GRPC_SERVER DEBUG: [0x51b000241180] received request Name# List ok# false data# peer# 2025-04-17T18:01:53.635126Z node 6 :GRPC_SERVER DEBUG: [0x51b000241f80] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-04-17T18:01:53.635159Z node 6 :GRPC_SERVER DEBUG: [0x51b000240380] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-04-17T18:01:53.635257Z node 6 :GRPC_SERVER DEBUG: [0x51b0001e4580] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-04-17T18:01:53.635359Z node 6 :GRPC_SERVER DEBUG: [0x51b0001e3780] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-04-17T18:01:53.635406Z node 6 :GRPC_SERVER DEBUG: [0x51b0001a9480] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-04-17T18:01:53.635540Z node 6 :GRPC_SERVER DEBUG: [0x51b00041fb80] received request Name# CreateStream ok# false data# peer# 2025-04-17T18:01:53.635548Z node 6 :GRPC_SERVER DEBUG: [0x51b0001a7f80] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-04-17T18:01:53.635680Z node 6 :GRPC_SERVER DEBUG: [0x51b000306d80] received request Name# ListStreams ok# false data# peer# 2025-04-17T18:01:53.635739Z node 6 :GRPC_SERVER DEBUG: [0x51b0002aaf80] received request Name# DeleteStream ok# false data# peer# 2025-04-17T18:01:53.635811Z node 6 :GRPC_SERVER DEBUG: [0x51b0001aa980] received request Name# DescribeStream ok# false data# peer# 2025-04-17T18:01:53.635914Z node 6 :GRPC_SERVER DEBUG: [0x51b000309080] received request Name# ListShards ok# false data# peer# 2025-04-17T18:01:53.635984Z node 6 :GRPC_SERVER DEBUG: [0x51b0003cfa80] received request Name# SetWriteQuota ok# false data# peer# 2025-04-17T18:01:53.636169Z node 6 :GRPC_SERVER DEBUG: [0x51b0003cec80] received request Name# UpdateStream ok# false data# peer# 2025-04-17T18:01:53.636212Z node 6 :GRPC_SERVER DEBUG: [0x51b000245780] received request Name# PutRecord ok# false data# peer# 2025-04-17T18:01:53.636371Z node 6 :GRPC_SERVER DEBUG: [0x51b000308280] received request Name# PutRecords ok# false data# peer# 2025-04-17T18:01:53.636410Z node 6 :GRPC_SERVER DEBUG: [0x51b0002e5980] received request Name# GetRecords ok# false data# peer# 2025-04-17T18:01:53.636598Z node 6 :GRPC_SERVER DEBUG: [0x51b0004a7580] received request Name# GetShardIterator ok# false data# peer# 2025-04-17T18:01:53.636608Z node 6 :GRPC_SERVER DEBUG: [0x51b0001c8c80] received request Name# SubscribeToShard ok# false data# peer# 2025-04-17T18:01:53.636814Z node 6 :GRPC_SERVER DEBUG: [0x51b000253080] received request Name# DescribeLimits ok# false data# peer# 2025-04-17T18:01:53.636816Z node 6 :GRPC_SERVER DEBUG: [0x51b0004abb80] received request Name# DescribeStreamSummary ok# false data# peer# 2025-04-17T18:01:53.637017Z node 6 :GRPC_SERVER DEBUG: [0x51b000501080] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-04-17T18:01:53.637054Z node 6 :GRPC_SERVER DEBUG: [0x51b00016c080] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-04-17T18:01:53.637260Z node 6 :GRPC_SERVER DEBUG: [0x51b000504180] received request Name# UpdateShardCount ok# false data# peer# 2025-04-17T18:01:53.637279Z node 6 :GRPC_SERVER DEBUG: [0x51b00046e780] received request Name# UpdateStreamMode ok# false data# peer# 2025-04-17T18:01:53.637470Z node 6 :GRPC_SERVER DEBUG: [0x51b000505d80] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-04-17T18:01:53.637512Z node 6 :GRPC_SERVER DEBUG: [0x51b0004f7d80] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-04-17T18:01:53.637688Z node 6 :GRPC_SERVER DEBUG: [0x51b000251b80] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-04-17T18:01:53.637720Z node 6 :GRPC_SERVER DEBUG: [0x51b0004fa780] received request Name# ListStreamConsumers ok# false data# peer# 2025-04-17T18:01:53.637882Z node 6 :GRPC_SERVER DEBUG: [0x51b000461580] received request Name# AddTagsToStream ok# false data# peer# 2025-04-17T18:01:53.637924Z node 6 :GRPC_SERVER DEBUG: [0x51b00050bf80] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-04-17T18:01:53.638081Z node 6 :GRPC_SERVER DEBUG: [0x51b000460780] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-04-17T18:01:53.638121Z node 6 :GRPC_SERVER DEBUG: [0x51b0004fbc80] received request Name# ListTagsForStream ok# false data# peer# 2025-04-17T18:01:53.638313Z node 6 :GRPC_SERVER DEBUG: [0x51b000507980] received request Name# MergeShards ok# false data# peer# 2025-04-17T18:01:53.638520Z node 6 :GRPC_SERVER DEBUG: [0x51b000500980] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-04-17T18:01:53.638741Z node 6 :GRPC_SERVER DEBUG: [0x51b00050cd80] received request Name# StartStreamEncryption ok# false data# peer# 2025-04-17T18:01:53.638746Z node 6 :GRPC_SERVER DEBUG: [0x51b000506b80] received request Name# SplitShard ok# false data# peer# 2025-04-17T18:01:53.638946Z node 6 :GRPC_SERVER DEBUG: [0x51b000508080] received request Name# StopStreamEncryption ok# false data# peer# 2025-04-17T18:01:53.638963Z node 6 :GRPC_SERVER DEBUG: [0x51b0003d1d80] received request Name# SelfCheck ok# false data# peer# 2025-04-17T18:01:53.639168Z node 6 :GRPC_SERVER DEBUG: [0x51b0003d3280] received request Name# NodeCheck ok# false data# peer# 2025-04-17T18:01:53.639200Z node 6 :GRPC_SERVER DEBUG: [0x51b0003db080] received request Name# CreateSession ok# false data# peer# 2025-04-17T18:01:53.639399Z node 6 :GRPC_SERVER DEBUG: [0x51b0003dcc80] received request Name# DeleteSession ok# false data# peer# 2025-04-17T18:01:53.639414Z node 6 :GRPC_SERVER DEBUG: [0x51b0003dda80] received request Name# AttachSession ok# false data# peer# 2025-04-17T18:01:53.639624Z node 6 :GRPC_SERVER DEBUG: [0x51b000391880] received request Name# CommitTransaction ok# false data# peer# 2025-04-17T18:01:53.639633Z node 6 :GRPC_SERVER DEBUG: [0x51b0003ef280] received request Name# BeginTransaction ok# false data# peer# 2025-04-17T18:01:53.639817Z node 6 :GRPC_SERVER DEBUG: [0x51b000392680] received request Name# RollbackTransaction ok# false data# peer# 2025-04-17T18:01:53.639833Z node 6 :GRPC_SERVER DEBUG: [0x51b0003ee480] received request Name# ExecuteQuery ok# false data# peer# 2025-04-17T18:01:53.640013Z node 6 :GRPC_SERVER DEBUG: [0x51b0003d4e80] received request Name# ExecuteScript ok# false data# peer# 2025-04-17T18:01:53.640048Z node 6 :GRPC_SERVER DEBUG: [0x51b0003d9b80] received request Name# FetchScriptResults ok# false data# peer# 2025-04-17T18:01:53.640251Z node 6 :GRPC_SERVER DEBUG: [0x51b000392d80] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-04-17T18:01:53.640285Z node 6 :GRPC_SERVER DEBUG: [0x51b000393b80] received request Name# ChangeTabletSchema ok# false data# peer# 2025-04-17T18:01:53.640441Z node 6 :GRPC_SERVER DEBUG: [0x51b000395780] received request Name# RestartTablet ok# false data# peer# 2025-04-17T18:01:53.640472Z node 6 :GRPC_SERVER DEBUG: [0x51b000394980] received request Name# CreateLogStore ok# false data# peer# 2025-04-17T18:01:53.640622Z node 6 :GRPC_SERVER DEBUG: [0x51b00032b980] received request Name# DescribeLogStore ok# false data# peer# 2025-04-17T18:01:53.640664Z node 6 :GRPC_SERVER DEBUG: [0x51b00032d580] received request Name# DropLogStore ok# false data# peer# 2025-04-17T18:01:53.640790Z node 6 :GRPC_SERVER DEBUG: [0x51b000098b80] received request Name# AlterLogStore ok# false data# peer# 2025-04-17T18:01:53.640874Z node 6 :GRPC_SERVER DEBUG: [0x51b000265d80] received request Name# CreateLogTable ok# false data# peer# 2025-04-17T18:01:53.640966Z node 6 :GRPC_SERVER DEBUG: [0x51b000015780] received request Name# DescribeLogTable ok# false data# peer# 2025-04-17T18:01:53.641068Z node 6 :GRPC_SERVER DEBUG: [0x51b000327a80] received request Name# DropLogTable ok# false data# peer# 2025-04-17T18:01:53.641158Z node 6 :GRPC_SERVER DEBUG: [0x51b000294a80] received request Name# AlterLogTable ok# false data# peer# 2025-04-17T18:01:53.641275Z node 6 :GRPC_SERVER DEBUG: [0x51b000372780] received request Name# Login ok# false data# peer# 2025-04-17T18:01:53.641343Z node 6 :GRPC_SERVER DEBUG: [0x51b000381580] received request Name# DescribeReplication ok# false data# peer# 2025-04-17T18:01:53.641485Z node 6 :GRPC_SERVER DEBUG: [0x51b000378280] received request Name# DescribeView ok# false data# peer# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS16+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3318, MsgBus: 20744 2025-04-17T17:59:35.074858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343305541698115:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:35.074894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e64/r3tmp/tmph9WSB3/pdisk_1.dat 2025-04-17T17:59:35.941869Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:35.977828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:35.977923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:35.985922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3318, node 1 2025-04-17T17:59:36.233594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:36.233628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:36.233636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:36.233747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20744 TClient is connected to server localhost:20744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:59:36.908379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:59:36.938748Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:59:39.480029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343322721567841:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:39.480196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:39.487716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343322721567852:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:59:39.496701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T17:59:39.528803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343322721567855:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T17:59:39.609495Z node 1 :TX_PROXY ERROR: Actor# [1:7494343322721567906:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:59:40.075684Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343305541698115:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:40.075794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:59:40.454624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T17:59:40.793869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:59:40.794072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:59:40.794386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:59:40.794561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:59:40.794678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:59:40.794755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T17:59:40.794822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:59:40.794850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T17:59:40.794936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:59:40.795300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T17:59:40.795439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T17:59:40.795555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T17:59:40.795644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T17:59:40.795737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T17:59:40.795853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:59:40.795968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:59:40.796085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:59:40.796193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:59:40.796353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494343327016535430:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:59:40.796882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T17:59:40.797043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T17:59:40.797173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T17:59:40.797283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T17:59:40.797380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494343327016535418:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T17:59:40.836072Z node 1 :TX_ ... p:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.625513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.628989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.634022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.640364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.640767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.647992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.650955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.656654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.658961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.662938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.668969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.668991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.676133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.676366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.690070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.694552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.698886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.700888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.706028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.707938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.712794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.713583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.719697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.720146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.726788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.726911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.738781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.740417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.746393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.747381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.755712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.759243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.764688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.769192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.774087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.778817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.789696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:18.792367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:19.009637Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ehj0340mcbnaay9yqdj6e", SessionId: ydb://session/3?node_id=1&id=NTE5ZTAxYzUtOWJlOTUzZmItMTdiOGY5MzYtOTBhZTI2OGE=, Slow query, duration: 50.589564s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:01:20.687779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:01:20.688281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:01:20.689081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494343610484423116:9343];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T18:01:20.689465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:01:48.113057Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ekmys412y2yqhmyyz1dgd", SessionId: ydb://session/3?node_id=1&id=NTE5ZTAxYzUtOWJlOTUzZmItMTdiOGY5MzYtOTBhZTI2OGE=, Slow query, duration: 11.127386s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "-- NB: Subquerys\n$orders_with_several_warehouses = (\n select cs_order_number\n from `/Root/test/ds/catalog_sales`\n group by cs_order_number\n having count(distinct cs_warehouse_sk) > 1\n);\n\n-- start query 1 in stream 0 using template query16.tpl and seed 171719422\nselect\n count(distinct cs1.cs_order_number) as `order count`\n ,sum(cs_ext_ship_cost) as `total shipping cost`\n ,sum(cs_net_profit) as `total net profit`\nfrom\n `/Root/test/ds/catalog_sales` cs1\n cross join `/Root/test/ds/date_dim`\n cross join `/Root/test/ds/customer_address`\n cross join `/Root/test/ds/call_center`\n left semi join $orders_with_several_warehouses cs2 on cs1.cs_order_number = cs2.cs_order_number\n left only join `/Root/test/ds/catalog_returns` cr1 on cs1.cs_order_number = cr1.cr_order_number\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand cs1.cs_ship_date_sk = d_date_sk\nand cs1.cs_ship_addr_sk = ca_address_sk\nand ca_state = 'IL'\nand cs1.cs_call_center_sk = cc_call_center_sk\nand cc_county in ('Richland County','Bronx County','Maverick County','Mesa County',\n 'Raleigh County'\n)\norder by `order count`\nlimit 100;\n", parameters: 0b >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> TPQTest::TestTimeRetention [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH2 [GOOD] Test command err: Trying to start YDB, gRPC: 13940, MsgBus: 17502 2025-04-17T17:59:58.250106Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343402997003797:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:58.250164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e63/r3tmp/tmptUewby/pdisk_1.dat 2025-04-17T17:59:58.964376Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:58.984925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:58.985011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:58.996107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13940, node 1 2025-04-17T17:59:59.313566Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:59.313589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:59.313597Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:59.313711Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17502 TClient is connected to server localhost:17502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:00.301223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:00.348765Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:02.624010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343420176873654:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:02.624114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:02.624568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343420176873666:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:02.628661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:00:02.647119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343420176873668:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:00:02.705028Z node 1 :TX_PROXY ERROR: Actor# [1:7494343420176873719:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:03.040004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:00:03.282375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:00:03.282522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:00:03.282783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:00:03.282893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:00:03.282962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:00:03.283021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:00:03.283074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:00:03.283146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:00:03.283234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:00:03.283319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:00:03.283399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:00:03.283464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494343424471841278:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:00:03.292984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:00:03.293045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:00:03.293222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:00:03.293326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:00:03.293413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:00:03.293481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:00:03.293579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:00:03.293646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:00:03.293706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:00:03.293777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:00:03.293868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:00:03.293946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494343424471841286:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:00:03.322639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494343424471841280:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:00:03.322708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494343424471841280:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abs ... ARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.312697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.313284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.316665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.318632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.320250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.323680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.323966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.329106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.329595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.335160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.337681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.341362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.343345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.347203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.347718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.352851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.353196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.357636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039204;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.359146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.363124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.364861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.368645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.370690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039224;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.373891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.376149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.379545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.381700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.385005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.386558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.389379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.391746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.394867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.397425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.400450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.403048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.406130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.408702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.411803Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.414425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.419756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.420279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.425831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.425831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.432218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.432219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.438497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:01:41.530550Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ejhcqdp9c2f9e25e2tr8y", SessionId: ydb://session/3?node_id=1&id=MmRkODUzLWFiOTA4MTlhLTM0NTVmNGUtZjkzNzFlMmU=, Slow query, duration: 40.962128s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:01:41.746077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:01:41.746851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:01:41.747033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 61983, MsgBus: 65215 2025-04-17T18:00:29.439246Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343536176395106:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:29.439370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a89/r3tmp/tmp4M64df/pdisk_1.dat 2025-04-17T18:00:30.046494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:30.048417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:30.048521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:30.055256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61983, node 1 2025-04-17T18:00:30.226640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:30.226674Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:30.226682Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:30.226800Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65215 TClient is connected to server localhost:65215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:31.031623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:31.084479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:34.440591Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343536176395106:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:34.440676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:34.711943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343557651232633:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.712092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.712521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343557651232645:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.716963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:00:34.741174Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T18:00:34.741680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343557651232647:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:00:34.829766Z node 1 :TX_PROXY ERROR: Actor# [1:7494343557651232698:2614] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15434, MsgBus: 18170 2025-04-17T18:00:38.397016Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343573719167358:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:38.414087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a89/r3tmp/tmpxTmXiU/pdisk_1.dat 2025-04-17T18:00:38.566314Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:38.566687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:38.566734Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:38.574601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15434, node 2 2025-04-17T18:00:38.746593Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:38.746608Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:38.746614Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:38.746705Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18170 TClient is connected to server localhost:18170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:39.242920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:39.273953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:43.398516Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343573719167358:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:43.398609Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:43.835515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343595194004747:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:43.835624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:43.835698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343595194004759:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:43.840367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:00:43.854521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343595194004761:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:00:43.951318Z node 2 :TX_PROXY ERROR: Actor# [2:7494343595194004812:2609] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:45.868195Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494343599488972150:2354] TxId: 281474976715661. Ctx: { TraceId: 01js2ej11mbw7s8mv1w9xg82eg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTBmNWEwMTMtOGM3ZjBkZGYtNjRjMjczMzEtZWQ2OGZjNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 100442499 > 50331648 2025-04-17T18:00:45.868526Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTBmNWEwMTMtOGM3ZjBkZGYtNjRjMjczMzEtZWQ2OGZjNjk=, ActorId: [2:7494343590899037432:2354], ActorState: ExecuteState, TraceId: 01js2ej11mbw7s8mv1w9xg82eg, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (100442499 > 50331648), code: 200509 Trying to start YDB, gRPC: 6621, MsgBus: 21322 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a89/r3tmp/tmpwBck4q/pdisk_1.dat 2025-04-17T18:00:46.824480Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/m ... d or you don't have access permissions } 2025-04-17T18:00:52.780248Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494343637473863182:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:52.784326Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:52.800886Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494343637473863184:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:00:52.883253Z node 3 :TX_PROXY ERROR: Actor# [3:7494343637473863242:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:01.802587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:01:01.802614Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:47.391966Z node 3 :KQP_EXECUTER WARN: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2ejc105c8zf4kra4a59p7z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTVlNjZhYjItZWNmNmE4YTMtYmUwMGE1YzQtOTcyYTFiNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, memory limit exceeded. 2025-04-17T18:01:47.400606Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTVlNjZhYjItZWNmNmE4YTMtYmUwMGE1YzQtOTcyYTFiNjI=, ActorId: [3:7494343650358765480:2517], ActorState: ExecuteState, TraceId: 01js2ejc105c8zf4kra4a59p7z, Create QueryResponse for error on request, msg: 2025-04-17T18:01:47.400813Z node 3 :KQP_SLOW_LOG WARN: TraceId: "01js2ejc105c8zf4kra4a59p7z", SessionId: ydb://session/3?node_id=3&id=YTVlNjZhYjItZWNmNmE4YTMtYmUwMGE1YzQtOTcyYTFiNjI=, Slow query, duration: 52.327750s, status: PRECONDITION_FAILED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT ToDict(\n ListMap(\n ListFromRange(0ul, 5000000ul),\n ($x) -> { RETURN AsTuple($x, $x + 1); }\n )\n );\n ", parameters: 0b
: Warning: Type annotation, code: 1030
:2:13: Warning: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:2:20: Warning: At function: ToDict
:5:38: Warning: At function: OrderedMap
:5:53: Warning: At function: +
:5:53: Warning: Integral type implicit bitcast: Uint64 and Int32, code: 1107
: Error: Memory limit exceeded, code: 2029 Trying to start YDB, gRPC: 28782, MsgBus: 21651 2025-04-17T18:01:48.251949Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343876215473736:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:48.252024Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a89/r3tmp/tmpXnoEmu/pdisk_1.dat 2025-04-17T18:01:48.406758Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:48.442403Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:48.442521Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:48.444374Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28782, node 4 2025-04-17T18:01:48.495696Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:48.495747Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:48.495758Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:48.495910Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21651 TClient is connected to server localhost:21651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:49.011809Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:49.039556Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:01:49.048825Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:01:49.140291Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:01:49.372294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:49.474078Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:52.660186Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343893395344685:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:52.660283Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:52.712222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:52.782200Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:52.831967Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:52.861397Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:52.902284Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:52.948011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:52.996479Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.067674Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343897690312639:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.067746Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.067829Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343897690312644:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.071459Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:53.081639Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494343897690312646:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:01:53.139286Z node 4 :TX_PROXY ERROR: Actor# [4:7494343897690312700:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:53.252290Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494343876215473736:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:53.252374Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:54.201844Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:01:57.518676Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmZkN2JmNC03MGJiNzc4Mi1lYjZmNzIwMC1lMWRlMjIxYg==, ActorId: [4:7494343901985280335:2513], ActorState: ExecuteState, TraceId: 01js2em8px8d812t3tb5pw6y5t, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestPQReadAhead [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:59:07.569422Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:07.569503Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:59:07.606752Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:07.627175Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-04-17T17:59:07.628124Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:59:07.630602Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:59:07.642327Z node 1 :PERSQUEUE INFO: new Cookie default|226d19f2-15d20b55-58d892d4-8d71dbdc_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user3" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:59:11.646239Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:11.646321Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-04-17T17:59:11.664251Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:11.664874Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "important_user" Generation: 2 Important: true } 2025-04-17T17:59:11.665429Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-04-17T17:59:11.667801Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-04-17T17:59:11.677490Z node 2 :PERSQUEUE INFO: new Cookie default|88a7504f-4a509770-fce5c93e-21fe369f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user3" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:175:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-17T17:59:15.863159Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:15.863225Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-17T17:59:15.880013Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:15.880494Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "important_user" Generation: 3 Important: true } 2025-04-17T17:59:15.880981Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-17T17:59:15.883254Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-17T17:59:15.893257Z node 3 :PERSQUEUE INFO: new Cookie default|97e37b71-800867f3-70a46ac-6d987a7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user3" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-17T17:59:19.881360Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:19.881439Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-04-17T17:59:19.907604Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:19.908645Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } Consumers { Name: "aaa" Generati ... ie: 123 } via pipe: [77:177:2192] !Reboot 72057594037927937 (actor [77:107:2139]) on event NKikimr::TEvPersQueue::TEvRequest ! Leader for TabletID 72057594037927937 is [77:107:2139] sender: [77:249:2057] recipient: [77:99:2134] Leader for TabletID 72057594037927937 is [77:107:2139] sender: [77:252:2057] recipient: [77:251:2254] Leader for TabletID 72057594037927937 is [77:107:2139] sender: [77:253:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:254:2255] sender: [77:255:2057] recipient: [77:251:2254] 2025-04-17T18:01:52.741871Z node 77 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:52.741971Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:01:52.743029Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [77:303:2296] 2025-04-17T18:01:52.747238Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [77:304:2297] 2025-04-17T18:01:52.761979Z node 77 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:01:52.762086Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [77:304:2297] 2025-04-17T18:01:52.776069Z node 77 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:01:52.776194Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [77:303:2296] 2025-04-17T18:01:52.819981Z node 77 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 12 parts 16 size 8365317 !Reboot 72057594037927937 (actor [77:107:2139]) rebooted! !Reboot 72057594037927937 (actor [77:107:2139]) tablet resolver refreshed! new actor is[77:254:2255] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [77:177:2192] Leader for TabletID 72057594037927937 is [77:254:2255] sender: [77:356:2057] recipient: [77:14:2061] 2025-04-17T18:01:54.159914Z node 77 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 12 partno 2 count 8 parts 15 size 7877895 Leader for TabletID 72057594037927937 is [0:0:0] sender: [78:103:2057] recipient: [78:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [78:103:2057] recipient: [78:101:2135] Leader for TabletID 72057594037927937 is [78:107:2139] sender: [78:108:2057] recipient: [78:101:2135] 2025-04-17T18:01:54.787763Z node 78 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:54.787848Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [78:149:2057] recipient: [78:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [78:149:2057] recipient: [78:147:2170] Leader for TabletID 72057594037927938 is [78:153:2174] sender: [78:154:2057] recipient: [78:147:2170] Leader for TabletID 72057594037927937 is [78:107:2139] sender: [78:179:2057] recipient: [78:14:2061] 2025-04-17T18:01:54.813574Z node 78 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:54.814506Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 78 actor [78:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 78 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 78 ReadRuleGenerations: 78 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 78 Important: false } Consumers { Name: "aaa" Generation: 78 Important: true } 2025-04-17T18:01:54.815134Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [78:185:2198] 2025-04-17T18:01:54.817703Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [78:185:2198] 2025-04-17T18:01:54.820512Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [78:186:2199] 2025-04-17T18:01:54.822533Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [78:186:2199] 2025-04-17T18:01:54.861579Z node 78 :PERSQUEUE INFO: new Cookie default|4dd16db5-3a56fbbc-f9c185b2-a8f167df_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [78:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [78:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [78:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [78:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [78:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [78:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [78:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [78:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [78:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [79:103:2057] recipient: [79:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [79:103:2057] recipient: [79:101:2135] Leader for TabletID 72057594037927937 is [79:107:2139] sender: [79:108:2057] recipient: [79:101:2135] 2025-04-17T18:01:56.030463Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:56.030521Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [79:149:2057] recipient: [79:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [79:149:2057] recipient: [79:147:2170] Leader for TabletID 72057594037927938 is [79:153:2174] sender: [79:154:2057] recipient: [79:147:2170] Leader for TabletID 72057594037927937 is [79:107:2139] sender: [79:179:2057] recipient: [79:14:2061] 2025-04-17T18:01:56.050489Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:01:56.051327Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 79 actor [79:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 79 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 79 ReadRuleGenerations: 79 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 79 Important: false } Consumers { Name: "aaa" Generation: 79 Important: true } 2025-04-17T18:01:56.051919Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:185:2198] 2025-04-17T18:01:56.054684Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [79:185:2198] 2025-04-17T18:01:56.057334Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:186:2199] 2025-04-17T18:01:56.059417Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [79:186:2199] 2025-04-17T18:01:56.099142Z node 79 :PERSQUEUE INFO: new Cookie default|67ac6b89-c117300e-c96e0b3d-14c3d2e8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [79:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [79:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [79:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [79:177:2192] |83.8%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCompatTest::ReadWriteSessions [GOOD] >> TPersQueueTest::EventBatching [GOOD] >> TPersQueueTest::DisableWrongSettings |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |83.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots >> TTopicYqlTest::BadRequests [GOOD] |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |83.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTimeRetention [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:59:22.095237Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:22.095330Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:59:22.132374Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-17T17:59:22.132523Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T17:59:22.188917Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-17T17:59:22.200328Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-17T17:59:22.200542Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:22.202831Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-17T17:59:22.202944Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:59:22.203024Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:59:22.203059Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:59:22.203087Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2025-04-17T17:59:22.203640Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:22.203982Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:59:22.210583Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-04-17T17:59:22.210677Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:59:22.210749Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:59:22.212996Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:22.213125Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-04-17T17:59:22.213167Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-04-17T17:59:22.213206Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2025-04-17T17:59:22.213241Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2025-04-17T17:59:22.213386Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:22.213426Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:22.213469Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T17:59:22.213514Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-17T17:59:22.213562Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T17:59:22.213602Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T17:59:22.213628Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000ctest 2025-04-17T17:59:22.213649Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000utest 2025-04-17T17:59:22.213682Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:22.213723Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-17T17:59:22.213801Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:59:22.213848Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:59:22.213991Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T17:59:22.218549Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T17:59:22.218853Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-17T17:59:22.220696Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-04-17T17:59:22.220754Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-17T17:59:22.220804Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T17:59:22.226648Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T17:59:22.226740Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-04-17T17:59:22.226774Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-04-17T17:59:22.226814Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit request with generation 1 2025-04-17T17:59:22.226875Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit with generation 1 done 2025-04-17T17:59:22.226982Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T17:59:22.227012Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-17T17:59:22.227041Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-17T17:59:22.227081Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-17T17:59:22.227111Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-04-17T17:59:22.227132Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-04-17T17:59:22.227150Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001ctest 2025-04-17T17:59:22.227172Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001utest 2025-04-17T17:59:22.227193Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-17T17:59:22.227221Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-17T17:59:22.227297Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T17:59:22.227328Z node 1 :PERSQUEUE DEBUG: [P ... imestampsMs: 0 Generation: 75 Important: false } 2025-04-17T18:02:01.731955Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [77:185:2198] 2025-04-17T18:02:01.735127Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [77:185:2198] 2025-04-17T18:02:01.737819Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [77:186:2199] 2025-04-17T18:02:01.739676Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [77:186:2199] 2025-04-17T18:02:01.755921Z node 77 :PERSQUEUE INFO: new Cookie default|64b0194f-545fb363-1a0bec61-8e4eecda_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:01.763553Z node 77 :PERSQUEUE INFO: new Cookie default|60e17344-df8e4802-de1490c4-d5b1d773_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:01.773397Z node 77 :PERSQUEUE INFO: new Cookie default|9f3c2a74-ae743dca-b2dc4260-864ab9f2_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:01.784356Z node 77 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:02:01.789805Z node 77 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 76 actor [77:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 76 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 75 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 75 Important: false } 2025-04-17T18:02:01.796630Z node 77 :PERSQUEUE INFO: new Cookie default|7ecae53c-4988663a-80c16df8-66903c73_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:01.810366Z node 77 :PERSQUEUE INFO: new Cookie default|abf13f99-ab64a0e9-18a334a9-8b08b51e_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:01.827406Z node 77 :PERSQUEUE INFO: new Cookie default|6e58df1a-c6385a9f-68bf79d7-2aff48a9_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [78:103:2057] recipient: [78:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [78:103:2057] recipient: [78:101:2135] Leader for TabletID 72057594037927937 is [78:107:2139] sender: [78:108:2057] recipient: [78:101:2135] 2025-04-17T18:02:02.393088Z node 78 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:02:02.393185Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [78:149:2057] recipient: [78:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [78:149:2057] recipient: [78:147:2170] Leader for TabletID 72057594037927938 is [78:153:2174] sender: [78:154:2057] recipient: [78:147:2170] Leader for TabletID 72057594037927937 is [78:107:2139] sender: [78:179:2057] recipient: [78:14:2061] 2025-04-17T18:02:02.426111Z node 78 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:02:02.427087Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 77 actor [78:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 1000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 77 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 77 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 77 Important: false } 2025-04-17T18:02:02.427796Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [78:185:2198] 2025-04-17T18:02:02.430068Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [78:185:2198] 2025-04-17T18:02:02.431651Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [78:186:2199] 2025-04-17T18:02:02.433241Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [78:186:2199] 2025-04-17T18:02:02.440925Z node 78 :PERSQUEUE INFO: new Cookie default|8cf985fe-7df21fc1-afe4dd48-3800f2e9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:02.450661Z node 78 :PERSQUEUE INFO: new Cookie default|fb9a5fd0-674edc68-98f2a499-2fd256c_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:02.458983Z node 78 :PERSQUEUE INFO: new Cookie default|3d8434f1-f2be847f-6e3ff60f-8676098c_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:02.470377Z node 78 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:02:02.474652Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 78 actor [78:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 78 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 77 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 77 Important: false } 2025-04-17T18:02:02.484028Z node 78 :PERSQUEUE INFO: new Cookie default|6433fe8-3ab9a216-2ab41ab6-50086538_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:02.501860Z node 78 :PERSQUEUE INFO: new Cookie default|579da141-ece0e1dc-c37db55f-460ac9ca_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:02.518361Z node 78 :PERSQUEUE INFO: new Cookie default|c1f10f29-a00661df-bfb84521-a6d86209_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [79:103:2057] recipient: [79:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [79:103:2057] recipient: [79:101:2135] Leader for TabletID 72057594037927937 is [79:107:2139] sender: [79:108:2057] recipient: [79:101:2135] 2025-04-17T18:02:03.074011Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:02:03.074067Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [79:149:2057] recipient: [79:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [79:149:2057] recipient: [79:147:2170] Leader for TabletID 72057594037927938 is [79:153:2174] sender: [79:154:2057] recipient: [79:147:2170] Leader for TabletID 72057594037927937 is [79:107:2139] sender: [79:179:2057] recipient: [79:14:2061] 2025-04-17T18:02:03.090487Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:02:03.091328Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 79 actor [79:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 1000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 79 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 79 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 79 Important: false } 2025-04-17T18:02:03.091959Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:185:2198] 2025-04-17T18:02:03.094599Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [79:185:2198] 2025-04-17T18:02:03.095862Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:186:2199] 2025-04-17T18:02:03.097389Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [79:186:2199] 2025-04-17T18:02:03.102487Z node 79 :PERSQUEUE INFO: new Cookie default|65086801-46699550-421f1077-7758f668_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:03.109598Z node 79 :PERSQUEUE INFO: new Cookie default|3a72df93-ab94e7e1-251cb0d5-a7429230_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:03.115361Z node 79 :PERSQUEUE INFO: new Cookie default|62be8e9-9c0923dc-8b045c55-afa8ec7c_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:03.125214Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:02:03.130028Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 80 actor [79:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 80 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 79 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 79 Important: false } 2025-04-17T18:02:03.136467Z node 79 :PERSQUEUE INFO: new Cookie default|40fb893a-2b42a60-85511fa2-2cefeedf_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:03.148411Z node 79 :PERSQUEUE INFO: new Cookie default|28d1fdea-2dd77fb8-86dbaacf-46e85e7_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T18:02:03.160114Z node 79 :PERSQUEUE INFO: new Cookie default|b5682ce3-69e94a76-8ab21932-815a6137_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TPersQueueTest::MessageMetadata [GOOD] >> TPersQueueTest::LOGBROKER_7820 |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.9%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.9%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |83.9%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::BadRequests [GOOD] Test command err: 2025-04-17T17:57:20.826617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342725740489881:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:20.826681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:21.252545Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000621/r3tmp/tmpIApO8y/pdisk_1.dat 2025-04-17T17:57:21.292756Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:21.400207Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:21.846385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:22.130428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:22.130542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:22.136791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:22.136940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:22.144307Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:57:22.144473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:22.149355Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:22.151460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61056, node 1 2025-04-17T17:57:22.466886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/000621/r3tmp/yandexriXG6P.tmp 2025-04-17T17:57:22.466926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/000621/r3tmp/yandexriXG6P.tmp 2025-04-17T17:57:22.467072Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/000621/r3tmp/yandexriXG6P.tmp 2025-04-17T17:57:22.468726Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:22.629269Z INFO: TTestServer started on Port 22441 GrpcPort 61056 TClient is connected to server localhost:22441 PQClient connected to localhost:61056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:23.224607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:57:23.344983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:57:25.830279Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342725740489881:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:25.830340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:26.197678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342751510294793:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:26.197788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:26.198080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342751510294805:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:26.203245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:57:26.228560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342751510294807:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:57:26.306282Z node 1 :TX_PROXY ERROR: Actor# [1:7494342751510294903:2797] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:26.755738Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342751066062456:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:26.761569Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342751510294920:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:26.757545Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDE3ZjAzODUtMzUxMWRiYzMtYmEyMDNiZTYtYTA3YTNhODI=, ActorId: [2:7494342751066062417:2313], ActorState: ExecuteState, TraceId: 01js2ec06q6q3dxnf7m3r1fdnt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:26.770771Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjI1ZjJiM2YtZDlmYmI4ODMtNTg1ODM3YWUtOTBlZjI4MzM=, ActorId: [1:7494342751510294790:2339], ActorState: ExecuteState, TraceId: 01js2ec01j5a645dqsag2mxhd2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:26.773459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:26.776091Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:26.776660Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:26.925747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:27.100738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:57:27.448045Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2ec11v9aca62ya737x8g1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIzODBjNjgtNTI4ZGQyOS03MTE1ZmMxZi1mM2Y5YTk5MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342755805262669:3129] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 2 partitions CallPersQueueGRPC request to localhost:61056 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-17T17:57:32.638475Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:61056 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 2 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } 2025-04-17T17 ... eration: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7494343881070151536 RawX2: 107374184598 } Partitions { Partition { PartitionId: 0 } } 2025-04-17T18:02:01.653510Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:02:01.658416Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:02:01.658460Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-04-17T18:02:01.658486Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State CALCULATED 2025-04-17T18:02:01.658513Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State CALCULATED FrontTxId 281474976710676 2025-04-17T18:02:01.658538Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState WAIT_RS 2025-04-17T18:02:01.658572Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from CALCULATED to WAIT_RS 2025-04-17T18:02:01.658631Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-17T18:02:01.658665Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-04-17T18:02:01.658739Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState EXECUTING 2025-04-17T18:02:01.658776Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from WAIT_RS to EXECUTING 2025-04-17T18:02:01.658798Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-04-17T18:02:01.658856Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1744912921682, TxId 281474976710676 2025-04-17T18:02:01.659479Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:02:01.659517Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:02:01.659538Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:02:01.659562Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:02:01.659573Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] I0000000000 2025-04-17T18:02:01.659586Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cc1 2025-04-17T18:02:01.659597Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uc1 2025-04-17T18:02:01.659610Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cc2 2025-04-17T18:02:01.659621Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uc2 2025-04-17T18:02:01.659633Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] _config_0 2025-04-17T18:02:01.659656Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:02:01.659679Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:02:01.659709Z node 26 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:02:01.662506Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:02:01.662572Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1744912921682, TxId 281474976710676, Partition 0 2025-04-17T18:02:01.662593Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-04-17T18:02:01.662612Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State EXECUTING 2025-04-17T18:02:01.662637Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State EXECUTING FrontTxId 281474976710676 2025-04-17T18:02:01.662652Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-04-17T18:02:01.662687Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976710676 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-17T18:02:01.662730Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976710676 2025-04-17T18:02:01.663227Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-17T18:02:01.663382Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:02:01.663516Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976710676 2025-04-17T18:02:01.663550Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState EXECUTED 2025-04-17T18:02:01.663585Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTING to EXECUTED 2025-04-17T18:02:01.664079Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] save tx TxId: 281474976710676 State: EXECUTED MinStep: 1744912921556 MaxStep: 18446744073709551615 Step: 1744912921682 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7494343881070151536 RawX2: 107374184598 } Partitions { Partition { PartitionId: 0 } } 2025-04-17T18:02:01.664383Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:02:01.668234Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:02:01.668261Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-17T18:02:01.668278Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State EXECUTED 2025-04-17T18:02:01.668297Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State EXECUTED FrontTxId 281474976710676 2025-04-17T18:02:01.668316Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T18:02:01.668335Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState WAIT_RS_ACKS 2025-04-17T18:02:01.668356Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T18:02:01.668381Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-17T18:02:01.668390Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T18:02:01.668402Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-17T18:02:01.668423Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710676 to the list for deletion 2025-04-17T18:02:01.668442Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState DELETING 2025-04-17T18:02:01.668466Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710676 2025-04-17T18:02:01.668520Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:02:01.673080Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:02:01.673128Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-17T18:02:01.673152Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State DELETING 2025-04-17T18:02:01.673178Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710676 2025-04-17T18:02:02.377637Z node 25 :KQP_EXECUTER ERROR: ActorId: [25:7494343936904728406:2511] TxId: 281474976710678. Ctx: { TraceId: 01js2emdr76y17e955s6ghetm4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=ZjEyMzExZTYtYjhhMWNjYzEtN2FhZDRkM2MtMmEwMmJkZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 26 2025-04-17T18:02:02.377830Z node 25 :KQP_COMPUTE ERROR: SelfId: [25:7494343936904728410:2511], TxId: 281474976710678, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2emdr76y17e955s6ghetm4. SessionId : ydb://session/3?node_id=25&id=ZjEyMzExZTYtYjhhMWNjYzEtN2FhZDRkM2MtMmEwMmJkZDc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [25:7494343936904728406:2511], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPQCompatTest::ReadWriteSessions [GOOD] Test command err: 2025-04-17T17:57:14.492278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342699463934151:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:14.492492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:14.637089Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342700006483421:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:14.637146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:14.906099Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:14.914758Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00089b/r3tmp/tmpNYJgjQ/pdisk_1.dat 2025-04-17T17:57:15.620871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:15.660239Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:15.663081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:15.663221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:15.664755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:15.664860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:15.675213Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:57:15.675429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:15.694737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:15.747550Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24667, node 1 2025-04-17T17:57:15.864738Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:57:15.865769Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:57:16.101131Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00089b/r3tmp/yandexj9vzZz.tmp 2025-04-17T17:57:16.101163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00089b/r3tmp/yandexj9vzZz.tmp 2025-04-17T17:57:16.101323Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00089b/r3tmp/yandexj9vzZz.tmp 2025-04-17T17:57:16.101442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:16.209081Z INFO: TTestServer started on Port 5589 GrpcPort 24667 TClient is connected to server localhost:5589 PQClient connected to localhost:24667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:16.760180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:57:16.873722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:57:19.463951Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342699463934151:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:19.464023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:19.638295Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342700006483421:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:19.638349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:20.262680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342725233738908:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.262857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.263424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342725233738935:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:20.271054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:57:20.318765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342725233738937:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:57:20.387263Z node 1 :TX_PROXY ERROR: Actor# [1:7494342725233739029:2778] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:20.790389Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342725776287477:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:20.792063Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342725233739046:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:20.790870Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWU0YzBmNy1kNWY5NDFhNC05NzUyZmJhNS0xYjc2ZTdlMw==, ActorId: [2:7494342725776287457:2314], ActorState: ExecuteState, TraceId: 01js2ebte904fzkhfhf92qp3rh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:20.793511Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWVhZDEyNDUtMzhkYTM5NzMtNGRmNDU3MTYtOWQ3NDVmYzA=, ActorId: [1:7494342725233738905:2339], ActorState: ExecuteState, TraceId: 01js2ebt6g4yap6awqmwsj5ebn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:20.793012Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:20.793830Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:20.799530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:20.910349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:21.086573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:57:21.481283Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2ebv6m8ta8cvbd2fpd92fq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzZiNjI5YjUtZDY1MTM0MzQtYTU5ZjE0ODQtMzhjZTg0MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342729528706794:3110] === CheckClustersList. Ok 2025-04-17T17:57:26.692576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose it ... d/user_27_5_14121183594543544853_v1 2025-04-17T18:02:01.767771Z node 27 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-17T18:02:01.767787Z node 27 :PQ_READ_PROXY DEBUG: new session created cookie 6 2025-04-17T18:02:01.767787Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [27:7494343933505789821:2710] disconnected; active server actors: 1 2025-04-17T18:02:01.767836Z node 28 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [27:7494343933505789821:2710] client user disconnected session shared/user_27_5_14121183594543544853_v1 2025-04-17T18:02:01.768084Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2025-04-17T18:02:01.768247Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 read init: from# ipv6:[::1]:52188, request# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2025-04-17T18:02:01.768515Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 auth for : user 2025-04-17T18:02:01.768548Z node 27 :PQ_METACACHE DEBUG: Handle describe topics 2025-04-17T18:02:01.768561Z node 27 :PQ_METACACHE DEBUG: SendSchemeCacheRequest 2025-04-17T18:02:01.768602Z node 27 :PQ_METACACHE DEBUG: send request for 1 topics, got 1 requests infly, db = "Root/LbCommunal" 2025-04-17T18:02:01.769048Z node 27 :PQ_METACACHE DEBUG: Handle SchemeCache response: result# { ErrorCount: 0 DatabaseName: Root/LbCommunal DomainOwnerId: 0 Instant: 12 ResultSet [{ Path: Root/LbCommunal/account/topic2-mirrored-from-dc2 TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:02:01.769159Z node 27 :PQ_METACACHE DEBUG: Got describe topics SC response 2025-04-17T18:02:01.769196Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 Handle describe topics response 2025-04-17T18:02:01.769383Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 auth is DEAD 2025-04-17T18:02:01.769409Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 auth ok: topics# 1, initDone# 0 2025-04-17T18:02:01.770354Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 register session: topic# rt3.dc2--account--topic2 ===Got response: status: SUCCESS init_response { session_id: "shared/user_27_6_11008965059196565007_v1" } 2025-04-17T18:02:01.770765Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7494343933505789832:2717] connected; active server actors: 1 2025-04-17T18:02:01.771026Z node 28 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] consumer "user" register session for pipe [27:7494343933505789832:2717] session shared/user_27_6_11008965059196565007_v1 2025-04-17T18:02:01.771102Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user register readable partition 0 2025-04-17T18:02:01.771188Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user family created family=1 (Status=Free, Partitions=[0]) 2025-04-17T18:02:01.771248Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user register reading session ReadingSession "shared/user_27_6_11008965059196565007_v1" (Sender=[27:7494343933505789829:2717], Pipe=[27:7494343933505789832:2717], Partitions=[], ActiveFamilyCount=0) 2025-04-17T18:02:01.771283Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user rebalancing was scheduled 2025-04-17T18:02:01.771374Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-17T18:02:01.771486Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_27_6_11008965059196565007_v1" (Sender=[27:7494343933505789829:2717], Pipe=[27:7494343933505789832:2717], Partitions=[], ActiveFamilyCount=0) 2025-04-17T18:02:01.771599Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user family 1 status Active partitions [0] session "shared/user_27_6_11008965059196565007_v1" sender [27:7494343933505789829:2717] lock partition 0 for ReadingSession "shared/user_27_6_11008965059196565007_v1" (Sender=[27:7494343933505789829:2717], Pipe=[27:7494343933505789832:2717], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2025-04-17T18:02:01.771698Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-17T18:02:01.771765Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing duration: 0.000332s 2025-04-17T18:02:01.772531Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 assign: record# { Partition: 0 TabletId: 72075186224037898 Topic: "topic2-mirrored-from-dc2" Generation: 1 Step: 3 Session: "shared/user_27_6_11008965059196565007_v1" ClientId: "user" PipeClient { RawX1: 7494343933505789832 RawX2: 4503715591490205 } Path: "/Root/LbCommunal/account/topic2-mirrored-from-dc2" } 2025-04-17T18:02:01.772629Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 INITING TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) 2025-04-17T18:02:01.772868Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server connected, pipe [27:7494343933505789834:2720], now have 1 active actors on pipe 2025-04-17T18:02:01.772872Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037898 Generation: 1, pipe: [27:7494343933505789834:2720] 2025-04-17T18:02:01.772904Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2-mirrored-from-dc2' requestId: 2025-04-17T18:02:01.772934Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] got client message batch for topic 'rt3.dc2--account--topic2' partition 0 2025-04-17T18:02:01.772982Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Created session shared/user_27_6_11008965059196565007_v1 on pipe: [27:7494343933505789834:2720] 2025-04-17T18:02:01.773029Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_27_6_11008965059196565007_v1:1 with generation 1 2025-04-17T18:02:01.773113Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] Topic 'rt3.dc2--account--topic2' partition 0 user user session is set to 0 (startOffset 0) session shared/user_27_6_11008965059196565007_v1 2025-04-17T18:02:01.773269Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:02:01.773299Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:02:01.773323Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:02:01.773348Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:02:01.773357Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T18:02:01.773367Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T18:02:01.773393Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:02:01.773412Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:02:01.773437Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:02:01.775989Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'topic2-mirrored-from-dc2' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2025-04-17T18:02:01.776031Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:02:01.776162Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 SizeLag: 0 WriteTimestampEstimateMS: 0 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-17T18:02:01.776208Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 INIT DONE TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-04-17T18:02:01.776275Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 sending to client partition status ===Got response: status: SUCCESS start_partition_session_request { partition_session { partition_session_id: 1 path: "account/topic2-mirrored-from-dc2" } partition_offsets { } } 2025-04-17T18:02:01.777715Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 grpc read done: success# 0, data# { } 2025-04-17T18:02:01.777741Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 grpc read failed 2025-04-17T18:02:01.777763Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 closed 2025-04-17T18:02:01.777811Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_11008965059196565007_v1 is DEAD 2025-04-17T18:02:01.778018Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session shared/user_27_6_11008965059196565007_v1 2025-04-17T18:02:01.778076Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [27:7494343933505789834:2720] destroyed 2025-04-17T18:02:01.778143Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_6_11008965059196565007_v1 2025-04-17T18:02:01.778310Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7494343933505789832:2717] disconnected; active server actors: 1 2025-04-17T18:02:01.778373Z node 28 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7494343933505789832:2717] client user disconnected session shared/user_27_6_11008965059196565007_v1 >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> YdbProxy::ReadTopic >> YdbProxy::CopyTable |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> YdbProxy::DescribePath >> YdbProxy::CreateTable >> YdbProxy::CreateTopic >> YdbProxy::ListDirectory >> YdbProxy::RemoveDirectory >> YdbProxy::MakeDirectory >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots >> VDiskBalancing::TestRandom_Block42 [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream >> TPersQueueTest::TestBigMessage [GOOD] >> TPersQueueTest::TClusterTrackerTest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TestScript::StepMerging [GOOD] >> TColumnEngineTestLogs::IndexTtl >> TestProgram::YqlKernelEndsWithScalar |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] >> KqpBatchDelete::ManyPartitions_3 [GOOD] |84.0%| [TA] $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] >> TColumnEngineTestLogs::IndexTtl [GOOD] >> TestProgram::YqlKernelStartsWithScalar [GOOD] >> TestProgram::YqlKernelEndsWithScalar [GOOD] >> TestProgram::SimpleFunction >> YdbProxy::MakeDirectory [GOOD] >> TestProgram::CountUIDByVAT >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] >> TestProgram::CountWithNulls >> YdbProxy::OAuthToken >> TestProgram::CountWithNulls [GOOD] >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::DescribePath [GOOD] >> TestProgram::SimpleFunction [GOOD] >> TestProgram::CountUIDByVAT [GOOD] >> YdbProxy::DescribeTable >> YdbProxy::DropTopic >> YdbProxy::StaticCreds >> YdbProxy::CopyTable [GOOD] >> YdbProxy::AlterTable [GOOD] >> YdbProxy::CopyTables >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"7,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"7,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"7,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"7,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [FAIL] >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> BackupRestoreS3::TestAllPrimitiveTypes-BOOL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8344;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8344;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8344;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8344;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING; ... ALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38008;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38040;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"2,4\",\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N1(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"2,4","o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N2(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(15):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N2(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(15):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 2799195350893196921 2025-04-17T17:58:18.693231Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.134722s 2025-04-17T17:58:18.693407Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.144130s Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-04-17T17:58:22.657676Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2025-04-17T17:58:23.267437Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2025-04-17T17:58:25.923715Z 3 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:188:17] ServerId# [1:296:58] TabletId# 72057594037932033 PipeClientId# [3:188:17] 2025-04-17T17:58:25.923914Z 6 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:209:17] ServerId# [1:299:61] TabletId# 72057594037932033 PipeClientId# [6:209:17] 2025-04-17T17:58:25.924049Z 5 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7662:16] ServerId# [1:7671:1093] TabletId# 72057594037932033 PipeClientId# [5:7662:16] 2025-04-17T17:58:25.924141Z 4 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:195:17] ServerId# [1:297:59] TabletId# 72057594037932033 PipeClientId# [4:195:17] 2025-04-17T17:58:25.924318Z 2 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:181:17] ServerId# [1:295:57] TabletId# 72057594037932033 PipeClientId# [2:181:17] 2025-04-17T17:58:25.924415Z 7 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:216:17] ServerId# [1:300:62] TabletId# 72057594037932033 PipeClientId# [7:216:17] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407 ... # 0.999634} Step = 936 SEND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Stop node 3 2025-04-17T18:01:26.057272Z 1 00h28m30.713448s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 4 2025-04-17T18:01:26.605182Z 1 00h28m40.741391s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Stop node 7 2025-04-17T18:01:29.407197Z 1 00h29m10.743636s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Stop node 1 2025-04-17T18:01:30.075006Z 1 00h29m20.743729s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Stop node 1 2025-04-17T18:01:31.423370Z 1 00h29m40.744753s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Starting nodes Start compaction 1 Start checking ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] Test command err: 2025-04-17T17:57:18.330702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342718620229379:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:18.331202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00064d/r3tmp/tmpWdxqiH/pdisk_1.dat 2025-04-17T17:57:18.664206Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:18.949318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:18.949410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:18.952601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18440, node 1 2025-04-17T17:57:19.082627Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:57:19.083081Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:57:19.122157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:19.280855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00064d/r3tmp/yandexJ2I7GC.tmp 2025-04-17T17:57:19.280889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00064d/r3tmp/yandexJ2I7GC.tmp 2025-04-17T17:57:19.281037Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00064d/r3tmp/yandexJ2I7GC.tmp 2025-04-17T17:57:19.284101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:19.403518Z INFO: TTestServer started on Port 23932 GrpcPort 18440 TClient is connected to server localhost:23932 PQClient connected to localhost:18440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:19.945189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:19.982631Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:57:20.029263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:57:20.242842Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:57:22.480072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342735800099237:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:22.480245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:22.480772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342735800099264:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:22.484842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:57:22.510452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342735800099266:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:57:22.584034Z node 1 :TX_PROXY ERROR: Actor# [1:7494342735800099330:2455] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:22.963744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:22.965842Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342735800099338:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:22.968442Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjQzZmMwYmMtNjdlNDNhODgtOWNhZmM5NGEtOGZhM2EwMGY=, ActorId: [1:7494342735800099234:2337], ActorState: ExecuteState, TraceId: 01js2ebwcvbcg8ab6tywns57ek, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:22.973201Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:23.016256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:23.164631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:57:23.314501Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342718620229379:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:23.314562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:23.528034Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2ebx5n78z90vvtdw3ca58c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY1OWQyYzgtOGFjZTgwMWItMjFkYmI1ZWUtYmExMzA5NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342740095066935:2641] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 2 partitions CallPersQueueGRPC request to localhost:18440 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-17T17:57:29.782615Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:18440 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 2 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710681 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic1, dc = dc1 2025-04-17T17:57:29.839485Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T17:57:29.840178Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-17T17:57:29.840330Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T17:57:29.840530Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-17T17:57:29.840546Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T17:57:29.840558Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-17T17:57:29.840571Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-17T17:57:29.840594Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:57:29.840627Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-17T17:57:29.868548Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7494342765864871158:2880], now have 1 active actors on pipe 2025-04-17T17:57:29.869653Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7494342718620229676 RawX2: 4294969493 } TxId: 281474976710681 Config { TabletConfig { PartitionC ... ssion _26_1_8801625993888334515_v1 Process answer. Aval parts: 1 2025-04-17T18:02:10.573744Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 33 2025-04-17T18:02:10.580923Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 grpc read done: success# 1, data# { read_request { bytes_size: 400 } } 2025-04-17T18:02:10.581101Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 got read request: guid# 4d139de0-d80d9d84-cf2c0a14-2f7efe11 2025-04-17T18:02:10.581150Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 performing read request: guid# 3915c450-4683cf36-ed83eae-5b085a34, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), count# 6, size# 304, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-17T18:02:10.581220Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2)maxCount 6 maxSize 304 maxTimeLagMs 0 readTimestampMs 0 readOffset 35 EndOffset 40 ClientCommitOffset 0 committedOffset 0 Guid 3915c450-4683cf36-ed83eae-5b085a34 2025-04-17T18:02:10.584189Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-17T18:02:10.584251Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-04-17T18:02:10.584405Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 33 Topic 'rt3.dc1--topic1' partition 1 user $without_consumer offset 35 count 6 size 304 endOffset 40 max time lag 0ms effective offset 35 2025-04-17T18:02:10.584456Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 33 added 0 blobs, size 0 count 0 last offset 35, current partition end offset: 40 2025-04-17T18:02:10.584548Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Reading cookie 33. All data is from uncompacted head. 2025-04-17T18:02:10.584577Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T18:02:10.584727Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 35 2025-04-17T18:02:10.590729Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 35 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 37 WriteTimestampMS: 1744912930144 CreateTimestampMS: 1744912930142 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 36 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 38 WriteTimestampMS: 1744912930186 CreateTimestampMS: 1744912930183 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 37 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 39 WriteTimestampMS: 1744912930195 CreateTimestampMS: 1744912930193 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 372 RealReadOffset: 37 WaitQuotaTimeMs: 0 EndOffset: 40 StartOffset: 0 } Cookie: 35 } 2025-04-17T18:02:10.590973Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) ready for read with readOffset 38 endOffset 40 2025-04-17T18:02:10.591010Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 40 ReadOffset 38 ReadGuid 3915c450-4683cf36-ed83eae-5b085a34 has messages 1 2025-04-17T18:02:10.591098Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), readOffset# 38, endOffset# 40, WTime# 1744912930195, sizeLag# 372 2025-04-17T18:02:10.591120Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1TEvPartitionReady. Aval parts: 0 2025-04-17T18:02:10.591166Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 read done: guid# 3915c450-4683cf36-ed83eae-5b085a34, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), size# 516 2025-04-17T18:02:10.591189Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 response to read: guid# 3915c450-4683cf36-ed83eae-5b085a34 2025-04-17T18:02:10.591497Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 Process answer. Aval parts: 1 Bytes readed: 516 Offset: 35 from session 2 Offset: 36 from session 2 Offset: 37 from session 2 2025-04-17T18:02:10.594248Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 grpc read done: success# 1, data# { read_request { bytes_size: 400 } } 2025-04-17T18:02:10.594356Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 got read request: guid# 1003b8fa-19f61893-9224e5b9-eb662388 2025-04-17T18:02:10.594403Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 performing read request: guid# ede59b2b-af4923e8-84d9c18-47600051, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), count# 2, size# 188, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-17T18:02:10.594458Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2)maxCount 2 maxSize 188 maxTimeLagMs 0 readTimestampMs 0 readOffset 38 EndOffset 40 ClientCommitOffset 0 committedOffset 0 Guid ede59b2b-af4923e8-84d9c18-47600051 2025-04-17T18:02:10.595011Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-17T18:02:10.595068Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-04-17T18:02:10.595224Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 34 Topic 'rt3.dc1--topic1' partition 1 user $without_consumer offset 38 count 2 size 188 endOffset 40 max time lag 0ms effective offset 38 2025-04-17T18:02:10.595274Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 34 added 0 blobs, size 0 count 0 last offset 38, current partition end offset: 40 2025-04-17T18:02:10.595359Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Reading cookie 34. All data is from uncompacted head. 2025-04-17T18:02:10.595388Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T18:02:10.595537Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 38 Bytes readed: 348 Offset: 38 from session 2 Offset: 39 from session 2 2025-04-17T18:02:10.596537Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 38 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 40 WriteTimestampMS: 1744912930216 CreateTimestampMS: 1744912930213 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 39 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 41 WriteTimestampMS: 1744912930232 CreateTimestampMS: 1744912930230 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 39 WaitQuotaTimeMs: 0 EndOffset: 40 StartOffset: 0 } Cookie: 38 } 2025-04-17T18:02:10.596717Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) wait data in partition inited, cookie 1 from offset40 2025-04-17T18:02:10.596751Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 40 ReadOffset 40 ReadGuid ede59b2b-af4923e8-84d9c18-47600051 has messages 1 2025-04-17T18:02:10.596876Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 read done: guid# ede59b2b-af4923e8-84d9c18-47600051, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), size# 348 2025-04-17T18:02:10.596900Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 response to read: guid# ede59b2b-af4923e8-84d9c18-47600051 2025-04-17T18:02:10.597075Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 Process answer. Aval parts: 0 2025-04-17T18:02:10.598219Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_8801625993888334515_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 2 offsets { end: 39 } } } } 2025-04-17T18:02:10.598242Z node 26 :PQ_READ_PROXY INFO: session cookie 1 consumer session _26_1_8801625993888334515_v1 closed with error: reason# can't commit when reading without a consumer 2025-04-17T18:02:10.598447Z node 26 :PQ_READ_PROXY INFO: session cookie 1 consumer session _26_1_8801625993888334515_v1 is DEAD 2025-04-17T18:02:10.602809Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_8801625993888334515_v1 2025-04-17T18:02:10.602872Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7494343970156345300:2586] destroyed 2025-04-17T18:02:10.602919Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_8801625993888334515_v1 2025-04-17T18:02:10.602952Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7494343970156345302:2583] destroyed 2025-04-17T18:02:10.603000Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_8801625993888334515_v1 2025-04-17T18:02:10.603057Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7494343970156345301:2582] destroyed 2025-04-17T18:02:10.603148Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_8801625993888334515_v1 2025-04-17T18:02:10.603194Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_8801625993888334515_v1 2025-04-17T18:02:10.603228Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_8801625993888334515_v1 2025-04-17T18:02:10.607435Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_8801625993888334515_v1 2025-04-17T18:02:10.607492Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7494343970156345299:2585] destroyed 2025-04-17T18:02:10.607519Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_8801625993888334515_v1 2025-04-17T18:02:10.607537Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7494343970156345303:2584] destroyed 2025-04-17T18:02:10.607586Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_8801625993888334515_v1 2025-04-17T18:02:10.607606Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_8801625993888334515_v1 |84.1%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 17109, MsgBus: 15251 2025-04-17T17:58:34.809741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343043709861822:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:34.809795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000b90/r3tmp/tmpUGzO6l/pdisk_1.dat 2025-04-17T17:58:35.240438Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:35.262234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:35.262368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:35.263739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17109, node 1 2025-04-17T17:58:35.530783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:35.530805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:35.530811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:35.530919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15251 TClient is connected to server localhost:15251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:36.264787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:36.293524Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:58:36.314056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:36.464345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:36.715737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:36.833752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:38.842979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343060889732537:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.843104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:39.099905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:39.138431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:39.164974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:39.187323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:39.218157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:39.302540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:39.341222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:39.392953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343065184700498:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:39.393006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:39.393216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343065184700503:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:39.397070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:39.413018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343065184700505:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:39.485991Z node 1 :TX_PROXY ERROR: Actor# [1:7494343065184700559:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:39.855694Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343043709861822:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:39.856095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:40.668241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:50.233166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:58:50.233205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:53.424426Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2eea0ddas258wh7ahv3mmm", SessionId: ydb://session/3?node_id=1&id=MTE3ZGQzNjQtNmNjYTM1MGYtZDk4YmViMDAtYTU5MWQ2, Slow query, duration: 11.490658s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n BATCH DELETE FROM ManyShardsTable\n WHERE Data >= 200;\n ", parameters: 0b Trying to start YDB, gRPC: 11762, MsgBus: 5321 2025-04-17T17:58:55.294061Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343132688952085:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:55.294111Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000b90/r3tmp/tmpfxQGHE/pdisk_1.dat 2025-04-17T17:58:55.476633Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:55.483272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:55.483358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:55.485417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11762, node 2 2025-04-17T17:58:55.560859Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:55.560899Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:55.560919Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:55.561139Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5321 TClient is connected to server localhost:5321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:56.060056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part ... k, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.071164Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.117692Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.166697Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.217366Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.267311Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.325794Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:01:53.425236Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494343899658775425:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.425366Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.425611Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494343899658775430:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:53.431347Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:01:53.447174Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494343899658775432:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:01:53.550472Z node 11 :TX_PROXY ERROR: Actor# [11:7494343899658775492:3567] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:55.344109Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 14477, MsgBus: 16871 2025-04-17T18:02:00.676382Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494343927803613727:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:00.676460Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000b90/r3tmp/tmpZR6GcB/pdisk_1.dat 2025-04-17T18:02:00.849547Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:00.879220Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:00.879343Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:00.881306Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14477, node 12 2025-04-17T18:02:00.947682Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:00.947715Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:00.947729Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:00.948230Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16871 TClient is connected to server localhost:16871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:01.842764Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:01.860386Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:01.944177Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:02.197609Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:02.317288Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:05.676567Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7494343927803613727:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:05.676676Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:05.818388Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494343949278451993:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:05.818554Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:05.948479Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:02:06.025286Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:06.075566Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:06.127722Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:06.191707Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:02:06.298372Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:02:06.367762Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:02:06.463451Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494343953573419962:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:06.463552Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494343953573419967:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:06.463591Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:06.468221Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:02:06.480217Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494343953573419969:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:02:06.567374Z node 12 :TX_PROXY ERROR: Actor# [12:7494343953573420024:3569] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:08.440543Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2025-04-17T18:02:09.551555Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343966871242732:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:09.551628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018ae/r3tmp/tmp12W4VJ/pdisk_1.dat 2025-04-17T18:02:10.566358Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:10.591050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:10.591200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:10.627286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:10.635380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:20743 TServer::EnableGrpc on GrpcPort 24242, node 1 2025-04-17T18:02:11.835128Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:11.835149Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:11.835155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:11.835309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:14.116250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.481691Z node 1 :TX_PROXY ERROR: Actor# [1:7494343988346079869:2309] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-17T18:02:14.551578Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343966871242732:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:14.551676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:14.554458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:02:14.817214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:02:14.844456Z node 1 :TX_PROXY ERROR: Actor# [1:7494343988346079989:2392] txid# 281474976710661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UINT8 >> TestProgram::YqlKernelStartsWith >> TColumnEngineTestLogs::IndexWriteOverload |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] >> TestProgram::JsonExistsBinary >> TColumnEngineTestLogs::IndexReadWithPredicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"7,9\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] >> TestProgram::JsonExists >> YdbProxy::OAuthToken [GOOD] >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] >> TestProgram::JsonExistsBinary [GOOD] >> TestProgram::JsonExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=insert_table.cpp:43;event=commit_insertion;path_id=0;blob_range={ Blob: DS:0:[2222:1:1:2:100:1:0] Offset: 0 Size: 0 }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"3,4\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UINT8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] >> YdbProxy::DropTopic [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> YdbProxy::DescribeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2025-04-17T18:02:09.553605Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343967497636740:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:09.553662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001909/r3tmp/tmp802wlN/pdisk_1.dat 2025-04-17T18:02:10.542888Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:10.613573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:10.613693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:10.625172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:10.626006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:16969 TServer::EnableGrpc on GrpcPort 3037, node 1 2025-04-17T18:02:11.834135Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:11.834167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:11.834173Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:11.834389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:14.114348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.554338Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343967497636740:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:14.554449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:15.069107Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343990690211516:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:15.069146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001909/r3tmp/tmpzLShEz/pdisk_1.dat 2025-04-17T18:02:15.155253Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:15.197785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:15.197857Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:15.199428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12437 TServer::EnableGrpc on GrpcPort 31882, node 2 2025-04-17T18:02:15.386802Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:15.386829Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:15.386837Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:15.386960Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:15.672575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:15.680215Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:02:15.711790Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> YdbProxy::StaticCreds [GOOD] >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] >> TestProgram::JsonValue |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] >> TestProgram::NumRowsWithNulls >> TestProgram::NumRowsWithNulls [GOOD] >> YdbProxy::DropTable >> YdbProxy::DescribeConsumer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); >> TestProgram::JsonValueBinary >> TestProgram::JsonValue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2025-04-17T18:02:09.550961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343968116964565:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:09.551046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018d5/r3tmp/tmpRzv4qA/pdisk_1.dat 2025-04-17T18:02:10.568608Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:10.625677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:10.625790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:10.626350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:10.627430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21276 TServer::EnableGrpc on GrpcPort 15049, node 1 2025-04-17T18:02:11.834957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:11.835005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:11.835012Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:11.835171Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:14.114392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.554378Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343968116964565:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:14.554464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:15.171440Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343992066516255:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:15.172033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018d5/r3tmp/tmpe2NkvL/pdisk_1.dat 2025-04-17T18:02:15.263613Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:15.313523Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:15.313604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:15.315322Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19489 TServer::EnableGrpc on GrpcPort 21710, node 2 2025-04-17T18:02:15.594989Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:15.595021Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:15.595029Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:15.595179Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:16.067216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:16.074874Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:02:16.311051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:02:16.344644Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-17T18:02:16.344676Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T18:02:16.359653Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-17T18:02:16.359790Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-17T18:02:16.380914Z node 2 :TX_PROXY ERROR: Actor# [2:7494343996361484341:2402] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N2(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(15):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N4(8):{\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N5(8):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"5":{"p":{"i":"10002","t":"Projection"},"w":8,"id":5},"4":{"p":{"p":{"kernel":{"class_name":"SIMPLE"}},"o":"10002","t":"Calculation"},"w":8,"id":4},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; >> YdbProxy::CreateCdcStream [GOOD] >> TPersQueueTest::DisableWrongSettings [GOOD] >> TPersQueueTest::DisableDeduplication ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"10,11\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; digraph program {N0[shape=box, label="N3(26):{\"i\":\"10,11\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2025-04-17T18:02:09.550954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343966751211069:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:09.551014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00192d/r3tmp/tmprgUABt/pdisk_1.dat 2025-04-17T18:02:10.563613Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:10.612331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:10.612441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:10.622260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:10.636270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14264 TServer::EnableGrpc on GrpcPort 15967, node 1 2025-04-17T18:02:11.834144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:11.834164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:11.834170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:11.834394Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:14.118712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.551112Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343966751211069:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:14.551243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:15.117341Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343993916374863:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:15.117406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00192d/r3tmp/tmpHp0KUA/pdisk_1.dat 2025-04-17T18:02:15.247855Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:15.261814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:15.261910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:15.263411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19331 TServer::EnableGrpc on GrpcPort 8701, node 2 2025-04-17T18:02:15.497514Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:15.497544Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:15.497553Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:15.497666Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:15.871028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:18.592505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TestProgram::JsonValueBinary [GOOD] >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2025-04-17T18:02:09.551480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343966234760901:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:09.551517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001923/r3tmp/tmpwnfsMb/pdisk_1.dat 2025-04-17T18:02:10.506443Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:10.562013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:10.570377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:10.635367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:10.655944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26176 TServer::EnableGrpc on GrpcPort 29339, node 1 2025-04-17T18:02:11.834380Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:11.834400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:11.834407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:11.834525Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:14.117498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.351919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:02:14.368788Z node 1 :TX_PROXY ERROR: Actor# [1:7494343987709598078:2339] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-17T18:02:14.551564Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343966234760901:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:14.551638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:15.156299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343993831746546:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:15.156344Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001923/r3tmp/tmpbSXBH8/pdisk_1.dat 2025-04-17T18:02:15.295666Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:15.321483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:15.321562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:15.323147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23061 TServer::EnableGrpc on GrpcPort 20421, node 2 2025-04-17T18:02:15.563127Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:15.563163Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:15.563171Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:15.563314Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:15.879247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:15.886750Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:15.958008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744912935927 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744912935927 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) 2025-04-17T18:02:16.295331Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\" ... 30\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2025-04-17T18:02:09.551567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343964959437283:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:09.551634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00191b/r3tmp/tmpA4gOuf/pdisk_1.dat 2025-04-17T18:02:10.593176Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:10.609907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:10.610025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:10.622253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:10.637115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32189 TServer::EnableGrpc on GrpcPort 12587, node 1 2025-04-17T18:02:11.834255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:11.834277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:11.834283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:11.834390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:14.135079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.410826Z node 1 :TX_PROXY ERROR: Actor# [1:7494343986434274419:2309] txid# 281474976710658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2025-04-17T18:02:14.552514Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343964959437283:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:14.552615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:15.685392Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343990505427300:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:15.685447Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00191b/r3tmp/tmpVvqYlO/pdisk_1.dat 2025-04-17T18:02:15.916443Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:15.990119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:15.990217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:15.998027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9318 TServer::EnableGrpc on GrpcPort 28735, node 2 2025-04-17T18:02:16.361579Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:16.361597Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:16.361631Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:16.361740Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:17.136536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:17.144561Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;prio ... 2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2025-04-17T18:02:09.551448Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343965114744853:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:09.551500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00189c/r3tmp/tmp7EcaRS/pdisk_1.dat 2025-04-17T18:02:10.504991Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:10.634362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:10.634473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:10.635384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:10.656366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13995 TServer::EnableGrpc on GrpcPort 16992, node 1 2025-04-17T18:02:11.834233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:11.834263Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:11.834270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:11.834452Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:14.118851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.546389Z node 1 :TX_PROXY ERROR: Actor# [1:7494343986589581989:2309] txid# 281474976710658, issues: { message: "Column key has wrong key type Float" severity: 1 } 2025-04-17T18:02:14.554648Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343965114744853:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:14.554718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:14.566378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:02:14.808504Z node 1 :TX_PROXY ERROR: Actor# [1:7494343986589582081:2371] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:15.620717Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343991543289311:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:15.620868Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00189c/r3tmp/tmp8nH9Fl/pdisk_1.dat 2025-04-17T18:02:15.866024Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:15.891216Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:15.891290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:15.894233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11916 TServer::EnableGrpc on GrpcPort 7504, node 2 2025-04-17T18:02:16.290793Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:16.290818Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:16.290825Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:16.290937Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:16.718851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:16.734162Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:19.776843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:02:19.974300Z node 2 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][2:7494344008723159343:2346] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T18:02:20.047109Z node 2 :TX_PROXY ERROR: Actor# [2:7494344013018126702:2455] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table/updates\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeCdcStream, state: EPathStateNoChanges)" severity: 1 } >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> BackupRestore::TestAllPrimitiveTypes-BOOL |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> BackupRestoreS3::TestAllPrimitiveTypes-BOOL [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-INT8 >> BackupRestoreS3::TestAllPrimitiveTypes-UINT8 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UTF8 >> YdbProxy::AlterTopic [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UINT8 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UINT32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2025-04-17T18:02:09.550989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343964745901806:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:09.551126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018a5/r3tmp/tmpA4rcsT/pdisk_1.dat 2025-04-17T18:02:10.560305Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:10.617609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:10.617706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:10.622246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:10.647136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9022 TServer::EnableGrpc on GrpcPort 20907, node 1 2025-04-17T18:02:11.838349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:11.838378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:11.838393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:11.838562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:14.115262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.489377Z node 1 :TX_PROXY ERROR: Actor# [1:7494343986220738940:2309] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-17T18:02:14.551441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343964745901806:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:14.551641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:14.556025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:02:15.537160Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343992741545900:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:15.537224Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018a5/r3tmp/tmpdHGlG9/pdisk_1.dat 2025-04-17T18:02:15.732335Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:15.745090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:15.745191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:15.749254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6563 TServer::EnableGrpc on GrpcPort 20655, node 2 2025-04-17T18:02:16.074387Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:16.074411Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:16.074418Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:16.074552Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:16.799166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:16.818339Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:02:19.924919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:02:20.048397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:02:20.971843Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344015338587135:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:20.972037Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018a5/r3tmp/tmpjB6rbC/pdisk_1.dat 2025-04-17T18:02:21.074568Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:21.114620Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:21.114699Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:21.116353Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26900 TServer::EnableGrpc on GrpcPort 1807, node 3 2025-04-17T18:02:21.278835Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:21.278860Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:21.278868Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:21.278986Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:02:21.592816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:02:21.599401Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:02:21.780643Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:02:21.805330Z node 3 :TX_PROXY ERROR: Actor# [3:7494344019633555199:2395] txid# 281474976715660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |84.3%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] Test command err: 2025-04-17T17:59:46.097025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343349813636389:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:46.122999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:59:46.152299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343350333431123:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:46.152347Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:59:46.243294Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494343351569591509:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:46.243368Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a6/r3tmp/tmp3ASiEh/pdisk_1.dat 2025-04-17T17:59:47.440441Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:47.446785Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:47.502355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:59:48.004292Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:48.135756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:48.135846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:48.142727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:48.142795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:48.144387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:48.144446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:48.175830Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T17:59:48.175867Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:59:48.175986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:59:48.194748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:59:48.194980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13005 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T17:59:49.135245Z node 1 :TX_PROXY DEBUG: actor# [1:7494343354108603755:2139] Handle TEvNavigate describe path dc-1 2025-04-17T17:59:49.135294Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343366993506148:2470] HANDLE EvNavigateScheme dc-1 2025-04-17T17:59:49.135418Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494343354108603786:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T17:59:49.135550Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343358403571257:2260][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494343354108603786:2155], cookie# 1 2025-04-17T17:59:49.138445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343358403571270:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343358403571267:2260], cookie# 1 2025-04-17T17:59:49.138486Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343358403571271:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343358403571268:2260], cookie# 1 2025-04-17T17:59:49.138501Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343358403571272:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343358403571269:2260], cookie# 1 2025-04-17T17:59:49.138536Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343349813636118:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343358403571272:2260], cookie# 1 2025-04-17T17:59:49.138566Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343358403571272:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343349813636118:2059], cookie# 1 2025-04-17T17:59:49.138596Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343358403571257:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343358403571269:2260], cookie# 1 2025-04-17T17:59:49.138621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343358403571257:2260][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T17:59:49.138637Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343349813636112:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343358403571270:2260], cookie# 1 2025-04-17T17:59:49.138652Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494343349813636115:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494343358403571271:2260], cookie# 1 2025-04-17T17:59:49.138668Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343358403571270:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343349813636112:2053], cookie# 1 2025-04-17T17:59:49.138696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494343358403571271:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343349813636115:2056], cookie# 1 2025-04-17T17:59:49.138715Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343358403571257:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343358403571267:2260], cookie# 1 2025-04-17T17:59:49.138733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343358403571257:2260][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T17:59:49.138752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343358403571257:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494343358403571268:2260], cookie# 1 2025-04-17T17:59:49.138772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494343358403571257:2260][/dc-1] Unexpected sync response: sender# [1:7494343358403571268:2260], cookie# 1 2025-04-17T17:59:49.138828Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494343354108603786:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T17:59:49.165285Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494343354108603786:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494343358403571257:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T17:59:49.165436Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494343354108603786:2155], cacheItem# { Subscriber: { Subscriber: [1:7494343358403571257:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T17:59:49.170262Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494343366993506149:2471], recipient# [1:7494343366993506148:2470], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T17:59:49.170332Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343366993506148:2470] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T17:59:49.355276Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343366993506148:2470] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-17T17:59:49.358113Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343366993506148:2470] Handle TEvDescribeSchemeResult Forward to# [1:7494343366993506147:2469] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response ... ] } 2025-04-17T18:02:22.164405Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7494343420082521460:2127], cacheItem# { Subscriber: { Subscriber: [7:7494343424377489365:2549] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:02:22.164561Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7494344021377945968:3666], recipient# [7:7494344021377945967:2719], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:22.184736Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7494343420082521460:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:22.184906Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7494343420082521460:2127], cacheItem# { Subscriber: { Subscriber: [7:7494343424377489365:2549] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:02:22.185059Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7494344021377945970:3667], recipient# [7:7494344021377945969:2720], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:22.489973Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7494343431339509285:2104], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:22.490098Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7494343431339509285:2104], cacheItem# { Subscriber: { Subscriber: [8:7494343435634476989:2342] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:02:22.490222Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7494344024044997233:2666], recipient# [8:7494344024044997232:2653], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:22.716091Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7494343420082521460:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:22.716194Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7494343420082521460:2127], cacheItem# { Subscriber: { Subscriber: [7:7494343441557358817:2745] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:02:22.716265Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7494344021377945979:3668], recipient# [7:7494344021377945978:2721], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:23.008587Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7494343431339509285:2104], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:23.008711Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7494343431339509285:2104], cacheItem# { Subscriber: { Subscriber: [8:7494343448519378904:2347] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:02:23.008850Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7494344028339964532:2667], recipient# [8:7494344028339964531:2654], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:23.165242Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7494343420082521460:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:23.165380Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7494343420082521460:2127], cacheItem# { Subscriber: { Subscriber: [7:7494343424377489365:2549] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:02:23.165469Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7494344025672913283:3675], recipient# [7:7494344025672913282:2722], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:23.185685Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7494343420082521460:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:02:23.185788Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7494343420082521460:2127], cacheItem# { Subscriber: { Subscriber: [7:7494343424377489365:2549] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:02:23.185863Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7494344025672913285:3676], recipient# [7:7494344025672913284:2723], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> YdbProxy::DescribeTopic [GOOD] |84.3%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2025-04-17T18:02:20.414478Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344014034788686:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:20.421235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001894/r3tmp/tmpBRwBSe/pdisk_1.dat 2025-04-17T18:02:20.749211Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:20.754106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:20.754240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:20.757619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26078 TServer::EnableGrpc on GrpcPort 15322, node 1 2025-04-17T18:02:20.970769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:20.970795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:20.970803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:20.970914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:21.273337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:22.978843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:02:23.106294Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T18:02:23.107674Z node 1 :TX_PROXY ERROR: Actor# [1:7494344026919691352:2395] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-17T18:02:23.506665Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344027976595820:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:23.506740Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001894/r3tmp/tmpOLpykZ/pdisk_1.dat 2025-04-17T18:02:23.564050Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:23.625586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:23.625660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:23.627288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32563 TServer::EnableGrpc on GrpcPort 18935, node 2 2025-04-17T18:02:23.738125Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:23.738153Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:23.738163Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:23.738287Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:23.928679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> BackupRestore::TestAllPrimitiveTypes-BOOL [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INT16 >> YdbProxy::ReadNonExistentTopic [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-INT8 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-INT16 >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData >> BackupRestoreS3::TestAllPrimitiveTypes-UTF8 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-YSON ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2025-04-17T18:02:09.551793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343968045712574:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:09.551835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00193d/r3tmp/tmpuBwzLO/pdisk_1.dat 2025-04-17T18:02:10.506385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:10.578511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:10.578624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:10.611598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:10.624604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:24697 TServer::EnableGrpc on GrpcPort 26238, node 1 2025-04-17T18:02:11.840145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:11.840171Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:11.840178Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:11.840375Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:14.115231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.552197Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343968045712574:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:14.552267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:15.141218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T18:02:15.315281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343993815517280:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:15.315288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343993815517269:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:15.315383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:15.329313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-17T18:02:15.336209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343993815517283:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T18:02:15.420600Z node 1 :TX_PROXY ERROR: Actor# [1:7494343993815517324:2478] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:21.030260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:02:21.389665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:02:21.831559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:02:22.258756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:02:22.617452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:02:25.303098Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344036612818169:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:25.303176Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00193d/r3tmp/tmpz5fySQ/pdisk_1.dat 2025-04-17T18:02:25.423621Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:25.445359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:25.445470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:25.446919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25608 TServer::EnableGrpc on GrpcPort 21073, node 2 2025-04-17T18:02:25.620154Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:25.620178Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:25.620187Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:25.620325Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:25.849398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback |84.3%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::SysViewClientLost [FAIL] >> KqpTypes::DyNumberCompare >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UINT32 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UINT64 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView >> TPersQueueTest::LOGBROKER_7820 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... onent=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler >> ReadSessionImplTest::CommonHandler [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INT16 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INT32 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::LOGBROKER_7820 [GOOD] Test command err: 2025-04-17T17:57:42.706116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342821783324637:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:42.706163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:42.764602Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342821453267990:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:42.764648Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:43.269216Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000570/r3tmp/tmpl2cPrN/pdisk_1.dat 2025-04-17T17:57:43.300944Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:43.751865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:43.791750Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:43.882672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:43.894693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:43.894831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:43.903850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:43.903938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:43.911838Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:57:43.911991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:43.912814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17610, node 1 2025-04-17T17:57:44.234828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/000570/r3tmp/yandexjQMIKn.tmp 2025-04-17T17:57:44.234850Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/000570/r3tmp/yandexjQMIKn.tmp 2025-04-17T17:57:44.234977Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/000570/r3tmp/yandexjQMIKn.tmp 2025-04-17T17:57:44.235092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:44.334610Z INFO: TTestServer started on Port 19763 GrpcPort 17610 TClient is connected to server localhost:19763 PQClient connected to localhost:17610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:44.943549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:57:45.023937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:57:45.418884Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T17:57:47.709882Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342821783324637:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:47.709988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:47.770314Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342821453267990:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:47.770377Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:48.353904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342847553129536:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:48.354056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:48.358336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342847553129552:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:48.372814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T17:57:48.374162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342847553129583:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:48.374239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:48.412236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342847553129554:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T17:57:48.909994Z node 1 :TX_PROXY ERROR: Actor# [1:7494342847553129638:2779] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:48.963848Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342847553129653:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:48.965188Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTEzNjVhZGYtMmYxNTU0Ni1jZDFhODlmNC1iMjIwZTc5, ActorId: [1:7494342847553129534:2339], ActorState: ExecuteState, TraceId: 01js2ecnj618w0r62p53j56fef, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:48.968482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:57:48.968709Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:48.968570Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342847223072177:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:48.970165Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjAzMjgzMTItMzIxNTIxYzctNWRhMmEzMzMtMjcwNTUwMGI=, ActorId: [2:7494342847223072145:2314], ActorState: ExecuteState, TraceId: 01js2ecnyn71es51c2xj97e1vv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:48.971004Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:49.133647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:57:49.390339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); ... ad: {15, 23} (653-653) 2025-04-17T18:02:26.882854Z :DEBUG: [] Take Data. Partition 0. Read: {16, 0} (654-654) 2025-04-17T18:02:26.882880Z :DEBUG: [] Take Data. Partition 0. Read: {17, 0} (655-655) 2025-04-17T18:02:26.882904Z :DEBUG: [] Take Data. Partition 0. Read: {17, 1} (656-656) 2025-04-17T18:02:26.882926Z :DEBUG: [] Take Data. Partition 0. Read: {17, 2} (657-657) 2025-04-17T18:02:26.882958Z :DEBUG: [] Take Data. Partition 0. Read: {17, 3} (658-658) 2025-04-17T18:02:26.882975Z :DEBUG: [] Take Data. Partition 0. Read: {17, 4} (659-659) 2025-04-17T18:02:26.882992Z :DEBUG: [] Take Data. Partition 0. Read: {17, 5} (660-660) 2025-04-17T18:02:26.883008Z :DEBUG: [] Take Data. Partition 0. Read: {17, 6} (661-661) 2025-04-17T18:02:26.883025Z :DEBUG: [] Take Data. Partition 0. Read: {17, 7} (662-662) 2025-04-17T18:02:26.883040Z :DEBUG: [] Take Data. Partition 0. Read: {17, 8} (663-663) 2025-04-17T18:02:26.883055Z :DEBUG: [] Take Data. Partition 0. Read: {17, 9} (664-664) 2025-04-17T18:02:26.883075Z :DEBUG: [] Take Data. Partition 0. Read: {17, 10} (665-665) 2025-04-17T18:02:26.883117Z :DEBUG: [] Take Data. Partition 0. Read: {17, 11} (666-666) 2025-04-17T18:02:26.883135Z :DEBUG: [] Take Data. Partition 0. Read: {17, 12} (667-667) 2025-04-17T18:02:26.883151Z :DEBUG: [] Take Data. Partition 0. Read: {17, 13} (668-668) 2025-04-17T18:02:26.883170Z :DEBUG: [] Take Data. Partition 0. Read: {17, 14} (669-669) 2025-04-17T18:02:26.883196Z :DEBUG: [] Take Data. Partition 0. Read: {17, 15} (670-670) 2025-04-17T18:02:26.883222Z :DEBUG: [] Take Data. Partition 0. Read: {17, 16} (671-671) 2025-04-17T18:02:26.883238Z :DEBUG: [] Take Data. Partition 0. Read: {17, 17} (672-672) 2025-04-17T18:02:26.883255Z :DEBUG: [] Take Data. Partition 0. Read: {18, 0} (673-673) 2025-04-17T18:02:26.883274Z :DEBUG: [] Take Data. Partition 0. Read: {19, 0} (674-674) 2025-04-17T18:02:26.883294Z :DEBUG: [] Take Data. Partition 0. Read: {20, 0} (675-675) 2025-04-17T18:02:26.883310Z :DEBUG: [] Take Data. Partition 0. Read: {20, 1} (676-676) 2025-04-17T18:02:26.883330Z :DEBUG: [] Take Data. Partition 0. Read: {20, 2} (677-677) 2025-04-17T18:02:26.883360Z :DEBUG: [] Take Data. Partition 0. Read: {20, 3} (678-678) 2025-04-17T18:02:26.883381Z :DEBUG: [] Take Data. Partition 0. Read: {20, 4} (679-679) 2025-04-17T18:02:26.883405Z :DEBUG: [] Take Data. Partition 0. Read: {20, 5} (680-680) 2025-04-17T18:02:26.883431Z :DEBUG: [] Take Data. Partition 0. Read: {20, 6} (681-681) 2025-04-17T18:02:26.883508Z :DEBUG: [] Take Data. Partition 0. Read: {20, 7} (682-682) 2025-04-17T18:02:26.883526Z :DEBUG: [] Take Data. Partition 0. Read: {21, 0} (683-683) 2025-04-17T18:02:26.883546Z :DEBUG: [] Take Data. Partition 0. Read: {21, 1} (684-684) 2025-04-17T18:02:26.883570Z :DEBUG: [] Take Data. Partition 0. Read: {21, 2} (685-685) 2025-04-17T18:02:26.883594Z :DEBUG: [] Take Data. Partition 0. Read: {21, 3} (686-686) 2025-04-17T18:02:26.883619Z :DEBUG: [] Take Data. Partition 0. Read: {21, 4} (687-687) 2025-04-17T18:02:26.883644Z :DEBUG: [] Take Data. Partition 0. Read: {21, 5} (688-688) 2025-04-17T18:02:26.883662Z :DEBUG: [] Take Data. Partition 0. Read: {21, 6} (689-689) 2025-04-17T18:02:26.883680Z :DEBUG: [] Take Data. Partition 0. Read: {21, 7} (690-690) 2025-04-17T18:02:26.883697Z :DEBUG: [] Take Data. Partition 0. Read: {21, 8} (691-691) 2025-04-17T18:02:26.883714Z :DEBUG: [] Take Data. Partition 0. Read: {21, 9} (692-692) 2025-04-17T18:02:26.883732Z :DEBUG: [] Take Data. Partition 0. Read: {21, 10} (693-693) 2025-04-17T18:02:26.883751Z :DEBUG: [] Take Data. Partition 0. Read: {22, 0} (694-694) 2025-04-17T18:02:26.883769Z :DEBUG: [] Take Data. Partition 0. Read: {23, 0} (695-695) 2025-04-17T18:02:26.883790Z :DEBUG: [] Take Data. Partition 0. Read: {23, 1} (696-696) 2025-04-17T18:02:26.883832Z :DEBUG: [] Take Data. Partition 0. Read: {23, 2} (697-697) 2025-04-17T18:02:26.883860Z :DEBUG: [] Take Data. Partition 0. Read: {23, 3} (698-698) 2025-04-17T18:02:26.883895Z :DEBUG: [] Take Data. Partition 0. Read: {23, 4} (699-699) 2025-04-17T18:02:27.878612Z :DEBUG: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] [null] Commit offsets [496, 650). Partition stream id: 1 2025-04-17T18:02:27.878827Z :DEBUG: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] [null] The application data is transferred to the client. Number of messages 154, size 308000 bytes 2025-04-17T18:02:27.879058Z :DEBUG: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] [null] Commit offsets [650, 700). Partition stream id: 1 2025-04-17T18:02:27.879447Z :DEBUG: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] [null] The application data is transferred to the client. Number of messages 50, size 100000 bytes 2025-04-17T18:02:27.879824Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 2 } } } 2025-04-17T18:02:27.879958Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 commit request from client for 2 in TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-17T18:02:27.879978Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 commit request from 2 to 2 in TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-17T18:02:27.880007Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) committing to position 700 prev 496 end 700 by cookie 2 2025-04-17T18:02:27.880438Z node 26 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-17T18:02:27.880489Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-17T18:02:27.880684Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--topic1' partition 0 user user offset is set to 700 (startOffset 0) session shared/user_25_1_14112956993507285827_v1 2025-04-17T18:02:27.880953Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:02:27.881003Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:02:27.881047Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:02:27.881096Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:02:27.881119Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T18:02:27.881134Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T18:02:27.881179Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:02:27.881224Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:02:27.881302Z node 26 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:02:27.884428Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--topic1' partition 0 user user readTimeStamp for offset 700 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T18:02:27.884503Z node 26 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-17T18:02:27.884546Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:02:27.884738Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2025-04-17T18:02:27.884782Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 700 endOffset 700 with cookie 2 2025-04-17T18:02:27.884853Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 700 2025-04-17T18:02:27.885379Z :DEBUG: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] [null] Committed response: { cookies { assign_id: 1 partition_cookie: 2 } } 2025-04-17T18:02:28.733573Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset700 2025-04-17T18:02:28.878749Z :INFO: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] Closing read session. Close timeout: 10.000000s 2025-04-17T18:02:28.878862Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:topic1:0:1:699:700 2025-04-17T18:02:28.878963Z :INFO: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 9267 BytesRead: 1400000 MessagesRead: 700 BytesReadCompressed: 1400000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:02:28.880302Z :INFO: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] Closing read session. Close timeout: 0.000000s 2025-04-17T18:02:28.880368Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:topic1:0:1:699:700 2025-04-17T18:02:28.880438Z :INFO: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 9269 BytesRead: 1400000 MessagesRead: 700 BytesReadCompressed: 1400000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:02:28.880482Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 grpc read done: success# 0, data# { } 2025-04-17T18:02:28.880606Z :NOTICE: [] [] [5c4102fa-f0a8ad29-e100d796-26ff713f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:02:28.880522Z node 25 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 grpc read failed 2025-04-17T18:02:28.880564Z node 25 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 grpc closed 2025-04-17T18:02:28.880615Z node 25 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_25_1_14112956993507285827_v1 is DEAD 2025-04-17T18:02:28.881195Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_25_1_14112956993507285827_v1 2025-04-17T18:02:28.881271Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [25:7494344009648840274:2524] destroyed 2025-04-17T18:02:28.881346Z node 26 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_25_1_14112956993507285827_v1 2025-04-17T18:02:28.881763Z node 25 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [25:7494344009648840266:2514] disconnected; active server actors: 1 2025-04-17T18:02:28.881799Z node 25 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [25:7494344009648840266:2514] client user disconnected session shared/user_25_1_14112956993507285827_v1 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-04-17T18:01:59.136640Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1744912919136604 2025-04-17T18:01:59.555983Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343925259525360:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:59.556077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:01:59.722026Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343924999404109:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:59.722087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001598/r3tmp/tmpVSO6DT/pdisk_1.dat 2025-04-17T18:01:59.903043Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:01:59.906649Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:02:00.180653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:00.180746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:00.181425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:00.181486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:00.184725Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:02:00.184892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:00.185698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:00.217089Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5991, node 1 2025-04-17T18:02:00.254764Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:02:00.254783Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:02:00.480498Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001598/r3tmp/yandexnaaR9c.tmp 2025-04-17T18:02:00.480558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001598/r3tmp/yandexnaaR9c.tmp 2025-04-17T18:02:00.480755Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001598/r3tmp/yandexnaaR9c.tmp 2025-04-17T18:02:00.480897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:00.540557Z INFO: TTestServer started on Port 16914 GrpcPort 5991 TClient is connected to server localhost:16914 PQClient connected to localhost:5991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:00.822426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:02:02.650486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343938144428275:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:02.650691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343938144428267:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:02.651060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:02.654398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:02:02.683590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343938144428281:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:02:02.740605Z node 1 :TX_PROXY ERROR: Actor# [1:7494343938144428376:2682] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:03.159924Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494343937884306328:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:03.160165Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmJlYzM0YmEtYjg4YzdlYjgtNzI1ODQ2MzItM2NjMTRjYWE=, ActorId: [2:7494343937884306281:2307], ActorState: ExecuteState, TraceId: 01js2eme5b9kyzhncebb969xqc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:03.161894Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:03.169010Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494343938144428386:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:03.170643Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OThkYTE3ZTktNjc4ZWI1OTQtOGI4MDZkNjctZmZlNDdmMDg=, ActorId: [1:7494343938144428265:2336], ActorState: ExecuteState, TraceId: 01js2eme0r82r7nm0ct4rz6m3b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:03.171044Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:03.424848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:03.577847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:03.766013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:5991", true, true, 1000); 2025-04-17T18:02:04.119781Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2emf6n0wp9c5qn9a7xtk1d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY4ZTRiNmEtYjhmMmQ4MS0yMWIyYzU5OS03ZDA4NzdlZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494343946734363391:2987] 2025-04-17T18:02:04.553497Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343925259525360:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:04.553579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:04.722640Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494343924999404109:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:04.722741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-17T18:02:09.670054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:5991 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:02:09.739940Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:5991 MetaRequest { CmdCreateTopic { Topic: ... a. Partition 0. Read: {0, 0} (2-2) 2025-04-17T18:02:28.007141Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_8884559014547864654_v1 got read request: guid# 49093ec5-8c156a0c-ad884f82-8d41b326 2025-04-17T18:02:28.007190Z :DEBUG: [/Root] [/Root] [8d90c9d2-4e422bc0-4326f475-44fb7767] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-17T18:02:26.893000Z WriteTime: 2025-04-17T18:02:26.895000Z Ip: "ipv6:[::1]:35752" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:35752" } } } } 2025-04-17T18:02:28.007319Z :INFO: [/Root] [/Root] [8d90c9d2-4e422bc0-4326f475-44fb7767] Closing read session. Close timeout: 3.000000s 2025-04-17T18:02:28.007352Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-17T18:02:28.007384Z :INFO: [/Root] [/Root] [8d90c9d2-4e422bc0-4326f475-44fb7767] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1300 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:02:28.008163Z :INFO: [/Root] [/Root] [8d90c9d2-4e422bc0-4326f475-44fb7767] Closing read session. Close timeout: 0.000000s 2025-04-17T18:02:28.008205Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-17T18:02:28.008242Z :INFO: [/Root] [/Root] [8d90c9d2-4e422bc0-4326f475-44fb7767] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1300 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:02:28.008270Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_8884559014547864654_v1 grpc read done: success# 0, data# { } 2025-04-17T18:02:28.008315Z :NOTICE: [/Root] [/Root] [8d90c9d2-4e422bc0-4326f475-44fb7767] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:02:28.008289Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_8884559014547864654_v1 grpc read failed 2025-04-17T18:02:28.008304Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_8884559014547864654_v1 grpc closed 2025-04-17T18:02:28.008329Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_8884559014547864654_v1 is DEAD 2025-04-17T18:02:28.008474Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_8884559014547864654_v1 2025-04-17T18:02:28.008509Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494344038608485105:2569] destroyed 2025-04-17T18:02:28.008550Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_8884559014547864654_v1 2025-04-17T18:02:28.009105Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494344038608485103:2566] disconnected; active server actors: 1 2025-04-17T18:02:28.009138Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494344038608485103:2566] client user disconnected session shared/user_3_1_8884559014547864654_v1 2025-04-17T18:02:29.159362Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.159385Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.159406Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:02:29.159782Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:02:29.160313Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T18:02:29.160498Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.160799Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-04-17T18:02:29.161787Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.161816Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.161845Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:02:29.162108Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:02:29.162485Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T18:02:29.162594Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.162742Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T18:02:29.163737Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T18:02:29.248687Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-04-17T18:02:29.248825Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-04-17T18:02:29.249054Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T18:02:29.249102Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-17T18:02:29.249124Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-17T18:02:29.249164Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-17T18:02:29.308833Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.308867Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.308912Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:02:29.309211Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:02:29.309694Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T18:02:29.309842Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.310074Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T18:02:29.310873Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.311113Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T18:02:29.311265Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T18:02:29.311326Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T18:02:29.311413Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2025-04-17T18:02:29.350062Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.350110Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.350141Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:02:29.350494Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:02:29.350878Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T18:02:29.351001Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:29.351245Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-17T18:02:29.352009Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T18:02:29.352483Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-17T18:02:29.352631Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-17T18:02:29.352701Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T18:02:29.352748Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T18:02:29.352781Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-17T18:02:29.352958Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T18:02:29.353018Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-17T18:02:31.354222Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:31.354258Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:31.354307Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T18:02:31.354711Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-17T18:02:31.392377Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-17T18:02:31.392612Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:31.400113Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:02:31.403171Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-17T18:02:31.404738Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-17T18:02:31.404858Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows >> BackupRestoreS3::TestAllPrimitiveTypes-INT16 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-INT32 >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp >> TPersQueueTest::DisableDeduplication [GOOD] >> TPersQueueTest::InflightLimit >> BackupRestoreS3::TestAllPrimitiveTypes-YSON [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UUID >> TPersQueueTest::TClusterTrackerTest [GOOD] >> TPersQueueTest::TestReadPartitionByGroupId >> BackupRestore::TestAllPrimitiveTypes-UINT64 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UTF8 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [FAIL] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> TestProgram::YqlKernelContains >> TestProgram::YqlKernelContains [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INT32 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-FLOAT ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:156;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"7,9\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; >> BackupRestoreS3::TestAllPrimitiveTypes-INT32 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-INT64 |84.3%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFEA342D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFEAF268) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7F7C03E88D90) __libc_start_main+128 (0x7F7C03E88E40) _start+41 (0xD80D029) >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UUID [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UTF8 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-YSON >> KqpLimits::ReplySizeExceeded |84.4%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-UUID [GOOD] Test command err: 2025-04-17T18:02:05.010196Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343950564442450:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:05.010326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005a8/r3tmp/tmpLqvRgc/pdisk_1.dat 2025-04-17T18:02:06.114631Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:06.115869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:06.155751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:06.155860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:06.167208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3976, node 1 2025-04-17T18:02:07.173272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:07.173323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:07.173343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:07.173471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:09.291774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:09.793589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343967744312525:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:09.793689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:10.010256Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343950564442450:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:10.010322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:12.443731Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950564442494:2125] Handle TEvProposeTransaction 2025-04-17T18:02:12.443772Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950564442494:2125] TxId# 281474976715658 ProcessProposeTransaction 2025-04-17T18:02:12.443823Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950564442494:2125] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7494343980629214515:2654] 2025-04-17T18:02:12.521546Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-04-17T18:02:12.521598Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:02:12.521913Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:02:12.522004Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:02:12.522155Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:02:12.522316Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:02:12.522383Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-04-17T18:02:12.522624Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] txid# 281474976715658 HANDLE EvClientConnected 2025-04-17T18:02:12.523916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:02:12.525923Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-04-17T18:02:12.525986Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980629214515:2654] txid# 281474976715658 SEND to# [1:7494343980629214514:2353] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-04-17T18:02:12.836924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343980629214659:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:12.836987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:13.192858Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950564442494:2125] Handle TEvProposeTransaction 2025-04-17T18:02:13.192893Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950564442494:2125] TxId# 281474976715659 ProcessProposeTransaction 2025-04-17T18:02:13.192937Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950564442494:2125] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7494343984924181980:2784] 2025-04-17T18:02:13.195661Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924181980:2784] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "a" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-04-17T18:02:13.195715Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924181980:2784] txid# 281474976715659 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:02:13.195797Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924181980:2784] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:02:13.196221Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924181980:2784] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:02:13.196367Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924181980:2784] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:02:13.196409Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924181980:2784] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-04-17T18:02:13.196607Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924181980:2784] txid# 281474976715659 HANDLE EvClientConnected 2025-04-17T18:02:13.213707Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924181980:2784] txid# 281474976715659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-17T18:02:13.213796Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924181980:2784] txid# 281474976715659 SEND to# [1:7494343984924181979:2368] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 53} 2025-04-17T18:02:13.297987Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7494343984924182156:2375] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T18:02:13.331147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343984924182265:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:13.331249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:13.342383Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950564442494:2125] Handle TEvProposeTransaction 2025-04-17T18:02:13.342415Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950564442494:2125] TxId# 281474976715660 ProcessProposeTransaction 2025-04-17T18:02:13.342487Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950564442494:2125] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [1:7494343984924182277:3001] 2025-04-17T18:02:13.344937Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343984924182277:3001] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "b" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } ... 6d2b726b9f37495ff31a0ae2bb8a0b8d25e2ea9ed1ed13dcbe474da6dc7db3f content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T180237Z 2025-04-17T18:02:37.260078Z node 13 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [13:7494344085698533562:2209], result# No response body. REQUEST: GET /test_bucket?prefix=UuidTable HTTP/1.1 HEADERS: Host: localhost:63409 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C0BCB9B7-D57D-4112-888C-545F2B71C4BD amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=9b5435a105a37a3514018291333e2417e11f8be1413aa5bbe706d517792bd05f content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T180237Z S3_MOCK::HttpServeList: UuidTable 2025-04-17T18:02:37.297756Z node 13 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [13:7494344085698533562:2209], result# ListObjectsResult { } 2025-04-17T18:02:37.297838Z node 13 :IMPORT INFO: Reply: self# [13:7494344085698533562:2209], success# 1, error# 2025-04-17T18:02:37.297962Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:02:37.297983Z node 13 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976715665, itemIdx# 0, success# 1 2025-04-17T18:02:37.298361Z node 13 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/UuidTable' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T18:02:37.306672Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:02:37.306830Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:02:37.306854Z node 13 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 281474976715665 2025-04-17T18:02:37.306911Z node 13 :IMPORT INFO: TImport::TTxProgress: CreateTable propose: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/UuidTable' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710762 2025-04-17T18:02:37.307078Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:02:37.308345Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 2025-04-17T18:02:37.310579Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:02:37.310600Z node 13 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusAccepted 2025-04-17T18:02:37.310707Z node 13 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/UuidTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976710762 Issue: '' } 2025-04-17T18:02:37.312921Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:02:37.346999Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:02:37.347018Z node 13 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710762 2025-04-17T18:02:37.347099Z node 13 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/UuidTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T18:02:37.348111Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:02:37.348181Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:02:37.348193Z node 13 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710763, id# 281474976715665 2025-04-17T18:02:37.348235Z node 13 :IMPORT INFO: TImport::TTxProgress: Restore propose: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/UuidTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710763 2025-04-17T18:02:37.348808Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:02:37.349202Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRestore, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-04-17T18:02:37.350519Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:02:37.350536Z node 13 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710763, status# StatusAccepted 2025-04-17T18:02:37.350593Z node 13 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/UuidTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Subscribed WaitTxId: 281474976710763 Issue: '' } 2025-04-17T18:02:37.351549Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete REQUEST: HEAD /test_bucket/UuidTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:63409 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A1E856CA-770D-4850-AE5D-D76D3E5F3AF6 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=58b0c37b343692abb2c872da2ff5bd42e6c4362cfbf745018175bec8e2f3f6d8 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T180237Z S3_MOCK::HttpServeRead: /test_bucket/UuidTable/data_00.csv / 39 REQUEST: GET /test_bucket/UuidTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:63409 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4994DFF5-2CA0-4682-8D31-9E07FD2A6693 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=b77ab21d173d784da56dc0183b0ff09458c75bf1494b1cf83d8c052f2c524024 content-type: application/xml range: bytes=0-38 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T180237Z S3_MOCK::HttpServeRead: /test_bucket/UuidTable/data_00.csv / 39 2025-04-17T18:02:37.392385Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7494344085698533738:2398] [0] Resolve database: name# /Root 2025-04-17T18:02:37.392627Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7494344085698533738:2398] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:02:37.392645Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7494344085698533738:2398] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T18:02:37.393014Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7494344085698533738:2398] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715665 Status: SUCCESS Progress: PROGRESS_TRANSFER_DATA ImportFromS3Settings { endpoint: "localhost:63409" scheme: HTTP bucket: "test_bucket" items { source_prefix: "UuidTable" destination_path: "/Root/UuidTable" } } StartTime { seconds: 1744912957 } } 2025-04-17T18:02:37.398628Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:02:37.398664Z node 13 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T18:02:37.399847Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:02:37.797771Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7494344085698533778:2401] [0] Resolve database: name# /Root 2025-04-17T18:02:37.798089Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7494344085698533778:2401] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:02:37.798116Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7494344085698533778:2401] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T18:02:37.798590Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7494344085698533778:2401] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715665 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:63409" scheme: HTTP bucket: "test_bucket" items { source_prefix: "UuidTable" destination_path: "/Root/UuidTable" } } StartTime { seconds: 1744912957 } EndTime { seconds: 1744912957 } } 2025-04-17T18:02:37.903669Z node 13 :TX_PROXY DEBUG: actor# [13:7494344072813630058:2137] Handle TEvExecuteKqpTransaction 2025-04-17T18:02:37.903699Z node 13 :TX_PROXY DEBUG: actor# [13:7494344072813630058:2137] TxId# 281474976715666 ProcessProposeKqpTransaction 2025-04-17T18:02:37.904533Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2engba3gmg136q8zs1fz3d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTI3OTM5N2EtYTQwNWMzYy02OWNlNzY4MS1lMmI1NmEw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TPersQueueTest::CreateTopicWithMeteringMode [GOOD] >> TPersQueueTest::DefaultMeteringMode >> BackupRestore::TestAllPrimitiveTypes-FLOAT [GOOD] >> BackupRestore::TestAllPrimitiveTypes-DOUBLE ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 29785, MsgBus: 29575 2025-04-17T18:00:41.926876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343589038420188:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:41.927154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a79/r3tmp/tmpQB5jCr/pdisk_1.dat 2025-04-17T18:00:42.796603Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:42.798226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:42.798307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:42.807470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29785, node 1 2025-04-17T18:00:43.146777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:43.146793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:43.146803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:43.146893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29575 TClient is connected to server localhost:29575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:44.660699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:44.740513Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:44.755218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:44.904408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:45.131854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:00:45.246753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:00:46.923775Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343589038420188:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:46.923834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:47.344734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343614808225593:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:47.344851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:48.359173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:48.441110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:48.494141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:48.568759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:48.630874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:48.700817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:48.795423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:48.978593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343619103193569:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:48.978676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:48.979061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343619103193574:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:48.982955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:49.001576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343619103193576:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:49.074319Z node 1 :TX_PROXY ERROR: Actor# [1:7494343623398160930:3566] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:50.860885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:57.718783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:00:57.718809Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:35.690144Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912895630, txId: 281474976710673] shutting down 2025-04-17T18:01:35.758579Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-17T18:01:36.984350Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912896975, txId: 281474976710675] shutting down 2025-04-17T18:01:38.243354Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912898195, txId: 281474976710677] shutting down 2025-04-17T18:01:39.467575Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912899457, txId: 281474976710679] shutting down 2025-04-17T18:01:40.710325Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912900698, txId: 281474976710681] shutting down 2025-04-17T18:01:41.909230Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912901895, txId: 281474976710683] shutting down 2025-04-17T18:01:43.155842Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912903147, txId: 281474976710685] shutting down 2025-04-17T18:01:44.341436Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912904334, txId: 281474976710687] shutting down 2025-04-17T18:01:45.530998Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912905522, txId: 281474976710689] shutting down 2025-04-17T18:01:46.724970Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912906718, txId: 281474976710691] shutting down 2025-04-17T18:01:47.926880Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744912907917, txId: 281474976710693] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x198368DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x19CFB43F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x193D3478 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x193E6487 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x193E6487 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x193E6487 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19D32635 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x ...
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-04-17T18:02:33.555256Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjQ3YjYzODUtN2YzZDgyNDQtNDgzZjEwYTMtY2M3MzJmNWY=, ActorId: [2:7494344070033589169:2511], ActorState: ExecuteState, TraceId: 01js2enc66994as6ydsj76cr5c, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-04-17T18:02:33.575182Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494344070033589212:2523], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2025-04-17T18:02:33.575448Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjQ3YjYzODUtN2YzZDgyNDQtNDgzZjEwYTMtY2M3MzJmNWY=, ActorId: [2:7494344070033589169:2511], ActorState: ExecuteState, TraceId: 01js2enc6r1evjft4kde8dpw20, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional Trying to start YDB, gRPC: 10814, MsgBus: 18147 2025-04-17T18:02:34.235377Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344075151369051:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:34.235434Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a79/r3tmp/tmph3rE9H/pdisk_1.dat 2025-04-17T18:02:34.342101Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10814, node 3 2025-04-17T18:02:34.371866Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:34.371886Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:34.371898Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:34.372021Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:34.378146Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:34.378224Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:34.379740Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18147 TClient is connected to server localhost:18147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:34.744511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:34.760134Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:34.830340Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:34.952299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:35.010747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:36.794070Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344083741305420:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:36.794162Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:36.823106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:02:36.852902Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:36.876669Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:36.905472Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:36.934389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:02:36.964191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:02:36.992018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:02:37.068903Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344088036273375:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:37.069032Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:37.069133Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344088036273381:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:37.071730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:02:37.079003Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344088036273383:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:02:37.146067Z node 3 :TX_PROXY ERROR: Actor# [3:7494344088036273437:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> BackupRestoreS3::TestAllPrimitiveTypes-INT64 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter >> TPQCDTest::TestRelatedServicesAreRunning >> TPQCDTest::TestUnavailableWithoutBoth >> TPQCDTest::TestUnavailableWithoutClustersList >> TPQCDTest::TestDiscoverClusters >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> Balancing::Balancing_OneTopic_TopicApi >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> TopicAutoscaling::CommitTopPast >> TopicAutoscaling::PartitionSplit_PQv1 >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-04-17T18:02:05.013706Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343949561387218:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:05.014041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004c8/r3tmp/tmpmkwb9j/pdisk_1.dat 2025-04-17T18:02:06.061155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:06.114460Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:06.156040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:06.156120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:06.167209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29107, node 1 2025-04-17T18:02:07.173287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:07.173309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:07.173319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:07.173532Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:09.291766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:09.786463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343966741257352:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:09.786572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:10.015511Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343949561387218:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:10.015575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:12.443774Z node 1 :TX_PROXY DEBUG: actor# [1:7494343949561387321:2125] Handle TEvProposeTransaction 2025-04-17T18:02:12.443814Z node 1 :TX_PROXY DEBUG: actor# [1:7494343949561387321:2125] TxId# 281474976715658 ProcessProposeTransaction 2025-04-17T18:02:12.443866Z node 1 :TX_PROXY DEBUG: actor# [1:7494343949561387321:2125] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7494343979626159341:2655] 2025-04-17T18:02:12.532281Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-04-17T18:02:12.532328Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:02:12.532723Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:02:12.532832Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:02:12.533076Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:02:12.533198Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:02:12.533240Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-04-17T18:02:12.533427Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] txid# 281474976715658 HANDLE EvClientConnected 2025-04-17T18:02:12.534757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:02:12.536703Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-04-17T18:02:12.536769Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159341:2655] txid# 281474976715658 SEND to# [1:7494343979626159340:2353] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-04-17T18:02:12.848601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343979626159488:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:12.848661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:12.848744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343979626159493:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:12.849098Z node 1 :TX_PROXY DEBUG: actor# [1:7494343949561387321:2125] Handle TEvProposeTransaction 2025-04-17T18:02:12.849126Z node 1 :TX_PROXY DEBUG: actor# [1:7494343949561387321:2125] TxId# 281474976715659 ProcessProposeTransaction 2025-04-17T18:02:12.849162Z node 1 :TX_PROXY DEBUG: actor# [1:7494343949561387321:2125] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7494343979626159496:2776] 2025-04-17T18:02:12.888517Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T18:02:12.898079Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:02:12.898134Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T18:02:12.899429Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:02:12.899547Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:02:12.899725Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:02:12.899895Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:02:12.899954Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-04-17T18:02:12.900172Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 HANDLE EvClientConnected 2025-04-17T18:02:12.901414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:02:12.903330Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-17T18:02:12.903373Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343979626159496:2776] txid# 281474976715659 SEND to# [1:7494343979626159495:2364] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 53} 2025-04-17T18:02:12.916827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343979626159495:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281 ... : 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:02:34.192949Z node 10 :TX_PROXY DEBUG: [CreateExport] [10:7494344074180704015:2360] [281474976715662] Send request: schemeShardId# 72057594046644480 2025-04-17T18:02:34.193258Z node 10 :EXPORT DEBUG: TExport::TTxCreate: DoExecute 2025-04-17T18:02:34.193636Z node 10 :EXPORT DEBUG: TExport::TTxCreate: Reply: status# SUCCESS, error# 2025-04-17T18:02:34.195377Z node 10 :EXPORT DEBUG: TExport::TTxCreate: DoComplete 2025-04-17T18:02:34.195499Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T18:02:34.195518Z node 10 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 281474976715662 2025-04-17T18:02:34.195556Z node 10 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976715662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:48998' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 } 2025-04-17T18:02:34.195597Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T18:02:34.195600Z node 10 :TX_PROXY DEBUG: [CreateExport] [10:7494344074180704015:2360] [281474976715662] Handle TEvExport::TEvCreateExportResponse: record# Entry { Id: 281474976715662 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:6482" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744912954 } } 2025-04-17T18:02:34.195667Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T18:02:34.195691Z node 10 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976710757, id# 281474976715662 2025-04-17T18:02:34.195724Z node 10 :EXPORT INFO: TExport::TTxProgress: MkDir propose: info# { Id: 281474976715662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:48998' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, txId# 281474976710757 2025-04-17T18:02:34.195790Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T18:02:34.198205Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T18:02:34.198231Z node 10 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976710757, status# StatusAccepted 2025-04-17T18:02:34.198323Z node 10 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976715662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:48998' State: CreateExportDir WaitTxId: 281474976710757 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710757 2025-04-17T18:02:34.199918Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T18:02:34.199992Z node 10 :TX_PROXY DEBUG: [GetExport] [10:7494344074180704034:2361] [0] Resolve database: name# /Root 2025-04-17T18:02:34.200319Z node 10 :TX_PROXY DEBUG: [GetExport] [10:7494344074180704034:2361] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:02:34.200346Z node 10 :TX_PROXY DEBUG: [GetExport] [10:7494344074180704034:2361] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T18:02:34.200823Z node 10 :TX_PROXY DEBUG: [GetExport] [10:7494344074180704034:2361] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976715662 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:6482" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744912954 } } 2025-04-17T18:02:34.205351Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T18:02:34.205377Z node 10 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710757 2025-04-17T18:02:34.205394Z node 10 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710757, id# 281474976715662, itemIdx# 4294967295 2025-04-17T18:02:34.205509Z node 10 :EXPORT INFO: TExport::TTxProgress: UploadScheme: info# { Id: 281474976715662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:48998' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/view' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 6] SourcePathType: EPathTypeView State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' } 2025-04-17T18:02:34.211791Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T18:02:34.211868Z node 10 :EXPORT DEBUG: HandleSchemeDescription, self: [10:7494344074180704054:2769], status: StatusSuccess REQUEST: PUT /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:6482 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 23987F17-E51E-4E92-BB23-3F6739964950 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=25e87a838eef1ae37acb7423e6714ed1db93d7e1632ba3024ae65d29eaf51791 content-length: 165 content-md5: VGI/U9aBQRGDg7M5DEll1Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 24d8c773c0010e0420ccc01cdf6767c51f2496ddc97edc8d89de1748e6cf32e9 x-amz-date: 20250417T180234Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/create_view.sql / / 165 2025-04-17T18:02:34.319696Z node 10 :EXPORT DEBUG: HandleSchemePutResponse, self: [10:7494344074180704054:2769], result: PutObjectResult { ETag: 54623f53d68141118383b3390c4965d5 } REQUEST: PUT /test_bucket/view/metadata.json HTTP/1.1 HEADERS: Host: localhost:6482 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A6C2A82A-5992-4A08-B132-FB2109E17ABF amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=f8867dbd43e9352b594b78975e1ef4cb3d24b21fe2bba2466cc0e75ccf05778a content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: eff56d34e31fad9096420efe8729173b9963ab7eb04c9c0470c5f06519bd8c7c x-amz-date: 20250417T180234Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/metadata.json / / 31 2025-04-17T18:02:34.323566Z node 10 :EXPORT DEBUG: HandleMetadataPutResponse, self: [10:7494344074180704054:2769], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-04-17T18:02:34.323591Z node 10 :EXPORT INFO: Finish, self: [10:7494344074180704054:2769], success: 1, error: 2025-04-17T18:02:34.323730Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T18:02:34.323763Z node 10 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 281474976715662, itemIdx# 0, success# 1, error# 2025-04-17T18:02:34.325588Z node 10 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T18:02:34.405524Z node 10 :TX_PROXY DEBUG: [GetExport] [10:7494344074180704062:2362] [0] Resolve database: name# /Root 2025-04-17T18:02:34.405919Z node 10 :TX_PROXY DEBUG: [GetExport] [10:7494344074180704062:2362] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:02:34.405953Z node 10 :TX_PROXY DEBUG: [GetExport] [10:7494344074180704062:2362] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T18:02:34.406525Z node 10 :TX_PROXY DEBUG: [GetExport] [10:7494344074180704062:2362] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976715662 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:6482" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744912954 } } assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18B85038) NTestSuiteBackupRestoreS3::TestViewBackupRestore()+1418 (0x18BC876A) NTestSuiteBackupRestoreS3::TTestCaseTestAllSchemeObjectTypes::Execute_(NUnitTest::TTestContext&)+393 (0x18BCA919) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7FCC1CBB4D90) __libc_start_main+128 (0x7FCC1CBB4E40) _start+41 (0x164AC029) |84.4%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> BackupRestore::TestAllPrimitiveTypes-YSON [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UUID >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase >> ColumnBuildTest::ValidDefaultValue >> ColumnBuildTest::AlreadyExists >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild >> Donor::MultipleEvicts |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> KqpLimits::ReplySizeExceeded [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> BackupRestore::TestAllPrimitiveTypes-DOUBLE [GOOD] >> BackupRestore::TestAllPrimitiveTypes-DATE >> TPersQueueTest::TestWriteStat [GOOD] >> TPersQueueTest::TestWriteSessionsConflicts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 31829, MsgBus: 3970 2025-04-17T18:01:05.635874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343690925742012:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:05.637138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a66/r3tmp/tmpcr9MYh/pdisk_1.dat 2025-04-17T18:01:06.219367Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:06.275436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:06.275529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:06.278948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31829, node 1 2025-04-17T18:01:06.406811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:06.406830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:06.406842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:06.406964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3970 TClient is connected to server localhost:3970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:07.236206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:07.262436Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:01:10.087182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343712400579159:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:10.087273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343712400579168:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:10.087325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:10.094009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:01:10.109607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343712400579173:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:01:10.182532Z node 1 :TX_PROXY ERROR: Actor# [1:7494343712400579224:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:10.532464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:01:10.638739Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343690925742012:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:10.638832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6172, MsgBus: 9387 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a66/r3tmp/tmpdiC5TL/pdisk_1.dat 2025-04-17T18:01:12.075511Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:01:12.228231Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:12.279140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:12.279237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:12.280502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6172, node 2 2025-04-17T18:01:12.452127Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:12.452152Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:12.452161Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:12.452284Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9387 TClient is connected to server localhost:9387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:01:13.815006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:01:17.196172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343741680911594:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:17.196268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:17.202328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494343741680911621:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:17.206681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:01:17.230061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494343741680911623:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:01:17.288991Z node 2 :TX_PROXY ERROR: Actor# [2:7494343741680911675:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:01:17.320389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:01:17.529620Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494343741680911841:2351], SessionActorId: [2:7494343741680911827:2351], statusCode=PRECONDITION_FAILED. Issue=
: Error: Stream write queries aren't allowed., code: 2029 . sessionActorId=[2:7494343741680911827:2351]. isRollback=0 2025-04-17T18:01:17.529885Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWUyMmYwNzItYTVhNDdjMjQtYjVhNDY5OTgtZTM1ZGViYg==, ActorId: [2:7494343741680911827:2351], ActorState: ExecuteState, TraceId: 01js2ek1v4d95pbaac4m6zsecy, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7494343741680911842:2351] from: [2:7494343741680911841:2351] 2025-04-17T18:01:17.529977Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494343741680911842:2351] TxId: 281474976710661. Ctx: { TraceId: 01js2ek1v4d95pbaac4m6zsecy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWUyMmYwNzItYTVhNDdjMjQtYjVhNDY5OTgtZTM1ZGViYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Stream write queries aren't allowed., code: 2029 } 2025-04-17T18:01:17.530136Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494343741680911845:2351], TxId: 281474976710661, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2ek1v4d95pbaac4m6zsecy. SessionId : ydb://session/3?node_id=2&id=OWUyMmYwNzItYTVhNDdjMjQtYjVhNDY5OTgtZTM1ZGViYg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494343741680911842:2351], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:01:17.531042Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWUyMmYwNzIt ... CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7494343782034708302:2404], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:01:26.170494Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494343782034708305:2506], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=OTBmNTQzZTgtMjZjMmVhYS03NGY2YTk2Yy04YjgzMmZmNg==. TraceId : 01js2ek8yx3b6mmq8rwch4vgv8. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494343782034708302:2404], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:01:26.175100Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494343782034708310:2511], TxId: 281474976710671, task: 6. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=OTBmNTQzZTgtMjZjMmVhYS03NGY2YTk2Yy04YjgzMmZmNg==. TraceId : 01js2ek8yx3b6mmq8rwch4vgv8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7494343782034708302:2404], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:01:26.175637Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTBmNTQzZTgtMjZjMmVhYS03NGY2YTk2Yy04YjgzMmZmNg==, ActorId: [3:7494343769149805488:2404], ActorState: ExecuteState, TraceId: 01js2ek8yx3b6mmq8rwch4vgv8, Create QueryResponse for error on request, msg: VERIFY failed (2025-04-17T18:01:26.185320Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Error: Stream write queries aren't allowed., code: 2029 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-04-17T18:01:33.958150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:01:33.958199Z node 3 :IMPORT WARN: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1987E508 1. /-S/util/system/yassert.cpp:55: Panic @ 0x1986C79A 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x19CFB6B1 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x18FD7CD6 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:360: CreateSampleTables @ 0x495ED106 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:574: operator() @ 0x49630423 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x49630423 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x49630423 8. /-S/util/thread/pool.h:71: Process @ 0x49630423 9. /-S/util/thread/pool.cpp:411: DoExecute @ 0x198912E5 10. /-S/util/thread/factory.h:15: Execute @ 0x1988DE7C 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x1988DE7C 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x19882584 13. /tmp//-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x1952A698 14. ??:0: ?? @ 0x7FBCDE1C5AC2 15. ??:0: ?? @ 0x7FBCDE25784F Trying to start YDB, gRPC: 26025, MsgBus: 30940 2025-04-17T18:02:39.692591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344097063249563:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:39.692653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a66/r3tmp/tmpeb6z8s/pdisk_1.dat 2025-04-17T18:02:39.904088Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26025, node 1 2025-04-17T18:02:39.967993Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:39.968009Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:39.968015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:39.968096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:40.022820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:40.022934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:40.024709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30940 TClient is connected to server localhost:30940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:40.305952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:40.324420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:40.418141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:40.527698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:40.596403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:41.778858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344105653185931:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:41.778992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:41.964365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:02:41.985515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.007137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.029795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.051425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.079876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.120117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.193141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344109948153891:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:42.193203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344109948153896:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:42.193224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:42.196405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:02:42.203145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344109948153898:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:02:42.272327Z node 1 :TX_PROXY ERROR: Actor# [1:7494344109948153952:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:42.930372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:02:44.694320Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344097063249563:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:44.694409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:46.244446Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTdhNTQ0Y2UtNmMxMjE4ZjEtNjQ4OTczYTEtM2QwMDUwMw==, ActorId: [1:7494344109948154273:2511], ActorState: ExecuteState, TraceId: 01js2enr9c721je55vqm4achc6, Create QueryResponse for error on request, msg: >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-DOUBLE ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-04-17T17:56:48.679852Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:48.679883Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:48.679914Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-17T17:56:48.680393Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:56:48.680438Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:48.680469Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:48.681851Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007052s 2025-04-17T17:56:48.682570Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:56:48.682603Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:48.682647Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:48.682700Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008830s 2025-04-17T17:56:48.683146Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-17T17:56:48.683176Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:48.683200Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T17:56:48.683249Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006618s 2025-04-17T17:56:48.722268Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1744912608722228 2025-04-17T17:56:49.272712Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342590285067105:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:49.272763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:49.410465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342590712360865:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:49.410637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:56:50.106167Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:56:50.113927Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ae/r3tmp/tmpH0eE0S/pdisk_1.dat 2025-04-17T17:56:50.397433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:50.459056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:56:50.757333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:50.757603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:50.767268Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:56:50.775495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:50.807221Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26149, node 1 2025-04-17T17:56:50.856078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:56:50.856143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:56:50.866553Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:50.866586Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:56:50.917737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:56:51.078912Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0015ae/r3tmp/yandexdvWdre.tmp 2025-04-17T17:56:51.078938Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0015ae/r3tmp/yandexdvWdre.tmp 2025-04-17T17:56:51.079069Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0015ae/r3tmp/yandexdvWdre.tmp 2025-04-17T17:56:51.079175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:56:51.275491Z INFO: TTestServer started on Port 24762 GrpcPort 26149 TClient is connected to server localhost:24762 PQClient connected to localhost:26149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:56:51.811615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T17:56:54.274429Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342590285067105:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:54.274500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:54.410309Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342590712360865:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:56:54.410380Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:56:55.331600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342616054871909:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:55.331680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342616054871920:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:55.331733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:55.352455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T17:56:55.390097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494342616054871967:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:55.390644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:56:55.403492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494342616054871938:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T17:56:55.710389Z node 1 :TX_PROXY ERROR: Actor# [1:7494342616054872022:2702] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:56:55.747990Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342616054872033:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:56:55.749548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:56:55.749653Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTJjYTg4NjEtN2FkN2UyMmItMzkyMjgwZDQtYjIzM2JiNjY=, ActorId: [1:7494342616054871897:2339], ActorState: ExecuteState, TraceId: 01js2eb1x1507xryr5prwmmgqz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:56:55.752493Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:56:55.753494Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342616482164976:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access p ... 224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:02:45.084714Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T18:02:45.084748Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:02:45.084781Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000ptest-message-group-id 2025-04-17T18:02:45.084798Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000010_00000| 2025-04-17T18:02:45.084814Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:02:45.084853Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:02:45.084894Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:02:45.084990Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:02:45.085130Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-04-17T18:02:45.099949Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7494344110767135520:2620] 2025-04-17T18:02:45.100101Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:02:45.100136Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-04-17T18:02:45.100173Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100257Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-17T18:02:45.100298Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100333Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-17T18:02:45.100358Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100397Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-17T18:02:45.100431Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100465Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-17T18:02:45.100492Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100526Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-17T18:02:45.100559Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100595Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-17T18:02:45.100619Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100654Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-04-17T18:02:45.100685Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100720Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-04-17T18:02:45.100749Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100785Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-04-17T18:02:45.100815Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:02:45.100854Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-04-17T18:02:45.101098Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T18:02:45.101152Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T18:02:45.101146Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-17T18:02:45.101279Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:02:45.101333Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-04-17T18:02:45.101373Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-04-17T18:02:45.101598Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-17T18:02:45.101636Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T18:02:45.101730Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1744912965071 queuesize 0 startOffset 0 2025-04-17T18:02:45.102569Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 16 queued_in_partition_duration_ms: 11 } 2025-04-17T18:02:45.102653Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 1 2025-04-17T18:02:45.102712Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 2 2025-04-17T18:02:45.102750Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 3 2025-04-17T18:02:45.102781Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 4 2025-04-17T18:02:45.102817Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 5 2025-04-17T18:02:45.102849Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 6 2025-04-17T18:02:45.102879Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 7 2025-04-17T18:02:45.102910Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 8 2025-04-17T18:02:45.102942Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 9 2025-04-17T18:02:45.102969Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: acknoledged message 10 2025-04-17T18:02:45.106350Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: close. Timeout = 0 ms 2025-04-17T18:02:45.106442Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session will now close 2025-04-17T18:02:45.106524Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: aborting 2025-04-17T18:02:45.107202Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:02:45.107269Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0] Write session: destroy 2025-04-17T18:02:45.108707Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0 grpc closed 2025-04-17T18:02:45.108772Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|6d72b764-f26e1ba6-dcec4ec1-449d1ef8_0 is DEAD 2025-04-17T18:02:45.110100Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:02:45.110367Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7494344119357070438:2654] destroyed 2025-04-17T18:02:45.110436Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> ColumnBuildTest::AlreadyExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2025-04-17T18:02:43.666712Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344110567983626:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.666785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d0b/r3tmp/tmpmoedvW/pdisk_1.dat 2025-04-17T18:02:44.218196Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.221635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.221722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.223501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29724, node 1 2025-04-17T18:02:44.380130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:44.380151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:44.380157Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:44.380256Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:46.699811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344123452886210:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.699919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.700211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344123452886237:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.706870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.721189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344123452886239:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-17T18:02:46.831590Z node 1 :TX_PROXY ERROR: Actor# [1:7494344123452886300:2361] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.350072Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344123452886317:2377], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.350473Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjZmNTQyY2MtMmU1ZmQ0MWUtNmI0Mzk2MmItZjZiMjQwZGU=, ActorId: [1:7494344123452886208:2366], ActorState: ExecuteState, TraceId: 01js2ens188t1c927vzc4v0kn3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.370628Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:02:45.494415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:02:45.494511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:45.494553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:02:45.494583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:02:45.494623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:02:45.494647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:02:45.494696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:45.494774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:02:45.495126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:02:45.576521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:02:45.576568Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:45.590554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:02:45.590645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:02:45.590789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:02:45.596101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:02:45.596360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:02:45.596914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.597186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:02:45.600813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.602133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:45.602201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.602317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:02:45.602371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:45.602420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:02:45.602589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.608717Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:02:45.727117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:02:45.727318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.727517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:02:45.727718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:02:45.727765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.730012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.730124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:02:45.730302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.730369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:02:45.730409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:02:45.730438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:02:45.732217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.732269Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:02:45.732300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:02:45.734135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.734199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.734237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.734281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.737383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:02:45.742953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:02:45.743148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:02:45.744063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.744172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:02:45.744224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.744469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:02:45.744517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.744669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:02:45.744741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:02:45.746533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:45.746578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:45.746779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.746815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:02:45.747105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.747162Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:02:45.747247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:45.747275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.747310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:45.747354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.747397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:02:45.747437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.747474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:02:45.747501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:02:45.747553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:02:45.747583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:02:45.747615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:02:45.749319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:45.749434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:45.749472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:793:2671] TestWaitNotification: OK eventTxId 105 2025-04-17T18:02:48.109710Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-04-17T18:02:48.109974Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 307us result status StatusSuccess 2025-04-17T18:02:48.110410Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-04-17T18:02:48.112681Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 106 DatabaseName: "/MyRoot/ServerLessDB" Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } 2025-04-17T18:02:48.114929Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T18:02:48.115025Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1155:3025], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:02:48.115168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72075186233409549 2025-04-17T18:02:48.115237Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 106, txId# 281474976725757 2025-04-17T18:02:48.115324Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1155:3025], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:02:48.117277Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T18:02:48.117361Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1155:3025], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:02:48.119766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2025-04-17T18:02:48.119991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2025-04-17T18:02:48.120293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2025-04-17T18:02:48.133718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-04-17T18:02:48.133852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2025-04-17T18:02:48.133976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2025-04-17T18:02:48.134050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-04-17T18:02:48.134141Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 106, cookie: 106, txId: 281474976725757, status: StatusInvalidParameter 2025-04-17T18:02:48.135582Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1155:3025], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-04-17T18:02:48.136261Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1155:3025] 2025-04-17T18:02:48.136581Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2025-04-17T18:02:43.666766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344110804675277:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.666838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cc5/r3tmp/tmpHyaQYC/pdisk_1.dat 2025-04-17T18:02:44.143689Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.151770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.151923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.159260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14912, node 1 2025-04-17T18:02:44.316453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001cc5/r3tmp/yandexg4vWgW.tmp 2025-04-17T18:02:44.316480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001cc5/r3tmp/yandexg4vWgW.tmp 2025-04-17T18:02:44.321686Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001cc5/r3tmp/yandexg4vWgW.tmp 2025-04-17T18:02:44.321847Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:46.612015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344123689577875:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.612087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.614166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344123689577887:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.627807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.647207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344123689577889:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-17T18:02:46.781692Z node 1 :TX_PROXY ERROR: Actor# [1:7494344123689577952:2359] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.350218Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344123689577973:2377], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.350510Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWM0MTdkMWMtMThmMzllMTEtMmRhOWRmZWItMzNiNDFlM2E=, ActorId: [1:7494344123689577873:2363], ActorState: ExecuteState, TraceId: 01js2enryhcdkwx4pjwf3a0epz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.370754Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> Donor::MultipleEvicts [GOOD] >> ColumnBuildTest::ValidDefaultValue [GOOD] >> ColumnBuildTest::BaseCase [GOOD] >> ColumnBuildTest::CancelBuild [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 3871406552419604255 2025-04-17T18:02:47.597270Z 1 00h00m00.002560s :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.107806s 2025-04-17T18:02:47.597417Z 1 00h00m00.002560s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.107990s 0 donors: 2025-04-17T18:02:49.076556Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:02:49.076805Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6663112094606413040] 2025-04-17T18:02:49.100752Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 2025-04-17T18:02:49.181276Z 25 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:02:49.181498Z 25 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6663112094606413040] 2025-04-17T18:02:49.206440Z 25 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-04-17T18:02:49.280728Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:02:49.280954Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6663112094606413040] 2025-04-17T18:02:49.293612Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 2025-04-17T18:02:49.353533Z 25 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:02:49.353688Z 25 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6663112094606413040] 2025-04-17T18:02:49.366623Z 25 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-04-17T18:02:49.426413Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:02:49.426568Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6663112094606413040] 2025-04-17T18:02:49.437396Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 2025-04-17T18:02:49.485420Z 25 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:02:49.485567Z 25 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6663112094606413040] 2025-04-17T18:02:49.494430Z 25 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-04-17T18:02:49.542495Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:02:49.542636Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6663112094606413040] 2025-04-17T18:02:49.551807Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 2025-04-17T18:02:49.602545Z 25 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:02:49.602705Z 25 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6663112094606413040] 2025-04-17T18:02:49.611568Z 25 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-04-17T18:02:49.660524Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:02:49.660663Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6663112094606413040] 2025-04-17T18:02:49.671637Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:02:45.335546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:02:45.335643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:45.335699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:02:45.335738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:02:45.335782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:02:45.335843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:02:45.335904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:45.335967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:02:45.336265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:02:45.420856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:02:45.420909Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:45.436893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:02:45.437354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:02:45.437516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:02:45.447510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:02:45.447737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:02:45.448375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.448567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:02:45.451232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.452487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:45.452541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.452723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:02:45.452771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:45.452828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:02:45.452953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.459206Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:02:45.574643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:02:45.574881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.575091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:02:45.575311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:02:45.575367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.577390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.577500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:02:45.577661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.577724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:02:45.577757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:02:45.577808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:02:45.579370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.579420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:02:45.579451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:02:45.580995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.581036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.581071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.581118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.584497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:02:45.586049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:02:45.586240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:02:45.587139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.587264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:02:45.587309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.587583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:02:45.587633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.587783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:02:45.587873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:02:45.589599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:45.589638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:45.589814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.589860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:02:45.590218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.590259Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:02:45.590352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:45.590385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.590420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:45.590458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.590495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:02:45.590538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.590591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:02:45.590620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:02:45.590671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:02:45.590705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:02:45.590737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:02:45.592325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:45.592421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:45.592463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-04-17T18:02:49.622671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-17T18:02:49.622706Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-04-17T18:02:49.622752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-04-17T18:02:49.622835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-04-17T18:02:49.625628Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T18:02:49.625700Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T18:02:49.625868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-04-17T18:02:49.625963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-04-17T18:02:49.626100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-17T18:02:49.626129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-04-17T18:02:49.626165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-17T18:02:49.638444Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1814:3677], Recipient [1:752:2641]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1814:3677] ServerId: [1:1816:3679] } 2025-04-17T18:02:49.638510Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:02:49.696106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-04-17T18:02:49.696223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-04-17T18:02:49.696276Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-04-17T18:02:49.696320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-04-17T18:02:49.698451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-17T18:02:49.698505Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-04-17T18:02:49.698577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-17T18:02:49.698619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-17T18:02:49.698666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-17T18:02:49.698689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-17T18:02:49.698715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-04-17T18:02:49.698772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:563:2502] message: TxId: 281474976725761 2025-04-17T18:02:49.698809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-17T18:02:49.698837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-04-17T18:02:49.698862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-04-17T18:02:49.698919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-17T18:02:49.702037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-04-17T18:02:49.702109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-04-17T18:02:49.702167Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-04-17T18:02:49.702263Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T18:02:49.704099Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T18:02:49.704191Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T18:02:49.704240Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:02:49.705454Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T18:02:49.705507Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T18:02:49.705547Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-04-17T18:02:49.705635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T18:02:49.705665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1161:3032] TestWaitNotification: OK eventTxId 106 2025-04-17T18:02:49.707875Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-04-17T18:02:49.708126Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:02:45.194111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:02:45.194222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:45.194260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:02:45.194289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:02:45.195375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:02:45.195454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:02:45.195565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:45.195650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:02:45.200757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:02:45.291951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:02:45.292005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:45.321542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:02:45.321925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:02:45.322063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:02:45.353429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:02:45.353613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:02:45.354298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.354491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:02:45.358031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.366427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:45.366491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.366704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:02:45.366760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:45.366835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:02:45.367007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.373351Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:02:45.493542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:02:45.495162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.496299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:02:45.497486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:02:45.497556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.502975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.503123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:02:45.503337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.503457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:02:45.503495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:02:45.503531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:02:45.505432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.505514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:02:45.505550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:02:45.507340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.507389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.507427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.507473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.511761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:02:45.513679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:02:45.514744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:02:45.515784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.515955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:02:45.516003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.517744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:02:45.517813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.517974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:02:45.518069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:02:45.520943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:45.520988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:45.521192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.521233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:02:45.521708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.521755Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:02:45.521881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:45.521912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.521951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:45.521995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.522038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:02:45.522077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.522140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:02:45.522189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:02:45.522254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:02:45.522307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:02:45.522343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:02:45.524029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:45.524140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:45.524197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-04-17T18:02:49.596837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-17T18:02:49.596867Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-04-17T18:02:49.596908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-04-17T18:02:49.596985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-04-17T18:02:49.599444Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T18:02:49.599517Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T18:02:49.599680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-04-17T18:02:49.599781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-04-17T18:02:49.599933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-17T18:02:49.599964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-04-17T18:02:49.600014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-17T18:02:49.611882Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1814:3677], Recipient [1:752:2641]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1814:3677] ServerId: [1:1816:3679] } 2025-04-17T18:02:49.611923Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:02:49.668769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-04-17T18:02:49.668889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-04-17T18:02:49.668943Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-04-17T18:02:49.668982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-04-17T18:02:49.671212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-17T18:02:49.671278Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-04-17T18:02:49.671359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-17T18:02:49.671396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-17T18:02:49.671435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-17T18:02:49.671460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-17T18:02:49.671490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-04-17T18:02:49.671551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:563:2502] message: TxId: 281474976725761 2025-04-17T18:02:49.671590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-17T18:02:49.671619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-04-17T18:02:49.671645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-04-17T18:02:49.671706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-17T18:02:49.674612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-04-17T18:02:49.674685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-04-17T18:02:49.674756Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-04-17T18:02:49.674832Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T18:02:49.676564Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T18:02:49.676650Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T18:02:49.676700Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:02:49.678221Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-17T18:02:49.678300Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-17T18:02:49.678335Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-04-17T18:02:49.678456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T18:02:49.678497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1161:3032] TestWaitNotification: OK eventTxId 106 2025-04-17T18:02:49.680351Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-04-17T18:02:49.680611Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } >> BackupRestore::TestAllPrimitiveTypes-UUID [GOOD] |84.5%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:02:46.582996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:02:46.583073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:46.583113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:02:46.583143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:02:46.583181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:02:46.583206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:02:46.583254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:46.583342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:02:46.583703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:02:46.663568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:02:46.663625Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:46.676876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:02:46.676988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:02:46.677114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:02:46.683760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:02:46.684039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:02:46.684630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:46.684926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:02:46.689236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:46.690619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:46.690684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:46.690803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:02:46.690845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:46.690884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:02:46.691060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:02:46.697314Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:02:46.817934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:02:46.818140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:46.818363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:02:46.818627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:02:46.818678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:46.820981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:46.821139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:02:46.821308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:46.821353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:02:46.821391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:02:46.821420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:02:46.823314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:46.823372Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:02:46.823405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:02:46.824997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:46.825038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:46.825071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:46.825109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:02:46.828881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:02:46.830680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:02:46.830876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:02:46.831797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:46.831916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:02:46.831965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:46.832193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:02:46.832236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:46.832392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:02:46.832487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:02:46.834436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:46.834481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:46.834660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:46.834697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:02:46.834987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:46.835030Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:02:46.835116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:46.835144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:46.835177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:46.835215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:46.835264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:02:46.835300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:46.835334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:02:46.835363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:02:46.835416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:02:46.835446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:02:46.835488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:02:46.837303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:46.837409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:46.837446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:02:49.959902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-17T18:02:49.960066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:02:49.960258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T18:02:49.960290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-17T18:02:49.960337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-04-17T18:02:49.960598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:49.960682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:02:49.960725Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2025-04-17T18:02:49.960764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 240 2025-04-17T18:02:49.962391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T18:02:49.962437Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2025-04-17T18:02:49.962510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T18:02:49.962537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T18:02:49.962571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T18:02:49.962607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T18:02:49.962639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-04-17T18:02:49.962686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710761 2025-04-17T18:02:49.962720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T18:02:49.962746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-17T18:02:49.962770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-17T18:02:49.962832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-17T18:02:49.964212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-17T18:02:49.964291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-17T18:02:49.964341Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2025-04-17T18:02:49.964426Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1169:3022], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:02:49.965858Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:02:49.965929Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1169:3022], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:02:49.965968Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-04-17T18:02:49.967290Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:02:49.967361Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1169:3022], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:02:49.967389Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-17T18:02:49.967503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:02:49.967552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1193:3046] TestWaitNotification: OK eventTxId 102 2025-04-17T18:02:49.969689Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-17T18:02:49.969947Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } 2025-04-17T18:02:49.972070Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:02:49.972254Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 208us result status StatusSuccess 2025-04-17T18:02:49.972647Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersQueueTest::TestReadPartitionByGroupId [GOOD] >> TPersQueueTest::SrcIdCompatibility |84.5%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllPrimitiveTypes-UUID [GOOD] Test command err: 2025-04-17T18:02:05.016578Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343949564425510:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:05.016883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpWX5kry/pdisk_1.dat 2025-04-17T18:02:06.116644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:06.118271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:06.156034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:06.156156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:06.167212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18782, node 1 2025-04-17T18:02:07.173388Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:07.173410Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:07.173417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:07.173613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:09.291686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Backup "/Root" to "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpob0Qx0/"Create temporary directory "/Root/~backup_20250417T180209" in databaseProcess "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpob0Qx0/dir"Create directory "/Root/~backup_20250417T180209/dir" in databaseWrite ACL into "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpob0Qx0/dir/permissions.pb"Remove directory "/Root/~backup_20250417T180209/dir"2025-04-17T18:02:09.750382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715661:0, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250417T180209" in database2025-04-17T18:02:09.824052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-17T18:02:09.877992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715663:0, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpob0Qx0/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpob0Qx0/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpob0Qx0/dir"Restore empty directory "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpob0Qx0/dir" to "/Root/dir"Restore ACL "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpob0Qx0/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpob0Qx0/dir/permissions.pb"2025-04-17T18:02:10.015942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:10.016079Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343949564425510:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:10.016299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Restore completed successfully 2025-04-17T18:02:11.851108Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343974243093985:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:11.851196Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpQtGv8q/pdisk_1.dat 2025-04-17T18:02:11.960341Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:11.994348Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:11.994423Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:11.996686Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22217, node 4 2025-04-17T18:02:12.036576Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:12.036600Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:12.036618Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:12.036771Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:12.316621Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:14.665530Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343987127996755:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:14.665632Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:14.961609Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:02:15.154375Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343991422964242:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:15.154458Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:15.324230Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7494343991422964426:2359] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T18:02:15.369902Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343991422964526:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:15.369986Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:15.428353Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7494343991422964705:2380] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T18:02:15.454420Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494343991422964810:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:15.454507Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:15.524330Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7494343991422964985:2401] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:8:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown ... /build_root/6873/000481/r3tmp/tmpYnrvuz/YsonTable/data_00.csv"Drop table "/Root/~backup_20250417T180242/YsonTable"2025-04-17T18:02:42.451883Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250417T180242" in database2025-04-17T18:02:42.459115Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpYnrvuz/" to "/Root"Resolved db base path: "/Root"2025-04-17T18:02:42.546418Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037888 not found Restore folder "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpYnrvuz/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpYnrvuz/YsonTable"Read scheme from "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpYnrvuz/YsonTable/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpYnrvuz/YsonTable" to "/Root/YsonTable"2025-04-17T18:02:42.559925Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Created "/Root/YsonTable"Read data from "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpYnrvuz/YsonTable/data_00.csv"2025-04-17T18:02:42.660927Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2enn169ktgnynswh7h69c7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=ODc1M2QwNWEtNzViNjI0MzItNTY2MTFiN2UtMjlhMGVlYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpYnrvuz/YsonTable" to "/Root/YsonTable"Read ACL from "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpYnrvuz/YsonTable/permissions.pb"2025-04-17T18:02:42.676021Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T18:02:42.788558Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2enn3w9etrqh5qxrjkfhgq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=ZDc5NzhiZDYtMTQxOTJmMzktNTlhZjg0MWUtMjNhNjc0N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:44.752944Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7494344114839848401:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:44.788722Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000481/r3tmp/tmpmoksyx/pdisk_1.dat 2025-04-17T18:02:45.036326Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:45.088336Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:45.088450Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:45.100084Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16683, node 22 2025-04-17T18:02:45.214988Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:45.215019Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:45.215029Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:45.215234Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:45.429508Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.444984Z node 22 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:48.263670Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7494344132019718551:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:48.263696Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7494344132019718540:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:48.263792Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:48.268193Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:02:48.292324Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [22:7494344132019718554:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:02:48.363283Z node 22 :TX_PROXY ERROR: Actor# [22:7494344132019718631:2678] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:48.387326Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.542379Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2entry8d925g1asdejfcjv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=NGRiMDc4NWQtMWM2MDhlMDctZmJkN2M1MTYtZGRiZmY3MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:48.672389Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2entvf727sj1rsqgdffxrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=NGRiMDc4NWQtMWM2MDhlMDctZmJkN2M1MTYtZGRiZmY3MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/"Create temporary directory "/Root/~backup_20250417T180248" in databaseProcess "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable"Copy tables: { src: "/Root/UuidTable", dst: "/Root/~backup_20250417T180248/UuidTable" }Backup table "/Root/~backup_20250417T180248/UuidTable" to "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable"Describe table "/Root/~backup_20250417T180248/UuidTable"Write scheme into "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable/scheme.pb"Describe table "/Root/UuidTable"Write ACL into "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable/permissions.pb"Read table "/Root/~backup_20250417T180248/UuidTable"Write data into "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable/data_00.csv"Drop table "/Root/~backup_20250417T180248/UuidTable"Remove temporary directory "/Root/~backup_20250417T180248" in database2025-04-17T18:02:49.017380Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037889 not found 2025-04-17T18:02:49.026573Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/" to "/Root"Resolved db base path: "/Root"2025-04-17T18:02:49.104979Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037888 not found Restore folder "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable"Read scheme from "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable" to "/Root/UuidTable"2025-04-17T18:02:49.125010Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Created "/Root/UuidTable"Read data from "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable/data_00.csv"2025-04-17T18:02:49.279723Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2envfj1wmwqxsadnn6m4xz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=ZTJmOTIxMy1jMTE4OWMwMS0zN2I2MDMzZS1jYjY5NzliNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable" to "/Root/UuidTable"Read ACL from "/home/runner/.ya/build/build_root/6873/000481/r3tmp/tmp0v1uoS/UuidTable/permissions.pb"2025-04-17T18:02:49.303159Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T18:02:49.409616Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2envk2baq4tsq7ec4byaqy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=NGRiMDc4NWQtMWM2MDhlMDctZmJkN2M1MTYtZGRiZmY3MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:02:45.193662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:02:45.193850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:45.193893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:02:45.193927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:02:45.195387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:02:45.195445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:02:45.195528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:02:45.195621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:02:45.200748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:02:45.294087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:02:45.294135Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:45.320252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:02:45.320571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:02:45.320686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:02:45.340061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:02:45.340331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:02:45.348442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.349779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:02:45.359645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.365175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:45.365251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.368507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:02:45.368579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:45.368636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:02:45.368786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.377191Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:02:45.493576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:02:45.495499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.496653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:02:45.497731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:02:45.497779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.503652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.503773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:02:45.503933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.503992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:02:45.504026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:02:45.504066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:02:45.506990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.507057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:02:45.507096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:02:45.511262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.511325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.511359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.511414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.521471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:02:45.523399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:02:45.523566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:02:45.524464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:02:45.524573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:02:45.524620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.524944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:02:45.524995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:02:45.525143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:02:45.525216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:02:45.527029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:02:45.527072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:02:45.527254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:02:45.527297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:02:45.527636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:02:45.527688Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:02:45.527780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:45.527825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.527862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:02:45.527900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.527972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:02:45.528011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:02:45.528055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:02:45.528079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:02:45.528140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:02:45.528182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:02:45.528215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:02:45.529777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:45.529859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:02:45.529894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... MKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'27))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.589424Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2053:3916], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.596602Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2054:3917], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.603659Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2055:3918], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.610599Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2056:3919], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.617626Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2057:3920], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.624729Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2058:3921], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.631797Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2059:3922], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.638843Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2060:3923], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.645672Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2061:3924], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.652475Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2062:3925], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.659188Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2063:3926], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.666058Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2064:3927], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.673117Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2065:3928], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.680166Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2066:3929], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.687183Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2067:3930], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.694546Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2068:3931], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.701965Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2069:3932], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.709434Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2070:3933], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.717039Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2071:3934], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.724470Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2072:3935], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.732182Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2073:3936], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.739679Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2074:3937], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-17T18:02:50.747060Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2075:3938], Recipient [1:752:2641]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } |84.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |84.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |84.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |84.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2025-04-17T18:02:43.671076Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344112737770976:2115];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.671397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d20/r3tmp/tmpq4Luuh/pdisk_1.dat 2025-04-17T18:02:44.169102Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.171823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.171916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.177035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31338, node 1 2025-04-17T18:02:44.326453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001d20/r3tmp/yandexrsaPbC.tmp 2025-04-17T18:02:44.326474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001d20/r3tmp/yandexrsaPbC.tmp 2025-04-17T18:02:44.326627Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001d20/r3tmp/yandexrsaPbC.tmp 2025-04-17T18:02:44.326747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15408 PQClient connected to localhost:31338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:44.874719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:44.907647Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:44.929189Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:44.948855Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T18:02:46.898797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344125622673543:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.898902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.899357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344125622673555:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.904176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.915026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344125622673557:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:02:47.101512Z node 1 :TX_PROXY ERROR: Actor# [1:7494344129917640918:2391] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.350447Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344129917640933:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.388438Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGU0ODM1ZGUtYThkNjliZTctMjAzNmQ4OTktZmYwYWI3MzU=, ActorId: [1:7494344125622673540:2330], ActorState: ExecuteState, TraceId: 01js2ens6z6j9kdsgb916hj0px, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.390486Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:47.762547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:47.972362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.079388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.083293Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344112737770976:2115];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.083535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:49.233162Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2entfrfjgs59vqxvsnsev7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmEyNDgyYzctNGNiZTMwZjYtOTJkZmJiYTctMjQwZTM3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:49.772946Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 889 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2025-04-17T18:02:43.674155Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344113949296067:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.674728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d1d/r3tmp/tmpBNucsl/pdisk_1.dat 2025-04-17T18:02:44.159780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.159898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.162870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:44.185493Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22457, node 1 2025-04-17T18:02:44.318145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001d1d/r3tmp/yandexJnov9U.tmp 2025-04-17T18:02:44.318194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001d1d/r3tmp/yandexJnov9U.tmp 2025-04-17T18:02:44.318359Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001d1d/r3tmp/yandexJnov9U.tmp 2025-04-17T18:02:44.318479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9047 PQClient connected to localhost:22457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:44.868104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:44.908546Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:44.919357Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:46.775638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126834198688:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.775640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126834198696:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.775757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.781871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.809119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344126834198702:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:02:46.879699Z node 1 :TX_PROXY ERROR: Actor# [1:7494344126834198767:2391] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.350370Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344126834198782:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.379949Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjhiNDhhYTUtYjRmMTJhODYtODljMDc0YjktOWQwMjhhMDk=, ActorId: [1:7494344126834198685:2332], ActorState: ExecuteState, TraceId: 01js2ens3k3wvetqx06ph5k0r9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.381667Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:47.763217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:47.983877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.079890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.095491Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344113949296067:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.095842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:49.232966Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2entegc5d9h01g6c85qsdr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY2ZmI4NTktNzNjMTA0MGEtNWE3Njg1NjAtYzY4ZDk4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:49.772989Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 881 } >> BackupRestore::TestAllPrimitiveTypes-DATE [GOOD] >> BackupRestore::TestAllPrimitiveTypes-DATETIME >> KqpBatchUpdate::ManyPartitions_3 [GOOD] |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |84.6%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2025-04-17T18:02:43.666698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344113768952974:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.666766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cd1/r3tmp/tmp5luuYx/pdisk_1.dat 2025-04-17T18:02:44.100413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.100555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.104537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17028, node 1 2025-04-17T18:02:44.161069Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:02:44.161096Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:02:44.162580Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.317117Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:44.317147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:44.317156Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:44.317279Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9714 PQClient connected to localhost:17028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:44.875396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:44.930484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:44.947860Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:44.976050Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T18:02:46.729658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126653855594:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.729694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126653855603:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.729763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.733246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.744414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344126653855609:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:02:46.807085Z node 1 :TX_PROXY ERROR: Actor# [1:7494344126653855674:2391] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.350143Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344126653855682:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.379830Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWM1ZTk4ZjctNTY3MzIyMjItMTZmOTgwNDYtZDEwYzhjNzU=, ActorId: [1:7494344126653855590:2331], ActorState: ExecuteState, TraceId: 01js2ens275nk59xvmgs1svpt2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.381587Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:47.762511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:47.972620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.082923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.096348Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344113768952974:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.096690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:49.232663Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2enteh8jsr7z1wgdf4d9jb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJlMWZjMDItMmFmMzU5MzQtYTQ1YzE0NGYtNjM2Y2YzMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:49.772989Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 859 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 9815, MsgBus: 15628 2025-04-17T17:58:18.203064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342975901159690:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:18.203117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c52/r3tmp/tmpwUGFhu/pdisk_1.dat 2025-04-17T17:58:19.430530Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:19.433732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:19.433838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:19.434364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:58:19.457786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9815, node 1 2025-04-17T17:58:19.782740Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:19.782760Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:19.782766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:19.782867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15628 TClient is connected to server localhost:15628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:21.303734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:21.340807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:21.665802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:21.989930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:22.157793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:23.203097Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342975901159690:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:23.203168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:25.649202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343005965932326:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:25.649323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:26.193586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:26.265315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:26.344018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:26.392379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:26.451809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:26.514110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:26.592125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:26.698907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343010260900298:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:26.698961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:26.699370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343010260900303:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:26.703554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:26.728721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343010260900305:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:26.815596Z node 1 :TX_PROXY ERROR: Actor# [1:7494343010260900361:3561] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:28.032347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:34.422430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T17:58:34.422470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:44.050052Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2edyat8th6eg5v14p8q9f7", SessionId: ydb://session/3?node_id=1&id=ODExNjliZTUtM2IyY2JiOWEtM2JhMWMxOTItYzA1MWYyODE=, Slow query, duration: 14.071400s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n BATCH UPDATE ManyShardsTable\n SET Data = -10;\n ", parameters: 0b 2025-04-17T17:58:58.706306Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2eecnrbjrnj6htb6er65bz", SessionId: ydb://session/3?node_id=1&id=ODExNjliZTUtM2IyY2JiOWEtM2JhMWMxOTItYzA1MWYyODE=, Slow query, duration: 14.040953s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n BATCH UPDATE ManyShardsTable\n SET Data = 2\n WHERE Key >= 2000;\n ", parameters: 0b Trying to start YDB, gRPC: 4407, MsgBus: 23660 2025-04-17T17:59:00.710744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343153885897352:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:59:00.722283Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c52/r3tmp/tmpH5rPkV/pdisk_1.dat 2025-04-17T17:59:01.075419Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:59:01.111353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:59:01.111459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:59:01.112811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4407, node 2 2025-04-17T17:59:01.370716Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:59:01.370740Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:59:01.370748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:59:01.370880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23660 TClient is connected to server localhost:23660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Childr ... :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494344039189775951:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:31.583550Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:31.603809Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:31.644013Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:31.682164Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:02:31.725032Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:02:31.795871Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:02:31.880085Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494344060664614884:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:31.880193Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:31.880198Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494344060664614889:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:31.886053Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:02:31.900676Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494344060664614891:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:02:31.972052Z node 11 :TX_PROXY ERROR: Actor# [11:7494344060664614948:3555] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:33.278836Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 62364, MsgBus: 24660 2025-04-17T18:02:39.448571Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494344094386038342:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:39.448633Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c52/r3tmp/tmpZRmtLi/pdisk_1.dat 2025-04-17T18:02:39.607655Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:39.623143Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:39.623248Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:39.624659Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62364, node 12 2025-04-17T18:02:39.677240Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:39.677266Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:39.677275Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:39.677416Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24660 TClient is connected to server localhost:24660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:40.312050Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:40.320594Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:40.393791Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:40.609346Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:40.685829Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:43.450624Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494344111565909328:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:43.450747Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:43.531061Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:02:43.579710Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:43.628389Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:43.679560Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:43.739558Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:02:43.789596Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:02:43.843526Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:02:43.953543Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494344111565909994:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:43.953721Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:43.953951Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494344111565909999:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:43.960936Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:02:43.988822Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494344111565910001:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:02:44.089398Z node 12 :TX_PROXY ERROR: Actor# [12:7494344115860877354:3555] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:44.452156Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7494344094386038342:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:44.452417Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:45.850684Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... >> BackupRestoreS3::TestAllPrimitiveTypes-DOUBLE [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-DATE >> TPQCDTest::TestDiscoverClusters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2025-04-17T18:02:43.666704Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344114560866345:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.666766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d26/r3tmp/tmpXbVWDw/pdisk_1.dat 2025-04-17T18:02:44.108097Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.141820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.141931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.147987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13095, node 1 2025-04-17T18:02:44.329255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001d26/r3tmp/yandexdWBAQ8.tmp 2025-04-17T18:02:44.329285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001d26/r3tmp/yandexdWBAQ8.tmp 2025-04-17T18:02:44.329469Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001d26/r3tmp/yandexdWBAQ8.tmp 2025-04-17T18:02:44.329593Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7021 PQClient connected to localhost:13095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:44.921701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:44.954650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:44.970933Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:46.774108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344127445768966:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.774244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.774282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344127445768971:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.777442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.793073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344127445768980:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:02:46.856403Z node 1 :TX_PROXY ERROR: Actor# [1:7494344127445769045:2391] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.350598Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344127445769060:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.379974Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzZkMzQ2ZS04M2ViYTFiYi1iZTFlMjM5MC1kODM1OGNl, ActorId: [1:7494344127445768963:2332], ActorState: ExecuteState, TraceId: 01js2ens320gc6b39qp6e32yy6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.382111Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:47.765083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:47.972249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.079680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.095099Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344114560866345:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.095367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:49.233392Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2enteh1avb9xrzq2j8zr0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzMyMjhiZTYtNDViN2YyNDQtZDBkZGJlYS1kMTVlYzRhZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:49.772942Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 875 } 2025-04-17T18:02:51.324515Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01js2enxbdcy3qd746qtar41h1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk0OGQwNjQtYTI2YzE5M2QtNTZjYjRhMTctMTQ3ZGQxYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:52.425526Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2enygt50t09ja547wysjr1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjcyMTc5ZjAtNzBmZGQ3ZWUtYjU4NjUzZjAtMjNmMGQxY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:53.637214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2enzp45b1vx9s7ms8zw5y5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZkNDc1Y2EtMzIzNzU5N2MtZGY0M2E5NTQtYTYwMDZkNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:54.804764Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01js2ep0twf76a7qtnadqkchws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjMzZTAxNzctYTIwMDA0YTctNDc5YjYxYTQtMzNhYTYxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink >> KqpInplaceUpdate::SingleRowIf+UseSink >> KqpEffects::InsertAbort_Literal_Success >> KqpWrite::UpsertNullKey >> KqpEffects::UpdateOn_Params >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink >> KqpImmediateEffects::Replace >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> KqpImmediateEffects::UpdateAfterInsert >> KqpImmediateEffects::Insert >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KqpImmediateEffects::WriteThenReadWithCommit >> KqpWrite::ProjectReplace+UseSink >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> KqpInplaceUpdate::SingleRowStr+UseSink >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink >> KqpWrite::Insert >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> KqpImmediateEffects::Delete >> KqpImmediateEffects::DeleteAfterUpsert >> KqpImmediateEffects::MultiShardUpsertAfterRead >> KqpEffects::InsertRevert_Literal_Success >> KqpInplaceUpdate::SingleRowArithm+UseSink >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink |84.6%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.6%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::ImmediateUpdate >> BackupRestore::TestAllPrimitiveTypes-DATETIME [GOOD] >> BackupRestore::TestAllPrimitiveTypes-DATE32 >> KqpBatchDelete::ManyPartitions_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows 2025-04-17 18:02:48,427 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 18:02:48,560 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 95862 46.1M 42.5M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/6873/00114a/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_too 96533 1.5G 1.5G 1.4G └─ ydb-core-tx-datashard-ut_read_iterator --trace-path-append /home/runner/.ya/build/build_root/6873/00114a/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/test Test command err: 2025-04-17T17:52:54.378562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T17:52:54.379192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:52:54.379251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00114a/r3tmp/tmpl8TpX6/pdisk_1.dat 2025-04-17T17:52:54.804432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T17:52:54.853123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:52:54.900062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:52:54.900213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:52:54.912682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:52:54.998996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T17:52:55.045603Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T17:52:55.047128Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T17:52:55.047653Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T17:52:55.047903Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T17:52:55.096521Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T17:52:55.097233Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T17:52:55.097353Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T17:52:55.099168Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T17:52:55.099254Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T17:52:55.099326Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T17:52:55.099626Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T17:52:55.099743Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T17:52:55.099805Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T17:52:55.111002Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T17:52:55.146613Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T17:52:55.146812Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T17:52:55.146940Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T17:52:55.146977Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T17:52:55.147013Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T17:52:55.147045Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:55.147278Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:55.147336Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:55.147677Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T17:52:55.147785Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T17:52:55.147893Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:55.147942Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T17:52:55.147994Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T17:52:55.148034Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T17:52:55.148075Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T17:52:55.148107Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T17:52:55.148148Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T17:52:55.148257Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:55.148290Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:55.148334Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T17:52:55.148709Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T17:52:55.148748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T17:52:55.148853Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T17:52:55.149094Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T17:52:55.149163Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T17:52:55.149251Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T17:52:55.149300Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T17:52:55.149337Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T17:52:55.149368Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T17:52:55.149398Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:55.149665Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T17:52:55.149700Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T17:52:55.149746Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T17:52:55.149790Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:55.149831Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T17:52:55.149857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T17:52:55.149887Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T17:52:55.149916Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:55.149939Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T17:52:55.151449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T17:52:55.151494Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T17:52:55.162271Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T17:52:55.162347Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T17:52:55.162381Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T17:52:55.162436Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T17:52:55.162527Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T17:52:55.319945Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:55.320009Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T17:52:55.320064Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T17:52:55.320483Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T17:52:55.320519Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T17:52:55.322365Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T17:52:55.322424Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T17:52:55.322472Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T17:52:55.322509Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T17:52:55.326842Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T17:52:55.326921Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T17:52:55.327805Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:55.327868Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T17:52:55.327924Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T17:52:5 ... LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL\");", parameters: 0b 2025-04-17T18:01:29.724384Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2ek5ey4wkd4ft9f2d15jec, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=MjgyNjcyODMtMzE4NTZjOTAtMjQ2MmJmMmMtNTk4MmI1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:38.697668Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2ekdyp1ndw5js6yrnd4zxf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=MjM2YTljNjUtNWY0YzEwYmItNDRmNDIwZTAtNGFiNWZlZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:46.575722Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2ekpq481xjmvg9886y6c6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=MmNkNGRiNGMtNDA0YWU3MjktMjMwZjZmN2YtNWIyNzA3ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:01:54.915488Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2ekycwc03p24g7r52a1p3n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=Y2RiYjBmOTAtZTg1MmQwZjQtNzUyMDA1NjAtY2RkY2FjMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:01.892152Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2em6jc6ap1ja7pkv3nnesy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YTE3OGEzNjEtNWFmZDliYjktZWY5ZDQ4NDQtYzM1ZGJjOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:09.789533Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2emdbjbqq5tz6ynyte6v5a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=NjhjMDdhMzgtNWY3MjdkYzMtOWQ0OWVmZmItNGRmNzc0MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:18.701847Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2emn6zf1v9q76mny13da4q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=Y2E1MjUwYS02ZDZmNmZmZC1lZjg5NjY4OC04MTYyZGMxNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:26.204837Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2emxvh097xgtcv5m7713dh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YzA0Zjc2MDItYTcxNGQ3ZmEtMjcwMjNjZDUtMTIxYTg4ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-17T18:02:28.848968Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:02:28.849129Z node 11 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 20972828 RowCount: 10 IndexSize: 0 InMemSize: 4195104 LastAccessTime: 1516 LastUpdateTime: 1516 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 380 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 12190 Memory: 4320316 Storage: 16778723 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 11 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 20972828 RowCount: 10 IndexSize: 0 InMemSize: 4195104 LastAccessTime: 1516 LastUpdateTime: 1516 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 380 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4805 Memory: 126364 Storage: 20973341 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 11 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T18:02:31.989437Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2enafhfx5dv6gb39y5fw68, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YjNlZWI2MzgtYjc3Yjk1OTEtNDNkMWQ0ODEtMjg4YWQzOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:36.622761Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:02:36.623012Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:36.623118Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00114a/r3tmp/tmpzk4aLJ/pdisk_1.dat 2025-04-17T18:02:36.916362Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:02:36.946333Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:36.984100Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:36.984261Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:36.995825Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:37.088071Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:02:37.358648Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:37.358777Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:37.358885Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:37.364188Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:02:37.521031Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:02:37.555438Z node 12 :TX_PROXY ERROR: Actor# [12:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:40.733850Z node 12 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2enfxc6hvfbsq8dz9pgsgz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=ZmM3NDlkOTYtYmI4YmJlMzktNWQyYWM3MzctYzZkMzQzODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:44.054099Z node 12 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2enk8j6dyk43b7kgk39c5e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=ZTllZDY4NjktYjJiMzkzYmEtOGNjMjU5NjItZmE0NzgzMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:02:48.032369Z node 12 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2enpgx1xpw4en09zc5tat4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MjNiNTE2NDQtZDdmMDY2ODctZTlkNzk5NjktYmE1ZDk0YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/00114a/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/00114a/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 30579, MsgBus: 14178 2025-04-17T17:58:34.046500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343042390009376:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:34.046564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000b86/r3tmp/tmpOwPoB8/pdisk_1.dat 2025-04-17T17:58:34.557880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:34.557975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:34.560348Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:34.564124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30579, node 1 2025-04-17T17:58:34.706690Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:34.706711Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:34.706718Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:34.706813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14178 TClient is connected to server localhost:14178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:35.251624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:35.272017Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:58:35.287200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:35.493739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:35.848595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:35.970806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:37.784503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343055274913035:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:37.784612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.270301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.305543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.344854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.423677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.472801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.523307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.585922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T17:58:38.669228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343059569880995:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.669357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.671752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343059569881000:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:58:38.676550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T17:58:38.693392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343059569881002:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T17:58:38.772372Z node 1 :TX_PROXY ERROR: Actor# [1:7494343059569881060:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:58:39.048048Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343042390009376:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:39.048146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:58:39.992759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 28561, MsgBus: 32474 2025-04-17T17:58:42.604975Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343078860743004:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:58:42.605135Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000b86/r3tmp/tmpv0HAZo/pdisk_1.dat 2025-04-17T17:58:42.796297Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:58:42.833126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:58:42.833205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:58:42.835181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28561, node 2 2025-04-17T17:58:42.930637Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T17:58:42.930658Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T17:58:42.930665Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T17:58:42.930769Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32474 TClient is connected to server localhost:32474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:58:43.399819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:43.468382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T17:58:43.564412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.179951Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.224297Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.265196Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.306758Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.383119Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.429465Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:02:42.519476Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7494344109710749150:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:42.519561Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7494344109710749155:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:42.519606Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:42.523925Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:02:42.534595Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [19:7494344109710749157:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:02:42.594912Z node 19 :TX_PROXY ERROR: Actor# [19:7494344109710749213:3557] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:44.342887Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 7330, MsgBus: 62191 2025-04-17T18:02:47.945029Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7494344131418355649:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:47.945230Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000b86/r3tmp/tmpXWIGjY/pdisk_1.dat 2025-04-17T18:02:48.157057Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:48.162247Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:48.162397Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:48.166332Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7330, node 20 2025-04-17T18:02:48.241142Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:48.241175Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:48.241195Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:48.241439Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62191 TClient is connected to server localhost:62191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:49.329578Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:49.350446Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:49.437336Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:49.681688Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:49.787735Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:52.945480Z node 20 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7494344131418355649:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:52.945584Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:53.725527Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494344157188161192:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:53.725658Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:53.807820Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:02:53.850273Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:02:53.894394Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:53.936613Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:53.983787Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:02:54.026774Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:02:54.068897Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:02:54.147403Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494344161483129149:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:54.147418Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494344161483129154:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:54.147516Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:54.152988Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:02:54.164191Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7494344161483129156:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:02:54.252716Z node 20 :TX_PROXY ERROR: Actor# [20:7494344161483129213:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:55.577831Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... >> BackupRestoreS3::TestAllPrimitiveTypes-DATE [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-DATETIME |84.6%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink |84.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpInplaceUpdate::SingleRowSimple+UseSink >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> TopicAutoscaling::CommitTopPast [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> TPersQueueTest::TestWriteSessionsConflicts [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpWrite::ProjectReplace-UseSink >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::UpdateOn_Select >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> KqpInplaceUpdate::BigRow >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> KqpWrite::UpsertNullKey [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::DeletePkPrefixWithIndex >> KqpImmediateEffects::ManyFlushes >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink >> KqpEffects::UpdateOn_Literal >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm-UseSink >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect >> KqpWrite::Insert [GOOD] >> KqpWrite::InsertRevert >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf-UseSink >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> BackupRestore::TestAllPrimitiveTypes-DATE32 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-DATETIME64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::UpsertNullKey [GOOD] Test command err: Trying to start YDB, gRPC: 17244, MsgBus: 18610 2025-04-17T18:02:57.703406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344170733680178:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3c/r3tmp/tmp0mvmkV/pdisk_1.dat 2025-04-17T18:02:58.512834Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.528388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.528754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.532084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17244, node 1 2025-04-17T18:02:58.758580Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.758616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.758637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.758811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18610 TClient is connected to server localhost:18610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.801795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.831093Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.856875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.139066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.334649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:00.469122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.216956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344192208518442:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.217076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.706867Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344170733680178:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.706917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.714040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.745885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.778134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.851675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.899403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.950414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.040288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344196503486406:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.040350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.040477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344196503486411:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.044118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.056241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344196503486413:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.119957Z node 1 :TX_PROXY ERROR: Actor# [1:7494344196503486471:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK >> TPersQueueTest::SrcIdCompatibility [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::Interactive >> BackupRestoreS3::TestAllPrimitiveTypes-DATETIME [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-DATE32 >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink >> TPersQueueTest::DefaultMeteringMode [GOOD] >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> KqpWrite::ProjectReplace-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> KqpEffects::UpdateOn_Select [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] >> KqpEffects::UpdateOn_Literal [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> KqpInplaceUpdate::BigRow [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] >> KqpImmediateEffects::ManyFlushes [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DefaultMeteringMode [GOOD] Test command err: 2025-04-17T17:57:38.962799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342800832370660:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:38.962901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:38.999277Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342800506807256:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:38.999319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005b4/r3tmp/tmpiNCJZL/pdisk_1.dat 2025-04-17T17:57:39.298619Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:39.303461Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:39.972157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:39.972255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:39.997033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:39.997104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:40.008154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:40.032668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:40.038142Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:57:40.049351Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:40.055453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:40.079688Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17602, node 1 2025-04-17T17:57:40.139172Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:57:40.139199Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T17:57:40.401444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0005b4/r3tmp/yandex2LwU69.tmp 2025-04-17T17:57:40.401475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0005b4/r3tmp/yandex2LwU69.tmp 2025-04-17T17:57:40.463576Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0005b4/r3tmp/yandex2LwU69.tmp 2025-04-17T17:57:40.464038Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:40.519150Z INFO: TTestServer started on Port 19667 GrpcPort 17602 TClient is connected to server localhost:19667 PQClient connected to localhost:17602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:41.223685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T17:57:41.360031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T17:57:43.968592Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494342800832370660:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:43.968679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:44.005364Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494342800506807256:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:44.005420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T17:57:45.182816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342830571578732:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:45.182884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494342830571578743:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:45.182937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T17:57:45.189049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-17T17:57:45.227096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494342830571578746:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-17T17:57:45.291923Z node 2 :TX_PROXY ERROR: Actor# [2:7494342830571578773:2183] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T17:57:45.817115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T17:57:45.824977Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494342830571578780:2325], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:45.826640Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWQwYzM1OTAtMzllYTViYWYtYTU4N2VkMjMtY2ZlZjM2YWQ=, ActorId: [2:7494342830571578730:2316], ActorState: ExecuteState, TraceId: 01js2ecjjs7n7wwwwvk082vs52, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:45.826848Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494342830897142931:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T17:57:45.828091Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTMwNGJjYy01ZmMyN2FhNy00ZGE5NmZkZi03Yzg5MDM1Yg==, ActorId: [1:7494342830897142881:2340], ActorState: ExecuteState, TraceId: 01js2ecjqz4tvc8ka78ayc4r59, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T17:57:45.834808Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:45.835382Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T17:57:45.935547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T17:57:46.197535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T17:57:46.607324Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2ecks1fbnskx6fnv9qsjy0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U4ZjM5NDItNjYyYWE1MDYtZWU3MzhmNzItYTdjMzA3ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494342835192110692:3107] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 1 partitions CallPersQueueGRPC request to localhost:17602 ... ute txs with state CALCULATING 2025-04-17T18:03:07.717788Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State CALCULATING 2025-04-17T18:03:07.717827Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State CALCULATING FrontTxId 281474976715672 2025-04-17T18:03:07.717862Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-04-17T18:03:07.717902Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState CALCULATED 2025-04-17T18:03:07.717942Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from CALCULATING to CALCULATED 2025-04-17T18:03:07.718546Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] save tx TxId: 281474976715672 State: CALCULATED MinStep: 1744912987615 MaxStep: 18446744073709551615 Step: 1744912987748 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7494344161341083651 RawX2: 124554053780 } Partitions { Partition { PartitionId: 0 } } 2025-04-17T18:03:07.718774Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:03:07.728215Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:03:07.728275Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-04-17T18:03:07.728313Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State CALCULATED 2025-04-17T18:03:07.728359Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State CALCULATED FrontTxId 281474976715672 2025-04-17T18:03:07.728402Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState WAIT_RS 2025-04-17T18:03:07.728445Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from CALCULATED to WAIT_RS 2025-04-17T18:03:07.728527Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-17T18:03:07.728595Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-04-17T18:03:07.728710Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState EXECUTING 2025-04-17T18:03:07.728759Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from WAIT_RS to EXECUTING 2025-04-17T18:03:07.728791Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-04-17T18:03:07.728931Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1744912987748, TxId 281474976715672 2025-04-17T18:03:07.729442Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:03:07.729507Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:03:07.729553Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:03:07.729602Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:03:07.729620Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] I0000000000 2025-04-17T18:03:07.729639Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] _config_0 2025-04-17T18:03:07.729687Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:03:07.729734Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:03:07.729778Z node 30 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:03:07.733943Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:03:07.734130Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1744912987748, TxId 281474976715672, Partition 0 2025-04-17T18:03:07.734168Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-04-17T18:03:07.734244Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State EXECUTING 2025-04-17T18:03:07.734286Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State EXECUTING FrontTxId 281474976715672 2025-04-17T18:03:07.734317Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-04-17T18:03:07.734365Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976715672 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-17T18:03:07.734417Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976715672 2025-04-17T18:03:07.734862Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-04-17T18:03:07.734971Z node 30 :PERSQUEUE NOTICE: [PQ: 72075186224037892] metering mode METERING_MODE_REQUEST_UNITS 2025-04-17T18:03:07.735128Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976715672 2025-04-17T18:03:07.735166Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState EXECUTED 2025-04-17T18:03:07.735214Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from EXECUTING to EXECUTED 2025-04-17T18:03:07.735723Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] save tx TxId: 281474976715672 State: EXECUTED MinStep: 1744912987615 MaxStep: 18446744073709551615 Step: 1744912987748 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7494344161341083651 RawX2: 124554053780 } Partitions { Partition { PartitionId: 0 } } 2025-04-17T18:03:07.736055Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:03:07.744190Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:03:07.744241Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-17T18:03:07.744279Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State EXECUTED 2025-04-17T18:03:07.744332Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State EXECUTED FrontTxId 281474976715672 2025-04-17T18:03:07.744383Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T18:03:07.744424Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState WAIT_RS_ACKS 2025-04-17T18:03:07.744464Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T18:03:07.744509Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] PredicateAcks: 0/0 2025-04-17T18:03:07.744529Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T18:03:07.744556Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] PredicateAcks: 0/0 2025-04-17T18:03:07.744586Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715672 to the list for deletion 2025-04-17T18:03:07.744623Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState DELETING 2025-04-17T18:03:07.744664Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715672 2025-04-17T18:03:07.744760Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:03:07.751504Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:03:07.751551Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-17T18:03:07.751589Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State DELETING 2025-04-17T18:03:07.751623Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715672 2025-04-17T18:03:07.760871Z node 29 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-17T18:03:07.760975Z node 29 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/PQ/ttt" 2025-04-17T18:03:07.761098Z node 29 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ/ttt >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] >> KqpImmediateEffects::DeleteAfterInsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeletePkPrefixWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 15361, MsgBus: 17192 2025-04-17T18:02:57.703382Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344172755545279:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a49/r3tmp/tmponYVE8/pdisk_1.dat 2025-04-17T18:02:58.578010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.578118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.583419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:58.614653Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15361, node 1 2025-04-17T18:02:58.754651Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.754687Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.754715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.754803Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17192 TClient is connected to server localhost:17192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.828544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.850806Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.876018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:00.148748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:00.435461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.561800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.239904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344194230383526:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.240045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.638268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.703802Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344172755545279:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.703855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.715023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.759784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.795787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.830484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.880022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.920506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.006548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198525351487:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.006625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.006773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198525351492:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.010707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.026572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344198525351494:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.097601Z node 1 :TX_PROXY ERROR: Actor# [1:7494344198525351551:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.243468Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-17T18:03:04.252581Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:04.252724Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:04.252925Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344202820319210:2516], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7494344202820319186:2516]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7494344202820319210:2516].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:04.253347Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344202820319203:2516], SessionActorId: [1:7494344202820319186:2516], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344202820319186:2516]. isRollback=0 2025-04-17T18:03:04.253562Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmIyOTAwMmUtY2IwYWI5Mi1lNGI0YTE3OS05ZTgzZmY2Zg==, ActorId: [1:7494344202820319186:2516], ActorState: ExecuteState, TraceId: 01js2epa3vaxyws46f58mdwdp0, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344202820319204:2516] from: [1:7494344202820319203:2516] 2025-04-17T18:03:04.253653Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344202820319204:2516] TxId: 281474976710672. Ctx: { TraceId: 01js2epa3vaxyws46f58mdwdp0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmIyOTAwMmUtY2IwYWI5Mi1lNGI0YTE3OS05ZTgzZmY2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:04.253872Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmIyOTAwMmUtY2IwYWI5Mi1lNGI0YTE3OS05ZTgzZmY2Zg==, ActorId: [1:7494344202820319186:2516], ActorState: ExecuteState, TraceId: 01js2epa3vaxyws46f58mdwdp0, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8203, MsgBus: 17707 2025-04-17T18:03:05.407598Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344206514551994:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.407651Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a49/r3tmp/tmpF7CaRN/pdisk_1.dat 2025-04-17T18:03:05.572974Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.608889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.608969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.611358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8203, node 2 2025-04-17T18:03:05.653417Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.653438Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.653446Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.653531Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17707 TClient is connected to server localhost:17707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:03:06.231456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.269785Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.288918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.441491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.614557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.700711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.710763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344219399455642:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.710868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.761084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.801452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.826142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.854027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.880440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.912010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.946812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.029267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223694423600:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.029337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.029419Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223694423605:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.032427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.041237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344223694423607:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:09.096481Z node 2 :TX_PROXY ERROR: Actor# [2:7494344223694423661:3534] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.046499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.407832Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344206514551994:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.407901Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 22195, MsgBus: 4355 2025-04-17T18:02:57.704223Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344173455974498:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.704296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a37/r3tmp/tmplzySpQ/pdisk_1.dat 2025-04-17T18:02:58.500344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.518670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.518770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.523266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22195, node 1 2025-04-17T18:02:58.758620Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.758641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.758647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.758746Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4355 TClient is connected to server localhost:4355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.623658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.671097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.957543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.233996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.345681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:01.987062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344190635845440:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:01.987191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.680910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.704411Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344173455974498:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.704604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.740867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.806570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.832479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.865468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.906924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.980807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344194930813402:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.980877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.981654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344194930813407:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.986058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:02.997154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344194930813409:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.096120Z node 1 :TX_PROXY ERROR: Actor# [1:7494344199225780763:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.226437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22571, MsgBus: 18292 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a37/r3tmp/tmpl53VUK/pdisk_1.dat 2025-04-17T18:03:05.536553Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:05.554246Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.569000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.569085Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.570821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22571, node 2 2025-04-17T18:03:05.686475Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.686497Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.686510Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.686621Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18292 TClient is connected to server localhost:18292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.247709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.255084Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.258000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.364079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.640124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.724413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.627198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344217892811610:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.627279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.682938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.714878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.743960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.772134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.799594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.830378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.856531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.937801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344217892812269:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.937886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.937924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344217892812274:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.942159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:08.953976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344217892812276:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.009474Z node 2 :TX_PROXY ERROR: Actor# [2:7494344222187779626:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:09.923443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.429278Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=6; 2025-04-17T18:03:10.429458Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037922 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-17T18:03:10.429597Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037922 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-17T18:03:10.429785Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344226482747578:2514], Table: `/Root/TestImmediateEffects` ([72057594046644480:17:1]), SessionActorId: [2:7494344222187779961:2514]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[2:7494344226482747578:2514].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-04-17T18:03:10.430312Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344226482747526:2514], SessionActorId: [2:7494344222187779961:2514], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[2:7494344222187779961:2514]. isRollback=0 2025-04-17T18:03:10.430544Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjUwMDQxM2QtMzQ5ZjFiYTEtNTJiMjdhZTUtNDJhNzBkMGM=, ActorId: [2:7494344222187779961:2514], ActorState: ExecuteState, TraceId: 01js2epg4k7x040xxp6x9wq0mp, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7494344226482747572:2514] from: [2:7494344226482747526:2514] 2025-04-17T18:03:10.430621Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494344226482747572:2514] TxId: 281474976715677. Ctx: { TraceId: 01js2epg4k7x040xxp6x9wq0mp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjUwMDQxM2QtMzQ5ZjFiYTEtNTJiMjdhZTUtNDJhNzBkMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-04-17T18:03:10.430820Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjUwMDQxM2QtMzQ5ZjFiYTEtNTJiMjdhZTUtNDJhNzBkMGM=, ActorId: [2:7494344222187779961:2514], ActorState: ExecuteState, TraceId: 01js2epg4k7x040xxp6x9wq0mp, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7485, MsgBus: 2912 2025-04-17T18:02:57.706280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344173032106440:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.706429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a36/r3tmp/tmp8gHyYD/pdisk_1.dat 2025-04-17T18:02:58.582640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.586857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.586949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.602904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7485, node 1 2025-04-17T18:02:58.744384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.744410Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.744453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.744581Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2912 TClient is connected to server localhost:2912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.754683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.807078Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.820094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.036203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.229839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.352422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.134650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344194506944536:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.134735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.680059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.706655Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344173032106440:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.706760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.724053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.804557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.851466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.925837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.981103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.056090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198801912504:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.056158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.056372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198801912509:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.059680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.070244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344198801912511:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.161866Z node 1 :TX_PROXY ERROR: Actor# [1:7494344198801912569:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26858, MsgBus: 61924 2025-04-17T18:03:05.287975Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344206403799866:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.288043Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a36/r3tmp/tmpGF0Iv6/pdisk_1.dat 2025-04-17T18:03:05.379544Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26858, node 2 2025-04-17T18:03:05.415523Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.415616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.417386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:05.450659Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.450680Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.450688Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.450785Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61924 TClient is connected to server localhost:61924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:05.923848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:05.930218Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:05.948802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.041330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.230714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.314031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.379950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344219288703505:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.380056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.430057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.469656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.493586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.520226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.551107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.583281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.631357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.717000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344219288704170:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.717131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.717283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344219288704175:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.720673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:08.730674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344219288704177:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:08.802836Z node 2 :TX_PROXY ERROR: Actor# [2:7494344219288704233:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.232671Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344227878639201:2516], TxId: 281474976715673, task: 1. Ctx: { TraceId : 01js2epfqeejpwb0nxfh5mgvc8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzNmMDI4MGYtMzZhZWE3Ni04OTljZTctMjkwNDE4OTQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-17T18:03:10.233188Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344227878639203:2517], TxId: 281474976715673, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzNmMDI4MGYtMzZhZWE3Ni04OTljZTctMjkwNDE4OTQ=. TraceId : 01js2epfqeejpwb0nxfh5mgvc8. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494344227878639198:2503], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:03:10.233629Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzNmMDI4MGYtMzZhZWE3Ni04OTljZTctMjkwNDE4OTQ=, ActorId: [2:7494344223583671825:2503], ActorState: ExecuteState, TraceId: 01js2epfqeejpwb0nxfh5mgvc8, Create QueryResponse for error on request, msg: 2025-04-17T18:03:10.288085Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344206403799866:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.288158Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23260, MsgBus: 65441 2025-04-17T18:02:57.710884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344171287120920:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.711037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a30/r3tmp/tmp2xx6Qf/pdisk_1.dat 2025-04-17T18:02:58.507423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.507558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.533477Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.539094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23260, node 1 2025-04-17T18:02:58.750512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.750537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.750547Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.750690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65441 TClient is connected to server localhost:65441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.801400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.829353Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.841295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.022202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.275168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.381723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.053869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344192761959022:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.053983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.637806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.687985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.714957Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344171287120920:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.715015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.730119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.761772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.794102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.865036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.907565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.970446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344192761959688:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.970578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.972747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344192761959693:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.977866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.006763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344192761959695:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.095752Z node 1 :TX_PROXY ERROR: Actor# [1:7494344197056927051:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13834, MsgBus: 16778 2025-04-17T18:03:05.221964Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344206974559823:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.222024Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a30/r3tmp/tmpqjIpGB/pdisk_1.dat 2025-04-17T18:03:05.333702Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.355993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.356062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.357420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13834, node 2 2025-04-17T18:03:05.442414Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.442435Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.442443Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.442550Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16778 TClient is connected to server localhost:16778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:05.948558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:05.955828Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:05.966558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.070607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.304420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.418611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.801361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344219859463465:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.801445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.876512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.913461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.950627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.995513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.037327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.073139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.103919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.165871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224154431416:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.165950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.166418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224154431421:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.169362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.180473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344224154431423:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:09.273995Z node 2 :TX_PROXY ERROR: Actor# [2:7494344224154431482:3539] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.223693Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344206974559823:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.223772Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Select [GOOD] Test command err: Trying to start YDB, gRPC: 30014, MsgBus: 7001 2025-04-17T18:02:57.705211Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344172537955237:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.705295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1c/r3tmp/tmpa5RzlW/pdisk_1.dat 2025-04-17T18:02:58.584148Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.599392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.599495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.610651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30014, node 1 2025-04-17T18:02:58.755355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.755375Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.755381Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.755491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7001 TClient is connected to server localhost:7001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.765793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.782965Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.810776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.066045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.444471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.557347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.082082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344194012793470:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.082234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.637916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.710719Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344172537955237:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.710834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.711599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.744295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.779722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.813840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.854162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.927386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.006761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198307761432:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.006861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.007088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198307761437:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.011054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.037370Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:03:03.037661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344198307761439:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.107223Z node 1 :TX_PROXY ERROR: Actor# [1:7494344198307761498:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8969, MsgBus: 6800 2025-04-17T18:03:05.375377Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344207293731678:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.375425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1c/r3tmp/tmptXMlkG/pdisk_1.dat 2025-04-17T18:03:05.606614Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.625267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.625354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.631195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8969, node 2 2025-04-17T18:03:05.785593Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.785616Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.785624Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.785739Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6800 TClient is connected to server localhost:6800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.247596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.254061Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.261201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.351994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.577221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.649707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.913288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344220178635295:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.913373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.988992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.015337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.050881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.086399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.116047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.155762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.190877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.284434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224473603259:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.284518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.284752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224473603264:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.288148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.297392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344224473603266:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:09.395399Z node 2 :TX_PROXY ERROR: Actor# [2:7494344224473603323:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.375751Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344207293731678:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.375824Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Literal [GOOD] Test command err: Trying to start YDB, gRPC: 29491, MsgBus: 22006 2025-04-17T18:02:57.703396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344172257940860:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a18/r3tmp/tmpsHLvYS/pdisk_1.dat 2025-04-17T18:02:58.682752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.683498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.683599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.688064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29491, node 1 2025-04-17T18:02:58.862759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.862789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.862801Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.862953Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22006 TClient is connected to server localhost:22006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.624040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.675215Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.688633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.901117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:00.167882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:00.285461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:01.991618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344189437811816:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:01.991749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.639959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.706241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.709655Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344172257940860:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.709905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.747117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.817487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.845430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.890533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.981794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.062338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198027747083:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.062399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.062475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198027747088:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.065668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.078255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344198027747090:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.181434Z node 1 :TX_PROXY ERROR: Actor# [1:7494344198027747146:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13797, MsgBus: 18588 2025-04-17T18:03:05.398632Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344205668376347:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.398674Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a18/r3tmp/tmptTxYXT/pdisk_1.dat 2025-04-17T18:03:05.725000Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.774494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.782329Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.786914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13797, node 2 2025-04-17T18:03:05.974645Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.974666Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.974674Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.974785Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18588 TClient is connected to server localhost:18588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.619508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.630762Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.642270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.752179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.955479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.053954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.096508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222848247321:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.096613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.154752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.236172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.268633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.298832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.368037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.445270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.474627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.534795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222848247991:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.534851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.534995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222848247996:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.538346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.548090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344222848247998:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.612133Z node 2 :TX_PROXY ERROR: Actor# [2:7494344222848248053:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.399026Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344205668376347:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.399098Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9142, MsgBus: 30019 2025-04-17T18:02:57.708197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344174641072053:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.708322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a06/r3tmp/tmpqUvqdx/pdisk_1.dat 2025-04-17T18:02:58.574123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.589925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.590054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.593066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9142, node 1 2025-04-17T18:02:58.778954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.778974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.778986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.779144Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30019 TClient is connected to server localhost:30019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.643443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.709757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.973372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.189194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.272213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:01.936726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344191820942867:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:01.936858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.641220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.707854Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344174641072053:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.707983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.710557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.745991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.815344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.851466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.908856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.961532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.037614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200410878127:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.037733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.037944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200410878132:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.041346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.054749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344200410878134:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.141726Z node 1 :TX_PROXY ERROR: Actor# [1:7494344200410878191:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.217409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19366, MsgBus: 10241 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a06/r3tmp/tmpoNaiCd/pdisk_1.dat 2025-04-17T18:03:05.484057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:05.501817Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.543112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.543205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.544575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19366, node 2 2025-04-17T18:03:05.674640Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.674665Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.674673Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.674868Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10241 TClient is connected to server localhost:10241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.134992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.158837Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.184457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.273938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.541292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.659870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.700080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344219540791870:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.700170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.780375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.812797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.844504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.912743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.940213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.984875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.019836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.072811Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223835759827:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.072872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223835759832:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.072883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.076077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.084651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344223835759834:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.163627Z node 2 :TX_PROXY ERROR: Actor# [2:7494344223835759890:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.193473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpWrite::InsertRevert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23971, MsgBus: 11968 2025-04-17T18:02:57.703396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344174859973900:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a11/r3tmp/tmpYhnswo/pdisk_1.dat 2025-04-17T18:02:58.474882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.481896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.482006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.488211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23971, node 1 2025-04-17T18:02:58.754258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.754292Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.754299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.754389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11968 TClient is connected to server localhost:11968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.797773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.814594Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.834255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.058845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.324510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.405851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.099998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344196334812137:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.100117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.680029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.706260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344174859973900:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.706313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.764401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.797841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.831269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.869581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.905065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.988443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344196334812801:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.988538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.988754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344196334812806:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.992613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.003036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344196334812808:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.073889Z node 1 :TX_PROXY ERROR: Actor# [1:7494344200629780163:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.200577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24235, MsgBus: 1563 2025-04-17T18:03:05.506579Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344207622480960:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.524876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a11/r3tmp/tmpZrN4Tb/pdisk_1.dat 2025-04-17T18:03:05.714325Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.735573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.735645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.743039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24235, node 2 2025-04-17T18:03:05.826698Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.826716Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.826724Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.826835Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1563 TClient is connected to server localhost:1563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.462811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.480935Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.489379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.574575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.744210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.816055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.730248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344220507384456:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.730348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.780331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.817451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.848046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.883365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.922153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.972101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.015055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.100770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224802352417:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.100861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224802352422:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.100896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.103956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.114391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344224802352424:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:09.216795Z node 2 :TX_PROXY ERROR: Actor# [2:7494344224802352482:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.323677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.461255Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344207622480960:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.461322Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 15741, MsgBus: 27121 2025-04-17T18:02:57.703749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344174178904796:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3d/r3tmp/tmp2Fesaz/pdisk_1.dat 2025-04-17T18:02:58.615903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.620395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.620514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.639089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15741, node 1 2025-04-17T18:02:58.809880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.809902Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.809911Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.810054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27121 TClient is connected to server localhost:27121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.824051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.890204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.082575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.320249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.429086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.111889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344195653743035:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.111991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.705025Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344174178904796:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.705780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.725101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.784930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.823665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.853366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.912782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.951058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.034501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199948711001:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.034563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.034608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199948711006:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.038863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.049224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344199948711008:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.121718Z node 1 :TX_PROXY ERROR: Actor# [1:7494344199948711067:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.221110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10567, MsgBus: 9478 2025-04-17T18:03:05.438715Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344206061733739:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.438740Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a3d/r3tmp/tmpFok3Zs/pdisk_1.dat 2025-04-17T18:03:05.636510Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.652567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.652671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.663210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10567, node 2 2025-04-17T18:03:05.790672Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.790694Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.790700Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.790804Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9478 TClient is connected to server localhost:9478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.308215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.314988Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.327794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.416764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.649227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.731086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.879258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344218946637383:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.879363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.929628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.973724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.009230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.078233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.106722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.178459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.248712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.333348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223241605357:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.333496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.333820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223241605362:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.336824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.345287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344223241605364:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.400669Z node 2 :TX_PROXY ERROR: Actor# [2:7494344223241605418:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.378225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.442422Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344206061733739:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.442489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.843768Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715676; 2025-04-17T18:03:10.854484Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344227536573369:2541], Table: `/Root/TestImmediateEffects` ([72057594046644480:17:1]), SessionActorId: [2:7494344227536573275:2541]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[2:7494344227536573369:2541].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:03:10.855076Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344227536573362:2541], SessionActorId: [2:7494344227536573275:2541], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7494344227536573275:2541]. isRollback=0 2025-04-17T18:03:10.855382Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmRkNzYyM2UtNmFmNjU2ZGYtOGNmNTI3ZWUtNGM4YTQyZjA=, ActorId: [2:7494344227536573275:2541], ActorState: ExecuteState, TraceId: 01js2epgkq48gp8qw88ccs6tnh, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7494344227536573363:2541] from: [2:7494344227536573362:2541] 2025-04-17T18:03:10.855485Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494344227536573363:2541] TxId: 281474976715676. Ctx: { TraceId: 01js2epgkq48gp8qw88ccs6tnh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmRkNzYyM2UtNmFmNjU2ZGYtOGNmNTI3ZWUtNGM4YTQyZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:03:10.855748Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmRkNzYyM2UtNmFmNjU2ZGYtOGNmNTI3ZWUtNGM4YTQyZjA=, ActorId: [2:7494344227536573275:2541], ActorState: ExecuteState, TraceId: 01js2epgkq48gp8qw88ccs6tnh, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13353, MsgBus: 23591 2025-04-17T18:02:57.708514Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344173257678076:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.708648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a02/r3tmp/tmp7EhwVT/pdisk_1.dat 2025-04-17T18:02:58.562325Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.567434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.567562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.570008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13353, node 1 2025-04-17T18:02:58.748194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.748210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.748215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.748314Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23591 TClient is connected to server localhost:23591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.782247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.826861Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.838380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.006766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.225203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.346119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:01.946762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344190437548878:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:01.946911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.710402Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344173257678076:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.710569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.723095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.779819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.853700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.888494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.974078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.016132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.071398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199027484149:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.071474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.071620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199027484154:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.074550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.084799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344199027484156:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.139782Z node 1 :TX_PROXY ERROR: Actor# [1:7494344199027484210:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.312463Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-17T18:03:04.328380Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:04.328544Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:04.328725Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344203322451886:2516], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7494344203322451844:2516]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7494344203322451886:2516].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:04.329356Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344203322451879:2516], SessionActorId: [1:7494344203322451844:2516], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344203322451844:2516]. isRollback=0 2025-04-17T18:03:04.329585Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE2NTNkYWMtYTZlZmUxMzgtMmZhZjdmLWZkZGE5Yjgy, ActorId: [1:7494344203322451844:2516], ActorState: ExecuteState, TraceId: 01js2epa3m8k3kp4g2d3h6qvy9, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344203322451880:2516] from: [1:7494344203322451879:2516] 2025-04-17T18:03:04.329674Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344203322451880:2516] TxId: 281474976710672. Ctx: { TraceId: 01js2epa3m8k3kp4g2d3h6qvy9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjE2NTNkYWMtYTZlZmUxMzgtMmZhZjdmLWZkZGE5Yjgy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:04.329849Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE2NTNkYWMtYTZlZmUxMzgtMmZhZjdmLWZkZGE5Yjgy, ActorId: [1:7494344203322451844:2516], ActorState: ExecuteState, TraceId: 01js2epa3m8k3kp4g2d3h6qvy9, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 24533, MsgBus: 7787 2025-04-17T18:03:05.244932Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344205665543751:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.251497Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a02/r3tmp/tmpWO2O06/pdisk_1.dat 2025-04-17T18:03:05.348159Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.373548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.373633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.378249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24533, node 2 2025-04-17T18:03:05.550922Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.550940Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.550947Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.551053Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7787 TClient is connected to server localhost:7787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.173690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.180159Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.191266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.314281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.507727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.640458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.776858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344218550447248:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.776967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.830393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.870534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.903935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.934314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.965717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.001768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.049300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.127209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222845415207:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.127293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.127531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222845415212:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.131003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.140053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344222845415214:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.224856Z node 2 :TX_PROXY ERROR: Actor# [2:7494344222845415271:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.240996Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344205665543751:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.241092Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.535379Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344227140382926:2517], TxId: 281474976715673, task: 1. Ctx: { TraceId : 01js2epg3p833e0gsdc6pvj581. SessionId : ydb://session/3?node_id=2&id=M2E4ZjYzMjAtNjMwZjE5ZmYtNjY2NjRhOWUtNzkyMjk5Nzc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:03:10.570787Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344227140382928:2518], TxId: 281474976715673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2E4ZjYzMjAtNjMwZjE5ZmYtNjY2NjRhOWUtNzkyMjk5Nzc=. TraceId : 01js2epg3p833e0gsdc6pvj581. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494344227140382923:2504], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:03:10.571234Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2E4ZjYzMjAtNjMwZjE5ZmYtNjY2NjRhOWUtNzkyMjk5Nzc=, ActorId: [2:7494344227140382867:2504], ActorState: ExecuteState, TraceId: 01js2epg3p833e0gsdc6pvj581, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9680, MsgBus: 2941 2025-04-17T18:02:57.708654Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344172194746655:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.708703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a16/r3tmp/tmpeqncqs/pdisk_1.dat 2025-04-17T18:02:58.665670Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.705739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.705812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.708108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9680, node 1 2025-04-17T18:02:58.926735Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.926757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.926764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.926875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2941 TClient is connected to server localhost:2941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.689959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.738820Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.763248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.976728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.276907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.378368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.143617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344193669584703:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.143722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.710465Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344172194746655:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.710567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.711548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.754934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.791315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.867907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.927532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.985777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.070354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344197964552673:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.070455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.070815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344197964552678:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.074652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.084607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344197964552680:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.170076Z node 1 :TX_PROXY ERROR: Actor# [1:7494344197964552739:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.202941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25496, MsgBus: 16036 2025-04-17T18:03:05.391057Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344207127998001:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.391108Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a16/r3tmp/tmpXmUGcI/pdisk_1.dat 2025-04-17T18:03:05.510642Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.529632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.529710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.531748Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25496, node 2 2025-04-17T18:03:05.602337Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.602356Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.602363Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.602463Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16036 TClient is connected to server localhost:16036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.065308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.088335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.181986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.382702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.464034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.812246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344220012901650:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.812334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.858818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.895250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.948688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.993482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.031213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.070670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.110638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.175812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224307869605:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.175866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.176032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224307869610:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.179035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.189377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344224307869612:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.277269Z node 2 :TX_PROXY ERROR: Actor# [2:7494344224307869670:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.391685Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344207127998001:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.391753Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.433769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] Test command err: Trying to start YDB, gRPC: 26192, MsgBus: 23079 2025-04-17T18:02:57.707750Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344171302076197:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.708329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a07/r3tmp/tmpwEziTw/pdisk_1.dat 2025-04-17T18:02:58.417420Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.453367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.454493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.457528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26192, node 1 2025-04-17T18:02:58.750272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.750291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.750297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.750397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23079 TClient is connected to server localhost:23079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.731368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.759250Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.773741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.979361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.251594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.368337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.150870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344192776914427:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.150988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.640245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.680474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.720120Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344171302076197:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.720452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.732760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.767543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.801295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.853102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.890646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.961837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344192776915090:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.961917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.962115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344192776915095:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.969842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.001047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344192776915097:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.075873Z node 1 :TX_PROXY ERROR: Actor# [1:7494344197071882451:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.221187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9031, MsgBus: 63403 2025-04-17T18:03:05.433854Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344206205765296:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.433938Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a07/r3tmp/tmpmgYsHX/pdisk_1.dat 2025-04-17T18:03:05.539314Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.539390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.541662Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.552627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9031, node 2 2025-04-17T18:03:05.626622Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.626642Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.626650Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.626750Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63403 TClient is connected to server localhost:63403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.235362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.243019Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.257304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:06.346062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.617552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:06.697664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.877344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344219090668926:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.877441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.922140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.951723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.986030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.016194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.048568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.088273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.123609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.187611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223385636881:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.187661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223385636886:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.187704Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.190812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.206412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344223385636888:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.264834Z node 2 :TX_PROXY ERROR: Actor# [2:7494344223385636942:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.251290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.434313Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344206205765296:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.434380Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24496, MsgBus: 13488 2025-04-17T18:02:57.704725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344174541153179:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.705102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1d/r3tmp/tmpFayHWC/pdisk_1.dat 2025-04-17T18:02:58.486301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.486407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.491188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24496, node 1 2025-04-17T18:02:58.531775Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:02:58.531878Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:02:58.546835Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.822229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.822264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.822276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.822374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13488 TClient is connected to server localhost:13488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.781296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.795263Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.805399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.054736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.299904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.378488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.211110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344196015991412:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.211227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.640415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.683037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.706308Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344174541153179:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.706484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.739462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.781134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.851383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.927300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.965195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.054263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200310959386:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.054356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.054589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200310959391:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.058481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.067668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344200310959393:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.137037Z node 1 :TX_PROXY ERROR: Actor# [1:7494344200310959450:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.198688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16451, MsgBus: 18844 2025-04-17T18:03:05.661979Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344207468039263:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.664963Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1d/r3tmp/tmpOJbAdG/pdisk_1.dat 2025-04-17T18:03:05.855980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.856060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.857264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:05.878240Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16451, node 2 2025-04-17T18:03:06.030755Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:06.030779Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:06.030788Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:06.030900Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18844 TClient is connected to server localhost:18844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.787862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.799315Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.820738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.885827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.028365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.114859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.007857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224647910057:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.007943Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.062397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.134735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.166058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.194373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.235624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.271296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.343277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.417049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224647910727:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.417131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.417201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224647910732:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.420707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.434682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344224647910734:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:09.530982Z node 2 :TX_PROXY ERROR: Actor# [2:7494344224647910791:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.567974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.659830Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344207468039263:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.659907Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31655, MsgBus: 27520 2025-04-17T18:02:57.707772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344172101547430:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.707944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fb/r3tmp/tmpNI38C2/pdisk_1.dat 2025-04-17T18:02:58.490257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.492566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.494662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:58.523520Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31655, node 1 2025-04-17T18:02:58.750654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.750675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.750680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.750787Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27520 TClient is connected to server localhost:27520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.627919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:59.693138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:02:59.845826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.044924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.142544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:01.934571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344189281418231:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:01.934700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.640230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.684467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.707885Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344172101547430:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.707934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.727220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.763208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.796692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.839264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.884553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.972495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344193576386193:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.972586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.972807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344193576386198:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.976976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:02.994190Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:03:02.994567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344193576386200:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.054533Z node 1 :TX_PROXY ERROR: Actor# [1:7494344197871353552:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.200936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9352, MsgBus: 16302 2025-04-17T18:03:05.503331Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344207889586937:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.518317Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fb/r3tmp/tmpkN5CcI/pdisk_1.dat 2025-04-17T18:03:05.678268Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.703238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.703318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.707628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9352, node 2 2025-04-17T18:03:05.867025Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.867047Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.867054Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.867152Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16302 TClient is connected to server localhost:16302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.367297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.379217Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.397041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.508647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.728278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.805483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.039296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344225069457821:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.039408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.097171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.130063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.159213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.194759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.221805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.260884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.292827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.350773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344225069458479:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.350857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.351116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344225069458484:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.354935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.365396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344225069458486:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.436698Z node 2 :TX_PROXY ERROR: Actor# [2:7494344225069458541:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.506307Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344207889586937:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.506844Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.553789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 12007, MsgBus: 5053 2025-04-17T18:02:57.703983Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344174411445464:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.704032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0d/r3tmp/tmpzOFZBb/pdisk_1.dat 2025-04-17T18:02:58.621741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.655691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.655800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.662518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12007, node 1 2025-04-17T18:02:58.770752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.770773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.770799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.770922Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5053 TClient is connected to server localhost:5053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.635527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.670913Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.680950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.904672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.158888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.279287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.003284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344191591316399:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.003421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.679923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.704786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344174411445464:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.704840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.759718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.830435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.876343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.932780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.015330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.104804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200181251675:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.104870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.105004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200181251680:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.109292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.121184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344200181251682:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.221971Z node 1 :TX_PROXY ERROR: Actor# [1:7494344200181251737:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.201604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:04.489618Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344204476219617:2516] TxId: 281474976710674. Ctx: { TraceId: 01js2epa9kam7f672jyvxb1649, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-17T18:03:04.489671Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-04-17T18:03:04.489920Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710674. Resolved key sets: 1 2025-04-17T18:03:04.490082Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710674. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 17] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-17T18:03:04.490159Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344204476219617:2516] TxId: 281474976710674. Ctx: { TraceId: 01js2epa9kam7f672jyvxb1649, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-17T18:03:04.490411Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-17T18:03:04.490559Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710674. Shard resolve complete, resolved shards: 1 2025-04-17T18:03:04.490594Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344204476219617:2516] TxId: 281474976710674. Ctx: { TraceId: 01js2epa9kam7f672jyvxb1649, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-17T18:03:04.490653Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344204476219617:2516] TxId: 281474976710674. Ctx: { TraceId: 01js2epa9kam7f672jyvxb1649, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037922] 2025-04-17T18:03:04.490705Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710674. Ctx: { TraceId: 01js2epa9kam7f672jyvxb1649, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {18446744073709551615, 1744912984409} 2025-04-17T18:03:04.490980Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710674. Ctx: { TraceId: 01js2epa9kam7f672jyvxb1649, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , P ... , Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494344204476219676:2516] 2025-04-17T18:03:04.655549Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344204476219672:2516] TxId: 281474976710676. Ctx: { TraceId: 01js2epag7dnj5c1px6cy5hhp1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:04.655581Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344204476219672:2516] TxId: 281474976710676. Ctx: { TraceId: 01js2epag7dnj5c1px6cy5hhp1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000589s ReadRows: 1 ReadBytes: 22 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:03:04.655952Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710677. Resolved key sets: 0 2025-04-17T18:03:04.656021Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710677. Ctx: { TraceId: 01js2epag7dnj5c1px6cy5hhp1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {18446744073709551615, 1744912984409} 2025-04-17T18:03:04.656049Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7494344204476219679:2516] TxId: 281474976710677. Ctx: { TraceId: 01js2epag7dnj5c1px6cy5hhp1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:03:04.656078Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344204476219679:2516] TxId: 281474976710677. Ctx: { TraceId: 01js2epag7dnj5c1px6cy5hhp1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[1:7494344204476219663:2516] 2025-04-17T18:03:04.656119Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344204476219679:2516] TxId: 281474976710677. Ctx: { TraceId: 01js2epag7dnj5c1px6cy5hhp1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:03:04.658257Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344204476219679:2516] TxId: 281474976710677. Ctx: { TraceId: 01js2epag7dnj5c1px6cy5hhp1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGRmNjMtMTcyNGQwNmEtMjRjNDNmZTEtODVmMzc5N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. Trying to start YDB, gRPC: 19904, MsgBus: 2109 2025-04-17T18:03:05.412520Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344206800761256:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.412588Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0d/r3tmp/tmp23BJx7/pdisk_1.dat 2025-04-17T18:03:05.650233Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.662463Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.662539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.663771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19904, node 2 2025-04-17T18:03:05.752269Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.752293Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.752301Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.752389Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2109 TClient is connected to server localhost:2109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.343406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.349453Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.363456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.466162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.641723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.709817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.970763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344219685664903:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.970844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.026469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.062491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.134539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.170624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.208342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.253307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.323393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.392256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223980632865:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.392330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.392498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223980632870:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.396046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.406069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344223980632872:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.500921Z node 2 :TX_PROXY ERROR: Actor# [2:7494344223980632931:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.417186Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344206800761256:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.417256Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.491341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 7897, MsgBus: 8449 2025-04-17T18:02:57.705801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344173662452033:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.707243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fa/r3tmp/tmpD6TeJI/pdisk_1.dat 2025-04-17T18:02:58.554821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.563102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.563211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.568231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7897, node 1 2025-04-17T18:02:58.782691Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.782717Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.782731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.782845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8449 TClient is connected to server localhost:8449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.627465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.669686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.970363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.256207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.393111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.083065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344195137290277:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.083162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.684414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.709173Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344173662452033:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.709370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.740964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.783345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.855310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.946216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.024334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.095217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199432258249:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.095309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.095482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199432258254:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.099407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.108791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344199432258256:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.172160Z node 1 :TX_PROXY ERROR: Actor# [1:7494344199432258309:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.192794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24785, MsgBus: 18276 2025-04-17T18:03:05.666024Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344208928401058:2205];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fa/r3tmp/tmphezchW/pdisk_1.dat 2025-04-17T18:03:05.691307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:03:05.769549Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.817698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.817783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.823963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24785, node 2 2025-04-17T18:03:05.962014Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.962051Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.962060Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.962160Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18276 TClient is connected to server localhost:18276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.621374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.634421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.807069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.016428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.088113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.172563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344226108271846:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.172632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.227145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.273053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.342904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.372109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.438248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.510603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.553836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.637395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344226108272521:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.637468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.637476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344226108272526:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.640556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.649869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344226108272528:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.706732Z node 2 :TX_PROXY ERROR: Actor# [2:7494344226108272583:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.568455Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344208928401058:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.568514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.630993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8512, MsgBus: 3026 2025-04-17T18:02:57.704121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344173937085895:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.704183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a43/r3tmp/tmpyXGTly/pdisk_1.dat 2025-04-17T18:02:58.468497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.468598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.478747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.479063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8512, node 1 2025-04-17T18:02:58.750664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.750695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.750705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.750795Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3026 TClient is connected to server localhost:3026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.905871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.969922Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.989510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.159679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.387480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.482939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.057101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344195411924147:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.057208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.642589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.695001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.704984Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344173937085895:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.705035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.734220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.770120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.805542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.843180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.917321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.012338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199706892120:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.012415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.012603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199706892125:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.016004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.025261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344199706892127:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.111407Z node 1 :TX_PROXY ERROR: Actor# [1:7494344199706892184:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.375736Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-17T18:03:04.386477Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:04.386656Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:04.386863Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344204001859863:2516], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7494344204001859819:2516]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7494344204001859863:2516].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:04.387352Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344204001859856:2516], SessionActorId: [1:7494344204001859819:2516], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344204001859819:2516]. isRollback=0 2025-04-17T18:03:04.387562Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGZmY2VlM2EtYzdhNzBjNTgtMjk3ZDM0ZWMtOWE0YTc4YTc=, ActorId: [1:7494344204001859819:2516], ActorState: ExecuteState, TraceId: 01js2epa3wdr39b8wb7ym934ma, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344204001859857:2516] from: [1:7494344204001859856:2516] 2025-04-17T18:03:04.387645Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344204001859857:2516] TxId: 281474976710672. Ctx: { TraceId: 01js2epa3wdr39b8wb7ym934ma, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGZmY2VlM2EtYzdhNzBjNTgtMjk3ZDM0ZWMtOWE0YTc4YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:04.387857Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGZmY2VlM2EtYzdhNzBjNTgtMjk3ZDM0ZWMtOWE0YTc4YTc=, ActorId: [1:7494344204001859819:2516], ActorState: ExecuteState, TraceId: 01js2epa3wdr39b8wb7ym934ma, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 6130, MsgBus: 21499 2025-04-17T18:03:05.345052Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344207518823696:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.345105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a43/r3tmp/tmp6sYQfE/pdisk_1.dat 2025-04-17T18:03:05.531106Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.566993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.567079Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.568731Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6130, node 2 2025-04-17T18:03:05.734695Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.734729Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.734744Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.734860Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21499 TClient is connected to server localhost:21499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.440273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.447669Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.457080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.538801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.730077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:06.826086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.953786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344220403727351:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.953871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.997236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.024104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.054572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.082318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.109834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.146847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.222495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.290550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224698695310:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.290638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.290911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224698695315:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.294905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.307626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344224698695317:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.405376Z node 2 :TX_PROXY ERROR: Actor# [2:7494344224698695376:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.346284Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344207518823696:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.346357Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.739424Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344228993663028:2516], TxId: 281474976715673, task: 1. Ctx: { TraceId : 01js2epg8r042wsnbz4bmrz6h1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NjFiNWIwMzEtYTZlMmI0MmMtNzNhZjljNjUtNTM0YWE2OTE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-17T18:03:10.739742Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344228993663030:2517], TxId: 281474976715673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NjFiNWIwMzEtYTZlMmI0MmMtNzNhZjljNjUtNTM0YWE2OTE=. TraceId : 01js2epg8r042wsnbz4bmrz6h1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494344228993663025:2504], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:03:10.740018Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjFiNWIwMzEtYTZlMmI0MmMtNzNhZjljNjUtNTM0YWE2OTE=, ActorId: [2:7494344228993662970:2504], ActorState: ExecuteState, TraceId: 01js2epg8r042wsnbz4bmrz6h1, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 29549, MsgBus: 9602 2025-04-17T18:02:57.703591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344174164376104:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a13/r3tmp/tmp8T0f6y/pdisk_1.dat 2025-04-17T18:02:58.572543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.601745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.601860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.611168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29549, node 1 2025-04-17T18:02:58.795802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.795833Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.795844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.795992Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9602 TClient is connected to server localhost:9602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.809736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.838198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.059342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:00.432997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:00.526202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.446552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344195639214330:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.446648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.706286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344174164376104:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.706358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.767236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.821372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.853611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.895873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.966829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.042284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.084331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.179490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199934182300:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.179584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.181341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199934182305:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.185181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.195651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344199934182307:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.248266Z node 1 :TX_PROXY ERROR: Actor# [1:7494344199934182363:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.256167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5847, MsgBus: 16116 2025-04-17T18:03:05.513539Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344207205391497:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.513579Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a13/r3tmp/tmpQ1llWx/pdisk_1.dat 2025-04-17T18:03:05.844279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.844357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.846160Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:05.849243Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5847, node 2 2025-04-17T18:03:06.010076Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:06.010098Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:06.010105Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:06.010226Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16116 TClient is connected to server localhost:16116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:03:06.584270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.614524Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.624983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.738073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.942491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.027245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.235346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224385262313:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.235435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.292172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.337401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.372094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.399809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.439626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.511184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.543994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.604645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224385262977:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.604713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.604762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224385262982:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.608276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.620805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344224385262984:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:09.725478Z node 2 :TX_PROXY ERROR: Actor# [2:7494344224385263041:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.513592Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344207205391497:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.513640Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.656433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:11.088391Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=7; 2025-04-17T18:03:11.088707Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 7 at tablet 72075186224037922 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:03:11.094469Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 7 at tablet 72075186224037922 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:03:11.094644Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344228680230932:2515], Table: `/Root/TestImmediateEffects` ([72057594046644480:17:1]), SessionActorId: [2:7494344228680230672:2515]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[2:7494344228680230932:2515].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:03:11.095278Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344228680230925:2515], SessionActorId: [2:7494344228680230672:2515], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7494344228680230672:2515]. isRollback=0 2025-04-17T18:03:11.095548Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTFlZmJlOTAtZmMzZWJhNTUtYWRjNmM5ZTgtNDdhYTUwNTQ=, ActorId: [2:7494344228680230672:2515], ActorState: ExecuteState, TraceId: 01js2epgrk9k5ppdbjfp070mea, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7494344232975198269:2515] from: [2:7494344228680230925:2515] 2025-04-17T18:03:11.095668Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494344232975198269:2515] TxId: 281474976710678. Ctx: { TraceId: 01js2epgrk9k5ppdbjfp070mea, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFlZmJlOTAtZmMzZWJhNTUtYWRjNmM5ZTgtNDdhYTUwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:03:11.095854Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTFlZmJlOTAtZmMzZWJhNTUtYWRjNmM5ZTgtNDdhYTUwNTQ=, ActorId: [2:7494344228680230672:2515], ActorState: ExecuteState, TraceId: 01js2epgrk9k5ppdbjfp070mea, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25257, MsgBus: 16006 2025-04-17T18:02:57.704160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344174335099456:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.704233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a01/r3tmp/tmpTtqCRC/pdisk_1.dat 2025-04-17T18:02:58.538926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.539052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.540600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:58.597144Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25257, node 1 2025-04-17T18:02:58.748635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.748665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.748690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.748811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16006 TClient is connected to server localhost:16006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.717532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.763851Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.781888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.957546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.175680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.267490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:01.934505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344191514970406:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:01.934627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.637861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.678512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.705285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344174335099456:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.705466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.732697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.770374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.796788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.870628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.924927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.010464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200104905668:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.010578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.010752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200104905673:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.016941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.028948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344200104905675:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.132407Z node 1 :TX_PROXY ERROR: Actor# [1:7494344200104905732:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.189878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17458, MsgBus: 20488 2025-04-17T18:03:05.483996Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344207670373856:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.484781Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a01/r3tmp/tmpGQA7ob/pdisk_1.dat 2025-04-17T18:03:05.687592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.687671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.692675Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.719469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17458, node 2 2025-04-17T18:03:05.918676Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:05.918700Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:05.918707Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:05.918801Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20488 TClient is connected to server localhost:20488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.612218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.618932Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.629255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.732146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:06.902506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.063320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.136529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224850244744:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.136624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.188282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.216934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.243911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.276174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.310126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.379822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.459283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.557556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224850245419:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.557644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.557972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344224850245424:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.562521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.577415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344224850245426:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:09.651885Z node 2 :TX_PROXY ERROR: Actor# [2:7494344224850245481:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.484617Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344207670373856:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.484694Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.739332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestStorageRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 29295, MsgBus: 32156 2025-04-17T18:02:57.703428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344173523832289:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a21/r3tmp/tmpT3NpSW/pdisk_1.dat 2025-04-17T18:02:58.568076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.572282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.572381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.577312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29295, node 1 2025-04-17T18:02:58.746661Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.746680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.746688Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.746805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32156 TClient is connected to server localhost:32156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.635248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.694895Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.709918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.970414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.246548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.369344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.028689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344194998670538:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.028836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.642661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.697979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.704087Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344173523832289:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.704244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.757720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.826771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.898902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.980805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.034141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.112540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199293638512:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.112610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.112938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199293638517:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.115903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.124973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344199293638519:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.218085Z node 1 :TX_PROXY ERROR: Actor# [1:7494344199293638576:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.223876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24233, MsgBus: 8295 2025-04-17T18:03:06.103710Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344209974881942:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:06.104018Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a21/r3tmp/tmpODe4uH/pdisk_1.dat 2025-04-17T18:03:06.319338Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:06.328007Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:06.328086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:06.331204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24233, node 2 2025-04-17T18:03:06.474931Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:06.474954Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:06.474962Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:06.475066Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8295 TClient is connected to server localhost:8295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:03:07.240155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.250419Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:07.272970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:07.351500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.546943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.637217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.429869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222859785525:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.429957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.471265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.496203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.522895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.555036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.589215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.624290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.661985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.739820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222859786185:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.739942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.740147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222859786190:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.743941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.755749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344222859786192:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.818062Z node 2 :TX_PROXY ERROR: Actor# [2:7494344222859786246:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.753700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:11.104184Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344209974881942:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:11.104257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 7345, MsgBus: 7893 2025-04-17T18:02:57.703360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344171583631447:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a22/r3tmp/tmpcCDQx8/pdisk_1.dat 2025-04-17T18:02:58.578020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.585414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.602886Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.605645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7345, node 1 2025-04-17T18:02:58.745057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.745079Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.745084Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.745185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7893 TClient is connected to server localhost:7893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.784931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.820851Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.848116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.062081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.294190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:00.393215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:01.962658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344188763502391:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:01.962829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.639907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.691352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.704690Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344171583631447:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.704744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.732645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.770883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.809457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.865822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.940437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.034084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344197353437655:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.034172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.034430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344197353437660:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.038162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.051184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344197353437662:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.120152Z node 1 :TX_PROXY ERROR: Actor# [1:7494344197353437717:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.198539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4452, MsgBus: 1955 2025-04-17T18:03:05.731776Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344206634806369:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.731811Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a22/r3tmp/tmpw8YPEP/pdisk_1.dat 2025-04-17T18:03:05.892129Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.899504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.899577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.903264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4452, node 2 2025-04-17T18:03:06.042604Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:06.042631Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:06.042638Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:06.042745Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1955 TClient is connected to server localhost:1955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.492169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.499245Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.521909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.653474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.898801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.048986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.388369Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223814677348:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.388441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.443908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.475019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.499539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.531024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.598445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.640917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.681666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.764779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223814678012:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.764887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.764959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223814678017:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.768592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.779276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344223814678019:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.860460Z node 2 :TX_PROXY ERROR: Actor# [2:7494344223814678078:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.735508Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344206634806369:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.736276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.801462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:11.076072Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976715674; 2025-04-17T18:03:11.087472Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344232404613243:2512], Table: `/Root/TestImmediateEffects` ([72057594046644480:17:1]), SessionActorId: [2:7494344228109645707:2512]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[2:7494344232404613243:2512].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:11.087975Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344232404613230:2512], SessionActorId: [2:7494344228109645707:2512], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7494344228109645707:2512]. isRollback=0 2025-04-17T18:03:11.088221Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTU1MzU2ZjMtMzI1NmYwYWItNjEwYzRkMDktMmNiZjllZjU=, ActorId: [2:7494344228109645707:2512], ActorState: ExecuteState, TraceId: 01js2epgq8fa7620bgh6t05an3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7494344232404613231:2512] from: [2:7494344232404613230:2512] 2025-04-17T18:03:11.088298Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494344232404613231:2512] TxId: 281474976715674. Ctx: { TraceId: 01js2epgq8fa7620bgh6t05an3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTU1MzU2ZjMtMzI1NmYwYWItNjEwYzRkMDktMmNiZjllZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:11.088484Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTU1MzU2ZjMtMzI1NmYwYWItNjEwYzRkMDktMmNiZjllZjU=, ActorId: [2:7494344228109645707:2512], ActorState: ExecuteState, TraceId: 01js2epgq8fa7620bgh6t05an3, Create QueryResponse for error on request, msg: 2025-04-17T18:03:11.090997Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715674; 2025-04-17T18:03:11.091001Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715674; 2025-04-17T18:03:11.091180Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1744912991122 : 281474976715674] from 72075186224037923 at tablet 72075186224037923, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-04-17T18:03:11.091203Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1744912991122 : 281474976715674] from 72075186224037924 at tablet 72075186224037924, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6385, MsgBus: 13375 2025-04-17T18:02:57.703818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344174644176718:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a31/r3tmp/tmpD3s629/pdisk_1.dat 2025-04-17T18:02:58.479078Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.489096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.489197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.491880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6385, node 1 2025-04-17T18:02:58.747787Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.747807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.747814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.747934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13375 TClient is connected to server localhost:13375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.689073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.738750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.996405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.158905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.256073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.422546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344196119014965:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.422665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.708247Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344174644176718:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.709130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.855795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.888318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.920927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.956711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.991130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.047654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.082159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.146472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200413982922:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.146560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.147037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344200413982927:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.150901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.161430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344200413982929:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.251484Z node 1 :TX_PROXY ERROR: Actor# [1:7494344200413982986:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.330106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19628, MsgBus: 23422 2025-04-17T18:03:06.015154Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344205426355645:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:06.015267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a31/r3tmp/tmpRxFRDh/pdisk_1.dat 2025-04-17T18:03:06.275127Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:06.304396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:06.304491Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:06.309345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19628, node 2 2025-04-17T18:03:06.432905Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:06.432928Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:06.432938Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:06.433018Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23422 TClient is connected to server localhost:23422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:07.060431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.067181Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:07.093665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.184363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.378196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.457283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.578125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222606226465:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.578250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.637103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.707039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.741482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.775435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.805642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.834581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.904488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.983121Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222606227135:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.983223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.983497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222606227140:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.987471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.998162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344222606227142:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:10.090763Z node 2 :TX_PROXY ERROR: Actor# [2:7494344226901194497:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.979784Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344205426355645:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.979858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:11.163669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink >> BackupRestore::TestAllPrimitiveTypes-DATETIME64 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-DYNUMBER ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 21530, MsgBus: 3831 2025-04-17T18:02:57.703412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344174040509763:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.703464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2b/r3tmp/tmpjFYRLs/pdisk_1.dat 2025-04-17T18:02:58.574659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.574752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.579214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21530, node 1 2025-04-17T18:02:58.610655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.774707Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.774730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.774737Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.774858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3831 TClient is connected to server localhost:3831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.638608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.679107Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.690006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.918665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.195337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.297336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.291270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344195515348000:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.291407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.665397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.705958Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344174040509763:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.706019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.722228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.757925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.790256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.869090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.926023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.999685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.079081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199810315966:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.079182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.079475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344199810315971:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.082493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.091936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344199810315973:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.163195Z node 1 :TX_PROXY ERROR: Actor# [1:7494344199810316029:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.188993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:04.389435Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710673; 2025-04-17T18:03:04.396786Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344204105283783:2516], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7494344204105283660:2516]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7494344204105283783:2516].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:04.397189Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344204105283772:2516], SessionActorId: [1:7494344204105283660:2516], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344204105283660:2516]. isRollback=0 2025-04-17T18:03:04.397360Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDhlM2U3NDItYzY3MDNkNGItMWQyMjJlNjgtYjU5ZjA0M2I=, ActorId: [1:7494344204105283660:2516], ActorState: ExecuteState, TraceId: 01js2epa5s1t3qetpkjjns1cfw, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344204105283773:2516] from: [1:7494344204105283772:2516] 2025-04-17T18:03:04.397415Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344204105283773:2516] TxId: 281474976710673. Ctx: { TraceId: 01js2epa5s1t3qetpkjjns1cfw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDhlM2U3NDItYzY3MDNkNGItMWQyMjJlNjgtYjU5ZjA0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:04.397554Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDhlM2U3NDItYzY3MDNkNGItMWQyMjJlNjgtYjU5ZjA0M2I=, ActorId: [1:7494344204105283660:2516], ActorState: ExecuteState, TraceId: 01js2epa5s1t3qetpkjjns1cfw, Create QueryResponse for error on request, msg: 2025-04-17T18:03:04.397732Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2025-04-17T18:03:04.397909Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744912984437 : 281474976710673] from 72075186224037922 at tablet 72075186224037922, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 }
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-04-17T18:03:04.522468Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710674; 2025-04-17T18:03:04.524028Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344204105283831:2516], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7494344204105283660:2516]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7494344204105283831:2516].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:04.524110Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344204105283823:2516], SessionActorId: [1:7494344204105283660:2516], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344204105283660:2516]. isRollback=0 2025-04-17T18:03:04.524275Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDhlM2U3NDItYzY3MDNkNGItMWQyMjJlNjgtYjU5ZjA0M2I=, ActorId: [1:7494344204105283660:2516], ActorState: ExecuteState, TraceId: 01js2epaba6xaj7wt7ec09rqqd, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344204105283824:2516] from: [1:7494344204105283823:2516] 2025-04-17T18:03:04.524339Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344204105283824:2516] TxId: 281474976710674. Ctx: { TraceId: 01js2epaba6xaj7wt7ec09rqqd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDhlM2U3NDItYzY3MDNkNGItMWQyMjJlNjgtYjU5ZjA0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:04.524472Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDhlM2U3NDItYzY3MDNkNGItMWQyMjJlNjgtYjU5ZjA0M2I=, ActorId: [1:7494344204105283660:2516], ActorState: ExecuteState, TraceId: 01js2epaba6xaj7wt7ec09rqqd, Create QueryResponse for error on request, msg: 2025-04-17T18:03:04.525153Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710674; 2025-04-17T18:03:04.525285Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744912984570 : 281474976710674] from 72075186224037922 at tablet 72075186224037922, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 }
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 20610, MsgBus: 18685 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2b/r3tmp/tmpvaKYLh/pdisk_1.dat 2025-04-17T18:03:05.934326Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:05.970512Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.975262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.975356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.976548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20610, node 2 2025-04-17T18:03:06.146715Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:06.146740Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:06.146748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:06.146853Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18685 TClient is connected to server localhost:18685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.808335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.815315Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.837124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.938617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.108551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:07.179959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.244545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223024018303:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.244635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.313779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.349570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.382527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.413446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.450529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.523545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.559145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.643557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223024018972:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.643646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.643855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223024018977:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.646765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.669331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344223024018979:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:09.755964Z node 2 :TX_PROXY ERROR: Actor# [2:7494344223024019037:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.735285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpWrite::CastValues >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink >> KqpImmediateEffects::InsertExistingKey+UseSink >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink >> KqpImmediateEffects::InsertDuplicates+UseSink >> KqpImmediateEffects::UpdateOn >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention >> KqpImmediateEffects::ConflictingKeyR1WR2 >> KqpEffects::InsertAbort_Params_Conflict+UseSink >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::Upsert >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpImmediateEffects::ReplaceDuplicates >> KqpImmediateEffects::Interactive [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] >> KqpImmediateEffects::UpsertDuplicates >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 31723, MsgBus: 3693 2025-04-17T18:02:57.706570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344173778057876:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.706661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0b/r3tmp/tmpadelFN/pdisk_1.dat 2025-04-17T18:02:58.526661Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:58.534206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.534288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.540782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31723, node 1 2025-04-17T18:02:58.768030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.768070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.768078Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.768223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3693 TClient is connected to server localhost:3693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.685317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.723510Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:02:59.740946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.925282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.141259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.224730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:01.937579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344190957928814:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:01.937688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.685419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.703623Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344173778057876:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.703731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.725047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.761520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.794258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.834123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.876426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.999637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344195252896781:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.999959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.000174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344195252896786:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.003635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.011720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344195252896788:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.077551Z node 1 :TX_PROXY ERROR: Actor# [1:7494344199547864139:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.196948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10707, MsgBus: 22144 2025-04-17T18:03:05.741404Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344206583345746:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.741463Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0b/r3tmp/tmpRYxSQy/pdisk_1.dat 2025-04-17T18:03:05.909815Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.915413Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.915483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.922898Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10707, node 2 2025-04-17T18:03:06.068852Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:06.068874Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:06.068880Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:06.068984Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22144 TClient is connected to server localhost:22144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.611088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.623999Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:06.636151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.702632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.904285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.979445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:08.765370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344219468249374:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.765427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:08.819830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.844479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.868719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.894083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.928353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:08.972591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.026167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.102116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223763217337:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.102224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.102439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344223763217342:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.105243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.116428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344223763217344:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:09.210597Z node 2 :TX_PROXY ERROR: Actor# [2:7494344223763217401:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.081990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.120554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.187860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.741449Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344206583345746:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.741523Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 2881, MsgBus: 16160 2025-04-17T18:03:02.395419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344194621837819:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.395527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f5/r3tmp/tmpThP4w4/pdisk_1.dat 2025-04-17T18:03:02.777097Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2881, node 1 2025-04-17T18:03:02.816883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:02.817478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:02.821694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:02.958815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:02.958841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:02.958853Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:02.958960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16160 TClient is connected to server localhost:16160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:03.564298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:03.586930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:03.725864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:03.898400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:03.972019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:05.680315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344207506741479:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:05.680404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:06.025643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.057669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.101457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.134643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.176071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.259192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.318032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.436005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344211801709449:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:06.436085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:06.436298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344211801709454:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:06.440602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:06.460125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344211801709456:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:06.567723Z node 1 :TX_PROXY ERROR: Actor# [1:7494344211801709512:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:07.402282Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344194621837819:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:07.402387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:07.660414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12717, MsgBus: 3203 2025-04-17T18:03:09.000431Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344222210761366:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:09.000474Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f5/r3tmp/tmpxltx8G/pdisk_1.dat 2025-04-17T18:03:09.132220Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:09.152239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:09.152311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:09.153669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12717, node 2 2025-04-17T18:03:09.204567Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:09.204589Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:09.204598Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:09.204706Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3203 TClient is connected to server localhost:3203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:09.631005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.635568Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:09.648404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.727068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.912709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:10.001973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:12.029244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344235095665015:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:12.029414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:12.073882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.105094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.133019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.160709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.188615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.222829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.260181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.341020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344235095665679:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:12.341100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:12.341204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344235095665684:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:12.344722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:12.356431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344235095665686:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:12.451186Z node 2 :TX_PROXY ERROR: Actor# [2:7494344235095665743:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:13.443583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:14.007289Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344222210761366:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.007344Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore >> TPQTest::TestTabletRestoreEventsOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23903, MsgBus: 17549 2025-04-17T18:03:03.497685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344200289329835:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:03.498730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f0/r3tmp/tmp5z1Pxi/pdisk_1.dat 2025-04-17T18:03:03.898548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:03.917336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:03.917455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:03.922708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23903, node 1 2025-04-17T18:03:04.026822Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:04.026842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:04.026867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:04.026971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17549 TClient is connected to server localhost:17549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:04.657231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:04.675577Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:04.686628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:04.801853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:04.942132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:05.009169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.084139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344217469200821:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:07.084247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:07.391295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.425911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.453298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.479411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.512255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.585725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.629423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:07.695869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344217469201487:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:07.695940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:07.696344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344217469201492:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:07.700002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:07.710680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344217469201494:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:07.772697Z node 1 :TX_PROXY ERROR: Actor# [1:7494344217469201548:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:08.497947Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344200289329835:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:08.498053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:08.684423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8451, MsgBus: 2899 2025-04-17T18:03:09.963696Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344222931971498:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:09.963899Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f0/r3tmp/tmpuXgiIW/pdisk_1.dat 2025-04-17T18:03:10.108704Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:10.123976Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:10.124061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:10.125872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8451, node 2 2025-04-17T18:03:10.165238Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:10.165259Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:10.165268Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:10.165381Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2899 TClient is connected to server localhost:2899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:10.606240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:10.624669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:10.702499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:10.823150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:10.886354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:13.121947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344240111842412:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:13.122057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:13.160362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:13.226586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:13.258066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:13.289356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:13.322231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:13.360876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:13.395469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:13.449193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344240111843074:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:13.449269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:13.449270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344240111843079:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:13.452149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:13.460873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344240111843081:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:13.553395Z node 2 :TX_PROXY ERROR: Actor# [2:7494344240111843136:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:14.704136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:14.969210Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344222931971498:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.969266Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPersQueueTest::InflightLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-17T17:59:12.158280Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:12.158367Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-17T17:59:12.179126Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:12.205830Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-17T17:59:12.219065Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-17T17:59:12.221541Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-17T17:59:12.227916Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-17T17:59:12.231312Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-17T17:59:12.242539Z node 1 :PERSQUEUE INFO: new Cookie default|57d08a93-14e33533-9a54bfcd-31e35840_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:12.249433Z node 1 :PERSQUEUE INFO: new Cookie default|46d0de90-6abfe242-5f3cd6a6-37ce03b4_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:12.259321Z node 1 :PERSQUEUE INFO: new Cookie default|d91c73da-b0032706-9ef4e8c4-511495c_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:12.275366Z node 1 :PERSQUEUE INFO: new Cookie default|2e2c57d-33a111d3-500844af-e4a9ea53_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:12.281929Z node 1 :PERSQUEUE INFO: new Cookie default|4398fe8a-f74e23b-16f00544-a87d3418_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:12.287719Z node 1 :PERSQUEUE INFO: new Cookie default|992ad1cd-393c88d9-dd94bdc-d5ba0c90_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-17T17:59:13.192408Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:13.192498Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:182:2057] recipient: [2:181:2193] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] 2025-04-17T17:59:13.328492Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:13.328583Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:184:2194] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:258:2057] recipient: [2:14:2061] 2025-04-17T17:59:15.057822Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:15.059001Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-17T17:59:15.060014Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:264:2256] 2025-04-17T17:59:15.063848Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:264:2256] 2025-04-17T17:59:15.066065Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:265:2257] 2025-04-17T17:59:15.067931Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:265:2257] 2025-04-17T17:59:15.084573Z node 2 :PERSQUEUE INFO: new Cookie default|7f4a27ff-335d9a78-b15b5298-351d945e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:15.105929Z node 2 :PERSQUEUE INFO: new Cookie default|601dd312-c9572f6f-af58298e-e353ee6f_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:15.145245Z node 2 :PERSQUEUE INFO: new Cookie default|14679a1f-d95bc9a5-adef373b-55af4422_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:15.205799Z node 2 :PERSQUEUE INFO: new Cookie default|25dbc13a-9c5af023-28052c36-591f27f4_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:15.227758Z node 2 :PERSQUEUE INFO: new Cookie default|db2c5c1d-d9a995bb-af471644-d1bc004e_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-17T17:59:15.243007Z node 2 :PERSQUEUE INFO: new Cookie default|bd38a695-626d9641-f4273c05-420c79cd_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-17T17:59:16.078524Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:16.078593Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:181:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:184:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:185:2057] recipient: [3:183:2195] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:187:2057] recipient: [3:183:2195] 2025-04-17T17:59:16.117412Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:16.117475Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:186:2196] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:260:2057] recipient: [3:14:2061] 2025-04-17T17:59:17.920736Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T17:59:17.921463Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-17T17:59:17.922258Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2258] 2025-04-17T17:59:17.924504Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:266:2258] 2025-04-17T17:59:17.926034Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:267:2259] 2025-04-17T17:59:17.939425Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:267:2259] 2025-04-17T17:59:17.959717Z node 3 :PERSQUEUE INFO: new Cookie default|35f3d2-1a4056e2-38b72a5e-4bf24d74_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owne ... nnected; active server actors: 1 2025-04-17T18:03:15.776423Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:507:2501] connected; active server actors: 1 2025-04-17T18:03:15.777780Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:512:2506] connected; active server actors: 1 2025-04-17T18:03:15.779316Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:517:2511] connected; active server actors: 1 2025-04-17T18:03:15.780940Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:522:2516] connected; active server actors: 1 2025-04-17T18:03:15.782680Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:527:2521] connected; active server actors: 1 2025-04-17T18:03:15.784468Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:532:2526] connected; active server actors: 1 2025-04-17T18:03:15.786249Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:537:2531] connected; active server actors: 1 2025-04-17T18:03:15.787972Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:542:2536] connected; active server actors: 1 2025-04-17T18:03:15.789662Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:547:2541] connected; active server actors: 1 2025-04-17T18:03:15.791270Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:552:2546] connected; active server actors: 1 2025-04-17T18:03:15.793080Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:557:2551] connected; active server actors: 1 2025-04-17T18:03:15.794980Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:562:2556] connected; active server actors: 1 2025-04-17T18:03:15.797135Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:567:2561] connected; active server actors: 1 2025-04-17T18:03:15.799445Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:572:2566] connected; active server actors: 1 2025-04-17T18:03:15.801019Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:577:2571] connected; active server actors: 1 2025-04-17T18:03:15.802661Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:582:2576] connected; active server actors: 1 2025-04-17T18:03:15.807305Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:587:2581] connected; active server actors: 1 2025-04-17T18:03:15.811152Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:592:2586] connected; active server actors: 1 2025-04-17T18:03:15.819790Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:597:2591] connected; active server actors: 1 2025-04-17T18:03:15.827920Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:602:2596] connected; active server actors: 1 2025-04-17T18:03:15.831556Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:607:2601] connected; active server actors: 1 2025-04-17T18:03:15.836122Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:612:2606] connected; active server actors: 1 2025-04-17T18:03:15.844017Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:617:2611] connected; active server actors: 1 2025-04-17T18:03:15.847627Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:622:2616] connected; active server actors: 1 2025-04-17T18:03:15.849913Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:627:2621] connected; active server actors: 1 2025-04-17T18:03:15.851906Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:632:2626] connected; active server actors: 1 2025-04-17T18:03:15.853766Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:637:2631] connected; active server actors: 1 2025-04-17T18:03:15.855667Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:642:2636] connected; active server actors: 1 2025-04-17T18:03:15.857464Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:647:2641] connected; active server actors: 1 2025-04-17T18:03:15.859381Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:652:2646] connected; active server actors: 1 2025-04-17T18:03:15.861226Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:657:2651] connected; active server actors: 1 2025-04-17T18:03:15.863108Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:662:2656] connected; active server actors: 1 2025-04-17T18:03:15.865066Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:667:2661] connected; active server actors: 1 2025-04-17T18:03:15.866975Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:672:2666] connected; active server actors: 1 2025-04-17T18:03:15.868752Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:677:2671] connected; active server actors: 1 2025-04-17T18:03:15.870141Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:682:2676] connected; active server actors: 1 2025-04-17T18:03:15.871942Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:687:2681] connected; active server actors: 1 2025-04-17T18:03:15.873724Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:692:2686] connected; active server actors: 1 2025-04-17T18:03:15.875494Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:697:2691] connected; active server actors: 1 2025-04-17T18:03:15.879401Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:702:2696] connected; active server actors: 1 2025-04-17T18:03:15.881268Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:707:2701] connected; active server actors: 1 2025-04-17T18:03:15.883133Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:712:2706] connected; active server actors: 1 2025-04-17T18:03:15.885201Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:717:2711] connected; active server actors: 1 2025-04-17T18:03:15.886998Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:722:2716] connected; active server actors: 1 2025-04-17T18:03:15.888783Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:727:2721] connected; active server actors: 1 2025-04-17T18:03:15.890681Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:732:2726] connected; active server actors: 1 2025-04-17T18:03:15.892637Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:737:2731] connected; active server actors: 1 2025-04-17T18:03:15.894559Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:742:2736] connected; active server actors: 1 2025-04-17T18:03:15.899449Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:747:2741] connected; active server actors: 1 2025-04-17T18:03:15.901757Z node 131 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [131:752:2746], now have 1 active actors on pipe 2025-04-17T18:03:15.902907Z node 131 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [131:755:2749], now have 1 active actors on pipe 2025-04-17T18:03:15.904035Z node 131 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [131:758:2752], now have 1 active actors on pipe 2025-04-17T18:03:15.905137Z node 131 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [131:761:2755] connected; active server actors: 1 2025-04-17T18:03:16.541451Z node 132 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:03:16.541544Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:03:16.586223Z node 132 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:03:16.586318Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:03:16.589406Z node 132 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:03:16.590685Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 135 actor [132:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 135 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 135 ReadRuleGenerations: 135 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 135 Important: false } Consumers { Name: "aaa" Generation: 135 Important: true } 2025-04-17T18:03:16.591412Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [132:245:2245] 2025-04-17T18:03:16.592534Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [132:245:2245] 2025-04-17T18:03:16.594095Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [132:243:2243] 2025-04-17T18:03:16.594959Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [132:243:2243] 2025-04-17T18:03:16.630514Z node 132 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:03:16.630590Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:03:16.631324Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [132:322:2305] 2025-04-17T18:03:16.632735Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [132:324:2307] 2025-04-17T18:03:16.637981Z node 132 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:03:16.638058Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 4 [132:322:2305] 2025-04-17T18:03:16.638475Z node 132 :PERSQUEUE INFO: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:03:16.638516Z node 132 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 4 [132:324:2307] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 32408, MsgBus: 23996 2025-04-17T18:02:57.706249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344172572711070:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:57.706551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a26/r3tmp/tmpjQmUcV/pdisk_1.dat 2025-04-17T18:02:58.503425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:58.503543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:58.517872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:58.547490Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32408, node 1 2025-04-17T18:02:58.750730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:58.750772Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:58.750784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:58.750879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23996 TClient is connected to server localhost:23996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:59.894511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:59.928817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.144521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.437704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:00.537875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:02.228006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344194047549170:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.228109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:02.636758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.688619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.717397Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344172572711070:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:02.724206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:02.736477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.805641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.853324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.910861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:02.953321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:03.041868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198342517135:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.041947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.042037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344198342517140:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:03.046077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:03.056941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344198342517142:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:03.162727Z node 1 :TX_PROXY ERROR: Actor# [1:7494344198342517199:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:04.228611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25850, MsgBus: 11164 2025-04-17T18:03:05.628933Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344205407765705:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:05.628982Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a26/r3tmp/tmp3X8kcI/pdisk_1.dat 2025-04-17T18:03:05.880099Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:05.896913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:05.896991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:05.898145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25850, node 2 2025-04-17T18:03:06.045767Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:06.045789Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:06.045799Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:06.045899Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11164 TClient is connected to server localhost:11164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:06.606078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.629400Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:06.640851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:06.786682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:07.066169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:07.154001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.189721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222587636651:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.189814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.237649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.269451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.299191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.326901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.354638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.427765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.483493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:09.544817Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222587637316:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.544895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.545126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344222587637321:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:09.548429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:09.563467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344222587637323:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:09.637153Z node 2 :TX_PROXY ERROR: Actor# [2:7494344222587637380:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:10.574956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.615509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:03:10.641549Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344205407765705:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:10.641603Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:10.667236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> BackupRestoreS3::TestAllPrimitiveTypes-DATE32 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-DATETIME64 >> DstCreator::ReplicationModeMismatch >> DstCreator::ColumnsSizeMismatch >> DstCreator::GlobalConsistency >> DstCreator::SameOwner >> DstCreator::Basic >> DstCreator::ExistingDst >> DstCreator::WithSyncIndex >> DstCreator::NonExistentSrc >> DstCreator::WithIntermediateDir |84.7%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWrite::CastValues [GOOD] >> KqpWrite::CastValuesOptional >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink >> KqpImmediateEffects::InsertExistingKey-UseSink >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertDuplicates-UseSink >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi >> KqpEffects::InsertAbort_Params_Conflict-UseSink |84.8%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit >> BackupRestore::TestAllPrimitiveTypes-DYNUMBER [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsert >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::ReplaceExistingKey >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertExistingKey ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllPrimitiveTypes-DYNUMBER [GOOD] Test command err: 2025-04-17T18:02:05.009834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343948524124216:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:05.010012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpuhD8LU/pdisk_1.dat 2025-04-17T18:02:06.075525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:06.132303Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:06.158586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:06.158687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:06.167262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17050, node 1 2025-04-17T18:02:07.173264Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:07.173289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:07.173313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:07.173454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:09.291864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:09.766987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343965703994358:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:09.767096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:10.009798Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343948524124216:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:10.009887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:12.482769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/"Create temporary directory "/Root/~backup_20250417T180212" in databaseProcess "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250417T180212/table" }Backup table "/Root/~backup_20250417T180212/table" to "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table"Describe table "/Root/~backup_20250417T180212/table"Write scheme into "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table/permissions.pb"Read table "/Root/~backup_20250417T180212/table"Write data into "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table/data_00.csv"Drop table "/Root/~backup_20250417T180212/table"2025-04-17T18:02:13.537886Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-17T18:02:13.537921Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found Remove temporary directory "/Root/~backup_20250417T180212" in database2025-04-17T18:02:13.574786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-17T18:02:13.608866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343982883864535:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:13.608949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:13.704304Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T18:02:13.704348Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table"Read scheme from "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table" to "/Root/table"2025-04-17T18:02:13.812255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-04-17T18:02:13.997593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T18:02:14.113894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 Restore ACL "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVORzmJ/table/permissions.pb"2025-04-17T18:02:14.407503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-17T18:02:15.989133Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494343991931363257:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:15.989226Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpC7cQXq/pdisk_1.dat 2025-04-17T18:02:16.542829Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:16.589104Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:16.589187Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:16.592982Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63560, node 4 2025-04-17T18:02:16.747012Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:16.747045Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:16.747053Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:16.747212Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:16.928030Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:19.998440Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344009111233442:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:19.998560Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:20.021153Z node 4 :FLAT_TX_SCHEMESHARD WARN: O ... :11.799557Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Backup completed successfullyRestore "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpPcKMR6/" to "/Root"Resolved db base path: "/Root"2025-04-17T18:03:11.872567Z node 31 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 31, TabletId: 72075186224037888 not found Restore folder "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpPcKMR6/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpPcKMR6/Datetime64Table"Read scheme from "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpPcKMR6/Datetime64Table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpPcKMR6/Datetime64Table" to "/Root/Datetime64Table"2025-04-17T18:03:11.893458Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Created "/Root/Datetime64Table"Read data from "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpPcKMR6/Datetime64Table/data_00.csv"2025-04-17T18:03:12.035028Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2ephptd6qm87ycw3c14n0s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=MjRhNjNiZmItYjIzZjU1MjMtYTllYjNjNWUtMmRkMzJiNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpPcKMR6/Datetime64Table" to "/Root/Datetime64Table"Read ACL from "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpPcKMR6/Datetime64Table/permissions.pb"2025-04-17T18:03:12.062154Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T18:03:12.186503Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2epht91fwhsknq367nsx74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=Yjg0YWI3MzItZWFmMWNjNjEtM2FmYjZkYzgtOTYwMDA1NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:14.130558Z node 34 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7494344244080137635:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.130620Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpEF92Nq/pdisk_1.dat 2025-04-17T18:03:14.270191Z node 34 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:14.311935Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:14.312052Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:14.315367Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11794, node 34 2025-04-17T18:03:14.368774Z node 34 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:14.368809Z node 34 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:14.368816Z node 34 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:14.368964Z node 34 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:14.600627Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:18.058057Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [34:7494344261260007895:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.058133Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [34:7494344261260007906:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.058213Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.062677Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.095043Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [34:7494344261260007909:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:03:18.165485Z node 34 :TX_PROXY ERROR: Actor# [34:7494344261260007986:2688] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:18.195168Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.413335Z node 34 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2epqxyebspnphm8w8gxqhp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=34&id=N2I2MTBlNzktYzBkNmU3YWItNzExZDdlOTAtZjk0MDkzNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:18.570729Z node 34 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2epr134a5ekkzm8cer8jtn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=34&id=N2I2MTBlNzktYzBkNmU3YWItNzExZDdlOTAtZjk0MDkzNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/"Create temporary directory "/Root/~backup_20250417T180318" in databaseProcess "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable"Copy tables: { src: "/Root/DyNumberTable", dst: "/Root/~backup_20250417T180318/DyNumberTable" }Backup table "/Root/~backup_20250417T180318/DyNumberTable" to "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable"Describe table "/Root/~backup_20250417T180318/DyNumberTable"Write scheme into "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable/scheme.pb"Describe table "/Root/DyNumberTable"Write ACL into "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable/permissions.pb"Read table "/Root/~backup_20250417T180318/DyNumberTable"Write data into "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable/data_00.csv"Drop table "/Root/~backup_20250417T180318/DyNumberTable"2025-04-17T18:03:19.130743Z node 34 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[34:7494344244080137635:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.130802Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:19.144276Z node 34 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 34, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250417T180318" in database2025-04-17T18:03:19.183151Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/" to "/Root"Resolved db base path: "/Root"2025-04-17T18:03:19.276483Z node 34 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 34, TabletId: 72075186224037888 not found Restore folder "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/" to "/Root"Process "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable"Read scheme from "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable/scheme.pb"Restore table "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable" to "/Root/DyNumberTable"2025-04-17T18:03:19.297985Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Created "/Root/DyNumberTable"Read data from "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable/data_00.csv"2025-04-17T18:03:19.539886Z node 34 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2epryz40s0c2aw01g03f04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=34&id=ZWYwNmVmNDgtYzJhOTliM2EtNmJjZDM4ZWItYWY1MzQ5OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable" to "/Root/DyNumberTable"Read ACL from "/home/runner/.ya/build/build_root/6873/000543/r3tmp/tmpVRdYHM/DyNumberTable/permissions.pb"2025-04-17T18:03:19.582958Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T18:03:19.775287Z node 34 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2eps5pbzkfkscxrpw8nrxn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=34&id=N2I2MTBlNzktYzBkNmU3YWItNzExZDdlOTAtZjk0MDkzNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 19959, MsgBus: 21503 2025-04-17T18:03:08.895007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344218215156331:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:08.895063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ee/r3tmp/tmpP3ybLH/pdisk_1.dat 2025-04-17T18:03:09.244834Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19959, node 1 2025-04-17T18:03:09.311083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:09.311160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:09.314713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:09.323971Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:09.323988Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:09.324008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:09.324134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21503 TClient is connected to server localhost:21503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:09.809503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.836995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:09.999419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:10.166795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:10.226811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:11.771168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344231100059994:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:11.771307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:12.087828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.152279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.175244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.198569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.227331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.296022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.363378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:12.425355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344235395027960:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:12.425440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:12.425503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344235395027965:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:12.429423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:12.440152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344235395027967:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:12.539464Z node 1 :TX_PROXY ERROR: Actor# [1:7494344235395028025:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:13.805240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:13.895236Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344218215156331:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:13.895335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27346, MsgBus: 4445 2025-04-17T18:03:15.574558Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344248189659898:2282];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:15.574768Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ee/r3tmp/tmp83R3kL/pdisk_1.dat 2025-04-17T18:03:15.696236Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27346, node 2 2025-04-17T18:03:15.746995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.747080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.749591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:15.834749Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.834771Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.834778Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.834886Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4445 TClient is connected to server localhost:4445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.265378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.271151Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:16.283351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.358783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.545146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.626371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:18.302315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344261074563291:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.302405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.351439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.381039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.416266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.448515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.486036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.531986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.590048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.668706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344261074563954:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.668812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.669178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344261074563959:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.672832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.695875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344261074563961:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:18.780683Z node 2 :TX_PROXY ERROR: Actor# [2:7494344261074564018:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.895335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.568726Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344248189659898:2282];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:20.568841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DstCreator::WithSyncIndex [GOOD] >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn >> KqpCost::IndexLookup-useSink >> KqpCost::ScanQueryRangeFullScan-SourceRead >> KqpCost::IndexLookup+useSink >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2025-04-17T18:03:19.260022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344266076860962:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.260100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001947/r3tmp/tmpndfeWO/pdisk_1.dat 2025-04-17T18:03:19.742024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:19.800484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:19.800564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:19.806828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25122 TServer::EnableGrpc on GrpcPort 13687, node 1 2025-04-17T18:03:20.186982Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.187008Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.187015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.187118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.702337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.719660Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:20.728871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913001034 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000761 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913001034 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-04-17T18:03:21.070996Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:21.071129Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:21.071317Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:21.071831Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:22.382346Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913001034, tx_id: 281474976710658 } } } 2025-04-17T18:03:22.386711Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:22.388798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:22.391697Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-17T18:03:22.391719Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Replicated 2025-04-17T18:03:22.434359Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-17T18:03:22.435762Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913002469 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemF ... pliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 7 PathOwnerId: 72057594046644480 } 2025-04-17T18:03:22.465241Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 7] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913002469 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913002469 ParentPathId: 6 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913002469 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913002469 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" >> KqpCost::IndexLookupAndTake-useSink >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> KqpCost::IndexLookupJoin-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] Test command err: === Start server === Server->StartServer(false); 2025-04-17T17:57:34.665649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342785380593029:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:34.665736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:34.717447Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342784386876546:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:34.717477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:35.149711Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:35.219801Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005e9/r3tmp/tmpmVcZqB/pdisk_1.dat 2025-04-17T17:57:35.747442Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:35.768922Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:35.979963Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:35.982900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:35.982987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:35.997723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:35.997796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:36.017058Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:57:36.017180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:36.022507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25808, node 1 2025-04-17T17:57:36.314791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0005e9/r3tmp/yandexJhEhEg.tmp 2025-04-17T17:57:36.314816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0005e9/r3tmp/yandexJhEhEg.tmp 2025-04-17T17:57:36.314974Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0005e9/r3tmp/yandexJhEhEg.tmp 2025-04-17T17:57:36.315088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:36.463297Z INFO: TTestServer started on Port 7165 GrpcPort 25808 TClient is connected to server localhost:7165 PQClient connected to localhost:25808 === TenantModeEnabled() = 0 === Init PQ - start server on port 25808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:37.432260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:57:37.432452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:37.432622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:57:37.432901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:57:37.432930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:37.435236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:57:37.435352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:57:37.435484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:37.435511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:57:37.435523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-04-17T17:57:37.435533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 2025-04-17T17:57:37.443164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:37.443209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:57:37.443223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 waiting... 2025-04-17T17:57:37.461770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:37.461802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:37.461836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-17T17:57:37.461866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-04-17T17:57:37.466466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:57:37.470565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-04-17T17:57:37.470584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-04-17T17:57:37.470618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-04-17T17:57:37.472931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-04-17T17:57:37.473063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:57:37.479115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912657523, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:57:37.479248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912657523 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:57:37.479278Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-17T17:57:37.479579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-04-17T17:57:37.479624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-17T17:57:37.480005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:57:37.480071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T17:57:37.482001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:57:37.482025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:57:37.482162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:57:37.482191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494342793970528074:2402], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-04-17T17:57:37.482226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:37.482243Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-04-17T17:57:37.482303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-04-17T17:57:37.482312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-17T17:57:37.482328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-04-17T17:57:37.482337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-17T17:57:37.482348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-04-17T17:57:37.482364Z node 1 :FLAT_TX_SCHEME ... K, TabletId: 72075186224037910, NodeId 25, Generation: 1 2025-04-17T18:03:05.405259Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [25:7494344206111196448:2736], now have 1 active actors on pipe 2025-04-17T18:03:05.405296Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-17T18:03:05.405330Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-17T18:03:05.405459Z node 25 :PERSQUEUE INFO: new Cookie test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0 generated for partition 7 topic 'rt3.dc1--account--topic100' owner test-src-id-compat2 2025-04-17T18:03:05.405565Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 7 2025-04-17T18:03:05.405619Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-17T18:03:05.406853Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744912985406 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:03:05.406983Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Write session established. Init response: session_id: "test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0" topic: "account/topic100" cluster: "dc1" partition_id: 7 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T18:03:05.407233Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Write 1 messages with Id from 1 to 1 2025-04-17T18:03:05.405919Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-17T18:03:05.405936Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-17T18:03:05.406006Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-17T18:03:05.406145Z node 25 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 7 MaxSeqNo: 0 sessionId: test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0 2025-04-17T18:03:05.407821Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Write session: try to update token 2025-04-17T18:03:05.407898Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-17T18:03:05.408381Z node 25 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T18:03:05.408633Z node 25 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-17T18:03:05.408821Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-17T18:03:05.408848Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-17T18:03:05.408949Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 1 2025-04-17T18:03:05.409008Z node 25 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:03:05.409174Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-17T18:03:05.409189Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-17T18:03:05.409238Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message topic: rt3.dc1--account--topic100 partition: 7 SourceId: '\0test-src-id-compat2' SeqNo: 1 partNo : 0 messageNo: 1 size 102 offset: -1 2025-04-17T18:03:05.409423Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob processing sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 2025-04-17T18:03:05.443277Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob complete sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2025-04-17T18:03:05.444633Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic100' partition 7 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000007_00000000000000000000_00000_0000000001_00000| size 177 WTime 1744912985444 2025-04-17T18:03:05.444854Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:03:05.444881Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- delete ---------------- 2025-04-17T18:03:05.444907Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] [x0000000007, x0000000008) 2025-04-17T18:03:05.444935Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- write ----------------- 2025-04-17T18:03:05.444966Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] m0000000007ptest-src-id-compat2 2025-04-17T18:03:05.444979Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] d0000000007_00000000000000000000_00000_0000000001_00000| 2025-04-17T18:03:05.444993Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] i0000000007 2025-04-17T18:03:05.445018Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- rename ---------------- 2025-04-17T18:03:05.445045Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] =========================== 2025-04-17T18:03:05.445126Z node 25 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:03:05.445259Z node 25 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 7 offset 0 partNo 0 count 1 size 177 2025-04-17T18:03:05.451609Z node 25 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 7 offset 0 count 1 size 177 actorID [25:7494344197521260288:2508] 2025-04-17T18:03:05.451764Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:03:05.451824Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyWrite. Partition: 7 2025-04-17T18:03:05.451889Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Answering for message sourceid: '\0test-src-id-compat2', Topic: 'rt3.dc1--account--topic100', Partition: 7, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-17T18:03:05.452145Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-17T18:03:05.452181Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-17T18:03:05.452278Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 1 requestId: cookie: 1 2025-04-17T18:03:05.452451Z node 25 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:03:05.452881Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 Topic 'rt3.dc1--account--topic100' partition 7 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-17T18:03:05.452918Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-17T18:03:05.452972Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-17T18:03:05.452998Z node 25 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T18:03:05.453568Z node 25 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037910' partition 7 offset 0 partno 0 count 1 parts 0 size 177 2025-04-17T18:03:05.453652Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 7 write_statistics { persist_duration_ms: 8 queued_in_partition_duration_ms: 36 } 2025-04-17T18:03:05.453642Z node 25 :PERSQUEUE DEBUG: Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp done, result 1744912985406 queuesize 0 startOffset 0 2025-04-17T18:03:05.453710Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Write session: acknoledged message 1 2025-04-17T18:03:05.453963Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Write session: close. Timeout = 0 ms 2025-04-17T18:03:05.454049Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Write session will now close 2025-04-17T18:03:05.454119Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Write session: aborting 2025-04-17T18:03:05.454670Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:03:05.454737Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0] Write session: destroy 2025-04-17T18:03:05.455447Z node 25 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0 grpc read done: success: 0 data: 2025-04-17T18:03:05.455478Z node 25 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0 grpc read failed 2025-04-17T18:03:05.455873Z node 25 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0 2025-04-17T18:03:05.455907Z node 25 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|2bae405a-55d3ec1c-a7e06f08-77adbdd9_0 is DEAD 2025-04-17T18:03:05.456520Z node 25 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:03:05.457248Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [25:7494344206111196448:2736] destroyed 2025-04-17T18:03:05.457304Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::DropOwner. 2025-04-17T18:03:06.610852Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:03:06.610901Z node 25 :IMPORT WARN: Table profiles were not loaded >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> BackupRestoreS3::TestAllPrimitiveTypes-DATETIME64 [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-DYNUMBER >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> KqpWrite::CastValuesOptional [GOOD] >> KqpCost::OlapRange >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] >> DstCreator::ColumnTypeMismatch [GOOD] >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> DstCreator::CannotFindColumn [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 7403, MsgBus: 9387 2025-04-17T18:03:14.202305Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344245082361182:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.208146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ea/r3tmp/tmpNUiFQL/pdisk_1.dat 2025-04-17T18:03:14.543711Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7403, node 1 2025-04-17T18:03:14.580489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:14.580729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:14.586784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:14.612200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:14.612235Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:14.612250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:14.612421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9387 TClient is connected to server localhost:9387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:15.153570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.186968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.429772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.627104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.705517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.332240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344257967264805:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.332342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.636581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.706700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.746428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.827047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.865531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.936999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.977182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.064544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344262262232772:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.064600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.064776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344262262232777:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.067754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.076406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344262262232779:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.137786Z node 1 :TX_PROXY ERROR: Actor# [1:7494344262262232833:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.202479Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344245082361182:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.202564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22637, MsgBus: 6047 2025-04-17T18:03:20.233075Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344269667730410:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:20.233157Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ea/r3tmp/tmpL0Areb/pdisk_1.dat 2025-04-17T18:03:20.378618Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:20.381850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:20.381932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:20.383290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22637, node 2 2025-04-17T18:03:20.458991Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.459011Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.459019Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.459130Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6047 TClient is connected to server localhost:6047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.940284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.958101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.031194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.186327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.299454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.398329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344282552634072:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:23.398445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:23.474346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.544785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.622819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.657978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.706678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.760801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.833824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.958872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344282552634748:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:23.958965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:23.959713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344282552634753:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:23.963542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:23.977027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344282552634756:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.077269Z node 2 :TX_PROXY ERROR: Actor# [2:7494344286847602108:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.232233Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344269667730410:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:25.232292Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DstCreator::SamePartitionCount [GOOD] >> DstCreator::WithAsyncIndex [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] >> DstCreator::EmptyReplicationConfig [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::ReplaceExistingKey [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9483, MsgBus: 25244 2025-04-17T18:03:13.977493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344239341248594:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:13.977622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ec/r3tmp/tmp7No3k3/pdisk_1.dat 2025-04-17T18:03:14.357115Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9483, node 1 2025-04-17T18:03:14.403128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:14.403327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:14.404993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:14.434871Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:14.434894Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:14.434902Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:14.435012Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25244 TClient is connected to server localhost:25244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:15.012436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.025202Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:15.044726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.268323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.473962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.559532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.232224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344256521119538:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.232332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.591564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.628998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.671609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.703719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.730638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.761671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.793386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:17.881671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344256521120199:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.881753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.881994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344256521120204:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.885406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:17.898653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344256521120206:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:17.973990Z node 1 :TX_PROXY ERROR: Actor# [1:7494344256521120263:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:18.977048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.977474Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344239341248594:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:18.977556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14891, MsgBus: 21475 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ec/r3tmp/tmpxyqUFU/pdisk_1.dat 2025-04-17T18:03:20.617459Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:20.665192Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14891, node 2 2025-04-17T18:03:20.703063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:20.703166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:20.713354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:20.735201Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.735220Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.735229Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.735328Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21475 TClient is connected to server localhost:21475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:21.183386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.199480Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:21.212902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.293796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.452629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.529649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.739693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344284735532274:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:23.739760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:23.800113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.836885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.878076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.931827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.968716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.008154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.084340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.155428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344289030500234:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.155507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.155705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344289030500239:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.162148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.176100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344289030500241:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.265070Z node 2 :TX_PROXY ERROR: Actor# [2:7494344289030500298:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.461474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1262, MsgBus: 29145 2025-04-17T18:03:14.918475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344246042117267:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.930283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b6/r3tmp/tmpjcnuzW/pdisk_1.dat 2025-04-17T18:03:15.548079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:15.557476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.557559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.560718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1262, node 1 2025-04-17T18:03:15.670539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.670558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.670660Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.670779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29145 TClient is connected to server localhost:29145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.162057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.196574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.360154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.506146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.597460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:18.065437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344263221988230:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.065596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.359485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.393067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.431437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.503413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.545152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.591702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.664577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.753519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344263221988901:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.753613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.754614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344263221988906:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.758418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.771191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344263221988910:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.869696Z node 1 :TX_PROXY ERROR: Actor# [1:7494344263221988965:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.916540Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344246042117267:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.916783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:20.133202Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-17T18:03:20.142629Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:20.142777Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:20.142954Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344271811923939:2513], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7494344267516956597:2513]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7494344271811923939:2513].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:20.143409Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344271811923932:2513], SessionActorId: [1:7494344267516956597:2513], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344267516956597:2513]. isRollback=0 2025-04-17T18:03:20.143630Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTE3MTE5OTctNjgyM2QwYzEtZmI2ZjVmZS1iY2VhYjkxYQ==, ActorId: [1:7494344267516956597:2513], ActorState: ExecuteState, TraceId: 01js2epsj33cn9z098n4k0jvee, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344271811923933:2513] from: [1:7494344271811923932:2513] 2025-04-17T18:03:20.143706Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344271811923933:2513] TxId: 281474976710672. Ctx: { TraceId: 01js2epsj33cn9z098n4k0jvee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTE3MTE5OTctNjgyM2QwYzEtZmI2ZjVmZS1iY2VhYjkxYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:20.143863Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTE3MTE5OTctNjgyM2QwYzEtZmI2ZjVmZS1iY2VhYjkxYQ==, ActorId: [1:7494344267516956597:2513], ActorState: ExecuteState, TraceId: 01js2epsj33cn9z098n4k0jvee, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 63649, MsgBus: 15214 2025-04-17T18:03:21.122697Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344275701649504:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.122746Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b6/r3tmp/tmptLZGRE/pdisk_1.dat 2025-04-17T18:03:21.222509Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:21.228781Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.228848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.231141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63649, node 2 2025-04-17T18:03:21.294663Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.294685Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.294693Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.294791Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15214 TClient is connected to server localhost:15214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:21.723231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.739901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.829871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.980318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.080670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.046961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344288586553116:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.047063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.103850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.153299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.186032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.217623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.246596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.294390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.333299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.393664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344288586553776:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.393753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.393828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344288586553781:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.397058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.406408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344288586553783:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.494565Z node 2 :TX_PROXY ERROR: Actor# [2:7494344288586553839:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.922216Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344292881521491:2515], TxId: 281474976715673, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTBhNTE3M2EtZDAxZGViZDMtNGY1ODBhOTItZjcxOTM2Yw==. CustomerSuppliedId : . TraceId : 01js2epz2s6y2hvxjv30yex12e. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:03:25.922441Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344292881521493:2516], TxId: 281474976715673, task: 2. Ctx: { TraceId : 01js2epz2s6y2hvxjv30yex12e. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MTBhNTE3M2EtZDAxZGViZDMtNGY1ODBhOTItZjcxOTM2Yw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494344292881521488:2503], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:03:25.922708Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTBhNTE3M2EtZDAxZGViZDMtNGY1ODBhOTItZjcxOTM2Yw==, ActorId: [2:7494344292881521434:2503], ActorState: ExecuteState, TraceId: 01js2epz2s6y2hvxjv30yex12e, Create QueryResponse for error on request, msg: 2025-04-17T18:03:26.123121Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344275701649504:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.158386Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2025-04-17T18:03:19.257477Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344268958459423:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.257510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001939/r3tmp/tmp0NGtvA/pdisk_1.dat 2025-04-17T18:03:19.809667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:19.809744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:19.810640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:19.812204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27308 TServer::EnableGrpc on GrpcPort 1169, node 1 2025-04-17T18:03:20.190887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.190912Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.190921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.191022Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.685771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.715967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.852516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000740 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000922 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000740 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000922 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T18:03:20.900447Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.900584Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.900604Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:20.901414Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:22.402588Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913000859, tx_id: 281474976710658 } } } 2025-04-17T18:03:22.402913Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:22.404948Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T18:03:22.406461Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000922 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594 ... RN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61280 TServer::EnableGrpc on GrpcPort 10335, node 2 2025-04-17T18:03:23.591745Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:23.591763Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:23.591771Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:23.591866Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:23.964692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.978536Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:23.984149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.045539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004016 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004114 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004016 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004114 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T18:03:24.085439Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.085554Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.085565Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:24.086102Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:26.237496Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913004072, tx_id: 281474976715658 } } } 2025-04-17T18:03:26.237808Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:26.239223Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T18:03:26.240129Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004114 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T18:03:26.240305Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 10417, MsgBus: 61251 2025-04-17T18:03:15.055318Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344248569739469:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:15.056209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b4/r3tmp/tmpZ1HBS1/pdisk_1.dat 2025-04-17T18:03:15.478731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:15.510564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.510658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.512350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10417, node 1 2025-04-17T18:03:15.637732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.637753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.637758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.637852Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61251 TClient is connected to server localhost:61251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.175619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.188234Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:16.194493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.320166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.502976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.573166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:18.143051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344261454643136:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.143154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.444807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.477604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.512369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.549719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.628910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.664010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.709523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.804189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344261454643803:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.804271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344261454643808:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.804321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.807679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.819046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344261454643810:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.874463Z node 1 :TX_PROXY ERROR: Actor# [1:7494344261454643866:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.888138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.055404Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344248569739469:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:20.055513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13664, MsgBus: 18180 2025-04-17T18:03:21.465968Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344274782612919:2168];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.472820Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b4/r3tmp/tmpFZtzCY/pdisk_1.dat 2025-04-17T18:03:21.572329Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13664, node 2 2025-04-17T18:03:21.614127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.614248Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.633773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:21.706676Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.706700Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.706707Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.706805Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18180 TClient is connected to server localhost:18180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:22.138241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.156814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.245551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:22.392292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.469491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.405212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344287667516477:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.405327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.449087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.475387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.500215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.573984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.606835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.678155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.719268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.804295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344287667517152:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.804366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.804701Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344287667517157:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.808008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.821388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344287667517159:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.882856Z node 2 :TX_PROXY ERROR: Actor# [2:7494344287667517216:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.800173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 18606, MsgBus: 26470 2025-04-17T18:03:14.581357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344246950666985:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.581981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d7/r3tmp/tmpcfKkzw/pdisk_1.dat 2025-04-17T18:03:14.994680Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:14.998398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:14.998502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.001037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18606, node 1 2025-04-17T18:03:15.155133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.155151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.155160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.155270Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26470 TClient is connected to server localhost:26470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:15.821821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:15.869362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:16.041152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.209984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.313580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.927160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344259835570644:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.927277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.272145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.312848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.341156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.414739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.451874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.496362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.531532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.629858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344264130538611:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.629935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.630204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344264130538616:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.634855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.650258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344264130538618:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.735000Z node 1 :TX_PROXY ERROR: Actor# [1:7494344264130538675:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.573772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344246950666985:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.573858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:19.706070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19190, MsgBus: 23709 2025-04-17T18:03:21.010764Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344275657300895:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.010898Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d7/r3tmp/tmpwfMSog/pdisk_1.dat 2025-04-17T18:03:21.095312Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19190, node 2 2025-04-17T18:03:21.149391Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.149464Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.152448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:21.176127Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.176148Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.176166Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.176268Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23709 TClient is connected to server localhost:23709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:21.621107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.627187Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:21.640423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.722727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.965429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:22.041686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.064436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344288542204537:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.064498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.102647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.138938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.185218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.230128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.294451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.367300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.437777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.544302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344288542205216:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.544388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.544807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344288542205221:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.548425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.566578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344288542205223:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.621066Z node 2 :TX_PROXY ERROR: Actor# [2:7494344288542205277:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.685600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.018300Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344275657300895:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.018499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2025-04-17T18:03:19.342282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344268451140940:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.420189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00195b/r3tmp/tmpbUVzpc/pdisk_1.dat 2025-04-17T18:03:19.817838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:19.817924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:19.820042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:19.867096Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17854 TServer::EnableGrpc on GrpcPort 18100, node 1 2025-04-17T18:03:20.183331Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.183359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.183374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.183513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.741535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.753854Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000796 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000796 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) 2025-04-17T18:03:20.758732Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.758999Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.759114Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:20.759925Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:22.465088Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-04-17T18:03:22.465151Z node 1 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2025-04-17T18:03:23.223357Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344283003139907:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:23.223450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00195b/r3tmp/tmpG9l5dd/pdisk_1.dat 2025-04-17T18:03:23.430507Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:23.458286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:23.458363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:23.460787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26427 TServer::EnableGrpc on GrpcPort 32321, node 2 2025-04-17T18:03:23.686812Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:23.686834Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:23.686839Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:23.686940Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:24.058449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.069282Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:24.072668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:24.180261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004107 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913004247 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004107 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913004247 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T18:03:24.209137Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.209273Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.209300Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:24.209701Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:26.401557Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913004184, tx_id: 281474976710658 } } } 2025-04-17T18:03:26.401898Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:26.403240Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T18:03:26.404606Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913004247 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T18:03:26.404768Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 15290, MsgBus: 22958 2025-04-17T18:03:14.878984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344247740988783:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.879064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019a6/r3tmp/tmpd7K1ao/pdisk_1.dat 2025-04-17T18:03:15.322538Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:15.338550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.338656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.342974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15290, node 1 2025-04-17T18:03:15.475902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.475915Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.475924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.476001Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22958 TClient is connected to server localhost:22958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.007806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.025484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:16.037707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:16.173514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.365392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:16.433280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.934265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344260625892453:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.934390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.215434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.250644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.280855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.312649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.344187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.399558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.472962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.578364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344264920860416:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.578456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.578730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344264920860421:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.581552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.590919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344264920860423:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.681375Z node 1 :TX_PROXY ERROR: Actor# [1:7494344264920860480:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.705500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.886280Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344247740988783:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.886419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18112, MsgBus: 22433 2025-04-17T18:03:21.102636Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344276254579180:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.102904Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019a6/r3tmp/tmpckjKBR/pdisk_1.dat 2025-04-17T18:03:21.249239Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18112, node 2 2025-04-17T18:03:21.267089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.267161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.278306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:21.306580Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.306599Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.306606Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.306706Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22433 TClient is connected to server localhost:22433 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:21.847289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.862700Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:21.880282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.954930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.125641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.197623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.042216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344289139482841:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.042304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.082790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.169533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.249174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.286044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.329133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.403188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.447906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.527859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344289139483512:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.527958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.528436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344289139483517:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.531168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.539065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344289139483519:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.620332Z node 2 :TX_PROXY ERROR: Actor# [2:7494344289139483576:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.607216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.102788Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344276254579180:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.102858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2025-04-17T18:03:19.261824Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344265520770210:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.262832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001960/r3tmp/tmpQ6p2xV/pdisk_1.dat 2025-04-17T18:03:19.804150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:19.819870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:19.819975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:19.823078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63188 TServer::EnableGrpc on GrpcPort 6702, node 1 2025-04-17T18:03:20.263092Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.263113Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.263121Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.263231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.716159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.731288Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:20.737412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913000845 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000775 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913000845 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-17T18:03:20.855174Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.855341Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.855369Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:20.855987Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:22.619178Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913000845, tx_id: 281474976710658 } } } 2025-04-17T18:03:22.619448Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:22.620772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:22.621880Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-17T18:03:22.621891Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-17T18:03:22.684937Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 TClient::Ls request: /Root/Replicated 2025-04-17T18:03:22.684965Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913002728 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-17T18:03:23.310877Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344284463118581:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:23.310918Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001960/r3tmp/tmpLfhUVM/pdisk_1.dat 2025-04-17T18:03:23.469980Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:23.502312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:23.502385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:23.503279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62372 TServer::EnableGrpc on GrpcPort 17599, node 2 2025-04-17T18:03:23.802727Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:23.802764Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:23.802773Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:23.802868Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:24.128407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.135208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:24.188335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004177 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004296 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004177 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004296 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T18:03:24.267801Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.268035Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.268047Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:24.268621Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:26.505867Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913004219, tx_id: 281474976715658 } } } 2025-04-17T18:03:26.506116Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:26.507362Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T18:03:26.508102Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004296 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T18:03:26.508239Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7057, MsgBus: 26117 2025-04-17T18:03:14.435460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344246399573576:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.435530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019dc/r3tmp/tmp6N5mgj/pdisk_1.dat 2025-04-17T18:03:14.853664Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:14.875274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:14.875382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:14.877717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7057, node 1 2025-04-17T18:03:14.958328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:14.958348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:14.958355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:14.958476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26117 TClient is connected to server localhost:26117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:15.748770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.762942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:15.775336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.927132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.115923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.189267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.807138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344259284477233:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.807233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.155689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.182930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.222506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.260154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.289678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.323505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.374393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.463018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344263579445191:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.463088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.463345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344263579445196:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.466674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.479674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344263579445198:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.545166Z node 1 :TX_PROXY ERROR: Actor# [1:7494344263579445254:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.438313Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344246399573576:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.452055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:19.606969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14236, MsgBus: 63764 2025-04-17T18:03:20.873179Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344270934214184:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:20.873238Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019dc/r3tmp/tmp8qJWJL/pdisk_1.dat 2025-04-17T18:03:21.032378Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:21.066804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.066876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14236, node 2 2025-04-17T18:03:21.068240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:21.125203Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.125222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.125229Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.125319Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63764 TClient is connected to server localhost:63764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:03:21.519690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:21.535577Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:21.555232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.663258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.821225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.893405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.054391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344288114085143:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.054482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.114980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.186207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.230070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.278645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.346658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.416105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.485361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.576282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344288114085822:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.576410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.576627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344288114085828:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.579807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.588165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344288114085830:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.682076Z node 2 :TX_PROXY ERROR: Actor# [2:7494344288114085885:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.873400Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344270934214184:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:25.873457Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:25.915322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2025-04-17T18:03:19.256111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344265706230815:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.256373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00196e/r3tmp/tmpe2V38f/pdisk_1.dat 2025-04-17T18:03:19.829997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:19.830079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:19.835881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:19.860723Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6099 TServer::EnableGrpc on GrpcPort 4047, node 1 2025-04-17T18:03:20.192415Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.192433Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.192439Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.192540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.681855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:20.717846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.863634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000754 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000943 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000754 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000943 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T18:03:20.913865Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.914097Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.914140Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:20.914797Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:22.566401Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913000831, tx_id: 281474976710658 } } } 2025-04-17T18:03:22.566685Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:22.567860Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T18:03:22.569591Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000943 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594 ... 968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11496 TServer::EnableGrpc on GrpcPort 3703, node 2 2025-04-17T18:03:23.850735Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:23.850757Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:23.850763Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:23.850851Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:24.204455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.208265Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:24.210402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.258783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004254 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004366 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004254 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004366 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T18:03:24.336292Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.336396Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.336406Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:24.336986Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:26.674485Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913004289, tx_id: 281474976715658 } } } 2025-04-17T18:03:26.674750Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:26.676168Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T18:03:26.677131Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004366 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T18:03:26.677299Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 19492, MsgBus: 22321 2025-04-17T18:03:14.689091Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344244825677372:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.689130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c9/r3tmp/tmpqvJptn/pdisk_1.dat 2025-04-17T18:03:15.179047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.179180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.180792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:15.184007Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19492, node 1 2025-04-17T18:03:15.449002Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.449041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.449055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.449173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22321 TClient is connected to server localhost:22321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.021692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.051157Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:16.065733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.190688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.372812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.462968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:18.069403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344262005548320:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.069519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.404077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.442994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.479440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.505095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.538215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.587275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.657368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.738470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344262005548985:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.738547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.738828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344262005548990:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.746949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.759045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344262005548992:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.835773Z node 1 :TX_PROXY ERROR: Actor# [1:7494344262005549049:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.690282Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344244825677372:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.690369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:19.896056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15770, MsgBus: 14026 2025-04-17T18:03:21.194558Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344274299802639:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.194797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c9/r3tmp/tmpJaV5IU/pdisk_1.dat 2025-04-17T18:03:21.393011Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:21.394954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.395013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.399194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15770, node 2 2025-04-17T18:03:21.510667Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.510688Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.510695Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.510796Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14026 TClient is connected to server localhost:14026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:21.879456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.892004Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:21.912613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.998766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:22.204366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:22.286352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.327111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344287184706281:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.327204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.373122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.439557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.507413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.538830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.569545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.611495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.681477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.752948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344287184706948:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.753037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.753246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344287184706953:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.757015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.764855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344287184706955:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.864704Z node 2 :TX_PROXY ERROR: Actor# [2:7494344287184707012:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.879757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.194897Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344274299802639:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.194964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:26.446589Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=5; 2025-04-17T18:03:26.446774Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037922 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-17T18:03:26.446919Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037922 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-17T18:03:26.447037Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344295774642265:2514], Table: `/Root/TestImmediateEffects` ([72057594046644480:17:1]), SessionActorId: [2:7494344291479674642:2514]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[2:7494344295774642265:2514].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-04-17T18:03:26.447629Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344295774642206:2514], SessionActorId: [2:7494344291479674642:2514], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[2:7494344291479674642:2514]. isRollback=0 2025-04-17T18:03:26.447805Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmRlNjA3NmUtZjBhYzExNmYtZWMxMGNjMzYtZWQ0NTRiM2U=, ActorId: [2:7494344291479674642:2514], ActorState: ExecuteState, TraceId: 01js2epzrm8136jy3mshfejbxz, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7494344295774642259:2514] from: [2:7494344295774642206:2514] 2025-04-17T18:03:26.447862Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494344295774642259:2514] TxId: 281474976715676. Ctx: { TraceId: 01js2epzrm8136jy3mshfejbxz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmRlNjA3NmUtZjBhYzExNmYtZWMxMGNjMzYtZWQ0NTRiM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-04-17T18:03:26.448018Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmRlNjA3NmUtZjBhYzExNmYtZWMxMGNjMzYtZWQ0NTRiM2U=, ActorId: [2:7494344291479674642:2514], ActorState: ExecuteState, TraceId: 01js2epzrm8136jy3mshfejbxz, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::UpsertExistingKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63366, MsgBus: 25546 2025-04-17T18:03:14.383900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344244015111324:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.384774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e3/r3tmp/tmpNDUeFU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63366, node 1 2025-04-17T18:03:14.806531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:14.806734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:14.808297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:14.808832Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:14.817261Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:03:14.818628Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:03:14.894640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:14.894676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:14.894704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:14.894801Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25546 TClient is connected to server localhost:25546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:15.501883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.518166Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:15.534124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.726134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.913876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.985437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.624480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344256900014960:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.624592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.988124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.022371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.090055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.157836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.185511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.260150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.301264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.395667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344261194982931:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.395747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.395960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344261194982936:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.399263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.411570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344261194982938:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.509748Z node 1 :TX_PROXY ERROR: Actor# [1:7494344261194982997:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.382706Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344244015111324:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.382772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:19.557594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.015895Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-04-17T18:03:20.027319Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037922 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:20.027499Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037922 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:20.027724Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344269784918122:2513], Table: `/Root/TestImmediateEffects` ([72057594046644480:17:1]), SessionActorId: [1:7494344265489950628:2513]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[1:7494344269784918122:2513].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:20.028254Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344269784918106:2513], SessionActorId: [1:7494344265489950628:2513], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344265489950628:2513]. isRollback=0 2025-04-17T18:03:20.028495Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDEwOGFiZjYtZjdiNDRjYjQtZmEzOTliZWMtYzM1OWYyZmI=, ActorId: [1:7494344265489950628:2513], ActorState: ExecuteState, TraceId: 01js2eps8g8vvvcjpgj7gyrxg2, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344269784918116:2513] from: [1:7494344269784918106:2513] 2025-04-17T18:03:20.028580Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344269784918116:2513] TxId: 281474976710675. Ctx: { TraceId: 01js2eps8g8vvvcjpgj7gyrxg2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDEwOGFiZjYtZjdiNDRjYjQtZmEzOTliZWMtYzM1OWYyZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:20.028753Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDEwOGFiZjYtZjdiNDRjYjQtZmEzOTliZWMtYzM1OWYyZmI=, ActorId: [1:7494344265489950628:2513], ActorState: ExecuteState, TraceId: 01js2eps8g8vvvcjpgj7gyrxg2, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 25824, MsgBus: 2840 2025-04-17T18:03:20.807352Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344269378196756:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:20.807452Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e3/r3tmp/tmphAM28Q/pdisk_1.dat 2025-04-17T18:03:20.895330Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25824, node 2 2025-04-17T18:03:20.957445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:20.957599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:20.975921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:20.994659Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.994684Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.994692Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.994812Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2840 TClient is connected to server localhost:2840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:21.435138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.443806Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:21.456216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.566086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.733303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.832918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.873853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344282263100406:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:23.873921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:23.946310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.031737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.110889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.144083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.183647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.225896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.268791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.370278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344286558068368:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.370358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.370803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344286558068373:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.374540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.384006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344286558068375:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.462435Z node 2 :TX_PROXY ERROR: Actor# [2:7494344286558068431:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.618968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.807791Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344269378196756:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:25.807864Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:26.454307Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344295148003589:2535], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01js2epze2aphv2y1xxmymcmpr. SessionId : ydb://session/3?node_id=2&id=ZmVmNWU3NzYtYjY5ZTFiN2ItNWQyY2Y1MGEtNWQyNDg4NTU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:03:26.454886Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344295148003590:2536], TxId: 281474976715677, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZmVmNWU3NzYtYjY5ZTFiN2ItNWQyY2Y1MGEtNWQyNDg4NTU=. TraceId : 01js2epze2aphv2y1xxmymcmpr. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494344295148003586:2503], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:03:26.455287Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmVmNWU3NzYtYjY5ZTFiN2ItNWQyY2Y1MGEtNWQyNDg4NTU=, ActorId: [2:7494344290853036023:2503], ActorState: ExecuteState, TraceId: 01js2epze2aphv2y1xxmymcmpr, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2025-04-17T18:03:19.355437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344265163334614:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.356327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00192b/r3tmp/tmpGwD1fR/pdisk_1.dat 2025-04-17T18:03:19.901710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:19.908918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:19.909004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:19.916049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16200 TServer::EnableGrpc on GrpcPort 1299, node 1 2025-04-17T18:03:20.266852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.266878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.266884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.267013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.681567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.702945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:20.715705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913000845 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000747 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913000845 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-17T18:03:20.882097Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.882203Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.882235Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:20.882819Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:22.602512Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913000845, tx_id: 281474976710658 } } } 2025-04-17T18:03:22.602787Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:22.604305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-17T18:03:22.605493Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-17T18:03:22.605516Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-17T18:03:22.633735Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-17T18:03:22.633765Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 4] TClient::Ls request: /Root/Dir/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913002672 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-17T18:03:23.500142Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344285626186268:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:23.545552Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00192b/r3tmp/tmpK6g6Gj/pdisk_1.dat 2025-04-17T18:03:23.615427Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:23.674686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:23.674757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:23.676403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29487 TServer::EnableGrpc on GrpcPort 15587, node 2 2025-04-17T18:03:23.894710Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:23.894732Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:23.894737Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:23.894839Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:03:24.263291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.270952Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.273590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744913004590 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004310 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744913004590 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-04-17T18:03:24.592833Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.592958Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.592986Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:24.593358Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:26.586601Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913004590, tx_id: 281474976715658 } } } 2025-04-17T18:03:26.586969Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:26.588811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.590172Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-17T18:03:26.590211Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-04-17T18:03:26.617931Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-04-17T18:03:26.617957Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 5] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913006655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2025-04-17T18:03:19.299347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344265894982710:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.299606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001973/r3tmp/tmp4TUvEd/pdisk_1.dat 2025-04-17T18:03:19.803209Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:19.818643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:19.818755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:19.822783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1911 TServer::EnableGrpc on GrpcPort 14791, node 1 2025-04-17T18:03:20.189786Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.189832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.189843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.189964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.683552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.714097Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:20.723187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.727086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000845 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000740 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000845 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-17T18:03:20.895720Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.895868Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.895901Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:20.899682Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:22.641276Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913000845, tx_id: 281474976710659 } } } 2025-04-17T18:03:22.641628Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:22.643112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:22.643884Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-17T18:03:22.643905Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 TClient::Ls request: /Root/Replicated 2025-04-17T18:03:22.672344Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2025-04-17T18:03:22.672373Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913002714 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-17T18:03:23.485314Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344284646364824:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:23.557158Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001973/r3tmp/tmp8rGelG/pdisk_1.dat 2025-04-17T18:03:23.644424Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:23.669660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:23.669743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:23.671667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19871 TServer::EnableGrpc on GrpcPort 4742, node 2 2025-04-17T18:03:23.878674Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:23.878693Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:23.878699Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:23.878837Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:24.217817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.224123Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:24.234203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744913004352 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004268 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744913004352 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-17T18:03:24.333470Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.333583Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.333594Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:24.334156Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:26.701440Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913004352, tx_id: 281474976715658 } } } 2025-04-17T18:03:26.701788Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:26.703566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.704392Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-17T18:03:26.704408Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-04-17T18:03:26.735086Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-04-17T18:03:26.735111Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744913004352 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913006774 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2025-04-17T18:03:19.254595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344266191341013:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.254640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00196c/r3tmp/tmpzPuDby/pdisk_1.dat 2025-04-17T18:03:19.897941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:19.922094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:19.922193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:19.924488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24206 TServer::EnableGrpc on GrpcPort 20805, node 1 2025-04-17T18:03:20.259871Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.259891Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.259898Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.260050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.701471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.719424Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.728191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913000859 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000761 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913000859 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-17T18:03:20.896604Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.896772Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.896796Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:20.897264Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:22.590619Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913000859, tx_id: 281474976710658 } } } 2025-04-17T18:03:22.590943Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:22.592805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:22.594145Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-17T18:03:22.594164Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-17T18:03:22.627117Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-17T18:03:22.627149Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913002665 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00196c/r3tmp/tmpCIviP1/pdisk_1.dat 2025-04-17T18:03:23.539691Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:23.540155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:23.564249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:23.564326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:23.571197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20845 TServer::EnableGrpc on GrpcPort 8450, node 2 2025-04-17T18:03:23.846799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:23.846821Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:23.846827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:23.846933Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:24.228136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.235060Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:24.238767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.289870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004275 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004366 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004275 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004366 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T18:03:24.327766Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.327876Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.327887Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:24.330743Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:26.738315Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913004310, tx_id: 281474976715658 } } } 2025-04-17T18:03:26.738571Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:26.739927Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T18:03:26.740776Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913004366 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T18:03:26.740953Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20208, MsgBus: 24889 2025-04-17T18:03:14.638575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344246700746363:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.658085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d5/r3tmp/tmp4QE2tY/pdisk_1.dat 2025-04-17T18:03:15.078710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:15.079009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.079086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.082638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20208, node 1 2025-04-17T18:03:15.169638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.169657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.169665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.169815Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24889 TClient is connected to server localhost:24889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:15.816803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.842878Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:15.868543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.003854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:16.182464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:16.272886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.926670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344259585650040:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.926793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.302714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.332287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.414942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.446058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.478677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.552758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.592839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.687115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344263880618008:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.687205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.687458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344263880618013:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.691312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.702505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344263880618015:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.761389Z node 1 :TX_PROXY ERROR: Actor# [1:7494344263880618071:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.643901Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344246700746363:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.643995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:19.880546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.332433Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-04-17T18:03:20.341844Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037922 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:20.342015Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037922 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:20.342238Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344272470553199:2513], Table: `/Root/TestImmediateEffects` ([72057594046644480:17:1]), SessionActorId: [1:7494344268175585703:2513]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[1:7494344272470553199:2513].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:20.342887Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344272470553183:2513], SessionActorId: [1:7494344268175585703:2513], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344268175585703:2513]. isRollback=0 2025-04-17T18:03:20.343155Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjY5MDZhODEtZjMyZDY2MzMtMTYyNmI1ZjktODFmZDc0MzM=, ActorId: [1:7494344268175585703:2513], ActorState: ExecuteState, TraceId: 01js2epsjpdn3nxnwtmpqzqwqj, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344272470553193:2513] from: [1:7494344272470553183:2513] 2025-04-17T18:03:20.343237Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344272470553193:2513] TxId: 281474976710675. Ctx: { TraceId: 01js2epsjpdn3nxnwtmpqzqwqj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY5MDZhODEtZjMyZDY2MzMtMTYyNmI1ZjktODFmZDc0MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:20.343429Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjY5MDZhODEtZjMyZDY2MzMtMTYyNmI1ZjktODFmZDc0MzM=, ActorId: [1:7494344268175585703:2513], ActorState: ExecuteState, TraceId: 01js2epsjpdn3nxnwtmpqzqwqj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 20700, MsgBus: 63247 2025-04-17T18:03:21.089842Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344277489381952:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.089886Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d5/r3tmp/tmpKlThTi/pdisk_1.dat 2025-04-17T18:03:21.284798Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:21.309249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.309327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.314867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20700, node 2 2025-04-17T18:03:21.374679Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.374702Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.374709Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.374803Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63247 TClient is connected to server localhost:63247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:21.819394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.825979Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:21.831952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.888689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.067790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:22.145464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.348232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344290374285600:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.348346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.399351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.423354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.451325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.477925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.508204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.552404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.593846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.659447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344290374286256:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.659536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.659938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344290374286261:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.663620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.675173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344290374286263:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.742211Z node 2 :TX_PROXY ERROR: Actor# [2:7494344290374286317:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:25.896457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.091568Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344277489381952:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.091653Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:26.607341Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344298964221454:2535], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01js2epznhf5gqxfc7k7zr2jv1. SessionId : ydb://session/3?node_id=2&id=NDU1ODYyYjItY2RhYzdjZTMtMWUyZjY3OWUtYjczN2FkODI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-17T18:03:26.607715Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344298964221455:2536], TxId: 281474976715677, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2epznhf5gqxfc7k7zr2jv1. SessionId : ydb://session/3?node_id=2&id=NDU1ODYyYjItY2RhYzdjZTMtMWUyZjY3OWUtYjczN2FkODI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494344298964221451:2503], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:03:26.608024Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDU1ODYyYjItY2RhYzdjZTMtMWUyZjY3OWUtYjczN2FkODI=, ActorId: [2:7494344294669253912:2503], ActorState: ExecuteState, TraceId: 01js2epznhf5gqxfc7k7zr2jv1, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2025-04-17T18:03:19.259996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344266395416006:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.265366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001954/r3tmp/tmpmp43Q6/pdisk_1.dat 2025-04-17T18:03:19.842846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:19.842947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:19.850500Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:19.854352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21454 TServer::EnableGrpc on GrpcPort 4123, node 1 2025-04-17T18:03:20.187546Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:20.187573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:20.187588Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:20.187736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:20.779760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.793803Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:20.797949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:20.920582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000838 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000985 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913000838 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000985 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T18:03:20.946348Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.946472Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:20.946493Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:20.946866Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:22.598710Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913000922, tx_id: 281474976710658 } } } 2025-04-17T18:03:22.599086Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:22.600588Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T18:03:22.602479Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913000985 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIo ... Disconnected 2025-04-17T18:03:23.866986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:23.877130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25742 TServer::EnableGrpc on GrpcPort 13502, node 2 2025-04-17T18:03:24.189784Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:24.189813Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:24.189821Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:24.189937Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:24.495001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.503000Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:24.505855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.556325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004541 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913004653 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913004541 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913004653 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-17T18:03:24.622457Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.622581Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:03:24.622593Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-17T18:03:24.624582Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-17T18:03:26.886298Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913004583, tx_id: 281474976710658 } } } 2025-04-17T18:03:26.886565Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:03:26.888166Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-17T18:03:26.889165Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913004653 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T18:03:26.889333Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 17180, MsgBus: 32245 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00199a/r3tmp/tmptJnnBq/pdisk_1.dat 2025-04-17T18:03:15.473862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:15.626806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.626964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.628545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:15.672005Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17180, node 1 2025-04-17T18:03:15.828980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.829002Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.829011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.829147Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32245 TClient is connected to server localhost:32245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.497246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.514796Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:16.532720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.696205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:16.863001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:16.918801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:18.405260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344262836841860:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.405345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.760121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.795640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.828606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.855508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.887632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.932740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.982919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.073165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344267131809823:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.073250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.073640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344267131809828:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.077863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:19.092603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344267131809830:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:19.159536Z node 1 :TX_PROXY ERROR: Actor# [1:7494344267131809886:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:20.371913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10995, MsgBus: 13732 2025-04-17T18:03:21.585607Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344277433062290:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.585646Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00199a/r3tmp/tmpOtXc20/pdisk_1.dat 2025-04-17T18:03:21.731005Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:21.741521Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.741599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.743258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10995, node 2 2025-04-17T18:03:21.814678Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.814697Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.814705Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.814809Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13732 TClient is connected to server localhost:13732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:22.220933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.226909Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:22.244265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.361230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.529940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.641843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.535070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344290317965958:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.535142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.605200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.645382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.683261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.754437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.788693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.822871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.877047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.967151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344290317966630:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.967235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.967410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344290317966635:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.971280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.980080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344290317966637:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:25.036341Z node 2 :TX_PROXY ERROR: Actor# [2:7494344294612933988:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:26.175712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.586146Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344277433062290:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.586224Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:26.790003Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=8; 2025-04-17T18:03:26.790407Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 8 at tablet 72075186224037922 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:03:26.795255Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 8 at tablet 72075186224037922 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:03:26.795533Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344298907901931:2514], Table: `/Root/TestImmediateEffects` ([72057594046644480:17:1]), SessionActorId: [2:7494344298907901623:2514]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[2:7494344298907901931:2514].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:03:26.796115Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344298907901923:2514], SessionActorId: [2:7494344298907901623:2514], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7494344298907901623:2514]. isRollback=0 2025-04-17T18:03:26.796380Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTY1OTMxNWItYmNiOWZkYzYtNDNmYmJmMDktNTI1ZmQzZDg=, ActorId: [2:7494344298907901623:2514], ActorState: ExecuteState, TraceId: 01js2eq0362a2c348crypmpa56, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7494344298907901994:2514] from: [2:7494344298907901923:2514] 2025-04-17T18:03:26.796460Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494344298907901994:2514] TxId: 281474976715679. Ctx: { TraceId: 01js2eq0362a2c348crypmpa56, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1OTMxNWItYmNiOWZkYzYtNDNmYmJmMDktNTI1ZmQzZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:03:26.796624Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTY1OTMxNWItYmNiOWZkYzYtNDNmYmJmMDktNTI1ZmQzZDg=, ActorId: [2:7494344298907901623:2514], ActorState: ExecuteState, TraceId: 01js2eq0362a2c348crypmpa56, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 13296, MsgBus: 18759 2025-04-17T18:03:15.211762Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344248870974257:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:15.211805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001986/r3tmp/tmpguIa0W/pdisk_1.dat 2025-04-17T18:03:15.654716Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:15.661520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.661625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.664727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13296, node 1 2025-04-17T18:03:15.817825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.817850Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.817866Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.818032Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18759 TClient is connected to server localhost:18759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.387270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.409557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.577022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.767487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:16.842307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.501674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344261755877924:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.501777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.876717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.910821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.978819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.013954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.056618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.106609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.145805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.231598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344266050845882:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.231668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.231885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344266050845887:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.240934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:19.254766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344266050845889:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:19.346492Z node 1 :TX_PROXY ERROR: Actor# [1:7494344266050845948:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:20.214360Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344248870974257:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:20.214433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:20.374091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9100, MsgBus: 8640 2025-04-17T18:03:21.624899Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344277055545171:2111];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.624990Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001986/r3tmp/tmpjt1obr/pdisk_1.dat 2025-04-17T18:03:21.757162Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9100, node 2 2025-04-17T18:03:21.800611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.800690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.810339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:21.888355Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.888374Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.888379Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.888447Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8640 TClient is connected to server localhost:8640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:22.424090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.443729Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:22.451249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.586283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.768453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.857928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.747380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344289940448775:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.747468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.816207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.851154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.920330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.988150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.024347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.065083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.102691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.208424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344294235416738:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:25.208511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:25.210451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344294235416743:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:25.216117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:25.228303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344294235416745:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:25.288112Z node 2 :TX_PROXY ERROR: Actor# [2:7494344294235416801:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:26.307421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.625485Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344277055545171:2111];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.625554Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5903, MsgBus: 25651 2025-04-17T18:03:14.392559Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344245517896722:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.392670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e4/r3tmp/tmpLMJJKM/pdisk_1.dat 2025-04-17T18:03:14.850568Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:14.858606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:14.858711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:14.866989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5903, node 1 2025-04-17T18:03:14.983750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:14.983766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:14.983771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:14.983894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25651 TClient is connected to server localhost:25651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:15.709377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.730815Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:15.756663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:15.928857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.075708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.142032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.826073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344258402800388:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:17.826207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.158453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.190847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.215525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.243839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.274704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.349249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.386057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.482190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344262697768347:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.482316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.490455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344262697768352:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.497983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.511112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344262697768354:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.567758Z node 1 :TX_PROXY ERROR: Actor# [1:7494344262697768411:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.392822Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344245517896722:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.392888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:19.612529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12708, MsgBus: 28749 2025-04-17T18:03:21.069395Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344277548843244:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.070882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e4/r3tmp/tmpodiRKQ/pdisk_1.dat 2025-04-17T18:03:21.187215Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12708, node 2025-04-17T18:03:21.215169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.215251Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.216036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2 2025-04-17T18:03:21.290276Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.290295Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.290303Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.290412Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28749 TClient is connected to server localhost:28749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:21.693240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.708121Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:21.720354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.807936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:21.954885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.032647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:24.394289Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344290433746896:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.394366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.434369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.479267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.509358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.584213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.630783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.702101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.732082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.800262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344290433747559:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.800337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.802311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344290433747564:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.806465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.818450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344290433747566:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:24.903245Z node 2 :TX_PROXY ERROR: Actor# [2:7494344290433747623:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:26.070324Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344277548843244:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.070394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:26.149347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 |84.9%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |84.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20907, MsgBus: 11845 2025-04-17T18:03:14.766100Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344243816754361:2101];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.767054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c8/r3tmp/tmpSy8qIg/pdisk_1.dat 2025-04-17T18:03:15.354357Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:15.357616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.357730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.363112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20907, node 1 2025-04-17T18:03:15.430720Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.430749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.430758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.430905Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11845 TClient is connected to server localhost:11845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.057309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.074199Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:16.096886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.259968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.448736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.526577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:18.136558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344260996625277:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.136668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.514167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.546742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.584437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.628197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.668070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.721098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.777358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.843354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344260996625941:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.843423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.843549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344260996625946:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.846788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:18.859536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344260996625948:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:18.964408Z node 1 :TX_PROXY ERROR: Actor# [1:7494344260996626005:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.770260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344243816754361:2101];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.783367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:20.156496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.473006Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710674; 2025-04-17T18:03:20.487164Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344269586561103:2513], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7494344269586560934:2513]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7494344269586561103:2513].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:20.487711Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344269586561092:2513], SessionActorId: [1:7494344269586560934:2513], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344269586560934:2513]. isRollback=0 2025-04-17T18:03:20.487967Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzA1MzBiMDgtNjZjNzQ3MGMtMjcyMmQ0Ny1kMmFiNjY3Ng==, ActorId: [1:7494344269586560934:2513], ActorState: ExecuteState, TraceId: 01js2epsvqapr5mbkhbwvv75nn, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344269586561093:2513] from: [1:7494344269586561092:2513] 2025-04-17T18:03:20.488064Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344269586561093:2513] TxId: 281474976710674. Ctx: { TraceId: 01js2epsvqapr5mbkhbwvv75nn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA1MzBiMDgtNjZjNzQ3MGMtMjcyMmQ0Ny1kMmFiNjY3Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:20.488293Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzA1MzBiMDgtNjZjNzQ3MGMtMjcyMmQ0Ny1kMmFiNjY3Ng==, ActorId: [1:7494344269586560934:2513], ActorState: ExecuteState, TraceId: 01js2epsvqapr5mbkhbwvv75nn, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 7055, MsgBus: 5533 2025-04-17T18:03:21.617684Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344276601129167:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.617730Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c8/r3tmp/tmpHvsumr/pdisk_1.dat 2025-04-17T18:03:21.731964Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:21.756308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:21.756388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:21.757686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7055, node 2 2025-04-17T18:03:21.817752Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:21.817773Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:21.817782Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:21.817887Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5533 TClient is connected to server localhost:5533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:22.331500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.339097Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:22.359315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.422481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:22.597942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:22.678894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.562314Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344289486032812:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.562411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.619994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.651260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.687298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.728926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.772762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.807688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.882695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:24.955877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344289486033479:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.955968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.956174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344289486033484:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:24.960243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:24.972991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344289486033486:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:25.068276Z node 2 :TX_PROXY ERROR: Actor# [2:7494344293781000840:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:26.229319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.617980Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344276601129167:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.618037Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:26.703940Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344298075968638:2532], TxId: 281474976715676, task: 1. Ctx: { TraceId : 01js2epzsceccfcwyw70h90v1t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjY0NWFhMDEtOGJjYzdlMWQtOTVkMjVhMjEtYjQ4NjljZjM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:03:26.704338Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344298075968639:2533], TxId: 281474976715676, task: 2. Ctx: { TraceId : 01js2epzsceccfcwyw70h90v1t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjY0NWFhMDEtOGJjYzdlMWQtOTVkMjVhMjEtYjQ4NjljZjM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494344298075968635:2503], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:03:26.704736Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjY0NWFhMDEtOGJjYzdlMWQtOTVkMjVhMjEtYjQ4NjljZjM=, ActorId: [2:7494344298075968430:2503], ActorState: ExecuteState, TraceId: 01js2epzsceccfcwyw70h90v1t, Create QueryResponse for error on request, msg: >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T17:57:32.756722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342778999107733:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:32.757813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:33.061406Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00061b/r3tmp/tmp9bwqzC/pdisk_1.dat 2025-04-17T17:57:33.376925Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:33.381535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:33.381644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:33.387006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28908, node 1 2025-04-17T17:57:33.526751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00061b/r3tmp/yandexpGsxB7.tmp 2025-04-17T17:57:33.526785Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00061b/r3tmp/yandexpGsxB7.tmp 2025-04-17T17:57:33.526974Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00061b/r3tmp/yandexpGsxB7.tmp 2025-04-17T17:57:33.527107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:33.608864Z INFO: TTestServer started on Port 2383 GrpcPort 28908 TClient is connected to server localhost:2383 PQClient connected to localhost:28908 === TenantModeEnabled() = 0 === Init PQ - start server on port 28908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:34.060168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:57:34.060371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:34.060784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:57:34.060968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:57:34.060994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:34.061451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:57:34.061520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:57:34.061628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:34.061647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:57:34.061668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T17:57:34.061681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-17T17:57:34.062112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:34.062138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:57:34.062150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T17:57:34.062526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:34.062542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:34.062572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:57:34.062602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 waiting... 2025-04-17T17:57:34.069404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:57:34.069754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:34.069771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T17:57:34.069802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:34.070022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T17:57:34.070130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:57:34.071492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912654114, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:57:34.071601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912654114 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:57:34.071630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:57:34.071883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T17:57:34.071910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:57:34.072065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:57:34.072109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T17:57:34.072766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:57:34.072798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:57:34.072961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:57:34.072975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494342783294075547:2245], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T17:57:34.073029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:34.073065Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T17:57:34.073138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:57:34.073148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:34.073184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:57:34.073205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:34.073220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T17:57:34.073236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:34.073252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T17:57:34.073262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-17T17:57:34.073292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-17T17:57:34.073306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-17T17:57:34.073319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-17T17:57:34.078401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-17T17:57:34.078496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-17T17:57:34.078508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-17T17:57:34.078527Z node 1 :FLAT_T ... , grpc_core::ClientChannel *> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:26 #2 0x1a7bf9f0 in grpc_core::ClientChannel::CreateResolverLocked() /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1514:25 #3 0x1a7eee5b in TryToConnectLocked /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1764:5 #4 0x1a7eee5b in operator() /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1779:52 #5 0x1a7eee5b in __invoke<(lambda at /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1778:27) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #6 0x1a7eee5b in __call<(lambda at /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1778:27) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #7 0x1a7eee5b in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #8 0x1a7eee5b in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #9 0x1a6c514d in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #10 0x1a6c514d in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #11 0x1a6c514d in DrainQueueOwned /-S/contrib/libs/grpc/src/core/lib/gprpp/work_serializer.cc:221:5 #12 0x1a6c514d in grpc_core::WorkSerializer::WorkSerializerImpl::Run(std::__y1::function, grpc_core::DebugLocation const&) /-S/contrib/libs/grpc/src/core/lib/gprpp/work_serializer.cc:109:5 #13 0x1a6c63c8 in grpc_core::WorkSerializer::Run(std::__y1::function, grpc_core::DebugLocation const&) /-S/contrib/libs/grpc/src/core/lib/gprpp/work_serializer.cc:237:10 #14 0x1a7b0151 in grpc_core::ClientChannel::FilterBasedCallData::StartTransportStreamOpBatch(grpc_call_element*, grpc_transport_stream_op_batch*) /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:2089:32 #15 0x1a3e783e in exec_ctx_run /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:45:3 #16 0x1a3e783e in grpc_core::ExecCtx::Flush() /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:72:9 #17 0x1a43bfa7 in ~ExecCtx /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.h:117:5 #18 0x1a43bfa7 in grpc_call_start_batch /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:3543:3 #19 0x1b92050a in grpc::internal::CallOpSet, grpc::internal::CallNoOp<3>, grpc::internal::CallNoOp<4>, grpc::internal::CallNoOp<5>, grpc::internal::CallNoOp<6>>::ContinueFillOpsAfterInterception() /-S/contrib/libs/grpc/include/grpcpp/impl/call_op_set.h:974:9 #20 0x1b93455c in PerformOps /-S/contrib/libs/grpc/include/grpcpp/impl/call.h:67:17 #21 0x1b93455c in grpc::ClientReaderWriter::ClientReaderWriter(grpc::ChannelInterface*, grpc::internal::RpcMethod const&, grpc::ClientContext*) /-S/contrib/libs/grpc/include/grpcpp/support/sync_stream.h:565:13 #22 0x1b90c7ba in Create /-S/contrib/libs/grpc/include/grpcpp/support/sync_stream.h:439:16 #23 0x1b90c7ba in Ydb::Topic::V1::TopicService::Stub::StreamReadRaw(grpc::ClientContext*) /-B/ydb/public/api/grpc/ydb_topic_v1.grpc.pb.cc:78:10 #24 0x18cfad86 in StreamRead /-B/ydb/public/api/grpc/ydb_topic_v1.grpc.pb.h:395:148 #25 0x18cfad86 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::InitControlSession(TBasicString> const&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:836:35 #26 0x18d09989 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDirectReadNotCached::Execute_(NUnitTest::TTestContext&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:1159:15 #27 0x18fdc897 in operator() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #28 0x18fdc897 in __invoke<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #29 0x18fdc897 in __call<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #30 0x18fdc897 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #31 0x18fdc897 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #32 0x19a84155 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #33 0x19a84155 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #34 0x19a84155 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #35 0x19a53ae8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #36 0x18fdb843 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #37 0x19a553b5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #38 0x19a7e6cc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #39 0x7f7b1f262d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 16 byte(s) in 1 object(s) allocated from: #0 0x192c2b8d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x1a4d7a3b in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x1a4d7a3b in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x1a4d7a3b in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x1a4d7a3b in __value_func<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33), std::__y1::allocator<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33)> > /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:364:45 #5 0x1a4d7a3b in __value_func<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33), 0> /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:372:60 #6 0x1a4d7a3b in function<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33), void> /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:946:50 #7 0x1a4d7a3b in operator=<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33), void> /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:975:3 #8 0x1a4d7a3b in grpc_core::Channel::Channel(bool, bool, TBasicString>, grpc_core::ChannelArgs const&, grpc_compression_options, grpc_core::RefCountedPtr) /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:31 #9 0x1a4d8aa9 in grpc_core::Channel::CreateWithBuilder(grpc_core::ChannelStackBuilder*) /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:153:37 #10 0x1a4d9ad7 in grpc_core::Channel::Create(char const*, grpc_core::ChannelArgs, grpc_channel_stack_type, grpc_transport*) /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:230:10 #11 0x1a85ff21 in CreateChannel /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:323:10 #12 0x1a85ff21 in grpc_channel_create /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:365:14 #13 0x1b0c45c0 in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelWithInterceptors(TBasicString> const&, grpc::ChannelArguments const&, std::__y1::vector>, std::__y1::allocator>>>) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:55:13 #14 0x1b0c439b in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelImpl(TBasicString> const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:40:12 #15 0x1b0bcb54 in grpc::CreateCustomChannel(TBasicString> const&, std::__y1::shared_ptr const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/create_channel.cc:50:25 #16 0x18ffdbd6 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::Connect(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:824:23 #17 0x18cf9622 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::TDirectReadTestSetup(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:806:13 #18 0x18d098f3 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDirectReadNotCached::Execute_(NUnitTest::TTestContext&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:1157:30 #19 0x18fdc897 in operator() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #20 0x18fdc897 in __invoke<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #21 0x18fdc897 in __call<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #22 0x18fdc897 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #23 0x18fdc897 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #24 0x19a84155 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #25 0x19a84155 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #26 0x19a84155 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #27 0x19a53ae8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #28 0x18fdb843 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #29 0x19a553b5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #30 0x19a7e6cc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #31 0x7f7b1f262d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 7544213 byte(s) leaked in 1622 allocation(s). |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [FAIL] >> KqpCost::ScanScriptingRangeFullScan+SourceRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 4825, MsgBus: 22193 2025-04-17T18:03:16.209127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344252683530837:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:16.209214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001983/r3tmp/tmpbCsVMT/pdisk_1.dat 2025-04-17T18:03:16.638278Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:16.643488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:16.643580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:16.645634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4825, node 1 2025-04-17T18:03:16.735859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:16.735881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:16.735887Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:16.736003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22193 TClient is connected to server localhost:22193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:17.234913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.247223Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:17.258616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.371341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.521362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.586674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:19.210446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344265568434467:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.210545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.607777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.642451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.680277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.764349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.834271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.904561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.988475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.079224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344269863402437:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:20.079268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:20.079455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344269863402442:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:20.082626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:20.096327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344269863402444:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:20.169567Z node 1 :TX_PROXY ERROR: Actor# [1:7494344269863402500:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:21.129742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:21.211056Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344252683530837:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:21.211386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19499, MsgBus: 14155 2025-04-17T18:03:22.547901Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344280657475353:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:22.548591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001983/r3tmp/tmpXCB6Ki/pdisk_1.dat 2025-04-17T18:03:22.668518Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19499, node 2 2025-04-17T18:03:22.715050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:22.715145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:22.718121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:22.744203Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:22.744221Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:22.744229Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:22.744342Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14155 TClient is connected to server localhost:14155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:03:23.171501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.192660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.279870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.465030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.581424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.756521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344293542379015:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:25.756613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:25.807743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.845376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.885455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.930107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.002648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.042274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.111362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.191228Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344297837346974:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:26.191313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:26.191676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344297837346979:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:26.195588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:26.209629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344297837346981:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:26.283116Z node 2 :TX_PROXY ERROR: Actor# [2:7494344297837347038:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:27.177838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.548606Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344280657475353:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:27.548664Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 14099, MsgBus: 16989 2025-04-17T18:03:15.251321Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344251340038317:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:15.251566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00198f/r3tmp/tmp84g6Wx/pdisk_1.dat 2025-04-17T18:03:15.741474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:15.743714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.743827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.747562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14099, node 1 2025-04-17T18:03:15.900831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.900854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.900866Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.900987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16989 TClient is connected to server localhost:16989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.621736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.650445Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:16.664287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.815957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.957801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:17.034384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:18.833614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344264224941976:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.833738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.289720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.361343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.403411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.447859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.506401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.576996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.662049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.734327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344268519909947:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.734416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.738330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344268519909952:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.745787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:19.762947Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:03:19.763143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344268519909955:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:19.868098Z node 1 :TX_PROXY ERROR: Actor# [1:7494344268519910014:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:20.251599Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344251340038317:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:20.251703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12667, MsgBus: 26436 2025-04-17T18:03:22.393036Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344281993326867:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:22.393092Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00198f/r3tmp/tmpxZJMg5/pdisk_1.dat 2025-04-17T18:03:22.550022Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:22.565039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:22.565141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:22.566829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12667, node 2 2025-04-17T18:03:22.630454Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:22.630477Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:22.630486Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:22.630599Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26436 TClient is connected to server localhost:26436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:23.057259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.081199Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.093247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:23.150434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.309504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:23.425881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.766767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344294878230517:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:25.766846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:25.828368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.871876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.904300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.951384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.988973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.070575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.124489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.211409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344299173198478:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:26.211484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:26.211907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344299173198483:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:26.216490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:26.231314Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344299173198485:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:26.315932Z node 2 :TX_PROXY ERROR: Actor# [2:7494344299173198542:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:27.394388Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344281993326867:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:27.394456Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] >> KqpCost::IndexLookupJoin+StreamLookupJoin |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink >> KqpCost::OlapWriteRow >> KqpCost::ScanScriptingRangeFullScan-SourceRead |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] >> KqpCost::IndexLookup-useSink [GOOD] >> KqpCost::QuerySeviceRangeFullScan |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 4551, MsgBus: 19644 2025-04-17T18:03:23.520600Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344283699859450:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:23.520645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ae8/r3tmp/tmpHQMYO7/pdisk_1.dat 2025-04-17T18:03:24.064639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:24.109308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:24.109391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:24.111476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4551, node 1 2025-04-17T18:03:24.390764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:24.390783Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:24.390793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:24.390888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19644 TClient is connected to server localhost:19644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:25.103927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.139969Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:25.167202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.346669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.508331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.609313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.823845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344296584762884:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:26.823946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.412687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.453276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.528999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.576772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.618409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.653394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.697204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.746604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344300879730845:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.746683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.746893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344300879730850:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.750090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:27.760257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344300879730852:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:27.837504Z node 1 :TX_PROXY ERROR: Actor# [1:7494344300879730909:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:28.552741Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344283699859450:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:28.552899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:29.017413Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:03:29.088915Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-04-17T18:03:29.297590Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7494344309469665838:2505] 2025-04-17T18:03:29.297622Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7494344309469665801:2505] 2025-04-17T18:03:29.303081Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1744913009350:281474976710672 created 2025-04-17T18:03:29.303393Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-04-17T18:03:29.303423Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-17T18:03:29.303445Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-04-17T18:03:29.303835Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Resolved key sets: 1 2025-04-17T18:03:29.304009Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-17T18:03:29.304054Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-04-17T18:03:29.304274Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710673. Shard resolve complete, resolved shards: 1 2025-04-17T18:03:29.304305Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-17T18:03:29.304339Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037914] 2025-04-17T18:03:29.304372Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:03:29.304425Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, Database ... tcher_actor.cpp:127;Recv TEvScanData from ShardID==[1:7494344309469665882:2050];ScanId=1;Finished=1;Lock=;BrokenLocks=; 2025-04-17T18:03:29.360711Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:524;action=got EvScanData;rows=3;finished=1;exceeded=0;scan=1;packs_to_send=0;from=[1:7494344309469665882:2050];shards remain=0;in flight scans=1;in flight shards=1;delayed_for_seconds_by_ratelimiter=0;tablet_id=72075186224037914;locks=0;broken locks=0; 2025-04-17T18:03:29.360744Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.cpp:39;event=receive_data;actor_id=[1:7494344309469665882:2050];count_chunks=1; 2025-04-17T18:03:29.360759Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:261;event=on_receive;compute_shard_id=NO_VALUE_OPTIONAL; 2025-04-17T18:03:29.360776Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:208;event=add_data_to_compute;rows=3; 2025-04-17T18:03:29.360809Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:178;event=send_data_to_compute;space=8388608;queue=1;compute_actor_id=[1:7494344309469665853:2512];rows=3; 2025-04-17T18:03:29.360827Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:162;event=stop_scanner; 2025-04-17T18:03:29.360845Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:39;event=scan_ack_on_finished;actor_id=[1:7494344309469665882:2050]; 2025-04-17T18:03:29.360859Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665856:2514]. EVLOGKQP:0/0/3/3 2025-04-17T18:03:29.360880Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:359;event=scanner_finished;tablet_id=72075186224037914;stop_shard=1; 2025-04-17T18:03:29.360903Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:93;event=stop_scanner;actor_id=[1:7494344309469665882:2050];message=;final_flag=1; 2025-04-17T18:03:29.360977Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665856:2514]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, pending resolve shards: 0, average read rows: 3, average read bytes: 0, 2025-04-17T18:03:29.361017Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:406;event=wait_all_scanner_finished;scans=0; 2025-04-17T18:03:29.361349Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665856:2514]. EVLOGKQP(max_in_flight:1) InFlightScans:InFlightShards:;wScans=0;wShards=0; {SHARD(72075186224037914):CHUNKS=1;D=0.000000s;PacksCount=1;RowsCount=3;BytesCount=0;MinPackSize=3;MaxPackSize=3;CAVG=0.000000s;CMIN=0.000000s;CMAX=0.000000s;}; 2025-04-17T18:03:29.361638Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:174 :TEvSendData: [1:7494344309469665856:2514]/[1:7494344309469665853:2512] 2025-04-17T18:03:29.361931Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665853:2512], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=. TraceId : 01js2eq2bze64fptqbr2m3y9vn. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:03:29.361960Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:204 :TEvFetcherFinished: [1:7494344309469665856:2514] 2025-04-17T18:03:29.363054Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-17T18:03:29.363095Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665854:2513], TxId: 281474976710673, task: 2. Ctx: { TraceId : 01js2eq2bze64fptqbr2m3y9vn. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-17T18:03:29.363148Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 2. Finish input channelId: 1, from: [1:7494344309469665853:2512] 2025-04-17T18:03:29.363237Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665854:2513], TxId: 281474976710673, task: 2. Ctx: { TraceId : 01js2eq2bze64fptqbr2m3y9vn. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:03:29.363969Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665854:2513], TxId: 281474976710673, task: 2. Ctx: { TraceId : 01js2eq2bze64fptqbr2m3y9vn. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:03:29.364035Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1. Tasks execution finished 2025-04-17T18:03:29.364048Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665853:2512], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=. TraceId : 01js2eq2bze64fptqbr2m3y9vn. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:03:29.364174Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1. pass away 2025-04-17T18:03:29.364277Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710673;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:03:29.364570Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710673, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T18:03:29.364863Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7494344309469665801:2505], seqNo: 1, nRows: 1 2025-04-17T18:03:29.365030Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7494344309469665853:2512], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 11932 Tasks { TaskId: 1 CpuTimeUs: 8070 FinishTimeMs: 1744913009364 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 302 BuildCpuTimeUs: 7768 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-gkdwnlcfnq" NodeId: 1 CreateTimeMs: 1744913009309 UpdateTimeMs: 1744913009364 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:29.365087Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494344309469665853:2512] 2025-04-17T18:03:29.365136Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7494344309469665854:2513], 2025-04-17T18:03:29.368547Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7494344309469665857:2513] 2025-04-17T18:03:29.368626Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665854:2513], TxId: 281474976710673, task: 2. Ctx: { TraceId : 01js2eq2bze64fptqbr2m3y9vn. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:03:29.368982Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-17T18:03:29.369029Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 2. Tasks execution finished 2025-04-17T18:03:29.369061Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344309469665854:2513], TxId: 281474976710673, task: 2. Ctx: { TraceId : 01js2eq2bze64fptqbr2m3y9vn. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:03:29.369197Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 2. pass away 2025-04-17T18:03:29.369291Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710673;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:03:29.369316Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7494344309469665854:2513], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 46342 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 3301 FinishTimeMs: 1744913009368 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 649 BuildCpuTimeUs: 2652 HostName: "ghrun-gkdwnlcfnq" NodeId: 1 CreateTimeMs: 1744913009310 UpdateTimeMs: 1744913009369 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:29.369364Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494344309469665854:2513] 2025-04-17T18:03:29.369531Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:29.369620Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344309469665849:2505] TxId: 281474976710673. Ctx: { TraceId: 01js2eq2bze64fptqbr2m3y9vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4MTllZjktYzMzYTUwMDAtNTA5NDA3YjktMzRlZGFiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.058274s ReadRows: 3 ReadBytes: 96 ru: 38 rate limiter was not found force flag: 1 2025-04-17T18:03:29.369793Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710673, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T18:03:29.370510Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913009350, txId: 281474976710672] shutting down >> KqpCost::IndexLookupAndTake-useSink [GOOD] >> KqpCost::OltpWriteRow+isSink >> KqpLimits::QSReplySize+useSink |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 10950, MsgBus: 15662 2025-04-17T18:03:23.502631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344284307271001:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:23.514778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001adf/r3tmp/tmpP1bCIE/pdisk_1.dat 2025-04-17T18:03:24.092596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:24.099952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:24.100050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:24.112790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10950, node 1 2025-04-17T18:03:24.390890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:24.390926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:24.390933Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:24.391067Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15662 TClient is connected to server localhost:15662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:25.158819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.176859Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:25.192798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.368812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.538109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.634401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.854439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344297192174529:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:26.854553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.412996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.450113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.482470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.523952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.561403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.598389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.666957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.748908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344301487142499:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.748962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.749042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344301487142504:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.752303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:27.761515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344301487142506:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:27.856940Z node 1 :TX_PROXY ERROR: Actor# [1:7494344301487142565:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:28.502269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344284307271001:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:28.547376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:29.030792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 12083, MsgBus: 9002 2025-04-17T18:03:23.543780Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344284390906432:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:23.555876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ae4/r3tmp/tmpgCiG8P/pdisk_1.dat 2025-04-17T18:03:24.122930Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:24.127445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:24.127536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:24.131391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12083, node 1 2025-04-17T18:03:24.390791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:24.390811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:24.390821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:24.390935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9002 TClient is connected to server localhost:9002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:25.161935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.183220Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.201008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.361207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:25.504950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:25.581639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.911924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344297275809931:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:26.912068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.412790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.442917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.469205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.499388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.540159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.595108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.625590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.730597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344301570777895:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.730695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.731058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344301570777900:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.738648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:27.755776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344301570777902:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:27.851147Z node 1 :TX_PROXY ERROR: Actor# [1:7494344301570777960:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:28.544115Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344284390906432:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:28.544176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:29.049560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 2155, MsgBus: 7217 2025-04-17T18:03:24.496914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344289710660856:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:24.497030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ad8/r3tmp/tmpGXCOYC/pdisk_1.dat 2025-04-17T18:03:24.961625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:24.961730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:24.963152Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:24.966663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2155, node 1 2025-04-17T18:03:25.076341Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:25.076367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:25.076375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:25.076486Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7217 TClient is connected to server localhost:7217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:25.664822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.682967Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:25.702006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.871106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.054247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.138202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:27.769864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344302595564457:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.769975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:28.107576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.137775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.163738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.189789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.221731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.259178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.289130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.363592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344306890532416:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:28.363659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:28.363757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344306890532421:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:28.367165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:28.381630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344306890532423:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:28.453203Z node 1 :TX_PROXY ERROR: Actor# [1:7494344306890532477:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:29.497183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344289710660856:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:29.497239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:29.503752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 20607, MsgBus: 17164 2025-04-17T18:03:24.769826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344289340865616:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:24.776383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001acf/r3tmp/tmpZz343W/pdisk_1.dat 2025-04-17T18:03:25.160564Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20607, node 1 2025-04-17T18:03:25.204516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:25.204618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:25.206989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:25.314742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:25.314769Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:25.314776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:25.314916Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17164 TClient is connected to server localhost:17164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:25.914521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:25.941810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.145924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.298635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.377006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:27.895934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344302225769154:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:27.896090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:28.227965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.259082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.288999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.315280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.380055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.413067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.447245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:28.513150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344306520737112:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:28.513243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:28.513464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344306520737117:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:28.517007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:28.529348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344306520737119:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:28.625235Z node 1 :TX_PROXY ERROR: Actor# [1:7494344306520737177:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:29.720548Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344289340865616:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:29.720613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:29.744434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.811777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.837648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 /Root/Join1_2 1 19 /Root/Join1_1 8 136 >> TInterconnectTest::TestManyEvents >> TestProtocols::TestResolveProtocol >> TInterconnectTest::TestSimplePingPong >> TInterconnectTest::TestNotifyUndelivered >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped >> TInterconnectTest::TestConnectAndDisconnect >> TInterconnectTest::TestBlobEvent220BytesPreSerialized >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> KqpCost::OlapRange [GOOD] >> TActorTracker::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 62156, MsgBus: 25430 2025-04-17T18:03:14.988680Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344244021469874:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:14.988760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b8/r3tmp/tmpFrEKy8/pdisk_1.dat 2025-04-17T18:03:15.469659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:15.469799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:15.471585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:15.496233Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62156, node 1 2025-04-17T18:03:15.656840Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:15.656861Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:15.656868Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:15.656998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25430 TClient is connected to server localhost:25430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:16.231221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.254171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.276556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:16.440987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.614076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:16.690290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:18.531143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344261201340831:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.531255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:18.855246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.888518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.918105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:18.963529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.008165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.061827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.140857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:19.246580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344265496308798:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.246695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.247101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344265496308803:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:19.251757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:19.266622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344265496308805:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:19.361872Z node 1 :TX_PROXY ERROR: Actor# [1:7494344265496308861:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:19.974854Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344244021469874:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:19.974932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:20.418801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.479058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:03:20.531294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4017, MsgBus: 27845 2025-04-17T18:03:25.851688Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344292169933825:2176];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:25.851735Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b8/r3tmp/tmpp5KbOs/pdisk_1.dat 2025-04-17T18:03:26.007109Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:26.014949Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:26.015033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:26.016613Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4017, node 2 2025-04-17T18:03:26.072907Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:26.072932Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:26.072942Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:26.073056Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27845 TClient is connected to server localhost:27845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:26.583521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.592354Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:26.608094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:26.667988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:26.838337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:26.912495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:29.211591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344309349804649:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:29.211686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:29.274040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.312314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.344117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.376169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.404208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.439404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.469867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.549506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344309349805312:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:29.549578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344309349805317:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:29.549597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:29.552452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:29.561172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344309349805319:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:29.621043Z node 2 :TX_PROXY ERROR: Actor# [2:7494344309349805373:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:30.708764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.851595Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344292169933825:2176];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:30.851653Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 12794, MsgBus: 62813 2025-04-17T18:03:26.617657Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344298052822168:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.618709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ac7/r3tmp/tmpurP1JO/pdisk_1.dat 2025-04-17T18:03:27.008587Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12794, node 1 2025-04-17T18:03:27.059774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:27.059933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:27.061265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:27.073202Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:27.073269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:27.073278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:27.073439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62813 TClient is connected to server localhost:62813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:27.594847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:27.625535Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:27.637953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:27.782782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:03:27.930007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:28.006293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.630056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344310937725684:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:29.630193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:29.913862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.947179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.978027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.002765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.031336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.069709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.103835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.168077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344315232693642:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:30.168167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:30.168526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344315232693647:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:30.172504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:30.182566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344315232693649:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:30.244832Z node 1 :TX_PROXY ERROR: Actor# [1:7494344315232693703:3539] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:31.212372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:31.612793Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344298052822168:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:31.612889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> BackupRestoreS3::TestAllPrimitiveTypes-DYNUMBER [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes |85.0%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |85.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange [GOOD] Test command err: Trying to start YDB, gRPC: 3914, MsgBus: 62786 2025-04-17T18:03:26.290652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344296603939572:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:26.290737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001acb/r3tmp/tmpfAHeFf/pdisk_1.dat 2025-04-17T18:03:26.680377Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3914, node 1 2025-04-17T18:03:26.718852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:26.718933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:26.720832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:26.773399Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:26.773439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:26.773448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:26.773554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62786 TClient is connected to server localhost:62786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:27.301502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:27.337072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:27.350592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:27.471653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:27.648904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:27.723600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:29.523953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344309488843232:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:29.524135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:29.928770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.962173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:29.991585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.023008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.055244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.132167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.171346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.262305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344313783811198:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:30.262412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:30.266326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344313783811203:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:30.273035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:30.285634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344313783811205:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:30.373379Z node 1 :TX_PROXY ERROR: Actor# [1:7494344313783811265:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:31.290454Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344296603939572:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:31.290518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:31.569215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:31.722202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:03:31.722430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:03:31.722746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:03:31.722880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:03:31.722975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:03:31.723091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:03:31.723180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:03:31.723294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:03:31.723431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:03:31.723597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:03:31.723723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:03:31.723839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494344318078779059:2521];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:03:31.748673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[1:7494344318078779010:2517];tablet_id=72075186224037929;process=TTxInitS ... ormalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:03:31.883630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:03:31.883720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:03:31.883741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:03:31.884583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:03:31.884654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:03:31.884809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:03:31.884920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:03:31.885048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:03:31.885172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:03:31.885283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:03:31.885397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:03:31.885510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:03:31.885635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:03:31.885739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:03:31.885836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7494344318078779050:2520];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:03:31.900267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:03:31.900318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:03:31.900405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:03:31.900431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:03:31.900560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:03:31.900581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:03:31.900655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:03:31.900679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:03:31.900730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:03:31.900750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:03:31.900798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:03:31.900826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:03:31.901249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:03:31.901283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:03:31.901421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:03:31.901447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:03:31.901549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:03:31.901574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:03:31.901729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:03:31.901753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:03:31.901833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:03:31.901853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:03:31.928650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:31.928780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:31.934317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:31.934896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:31.939138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:31.939344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:31.942936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:31.943628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:31.946445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:31.947214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:32.147654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T18:03:32.147654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T18:03:32.147985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;self_id=[1:7494344318078779065:2522];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037926;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037930;receive=72075186224037931; 2025-04-17T18:03:32.148292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys >> LocalTableWriter::SupportedTypes >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> LocalTableWriter::ConsistentWrite >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> LocalTableWriter::DataAlongWithHeartbeat >> LocalTableWriter::ApplyInCorrectOrder >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> KqpCost::AAARangeFullScan [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2025-04-17T18:03:34.149834Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @201 (null) -> PendingActivation 2025-04-17T18:03:34.149930Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP01 ready to work 2025-04-17T18:03:34.150953Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 (null) -> PendingActivation 2025-04-17T18:03:34.151007Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP01 ready to work 2025-04-17T18:03:34.151837Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-04-17T18:03:34.154778Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:18808 2025-04-17T18:03:34.154946Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-04-17T18:03:34.155372Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH01 starting outgoing handshake 2025-04-17T18:03:34.155522Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T18:03:34.156307Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH05 connected to peer 2025-04-17T18:03:34.157614Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:44236 2025-04-17T18:03:34.158026Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 0] ICH02 starting incoming handshake 2025-04-17T18:03:34.162081Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 275877 ProgramStartTime: 3075123779874 Serial: 886380389 ReceiverNodeId: 6 SenderActorId: "[5:886380389:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 275877" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 275877" AcceptUUID: "Cluster for process with id: 275877" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\263\010\014I\366(\t\301\354\'\360\253\337\350.\306-\023R\2128s\235\324\360\235\004\342\367\332s\027" RequestXxhash: true RequestXdcShuffle: true 2025-04-17T18:03:34.162746Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 275877 ProgramStartTime: 3075123779874 Serial: 886380389 ReceiverNodeId: 6 SenderActorId: "[5:886380389:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 275877" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 275877" AcceptUUID: "Cluster for process with id: 275877" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\263\010\014I\366(\t\301\354\'\360\253\337\350.\306-\023R\2128s\235\324\360\235\004\342\367\332s\027" RequestXxhash: true RequestXdcShuffle: true 2025-04-17T18:03:34.162812Z node 6 :INTERCONNECT WARN: Handshake [6:21:2057] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T18:03:34.163229Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-04-17T18:03:34.164529Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP02 configured for host ::1:3893 2025-04-17T18:03:34.164595Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP17 incoming handshake (actor [6:21:2057]) 2025-04-17T18:03:34.164653Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-04-17T18:03:34.164716Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-04-17T18:03:34.164847Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2025-04-17T18:03:34.164916Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @220 PendingConnection -> PendingConnection 2025-04-17T18:03:34.165372Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 275877 ProgramStartTime: 3075148507124 Serial: 4212468795 SenderActorId: "[6:4212468795:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 275877" AcceptUUID: "Cluster for process with id: 275877" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-04-17T18:03:34.165897Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 275877 ProgramStartTime: 3075148507124 Serial: 4212468795 SenderActorId: "[6:4212468795:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 275877" AcceptUUID: "Cluster for process with id: 275877" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-04-17T18:03:34.165974Z node 5 :INTERCONNECT WARN: Handshake [5:19:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-17T18:03:34.166150Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T18:03:34.166921Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\263\010\014I\366(\t\301\354\'\360\253\337\350.\306-\023R\2128s\235\324\360\235\004\342\367\332s\027" 2025-04-17T18:03:34.167025Z node 5 :INTERCONNECT INFO: Handshake [5:19:2057] [node 6] ICH04 handshake succeeded 2025-04-17T18:03:34.167385Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-04-17T18:03:34.167448Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:19:2057] poison: false 2025-04-17T18:03:34.167491Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 PendingConnection -> StateWork 2025-04-17T18:03:34.168560Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:23:2048] 2025-04-17T18:03:34.169540Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS09 handshake done sender: [5:19:2057] self: [5:886380389:0] peer: [6:4212468795:0] socket: 24 2025-04-17T18:03:34.169621Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS10 traffic start 2025-04-17T18:03:34.170433Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS11 registering socket in PollerActor 2025-04-17T18:03:34.170517Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T18:03:34.170570Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-04-17T18:03:34.170644Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T18:03:34.170757Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS04 subscribe for session state for [5:17:2056] 2025-04-17T18:03:34.170863Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:44242 2025-04-17T18:03:34.171311Z node 6 :INTERCONNECT DEBUG: Handshake [6:25:2058] [node 0] ICH02 starting incoming handshake 2025-04-17T18:03:34.177572Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS01 InputSession created 2025-04-17T18:03:34.177670Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T18:03:34.178517Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.178926Z node 6 :INTERCONNECT INFO: Handshake [6:21:2057] [node 5] ICH04 handshake succeeded 2025-04-17T18:03:34.179520Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-04-17T18:03:34.179611Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:21:2057] poison: false 2025-04-17T18:03:34.179665Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 PendingConnection -> StateWork 2025-04-17T18:03:34.179788Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP22 created new session: [6:26:2048] 2025-04-17T18:03:34.179858Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:21:2057] self: [6:4212468795:0] peer: [5:886380389:0] socket: 25 2025-04-17T18:03:34.179910Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-04-17T18:03:34.179993Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-04-17T18:03:34.180042Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T18:03:34.180081Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-04-17T18:03:34.180130Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T18:03:34.180198Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS01 InputSession created 2025-04-17T18:03:34.180378Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T18:03:34.180423Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.180501Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T18:03:34.180583Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.180669Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T18:03:34.180733Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.180786Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T18:03:34.180815Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.180868Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T18:03:34.180897Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.180937Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T18:03:34.180981Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T18:03:34.181044Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T18:03:34.181068Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T18:03:34.181104Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T18:03:34.181129Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T18:03:34.181188Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T18:03:34.181218Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-17T18:03:34.181329Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2025-04-17T18:03:34.181502Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS22 outgoing packet Serial# 1 Confirm# 0 DataSize# 84 InflightDataAmount# 84 2025-04-17T18:03:34.181619Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-04-17T18:03:34.182009Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T ... NTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T18:03:34.185802Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS07 socket disconnect 24 reason# EndOfStream 2025-04-17T18:03:34.185835Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS25 shutdown socket, reason# EndOfStream 2025-04-17T18:03:34.185916Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS15 start handshake 2025-04-17T18:03:34.186316Z node 5 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:50238 2025-04-17T18:03:34.186614Z node 6 :INTERCONNECT DEBUG: Handshake [6:28:2059] [node 5] ICH05 connected to peer 2025-04-17T18:03:34.186768Z node 6 :INTERCONNECT DEBUG: Handshake [6:28:2059] [node 5] ICH07 SendExBlock ExRequest HandshakeId: "6v\241\014\203\321\260dF\370Fy\027A\024\3557\302\">\211\016\350M\023\251\365pe\030\332h" 2025-04-17T18:03:34.187071Z node 5 :INTERCONNECT DEBUG: Handshake [5:30:2058] [node 6] ICH01 starting outgoing handshake 2025-04-17T18:03:34.187315Z node 5 :INTERCONNECT DEBUG: Handshake [5:31:2059] [node 0] ICH02 starting incoming handshake 2025-04-17T18:03:34.187408Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T18:03:34.187966Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP09 (actor [5:31:2059]) from: [6:4212468795:0] for: [5:886380389:0] 2025-04-17T18:03:34.188017Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS08 incoming handshake Self# [6:4212468795:0] Peer# [5:886380389:0] Counter# 1 LastInputSerial# 1 2025-04-17T18:03:34.188089Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:31:2059]) is held 2025-04-17T18:03:34.188345Z node 5 :INTERCONNECT DEBUG: Handshake [5:30:2058] [node 6] ICH05 connected to peer 2025-04-17T18:03:34.188437Z node 5 :INTERCONNECT DEBUG: Handshake [5:30:2058] [node 6] ICH07 SendExBlock ExRequest HandshakeId: "M\007\010>\"A\000p\377\241\007|\204@\201\352\3770\204\232\021\265\034\270p\0279\005\317\361\020\375" 2025-04-17T18:03:34.188502Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:44256 2025-04-17T18:03:34.188805Z node 6 :INTERCONNECT DEBUG: Handshake [6:33:2060] [node 0] ICH02 starting incoming handshake 2025-04-17T18:03:34.189168Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP09 (actor [6:33:2060]) from: [5:886380389:0] for: [6:4212468795:0] 2025-04-17T18:03:34.189204Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS08 incoming handshake Self# [5:886380389:0] Peer# [6:4212468795:0] Counter# 1 LastInputSerial# 1 2025-04-17T18:03:34.189241Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-04-17T18:03:34.189747Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-17T18:03:34.190241Z node 5 :INTERCONNECT DEBUG: Handshake [5:30:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "M\007\010>\"A\000p\377\241\007|\204@\201\352\3770\204\232\021\265\034\270p\0279\005\317\361\020\375" 2025-04-17T18:03:34.190321Z node 5 :INTERCONNECT INFO: Handshake [5:30:2058] [node 6] ICH04 handshake succeeded 2025-04-17T18:03:34.190533Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-04-17T18:03:34.190577Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:31:2059] poison: true 2025-04-17T18:03:34.190622Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:30:2058] poison: false 2025-04-17T18:03:34.190659Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 StateWork -> StateWork 2025-04-17T18:03:34.190712Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS09 handshake done sender: [5:30:2058] self: [5:886380389:0] peer: [6:4212468795:0] socket: 30 2025-04-17T18:03:34.190758Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS10 traffic start 2025-04-17T18:03:34.190822Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS11 registering socket in PollerActor 2025-04-17T18:03:34.190871Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T18:03:34.190913Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2025-04-17T18:03:34.190989Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T18:03:34.191042Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:44260 2025-04-17T18:03:34.191155Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS01 InputSession created 2025-04-17T18:03:34.191425Z node 6 :INTERCONNECT DEBUG: Handshake [6:36:2061] [node 0] ICH02 starting incoming handshake 2025-04-17T18:03:34.192026Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T18:03:34.192124Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.193365Z node 6 :INTERCONNECT NOTICE: Proxy [6:9:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:3893) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:33:2060] held: no 2025-04-17T18:03:34.193439Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP052 dropped outgoing handshake: [6:28:2059] poison: false 2025-04-17T18:03:34.193477Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP28 other handshake is still going on 2025-04-17T18:03:34.193670Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T18:03:34.193770Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.193848Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T18:03:34.193898Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.194268Z node 6 :INTERCONNECT INFO: Handshake [6:33:2060] [node 5] ICH04 handshake succeeded 2025-04-17T18:03:34.194517Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-04-17T18:03:34.194575Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:33:2060] poison: false 2025-04-17T18:03:34.194625Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 StateWork -> StateWork 2025-04-17T18:03:34.194672Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:33:2060] self: [6:4212468795:0] peer: [5:886380389:0] socket: 31 2025-04-17T18:03:34.194710Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-04-17T18:03:34.194805Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-04-17T18:03:34.194858Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T18:03:34.194900Z node 6 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2025-04-17T18:03:34.195016Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-04-17T18:03:34.195084Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2025-04-17T18:03:34.195126Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T18:03:34.195252Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS01 InputSession created 2025-04-17T18:03:34.195413Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T18:03:34.195484Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2025-04-17T18:03:34.195584Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.195611Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.195806Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T18:03:34.195850Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T18:03:34.195910Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T18:03:34.195945Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.195984Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-04-17T18:03:34.196015Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.196045Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T18:03:34.196090Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-04-17T18:03:34.196218Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T18:03:34.196252Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T18:03:34.196319Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2025-04-17T18:03:34.196410Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 InflightDataAmount# 84 2025-04-17T18:03:34.196494Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T18:03:34.196529Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T18:03:34.196568Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-17T18:03:34.196603Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-17T18:03:34.196644Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2025-04-17T18:03:34.196730Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-17T18:03:34.196814Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 2 2025-04-17T18:03:34.196854Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2025-04-17T18:03:34.196935Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-04-17T18:03:34.196989Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 2 2025-04-17T18:03:34.197093Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS01 socket: 30 reason# 2025-04-17T18:03:34.197165Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:23:2048] VirtualId# [5:886380389:0] 2025-04-17T18:03:34.197231Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 StateWork -> PendingActivation 2025-04-17T18:03:34.197279Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS25 shutdown socket, reason# 2025-04-17T18:03:34.197385Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] >> KqpCost::QuerySeviceRangeFullScan [GOOD] |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 30490, MsgBus: 3293 2025-04-17T18:03:29.964969Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344309680819214:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:29.965821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aba/r3tmp/tmpYv9HEl/pdisk_1.dat 2025-04-17T18:03:30.347372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:30.382203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:30.382370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30490, node 1 2025-04-17T18:03:30.384676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:30.446826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:30.446846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:30.446852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:30.446961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3293 TClient is connected to server localhost:3293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:30.985574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.011474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.122241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.279609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:31.368132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:03:32.905766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344322565722870:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:32.905890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.182429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.208405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.234760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.261123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.284916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.311449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.377652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.439659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344326860690832:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.439712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.439802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344326860690837:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.442853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:33.451603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344326860690839:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:33.521884Z node 1 :TX_PROXY ERROR: Actor# [1:7494344326860690893:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:34.701401Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913014712, txId: 281474976710672] shutting down >> TestProtocols::TestHTTPRequest [GOOD] |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::ControlPlane_CDC ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 21282, MsgBus: 63428 2025-04-17T18:03:29.869515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344308297448771:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:29.869667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001abc/r3tmp/tmpJMAYwS/pdisk_1.dat 2025-04-17T18:03:30.239086Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:30.257226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:30.257289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:30.258683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21282, node 1 2025-04-17T18:03:30.394148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:30.394172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:30.394197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:30.394315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63428 TClient is connected to server localhost:63428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:30.987855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.020466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.173012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.318795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.387419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.899342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344321182352440:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:32.899474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.185967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.212790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.237983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.262106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.285184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.311785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.335485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.408252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344325477320396:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.408312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.408352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344325477320401:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.411007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:33.418502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344325477320403:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:33.484026Z node 1 :TX_PROXY ERROR: Actor# [1:7494344325477320458:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PONOS {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Group (-∞, +∞)","Name (-∞, +∞)"],"Reverse":false,"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Test","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":1}],"E-Rows":"No estimate","Predicate":"item.Amount \u003C 5000","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-Filter","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":927,"Max":927,"Min":927},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"Tasks":1,"OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Test","ReadRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ReadBytes":{"Count":1,"Sum":20,"Max":20,"Min":20}}],"BaseTimeMs":1744913014622,"OutputBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"CpuTimeUs":{"Count":1,"Sum":883,"Max":883,"Min":883},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192},"WaitTimeUs":{"Count":1,"Sum":1035,"Max":1035,"Min":1035},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"UpdateTimeMs":1}}],"Node Type":"Merge","SortColumns":["Group (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"BaseTimeMs":1744913014622,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":570,"Max":570,"Min":570},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"CpuTimeUs":{"Count":1,"Sum":589,"Max":589,"Min":589},"StageDurationUs":1000,"ResultRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResultBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"OutputBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1 ... y_phases { duration_us: 4566 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 4488 affected_shards: 1 } compilation { duration_us: 204105 cpu_time_us: 201078 } process_cpu_time_us: 257 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Test\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"},{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"E-Rows\":\"No estimate\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Limit-Filter\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19}},\"Name\":\"4\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":927,\"Max\":927,\"Min\":927},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Test\",\"ReadRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ReadBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20}}],\"BaseTimeMs\":1744913014622,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":883,\"Max\":883,\"Min\":883},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1035,\"Max\":1035,\"Min\":1035},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":1}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Group (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\",\"Stats\":{\"UseLlvm\":\"undefined\",\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"BaseTimeMs\":1744913014622,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19}},\"Name\":\"RESULT\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":570,\"Max\":570,\"Min\":570},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":589,\"Max\":589,\"Min\":589},\"StageDurationUs\":1000,\"ResultRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResultBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":475,\"Max\":475,\"Min\":475},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":2,\"InputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Tasks\":1}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":204105,\"CpuTimeUs\":201078},\"ProcessCpuTimeUs\":257,\"TotalDurationUs\":214212,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":649},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Reverse\":false,\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":0.883,\"A-Cpu\":0.883,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":0.589,\"A-Cpu\":1.472,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:9\" \'\"\" \'1))\n(let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $4 (Uint64 \'1))\n(let $5 (OptionalType (DataType \'String)))\n(let $6 (StructType \'(\'\"Amount\" (OptionalType (DataType \'Uint64))) \'(\'\"Comment\" $5) \'(\'\"Group\" (OptionalType (DataType \'Uint32))) \'(\'\"Name\" $5)))\n(let $7 \'(\'(\'\"_logical_id\" \'559) \'(\'\"_id\" \'\"b3659909-fe8cf2e6-a4a84d23-6475a911\") \'(\'\"_wide_channels\" $6)))\n(let $8 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($12) (block \'(\n (let $13 (lambda \'($16) (block \'(\n (let $17 (Member $16 \'\"Amount\"))\n (return $17 (Member $16 \'\"Comment\") (Member $16 \'\"Group\") (Member $16 \'\"Name\") (Coalesce (< $17 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda \'($18 $19 $20 $21 $22) $22) $4))\n (let $15 (lambda \'($23 $24 $25 $26 $27) $23 $24 $25 $26))\n (return (FromFlow (WideMap $14 $15)))\n))) $7))\n(let $9 (DqCnMerge (TDqOutput $8 \'0) \'(\'(\'\"2\" \'\"Asc\"))))\n(let $10 (DqPhyStage \'($9) (lambda \'($28) (FromFlow (NarrowMap (Take (ToFlow $28) $4) (lambda \'($29 $30 $31 $32) (AsStruct \'(\'\"Amount\" $29) \'(\'\"Comment\" $30) \'(\'\"Group\" $31) \'(\'\"Name\" $32)))))) \'(\'(\'\"_logical_id\" \'572) \'(\'\"_id\" \'\"1cb404f2-716b17a9-d52ed976-2a14c8\"))))\n(let $11 (DqCnResult (TDqOutput $10 \'0) \'()))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($8 $10) \'($11) \'() \'(\'(\'\"type\" \'\"data\")))) \'((KqpTxResultBinding (ListType $6) \'0 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 214212 total_cpu_time_us: 205823 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Test\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":9},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Amount\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Comment\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Group\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Name\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Group\\\",\\\"Name\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1744913014\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"cc9ab4da-a46a515-43cfbc50-c597e321\",\"version\":\"1.0\"}" 2025-04-17T18:03:34.869948Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344308297448771:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:34.870006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 >> KqpCost::OltpWriteRow-isSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 5363, MsgBus: 3519 2025-04-17T18:03:30.520904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344313957473780:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:30.521624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a90/r3tmp/tmpwzJ07a/pdisk_1.dat 2025-04-17T18:03:30.900388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:30.900508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:30.901299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:30.904192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5363, node 1 2025-04-17T18:03:30.996297Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:30.996329Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:30.996337Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:30.996448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3519 TClient is connected to server localhost:3519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:31.522480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.544289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:31.556011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.690927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.905520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.969011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:33.392686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344326842377412:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.392813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.690508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.711796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.737190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.759410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.781772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.811292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.877796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.953919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344326842378083:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.953990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.954072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344326842378088:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.956811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:33.963937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344326842378090:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:34.049715Z node 1 :TX_PROXY ERROR: Actor# [1:7494344331137345442:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:35.139929Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913015174, txId: 281474976710672] shutting down |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] >> KqpCost::OltpWriteRow+isSink [GOOD] |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 22763, MsgBus: 5850 2025-04-17T18:03:30.256222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344314967699153:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:30.256400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aa2/r3tmp/tmpAywvgr/pdisk_1.dat 2025-04-17T18:03:30.617077Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:30.617277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:30.617370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:30.630949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22763, node 1 2025-04-17T18:03:30.690168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:30.690205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:30.690215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:30.690341Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5850 TClient is connected to server localhost:5850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:31.190029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.206063Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:31.226915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.355736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.522691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.589939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:33.089471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344327852602617:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.089608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.395406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.414816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.436109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.458803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.480774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.545742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.612146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.675774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344327852603287:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.675838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344327852603292:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.675870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.678798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:33.686124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344327852603294:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:33.756664Z node 1 :TX_PROXY ERROR: Actor# [1:7494344327852603348:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:34.588672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.613940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.639481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:03:35.255851Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344314967699153:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:35.255937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/Join1_2 1 19 /Root/Join1_1 8 136 >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] >> KqpExplain::ComplexJoin [GOOD] |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 23912, MsgBus: 4184 2025-04-17T18:03:30.877520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344312593726122:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:30.891649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a67/r3tmp/tmpMZiVXH/pdisk_1.dat 2025-04-17T18:03:31.261325Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23912, node 1 2025-04-17T18:03:31.313520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:31.313735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:31.318396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:31.366715Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:31.366737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:31.366750Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:31.366880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4184 TClient is connected to server localhost:4184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:31.913752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.938827Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:31.952527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.081627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.227926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.300806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:33.571333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344325478629798:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.571437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.864190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.925470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.952515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.978716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.003651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.068056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.094581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.139006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344329773597755:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.139043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344329773597760:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.139097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.141626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:34.148435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344329773597762:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:34.245146Z node 1 :TX_PROXY ERROR: Actor# [1:7494344329773597817:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx >> KqpDataIntegrityTrails::Select |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink [GOOD] Test command err: Trying to start YDB, gRPC: 23696, MsgBus: 5891 2025-04-17T18:03:30.360045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344314004070425:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:30.360484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a94/r3tmp/tmpN2Mi1p/pdisk_1.dat 2025-04-17T18:03:30.774563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:30.778862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:30.778952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:30.781053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23696, node 1 2025-04-17T18:03:30.862750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:30.862772Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:30.862784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:30.862935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5891 TClient is connected to server localhost:5891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:31.345079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.364815Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:31.384838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.528924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.663735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.733591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:33.316730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344326888973884:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.316838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.575526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.600134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.625459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.650974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.678656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.704801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.734113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.778901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344326888974540:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.778986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.779201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344326888974545:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.783399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:33.793816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344326888974547:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:33.849579Z node 1 :TX_PROXY ERROR: Actor# [1:7494344326888974601:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:34.840840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 query_phases { duration_us: 484 cpu_time_us: 484 } query_phases { duration_us: 2427 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 854 affected_shards: 1 } compilation { duration_us: 49792 cpu_time_us: 47078 } process_cpu_time_us: 780 total_duration_us: 53735 total_cpu_time_us: 49196 query_phases { duration_us: 523 cpu_time_us: 523 } query_phases { duration_us: 3155 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 950 affected_shards: 1 } compilation { duration_us: 55641 cpu_time_us: 53180 } process_cpu_time_us: 883 total_duration_us: 60505 total_cpu_time_us: 55536 2025-04-17T18:03:35.272297Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344335478909690:2540], TxId: 281474976710677, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2eq88vdb28xfek1h3s6epa. SessionId : ydb://session/3?node_id=1&id=Y2I3MTgwYTUtYmYyNGUxZjctNmFkMWMwZGQtNGQ2MzlkZWQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:03:35.272731Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344335478909692:2541], TxId: 281474976710677, task: 2. Ctx: { TraceId : 01js2eq88vdb28xfek1h3s6epa. SessionId : ydb://session/3?node_id=1&id=Y2I3MTgwYTUtYmYyNGUxZjctNmFkMWMwZGQtNGQ2MzlkZWQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494344335478909687:2503], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:03:35.273055Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2I3MTgwYTUtYmYyNGUxZjctNmFkMWMwZGQtNGQ2MzlkZWQ=, ActorId: [1:7494344331183942199:2503], ActorState: ExecuteState, TraceId: 01js2eq88vdb28xfek1h3s6epa, Create QueryResponse for error on request, msg: query_phases { duration_us: 721 cpu_time_us: 721 } query_phases { duration_us: 4723 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 6720 affected_shards: 1 } query_phases { duration_us: 27817 cpu_time_us: 28435 } compilation { duration_us: 167802 cpu_time_us: 162586 } process_cpu_time_us: 1578 total_duration_us: 205275 total_cpu_time_us: 200040 2025-04-17T18:03:35.355334Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344314004070425:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:35.355405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 639 cpu_time_us: 639 } query_phases { duration_us: 5393 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 5570 affected_shards: 1 } query_phases { duration_us: 1409 cpu_time_us: 2019 } query_phases { duration_us: 3840 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1347 affected_shards: 1 } compilation { duration_us: 192645 cpu_time_us: 188163 } process_cpu_time_us: 1588 total_duration_us: 208170 total_cpu_time_us: 199326 query_phases { duration_us: 650 cpu_time_us: 650 } query_phases { duration_us: 8299 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 8601 affected_shards: 1 } query_phases { duration_us: 1008 cpu_time_us: 691 affected_shards: 1 } compilation { duration_us: 231428 cpu_time_us: 227026 } process_cpu_time_us: 1298 total_duration_us: 252174 total_cpu_time_us: 238266 query_phases { duration_us: 651 cpu_time_us: 651 } query_phases { duration_us: 4322 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 4585 affected_shards: 1 } query_phases { duration_us: 3775 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1166 affected_shards: 1 } compilation { duration_us: 200524 cpu_time_us: 196328 } process_cpu_time_us: 1344 total_duration_us: 215449 total_cpu_time_us: 204074 query_phases { duration_us: 591 cpu_time_us: 591 } query_phases { duration_us: 2958 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 896 affected_shards: 1 } compilation { duration_us: 87683 cpu_time_us: 82398 } process_cpu_time_us: 935 total_duration_us: 92631 total_cpu_time_us: 84820 query_phases { duration_us: 510 cpu_time_us: 510 } query_phases { duration_us: 3750 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1048 affected_shards: 1 } compilation { duration_us: 63414 cpu_time_us: 60077 } process_cpu_time_us: 894 total_duration_us: 70637 total_cpu_time_us: 62529 >> TPersQueueTest::TestReadRuleServiceTypePassword [GOOD] >> TPersQueueTest::TestReadPartitionStatus >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow+isSink [GOOD] Test command err: Trying to start YDB, gRPC: 17142, MsgBus: 22133 2025-04-17T18:03:31.460094Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344317307106366:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:31.460678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a5b/r3tmp/tmp77ie3D/pdisk_1.dat 2025-04-17T18:03:31.789537Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17142, node 1 2025-04-17T18:03:31.836940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:31.837037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:31.839088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:31.844432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:31.844445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:31.844448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:31.844511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22133 TClient is connected to server localhost:22133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:32.265554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.286080Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:32.293095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.416445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.551285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.620043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:34.190261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344330192010013:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.190407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.462377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.484185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.510335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.535783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.560332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.624178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.650607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.727738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344330192010683:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.727799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344330192010688:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.727809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.730795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:34.738865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344330192010690:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:34.829694Z node 1 :TX_PROXY ERROR: Actor# [1:7494344330192010746:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:35.778379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 query_phases { duration_us: 6651 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1455 affected_shards: 1 } compilation { duration_us: 42573 cpu_time_us: 40068 } process_cpu_time_us: 383 total_duration_us: 52529 total_cpu_time_us: 41906 query_phases { duration_us: 9723 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 927 affected_shards: 1 } compilation { duration_us: 45516 cpu_time_us: 43267 } process_cpu_time_us: 393 total_duration_us: 60743 total_cpu_time_us: 44587 2025-04-17T18:03:36.165528Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=5; 2025-04-17T18:03:36.175613Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037922 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:36.175793Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037922 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:03:36.176001Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344338781945866:2512], Table: `/Root/TestTable` ([72057594046644480:17:1]), SessionActorId: [1:7494344334486978373:2512]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037922, Sink=[1:7494344338781945866:2512].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:03:36.176721Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344338781945859:2512], SessionActorId: [1:7494344334486978373:2512], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494344334486978373:2512]. isRollback=0 2025-04-17T18:03:36.176968Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWU0Y2UzYTMtNWY3OTI1MTUtZmNhNmY4Y2ItZmQ1ODZjZDI=, ActorId: [1:7494344334486978373:2512], ActorState: ExecuteState, TraceId: 01js2eq99d6rnj5znymqkahwfc, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494344338781945860:2512] from: [1:7494344338781945859:2512] 2025-04-17T18:03:36.177053Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344338781945860:2512] TxId: 281474976710676. Ctx: { TraceId: 01js2eq99d6rnj5znymqkahwfc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU0Y2UzYTMtNWY3OTI1MTUtZmNhNmY4Y2ItZmQ1ODZjZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:03:36.177249Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWU0Y2UzYTMtNWY3OTI1MTUtZmNhNmY4Y2ItZmQ1ODZjZDI=, ActorId: [1:7494344334486978373:2512], ActorState: ExecuteState, TraceId: 01js2eq99d6rnj5znymqkahwfc, Create QueryResponse for error on request, msg: query_phases { duration_us: 14690 cpu_time_us: 1265 affected_shards: 1 } compilation { duration_us: 50863 cpu_time_us: 48369 } process_cpu_time_us: 585 total_duration_us: 67271 total_cpu_time_us: 50219 query_phases { duration_us: 3748 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1530 affected_shards: 1 } compilation { duration_us: 54173 cpu_time_us: 49627 } process_cpu_time_us: 537 total_duration_us: 60755 total_cpu_time_us: 51694 query_phases { duration_us: 3260 cpu_time_us: 1241 affected_shards: 1 } compilation { duration_us: 72546 cpu_time_us: 70148 } process_cpu_time_us: 512 total_duration_us: 77972 total_cpu_time_us: 71901 query_phases { duration_us: 16767 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1382 affected_shards: 1 } compilation { duration_us: 80712 cpu_time_us: 78762 } process_cpu_time_us: 485 total_duration_us: 98979 total_cpu_time_us: 80629 2025-04-17T18:03:36.460448Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344317307106366:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:36.460527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 5041 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1431 affected_shards: 1 } compilation { duration_us: 42057 cpu_time_us: 38377 } process_cpu_time_us: 558 total_duration_us: 48851 total_cpu_time_us: 40366 query_phases { duration_us: 3280 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1043 affected_shards: 1 } compilation { duration_us: 39589 cpu_time_us: 37576 } process_cpu_time_us: 326 total_duration_us: 45221 total_cpu_time_us: 38945 |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 6776, MsgBus: 19399 2025-04-17T18:03:31.703178Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344317113246031:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:31.703320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a52/r3tmp/tmpf8Hfio/pdisk_1.dat 2025-04-17T18:03:32.094509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:32.124721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 6776, node 1 2025-04-17T18:03:32.124809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:32.126643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:32.174487Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:32.174513Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:32.174523Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:32.174644Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19399 TClient is connected to server localhost:19399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:32.642768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.667355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.800806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:32.969625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:33.053801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:34.647609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344329998149698:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.647723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:34.925750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.947427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.969313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.994010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:35.018958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:35.044850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:35.073284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:35.148085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344334293117655:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:35.148171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:35.148250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344334293117660:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:35.151259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:35.158904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344334293117662:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:35.233265Z node 1 :TX_PROXY ERROR: Actor# [1:7494344334293117717:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:36.245449Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-04-17T18:03:37.714260Z, after 1.469465s 2025-04-17T18:03:36.245709Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:03:36.331539Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-04-17T18:03:36.482525Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7494344338588085343:2503] 2025-04-17T18:03:36.482562Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7494344338588085310:2503] 2025-04-17T18:03:36.498329Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1744913016539:281474976710672 created 2025-04-17T18:03:36.498605Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-04-17T18:03:36.498667Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-04-17T18:03:36.498684Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-04-17T18:03:36.498942Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Resolved key sets: 1 2025-04-17T18:03:36.499138Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-17T18:03:36.499206Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-04-17T18:03:36.499416Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710673. Shard resolve complete, resolved shards: 1 2025-04-17T18:03:36.499456Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-17T18:03:36.499500Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037914] 2025-04-17T18:03:36.499537Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:03:36.499610Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) (block '( (let $2 (lambda '($5) (block '( (let $6 (Member $5 '"Amount")) (return $6 (Member $5 '"Comment") (Member $5 '"Group") (Member $5 '"Name") (Coalesce (< $6 (Uint64 '"5000")) (Bool 'false))) )))) (let $3 ( ... lt. }. CA StateFunc 271646922 2025-04-17T18:03:36.511054Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1, CA Id [1:7494344338588085358:2510]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-17T18:03:36.511072Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1, CA Id [1:7494344338588085358:2510]. enter pack cells method shardId: 72075186224037914 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-17T18:03:36.511143Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1, CA Id [1:7494344338588085358:2510]. exit pack cells method shardId: 72075186224037914 processedRows: 0 packed rows: 3 freeSpace: 8388548 2025-04-17T18:03:36.511194Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1, CA Id [1:7494344338588085358:2510]. returned 3 rows; processed 3 rows 2025-04-17T18:03:36.511264Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1, CA Id [1:7494344338588085358:2510]. dropping batch for read #0 2025-04-17T18:03:36.511276Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1, CA Id [1:7494344338588085358:2510]. effective maxinflight 1 sorted 1 2025-04-17T18:03:36.511287Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1, CA Id [1:7494344338588085358:2510]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-17T18:03:36.511302Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1, CA Id [1:7494344338588085358:2510]. returned async data processed rows 3 left freeSpace 8388548 received rows 3 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-17T18:03:36.511629Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085358:2510], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2eq9dm1331fe24tb18pzhp. SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:03:36.511663Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085358:2510], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2eq9dm1331fe24tb18pzhp. SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:03:36.511680Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085359:2511], TxId: 281474976710673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. TraceId : 01js2eq9dm1331fe24tb18pzhp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2025-04-17T18:03:36.511703Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-17T18:03:36.511727Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 2. Finish input channelId: 1, from: [1:7494344338588085358:2510] 2025-04-17T18:03:36.511787Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085359:2511], TxId: 281474976710673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. TraceId : 01js2eq9dm1331fe24tb18pzhp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-17T18:03:36.511927Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085358:2510], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2eq9dm1331fe24tb18pzhp. SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-17T18:03:36.511968Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085358:2510], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2eq9dm1331fe24tb18pzhp. SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:03:36.511986Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1. Tasks execution finished 2025-04-17T18:03:36.511999Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085358:2510], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2eq9dm1331fe24tb18pzhp. SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:03:36.512024Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085359:2511], TxId: 281474976710673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. TraceId : 01js2eq9dm1331fe24tb18pzhp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:03:36.512119Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 1. pass away 2025-04-17T18:03:36.512194Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7494344338588085310:2503], seqNo: 1, nRows: 1 2025-04-17T18:03:36.512343Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710673;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:03:36.512361Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7494344338588085358:2510], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 9911 Tasks { TaskId: 1 CpuTimeUs: 1206 FinishTimeMs: 1744913016511 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 225 BuildCpuTimeUs: 981 HostName: "ghrun-gkdwnlcfnq" NodeId: 1 StartTimeMs: 1744913016511 CreateTimeMs: 1744913016500 UpdateTimeMs: 1744913016512 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:36.512435Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494344338588085358:2510] 2025-04-17T18:03:36.512487Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7494344338588085359:2511], 2025-04-17T18:03:36.512509Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710673, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T18:03:36.514366Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7494344338588085361:2511] 2025-04-17T18:03:36.514451Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085359:2511], TxId: 281474976710673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. TraceId : 01js2eq9dm1331fe24tb18pzhp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-17T18:03:36.514495Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-17T18:03:36.514506Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 2. Tasks execution finished 2025-04-17T18:03:36.514520Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344338588085359:2511], TxId: 281474976710673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=. TraceId : 01js2eq9dm1331fe24tb18pzhp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-04-17T18:03:36.514575Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710673, task: 2. pass away 2025-04-17T18:03:36.514634Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710673;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:03:36.514761Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7494344338588085359:2511], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 3212 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 787 FinishTimeMs: 1744913016514 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 190 BuildCpuTimeUs: 597 HostName: "ghrun-gkdwnlcfnq" NodeId: 1 CreateTimeMs: 1744913016507 UpdateTimeMs: 1744913016514 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:36.514799Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710673, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T18:03:36.514807Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494344338588085359:2511] 2025-04-17T18:03:36.514915Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:36.514962Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494344338588085353:2503] TxId: 281474976710673. Ctx: { TraceId: 01js2eq9dm1331fe24tb18pzhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2NGQxMDgtNDQ1MGFjMzktY2IzYTM5YTktYzVmM2UyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.013123s ReadRows: 1 ReadBytes: 20 ru: 8 rate limiter was not found force flag: 1 2025-04-17T18:03:36.515779Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913016539, txId: 281474976710672] shutting down >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink >> LocalTableWriter::ConsistentWrite [GOOD] >> LocalTableWriter::ApplyInCorrectOrder [GOOD] >> LocalTableWriter::SupportedTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ComplexJoin [GOOD] Test command err: 2025-04-17T18:01:06.848868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343697441673618:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:06.848908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a65/r3tmp/tmpkNV3Ia/pdisk_1.dat 2025-04-17T18:01:07.548740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:07.548826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:07.555232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:01:07.596322Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14653, node 1 2025-04-17T18:01:07.654520Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:01:07.654541Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:01:07.850978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:01:07.851004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:01:07.851013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:01:07.851116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:01:08.080898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:7048 2025-04-17T18:01:08.737733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:01:08.839213Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343705033246298:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:08.839271Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:01:09.085903Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:01:09.106336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:09.106450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:09.124370Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:01:09.134470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:01:09.267329Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:01:09.267429Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:01:09.279159Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:01:09.331918Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:01:09.332184Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:01:09.332258Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:01:09.332359Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:01:09.332418Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:01:09.332483Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:01:09.332565Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:01:09.332621Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:01:09.332710Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:01:09.686776Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:01:09.686853Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:01:09.759072Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:01:09.760155Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:01:09.760217Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:01:09.760422Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:01:09.760459Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:01:09.760485Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:01:09.760508Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:01:09.760535Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:01:09.760557Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:01:09.762584Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:01:09.772993Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7494343709328214068:2248] 2025-04-17T18:01:09.776728Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:01:09.855406Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:01:09.855431Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:01:09.855497Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:01:09.879642Z node 2 :TX_PROXY ERROR: Actor# [2:7494343709328214253:2364] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-04-17T18:01:09.918978Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-04-17T18:01:09.920039Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:01:09.938317Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:01:09.938368Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7494343709328214299:2327], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:01:09.959702Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7494343709328214323:2391] 2025-04-17T18:01:09.962454Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7494343709328214323:2391], schemeshard id = 72075186224037897 2025-04-17T18:01:09.990421Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:01:10.001391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897 2025-04-17T18:01:10.012718Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:01:10.012768Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-04-17T18:01:10.275848Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-04-17T18:01:10.439126Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:01:11.849088Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343697441673618:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:01:11.896497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:01:12.415542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343723211478445:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:12.415634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:01:12.689362Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.103001s 2025-04-17T18:01:12.689408Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.103090s 2025-04-17T18:01:12.718930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72075186224037897 2025-04-17T18:01:13.042285Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7494343722213116594:2353];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:01:13.042595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7494343722213116594:2353];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:01:13.042874Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7494343722213116594:2353];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:01:13.042995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7494343722213116594:2353];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:01:13.043113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7494343722213116594:2353];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:01:13.043234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7494343722213116594:2353];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:01:13.043346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7494343722213116594:2353];tablet_id=72075186224037909;process ... =timeout;self_id=[5:7494344281861794217:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:27.419576Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1}],"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["App"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"columns":["App","Message","Ts"],"scan_by":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/Logs","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"App\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 19268, MsgBus: 21106 2025-04-17T18:03:29.431263Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494344309094032300:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:29.431399Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a65/r3tmp/tmpPRpSaa/pdisk_1.dat 2025-04-17T18:03:29.541372Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19268, node 6 2025-04-17T18:03:29.570667Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:29.570765Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:29.573391Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:29.601740Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:29.601766Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:29.601777Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:29.601941Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21106 TClient is connected to server localhost:21106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:03:30.120568Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:30.128654Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:03:30.135464Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:30.212127Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:30.402422Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:30.502004Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:33.025387Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494344326273903257:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.025500Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.096183Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.134647Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.179524Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.210619Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.241340Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.309515Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.342615Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.391608Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494344326273903919:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.391678Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494344326273903924:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.391692Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.394645Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:33.403514Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494344326273903926:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:33.489573Z node 6 :TX_PROXY ERROR: Actor# [6:7494344326273903983:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:34.430845Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494344309094032300:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:34.430935Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:34.467117Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:34.727013Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:03:34.758339Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> LocalTableWriter::DecimalKeys [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink >> KqpDataIntegrityTrails::Ddl |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2025-04-17T18:03:35.243183Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344337745258796:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:35.243244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d2/r3tmp/tmpWeIH98/pdisk_1.dat 2025-04-17T18:03:35.687157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:35.696721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:35.696806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:35.700101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13595 TServer::EnableGrpc on GrpcPort 21962, node 1 2025-04-17T18:03:36.016426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:36.016451Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:36.016457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:36.016550Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:36.516893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:36.534137Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:36.539680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913016658 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-17T18:03:36.682489Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Handshake: worker# [1:7494344342040226804:2357] 2025-04-17T18:03:36.682827Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:03:36.683191Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T18:03:36.683232Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Send handshake: worker# [1:7494344342040226804:2357] 2025-04-17T18:03:36.683601Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T18:03:36.688780Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-17T18:03:36.688967Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-04-17T18:03:36.689140Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344342040226807:2356] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T18:03:36.689203Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.689286Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344342040226807:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-17T18:03:36.701106Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344342040226807:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T18:03:36.701199Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.701253Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344342040226803:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink >> KqpDataIntegrityTrails::BrokenReadLock-UseSink |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2025-04-17T18:03:35.242762Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344336283317602:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:35.242841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e1/r3tmp/tmpjdadQX/pdisk_1.dat 2025-04-17T18:03:35.656761Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:35.685629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:35.686244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:35.690835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4693 TServer::EnableGrpc on GrpcPort 4604, node 1 2025-04-17T18:03:36.015352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:36.015383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:36.015394Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:36.015573Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:36.520464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:36.539074Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:36.542384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913016665 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-17T18:03:36.664042Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handshake: worker# [1:7494344340578285515:2295] 2025-04-17T18:03:36.664280Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:03:36.664511Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T18:03:36.664542Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Send handshake: worker# [1:7494344340578285515:2295] 2025-04-17T18:03:36.664991Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T18:03:36.670829Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-17T18:03:36.670985Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2025-04-17T18:03:36.673134Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340578285611:2356] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T18:03:36.673210Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.673323Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340578285611:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-17T18:03:36.676440Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340578285611:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T18:03:36.676496Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.676540Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-04-17T18:03:36.676986Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T18:03:36.677444Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T18:03:36.677883Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2025-04-17T18:03:36.677964Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2025-04-17T18:03:36.678097Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340578285611:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-17T18:03:36.681008Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340578285611:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T18:03:36.681063Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.681095Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2025-04-17T18:03:36.681474Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T18:03:36.681634Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2025-04-17T18:03:36.681731Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340578285611:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-17T18:03:36.683875Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340578285611:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T18:03:36.683919Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.683948Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2025-04-17T18:03:36.684320Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340578285608:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2025-04-17T18:03:35.242665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344334535274181:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:35.242742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f7/r3tmp/tmplHBomk/pdisk_1.dat 2025-04-17T18:03:35.685715Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:35.688414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:35.688503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:35.695749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14456 TServer::EnableGrpc on GrpcPort 3564, node 1 2025-04-17T18:03:36.011090Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:36.011113Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:36.011125Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:36.011237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:36.516755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:36.534752Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:36.543157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913016658 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ui... (TRUNCATED) 2025-04-17T18:03:36.678303Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338830242180:2355] Handshake: worker# [1:7494344338830242086:2293] 2025-04-17T18:03:36.678627Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338830242180:2355] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:03:36.678901Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338830242180:2355] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T18:03:36.678939Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338830242180:2355] Send handshake: worker# [1:7494344338830242086:2293] 2025-04-17T18:03:36.682095Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338830242180:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T18:03:36.682989Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338830242180:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2025-04-17T18:03:36.683438Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344338830242183:2355] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T18:03:36.683506Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338830242180:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.683805Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344338830242183:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2025-04-17T18:03:36.764989Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344338830242183:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T18:03:36.765068Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338830242180:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.765134Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338830242180:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2025-04-17T18:03:35.242725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344335997587898:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:35.242803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019cd/r3tmp/tmpV6r5qm/pdisk_1.dat 2025-04-17T18:03:35.651919Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:35.686839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:35.686934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:35.690839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25595 TServer::EnableGrpc on GrpcPort 29667, node 1 2025-04-17T18:03:36.018777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:36.018799Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:36.018806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:36.018922Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:36.517732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:36.544918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913016658 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-17T18:03:36.683275Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handshake: worker# [1:7494344340292555800:2294] 2025-04-17T18:03:36.683547Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:03:36.683810Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T18:03:36.683859Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Send handshake: worker# [1:7494344340292555800:2294] 2025-04-17T18:03:36.684259Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T18:03:36.698326Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-17T18:03:36.698507Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-04-17T18:03:36.698667Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340292555896:2355] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T18:03:36.698730Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.698813Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340292555896:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-17T18:03:36.701549Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340292555896:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T18:03:36.701617Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.701696Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-17T18:03:36.702162Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T18:03:36.702655Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-04-17T18:03:36.702777Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2025-04-17T18:03:36.702895Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340292555896:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-17T18:03:36.704520Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344340292555896:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T18:03:36.704561Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.704596Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344340292555893:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2025-04-17T18:03:35.242726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344334200216766:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:35.242806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019af/r3tmp/tmpsyMW38/pdisk_1.dat 2025-04-17T18:03:35.713033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:35.730897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:35.731022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:35.733030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11447 TServer::EnableGrpc on GrpcPort 5714, node 1 2025-04-17T18:03:36.014819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:36.014842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:36.014849Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:36.014948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:36.534281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:36.551301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913016672 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Decimal(1,0)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 1 DecimalScale: 0 } IsBuildInProgress: false } Columns { Name: "value" Type: "Decimal(35,10)" TypeId: 4865 I... (TRUNCATED) 2025-04-17T18:03:36.678543Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338495184759:2355] Handshake: worker# [1:7494344338495184666:2294] 2025-04-17T18:03:36.678831Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338495184759:2355] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:03:36.680014Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338495184759:2355] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T18:03:36.680052Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338495184759:2355] Send handshake: worker# [1:7494344338495184666:2294] 2025-04-17T18:03:36.680659Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338495184759:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-17T18:03:36.680852Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338495184759:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2025-04-17T18:03:36.681054Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344338495184762:2355] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-17T18:03:36.681125Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338495184759:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.681231Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344338495184762:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2025-04-17T18:03:36.684069Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7494344338495184762:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-17T18:03:36.684162Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338495184759:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-17T18:03:36.684223Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7494344338495184759:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |85.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |85.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_ChildFirst >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] >> KqpDataIntegrityTrails::Select [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> DataShardTxOrder::RandomPointsAndRanges >> TxOrderInternals::OperationOrder [GOOD] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [FAIL] >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> DataShardScan::ScanFollowedByUpdate >> DataShardTxOrder::ReadWriteReorder >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] >> DataShardTxOrder::ZigZag_oo8_dirty >> DataShardTxOrder::RandomPoints_DelayData >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty >> DataShardOutOfOrder::TestSnapshotReadPriority >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> DataShardTxOrder::ZigZag >> DataShardTxOrder::RandomPoints_DelayRS ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24701, MsgBus: 5093 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c56/r3tmp/tmpetWMZU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24701, node 1 TClient is connected to server localhost:5093 TClient is connected to server localhost:5093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> DataShardTxOrder::ForceOnlineBetweenOnline >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-DYNUMBER [GOOD] Test command err: 2025-04-17T18:02:05.011315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343950378976106:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:05.011359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004fd/r3tmp/tmpxACarC/pdisk_1.dat 2025-04-17T18:02:06.131987Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:06.136697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:02:06.155704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:06.155797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:06.167273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15178, node 1 2025-04-17T18:02:07.173254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:02:07.173278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:02:07.173289Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:02:07.173416Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:09.291589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:09.777276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343967558846171:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:09.777350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:10.013478Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343950378976106:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:10.013552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:02:12.443680Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950378976147:2125] Handle TEvProposeTransaction 2025-04-17T18:02:12.443720Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950378976147:2125] TxId# 281474976715658 ProcessProposeTransaction 2025-04-17T18:02:12.443776Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950378976147:2125] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7494343980443748158:2646] 2025-04-17T18:02:12.514211Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Group" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } IndexDescription { Name: "value_idx" KeyColumnNames: "Value" Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } } } } UserToken: "" DatabaseName: "" 2025-04-17T18:02:12.514264Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:02:12.514627Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:02:12.514691Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:02:12.514893Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:02:12.515064Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:02:12.515114Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-04-17T18:02:12.515279Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] txid# 281474976715658 HANDLE EvClientConnected 2025-04-17T18:02:12.517177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:02:12.523145Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-04-17T18:02:12.523209Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748158:2646] txid# 281474976715658 SEND to# [1:7494343980443748157:2353] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-04-17T18:02:12.842137Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950378976147:2125] Handle TEvNavigate describe path /Root 2025-04-17T18:02:12.842249Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748467:2896] HANDLE EvNavigateScheme /Root 2025-04-17T18:02:12.842556Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748467:2896] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:02:12.842663Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748467:2896] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root" Options { ShowPrivateTable: false } 2025-04-17T18:02:12.843500Z node 1 :TX_PROXY DEBUG: Actor# [1:7494343980443748467:2896] Handle TEvDescribeSchemeResult Forward to# [1:7494343980443748465:2369] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 65 Record# Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744912929382 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/Root:test" Kind: "test" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "ssd" Kind: "ssd" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046644480 2025-04-17T18:02:12.872456Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494343980443748469:2370] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/table TableId: [72057594046644480:2:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:02:12.872493Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494343980443748469:2370] [0] Allocate txId 2025-04-17T18:02:12.872580Z node 1 :TX_PROXY DEBUG: actor# [1:7494343950378976147:2125] Handle TEvAllocateTxId 2025-04-17T18:02:12.872695Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494343980443748469:2370] [281474976715659] TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T18:02:12.872715Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494343980443748469:2370] [281474976715659] Resolve database: name# /Root 2025-04-17T18:02:12.873071Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494343980443748469:2370] [281474976715659] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [ ... a614729 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T180331Z 2025-04-17T18:03:31.742811Z node 37 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [37:7494344319456128970:2205], result# No response body. 2025-04-17T18:03:31.814121Z node 37 :TX_PROXY DEBUG: [GetImport] [37:7494344319456128982:2401] [0] Resolve database: name# /Root 2025-04-17T18:03:31.818901Z node 37 :TX_PROXY DEBUG: [GetImport] [37:7494344319456128982:2401] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:03:31.818941Z node 37 :TX_PROXY DEBUG: [GetImport] [37:7494344319456128982:2401] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T18:03:31.837819Z node 37 :TX_PROXY DEBUG: [GetImport] [37:7494344319456128982:2401] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715665 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:1905" scheme: HTTP bucket: "test_bucket" items { source_prefix: "DyNumberTable" destination_path: "/Root/DyNumberTable" } } StartTime { seconds: 1744913011 } } REQUEST: GET /test_bucket?prefix=DyNumberTable HTTP/1.1 HEADERS: Host: localhost:1905 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B0367A8E-EB56-4C37-B602-F6A5BC7EC8DF amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=1f8731087945ca46c2b1e0edaf6fc7bc3005f315ec7677b48cac33c202e6a58e content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T180331Z S3_MOCK::HttpServeList: DyNumberTable 2025-04-17T18:03:31.843419Z node 37 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [37:7494344319456128970:2205], result# ListObjectsResult { } 2025-04-17T18:03:31.843483Z node 37 :IMPORT INFO: Reply: self# [37:7494344319456128970:2205], success# 1, error# 2025-04-17T18:03:31.843597Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:03:31.843618Z node 37 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976715665, itemIdx# 0, success# 1 2025-04-17T18:03:31.843997Z node 37 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/DyNumberTable' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T18:03:31.855727Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:03:31.855879Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:03:31.855895Z node 37 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 281474976715665 2025-04-17T18:03:31.855963Z node 37 :IMPORT INFO: TImport::TTxProgress: CreateTable propose: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/DyNumberTable' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710762 2025-04-17T18:03:31.856121Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:03:31.857512Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 2025-04-17T18:03:31.860458Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:03:31.860484Z node 37 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusAccepted 2025-04-17T18:03:31.860613Z node 37 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/DyNumberTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976710762 Issue: '' } 2025-04-17T18:03:31.864274Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:03:31.922068Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:03:31.922098Z node 37 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710762 2025-04-17T18:03:31.922207Z node 37 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/DyNumberTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T18:03:31.923899Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:03:31.923984Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:03:31.923999Z node 37 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710763, id# 281474976715665 2025-04-17T18:03:31.924047Z node 37 :IMPORT INFO: TImport::TTxProgress: Restore propose: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/DyNumberTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710763 2025-04-17T18:03:31.924681Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:03:31.925180Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRestore, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-04-17T18:03:31.927045Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:03:31.927073Z node 37 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710763, status# StatusAccepted 2025-04-17T18:03:31.927158Z node 37 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/DyNumberTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Subscribed WaitTxId: 281474976710763 Issue: '' } 2025-04-17T18:03:31.929230Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoComplete REQUEST: HEAD /test_bucket/DyNumberTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:1905 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B68A78EA-F4D5-4BC7-8580-58A6C5BBF1CD amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=7a8ebe7ea3b4a9a4cd3b9df0991554a046f0d3ba8a89f6fdd3f26a36220e4b65 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T180331Z S3_MOCK::HttpServeRead: /test_bucket/DyNumberTable/data_00.csv / 7 REQUEST: GET /test_bucket/DyNumberTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:1905 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EEDC5665-5940-4D22-851B-B180A52EF978 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=5d26102ea3aedf6f36484b87d6c89787d142a01505fc78bae6de22405dca1470 content-type: application/xml range: bytes=0-6 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T180331Z S3_MOCK::HttpServeRead: /test_bucket/DyNumberTable/data_00.csv / 7 2025-04-17T18:03:32.005693Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T18:03:32.005718Z node 37 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-17T18:03:32.007256Z node 37 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T18:03:32.251862Z node 37 :TX_PROXY DEBUG: [GetImport] [37:7494344323751096467:2409] [0] Resolve database: name# /Root 2025-04-17T18:03:32.252354Z node 37 :TX_PROXY DEBUG: [GetImport] [37:7494344323751096467:2409] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:03:32.252389Z node 37 :TX_PROXY DEBUG: [GetImport] [37:7494344323751096467:2409] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T18:03:32.253012Z node 37 :TX_PROXY DEBUG: [GetImport] [37:7494344323751096467:2409] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715665 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:1905" scheme: HTTP bucket: "test_bucket" items { source_prefix: "DyNumberTable" destination_path: "/Root/DyNumberTable" } } StartTime { seconds: 1744913011 } EndTime { seconds: 1744913012 } } 2025-04-17T18:03:32.367427Z node 37 :TX_PROXY DEBUG: actor# [37:7494344297981290738:2137] Handle TEvExecuteKqpTransaction 2025-04-17T18:03:32.367470Z node 37 :TX_PROXY DEBUG: actor# [37:7494344297981290738:2137] TxId# 281474976715666 ProcessProposeKqpTransaction 2025-04-17T18:03:32.368485Z node 37 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2eq5h22tqqyjef9ektcxaa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=37&id=NGNjMzJlZGMtZTUxYjYyNzItN2QxOThmNjAtOTBmM2ZiZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 22884, MsgBus: 22996 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bff/r3tmp/tmpfvq7nl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22884, node 1 TClient is connected to server localhost:22996 TClient is connected to server localhost:22996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] |85.3%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 11026, MsgBus: 62323 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bf0/r3tmp/tmptS0pvX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11026, node 1 TClient is connected to server localhost:62323 TClient is connected to server localhost:62323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] >> KqpDataIntegrityTrails::Ddl [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] Test command err: Trying to start YDB, gRPC: 25601, MsgBus: 13396 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c0f/r3tmp/tmptowXff/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25601, node 1 TClient is connected to server localhost:13396 TClient is connected to server localhost:13396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |85.4%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18906, MsgBus: 12357 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bcc/r3tmp/tmpnuo5aS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18906, node 1 TClient is connected to server localhost:12357 TClient is connected to server localhost:12357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24263, MsgBus: 32155 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bbd/r3tmp/tmpZ1D6r1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24263, node 1 TClient is connected to server localhost:32155 TClient is connected to server localhost:32155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> DataShardTxOrder::ReadWriteReorder [GOOD] >> KqpLimits::QSReplySize+useSink [GOOD] >> KqpLimits::QSReplySize-useSink >> DataShardTxOrder::DelayData >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2327, MsgBus: 24667 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b96/r3tmp/tmp1qivgb/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2327, node 1 TClient is connected to server localhost:24667 TClient is connected to server localhost:24667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3887, MsgBus: 24230 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ba0/r3tmp/tmpRj9MA3/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3887, node 1 TClient is connected to server localhost:24230 TClient is connected to server localhost:24230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 10645, MsgBus: 21932 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bab/r3tmp/tmpQ5DLHa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10645, node 1 TClient is connected to server localhost:21932 TClient is connected to server localhost:21932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2025-04-17T18:03:43.766764Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.787115Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.787564Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792010Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.848410Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.855295Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.945037Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.946857Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.946931Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.947053Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.947409Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.947816Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.947884Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.948195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.948232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.043031Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.077531Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.077711Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.077816Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.077866Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.077902Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.077942Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.078081Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.078132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.078441Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.078537Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.078650Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.078691Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.078746Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.078789Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.078832Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.078863Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.078905Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.079002Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.079040Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.079111Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.086192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.086248Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.086319Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.086440Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.086489Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.086556Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.086595Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.086623Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.086663Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.086691Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.086907Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.086933Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.086957Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.086993Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.087031Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.087055Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.087096Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.087122Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.087140Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.099220Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.099296Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.099379Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.099421Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.099491Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.100066Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.100127Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.100185Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.100262Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.100292Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.100418Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.100468Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.100520Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.100555Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.104402Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.104466Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.104684Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.104724Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.104778Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.104820Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.104853Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.104890Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.104924Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.104969Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.105006Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.105076Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.105118Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.105287Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.105327Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.105353Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.105377Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.105400Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.105463Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.105491Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.105529Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.105562Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.105626Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.105681Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.105713Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.105756Z node 1 :TX_DATASH ... WaitInRS 2025-04-17T18:03:45.108630Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-17T18:03:45.108650Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadAndWaitInRS 2025-04-17T18:03:45.108681Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit ExecuteDataTx 2025-04-17T18:03:45.108709Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit ExecuteDataTx 2025-04-17T18:03:45.109013Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437185 with status COMPLETE 2025-04-17T18:03:45.109067Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:03:45.109117Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-17T18:03:45.109142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit ExecuteDataTx 2025-04-17T18:03:45.109217Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompleteOperation 2025-04-17T18:03:45.109245Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompleteOperation 2025-04-17T18:03:45.109402Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is DelayComplete 2025-04-17T18:03:45.109428Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompleteOperation 2025-04-17T18:03:45.109461Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompletedOperations 2025-04-17T18:03:45.109496Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompletedOperations 2025-04-17T18:03:45.109539Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-17T18:03:45.109563Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompletedOperations 2025-04-17T18:03:45.109596Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437185 has finished 2025-04-17T18:03:45.109629Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:45.109669Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T18:03:45.109703Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T18:03:45.109738Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T18:03:45.109875Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:234:2227], Recipient [1:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:45.109913Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:45.109967Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:45.110007Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:45.110032Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:45.110059Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2025-04-17T18:03:45.110083Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2025-04-17T18:03:45.110107Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.110134Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:45.110155Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:45.110195Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:45.110825Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2025-04-17T18:03:45.110866Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.110890Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:45.110926Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-17T18:03:45.110951Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2025-04-17T18:03:45.110982Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.111003Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-17T18:03:45.111026Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:45.111045Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:45.111088Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically complete end at 9437184 2025-04-17T18:03:45.111113Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically incomplete end at 9437184 2025-04-17T18:03:45.111150Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:12] at 9437184 2025-04-17T18:03:45.111182Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.111204Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:45.111234Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2025-04-17T18:03:45.111259Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2025-04-17T18:03:45.111352Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.111383Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2025-04-17T18:03:45.111404Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2025-04-17T18:03:45.111424Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2025-04-17T18:03:45.111445Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.111465Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2025-04-17T18:03:45.111502Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2025-04-17T18:03:45.111523Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2025-04-17T18:03:45.111551Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.111569Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2025-04-17T18:03:45.111589Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2025-04-17T18:03:45.111612Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2025-04-17T18:03:45.111633Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.111671Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2025-04-17T18:03:45.111693Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:03:45.111717Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2025-04-17T18:03:45.112079Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2025-04-17T18:03:45.112125Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:03:45.112172Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.112197Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:03:45.112220Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2025-04-17T18:03:45.112247Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.112396Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is DelayComplete 2025-04-17T18:03:45.112424Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2025-04-17T18:03:45.112449Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2025-04-17T18:03:45.112486Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2025-04-17T18:03:45.112516Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-17T18:03:45.112538Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2025-04-17T18:03:45.112560Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437184 has finished 2025-04-17T18:03:45.112590Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:45.112612Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:45.112642Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:45.112668Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:45.125867Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2025-04-17T18:03:45.125950Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2025-04-17T18:03:45.126009Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T18:03:45.126075Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2025-04-17T18:03:45.126135Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:45.126196Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T18:03:45.126603Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2025-04-17T18:03:45.126639Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-04-17T18:03:45.126672Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.126697Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.126736Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:45.126765Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 2999, MsgBus: 24176 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b75/r3tmp/tmpJKI6rW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2999, node 1 TClient is connected to server localhost:24176 TClient is connected to server localhost:24176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2025-04-17T18:03:43.766297Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.787810Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.788286Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792035Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.841040Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.845252Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.918676Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.923584Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.923659Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.923751Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.927270Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.927799Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.927875Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.930206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.930253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.031122Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.071622Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.071807Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.071900Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.071934Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.071969Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.072004Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.072155Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.072207Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.072458Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.072535Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.072649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.072684Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.072737Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.072783Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.072820Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.072851Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.072893Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.072987Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.073021Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.073078Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.075634Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.075688Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.075768Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.075907Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.075944Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.076014Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.076062Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.076093Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.076140Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.076186Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.076469Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.076514Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.076548Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.076580Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.076639Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.076669Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.076707Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.076747Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.076773Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.088860Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.088918Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.088951Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.088987Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.089037Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.089576Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.089627Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.089666Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.089731Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.089757Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.089857Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.089893Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.089952Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.089990Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.093676Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.093730Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.093926Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.093961Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.094004Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.094040Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.094071Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.094104Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.094134Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.094191Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.094246Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.094294Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.094326Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.094457Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.094486Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.094521Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.094541Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.094563Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.094616Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.094641Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.094674Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.094703Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.094751Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.094800Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.094833Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.094874Z node 1 :TX_DATA ... 2025-04-17T18:03:45.189711Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.189759Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:4] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.189804Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 4] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:45.189870Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-17T18:03:45.189910Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.190039Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:45.190071Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.190093Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:45.190121Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:45.190157Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.190210Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:6] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.190251Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 6] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:45.190353Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-17T18:03:45.190386Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.190495Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:45.190520Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:45.190537Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:45.190557Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.190577Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.190627Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:45.190674Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-17T18:03:45.190697Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.190814Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.190837Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.190876Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:45.190920Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-17T18:03:45.190947Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.191055Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.191083Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.191126Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:45.191167Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-17T18:03:45.191187Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.191301Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.191336Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:14] at 9437184 on unit FinishPropose 2025-04-17T18:03:45.191379Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-17T18:03:45.191556Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.191686Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.191709Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.191740Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:45.191777Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T18:03:45.191798Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.191895Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.191935Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.191969Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:45.192040Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T18:03:45.192075Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.192186Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.192208Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.192234Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:45.192257Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.192328Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:45.192369Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.192393Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2025-04-17T18:03:45.192426Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:45.192487Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-17T18:03:45.192514Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.192745Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-17T18:03:45.192790Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:45.192824Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2025-04-17T18:03:45.192894Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-17T18:03:45.192916Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:45.192936Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-17T18:03:45.192991Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-17T18:03:45.193017Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:45.193037Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-17T18:03:45.193116Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-17T18:03:45.193146Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:45.193181Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-17T18:03:45.193245Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-17T18:03:45.193272Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:45.193300Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-17T18:03:45.193352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T18:03:45.193385Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:45.193407Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-17T18:03:45.193469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T18:03:45.193614Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:45.193645Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-17T18:03:45.193707Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-17T18:03:45.193728Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:45.193763Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10325, MsgBus: 30090 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b74/r3tmp/tmptyAqRY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10325, node 1 TClient is connected to server localhost:30090 TClient is connected to server localhost:30090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TopicAutoscaling::PartitionSplit_DistributedTxCommit [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_NotSplitedTopic >> DataShardScan::ScanFollowedByUpdate [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> DataShardOutOfOrder::UncommittedReadSetAck >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2025-04-17T18:03:43.766448Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.787722Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.788173Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792032Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.843762Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.848463Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.926244Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.927977Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.928071Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.928174Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.928519Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.928929Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.928995Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.930200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.930243Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.031139Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.059726Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.059941Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.060028Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.060068Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.060100Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.060135Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.060283Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.060332Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.060569Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.060651Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.060779Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.060823Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.060887Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.060933Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.060977Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.061011Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.061059Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.061168Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.061210Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.061264Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.067413Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.067498Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.067582Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.067699Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.067730Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.067799Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.067840Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.067870Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.067939Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.067981Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.068267Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.068307Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.068336Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.068371Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.068411Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.068437Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.068467Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.068489Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.068505Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.080483Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.080554Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.080608Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.080672Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.080732Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.081326Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.081381Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.081424Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.081489Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.081516Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.081630Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.081667Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.081724Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.081795Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.085622Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.085686Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.085895Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.085936Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.085988Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.086031Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.086064Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.086101Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.086136Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.086201Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.086243Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.086306Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.086346Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.086494Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.086526Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.086549Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.086571Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.086592Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.086652Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.086677Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.086768Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.086800Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.086842Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.086893Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.086929Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.086969Z node 1 :TX_D ... essageQuota: 9 2025-04-17T18:03:46.047389Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437186, TxId: 36, MessageQuota: 10 2025-04-17T18:03:46.047571Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437186, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-04-17T18:03:46.047706Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437184, TxId: 36, MessageQuota: 10 2025-04-17T18:03:46.047842Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437184, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-04-17T18:03:46.047936Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437185, TxId: 36, PendingAcks: 0 2025-04-17T18:03:46.047982Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437185, TxId: 36, MessageQuota: 9 2025-04-17T18:03:46.048332Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437186, TxId: 36, PendingAcks: 0 2025-04-17T18:03:46.048373Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437186, TxId: 36, MessageQuota: 9 2025-04-17T18:03:46.048640Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437184, TxId: 36, PendingAcks: 0 2025-04-17T18:03:46.048675Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437184, TxId: 36, MessageQuota: 9 2025-04-17T18:03:46.049015Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437186 2025-04-17T18:03:46.049053Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437186 2025-04-17T18:03:46.049132Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437184 2025-04-17T18:03:46.049170Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437184 2025-04-17T18:03:46.049269Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437185 2025-04-17T18:03:46.049289Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437185 2025-04-17T18:03:46.049414Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:457:2399], Recipient [1:457:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.049459Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.049528Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-17T18:03:46.049565Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:03:46.049610Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2025-04-17T18:03:46.049640Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2025-04-17T18:03:46.049673Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2025-04-17T18:03:46.049722Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-04-17T18:03:46.049752Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2025-04-17T18:03:46.049818Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2025-04-17T18:03:46.049851Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2025-04-17T18:03:46.050087Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is DelayComplete 2025-04-17T18:03:46.050124Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2025-04-17T18:03:46.050161Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2025-04-17T18:03:46.050644Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2025-04-17T18:03:46.050713Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-04-17T18:03:46.050742Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2025-04-17T18:03:46.050788Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437186 has finished 2025-04-17T18:03:46.050847Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.050878Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-17T18:03:46.050919Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-17T18:03:46.050966Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-17T18:03:46.051149Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:234:2227], Recipient [1:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.051188Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.051237Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:46.051268Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:03:46.051312Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2025-04-17T18:03:46.051371Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2025-04-17T18:03:46.051412Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2025-04-17T18:03:46.051452Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-04-17T18:03:46.051490Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2025-04-17T18:03:46.051515Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2025-04-17T18:03:46.051540Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2025-04-17T18:03:46.051709Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is DelayComplete 2025-04-17T18:03:46.051730Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2025-04-17T18:03:46.051748Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2025-04-17T18:03:46.051770Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2025-04-17T18:03:46.051803Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-04-17T18:03:46.051819Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2025-04-17T18:03:46.051853Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437184 has finished 2025-04-17T18:03:46.051877Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.051897Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:46.051914Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:46.051937Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:46.052091Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:345:2313], Recipient [1:345:2313]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.052113Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.052140Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-17T18:03:46.052162Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:03:46.052180Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2025-04-17T18:03:46.052195Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2025-04-17T18:03:46.052213Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2025-04-17T18:03:46.052256Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-04-17T18:03:46.052273Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2025-04-17T18:03:46.052294Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2025-04-17T18:03:46.052310Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2025-04-17T18:03:46.052396Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is DelayComplete 2025-04-17T18:03:46.052410Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2025-04-17T18:03:46.052423Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2025-04-17T18:03:46.052437Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2025-04-17T18:03:46.052453Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-04-17T18:03:46.052478Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2025-04-17T18:03:46.052535Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437185 has finished 2025-04-17T18:03:46.052552Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.052574Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T18:03:46.052589Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T18:03:46.052602Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T18:03:46.068881Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:46.068945Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:46.068978Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2025-04-17T18:03:46.069078Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 4 ms, propose latency: 5 ms 2025-04-17T18:03:46.069139Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:46.069317Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:46.069384Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:46.069412Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2025-04-17T18:03:46.069467Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 4 ms, propose latency: 5 ms 2025-04-17T18:03:46.069508Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:46.069671Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T18:03:46.069762Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T18:03:46.069793Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2025-04-17T18:03:46.069830Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:99:2134], exec latency: 4 ms, propose latency: 5 ms 2025-04-17T18:03:46.069874Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 6446, MsgBus: 26009 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c0e/r3tmp/tmpbG0X9A/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6446, node 1 TClient is connected to server localhost:26009 TClient is connected to server localhost:26009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> DataShardOutOfOrder::UncommittedReads >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] >> DataShardTxOrder::RandomDotRanges_DelayRS >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> DataShardTxOrder::ImmediateBetweenOnline >> DataShardOutOfOrder::TestReadTableWriteConflict >> DataShardOutOfOrder::TestOutOfOrderLockLost ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 11740, MsgBus: 23498 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b8b/r3tmp/tmpWGtb3E/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11740, node 1 TClient is connected to server localhost:23498 TClient is connected to server localhost:23498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] |85.4%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2025-04-17T18:03:43.767214Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.787582Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.787942Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792002Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.836903Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.842932Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.908858Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.923628Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.923696Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.923821Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.927279Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.927773Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.927836Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.930235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.930281Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.031145Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.068056Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.068240Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.068335Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.068367Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.068402Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.068432Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.068563Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.068607Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.068848Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.068930Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.069027Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.069060Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.069103Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.069142Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.069181Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.069210Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.069246Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.069367Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.069399Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.069455Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.071871Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.071924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.072003Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.072128Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.072175Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.072242Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.072287Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.072321Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.072367Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.072409Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.072687Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.072734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.072768Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.072799Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.072850Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.072884Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.072919Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.072949Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.072972Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.088177Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.088256Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.088291Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.088341Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.088385Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.088883Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.088928Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.088971Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.089045Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.089079Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.089181Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.089219Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.089270Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.089307Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.092216Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.092260Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.092414Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.092451Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.092498Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.092532Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.092561Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.092595Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.092620Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.092647Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.092693Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.092741Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.092774Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.092882Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.092903Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.092918Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.092930Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.092944Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.092977Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.092993Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.093014Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.093065Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.093090Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.093123Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.093143Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.093171Z node 1 :TX_DATA ... m 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:48.711437Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T18:03:48.711476Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.711558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.711596Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.711661Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:48.711700Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T18:03:48.711723Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.711808Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.711829Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.711858Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.711897Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T18:03:48.711932Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.712022Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.712044Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.712074Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.712134Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T18:03:48.712174Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.712265Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.712288Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.712320Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.712345Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.712425Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.712446Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.712473Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.712496Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.712568Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.712587Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.712635Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.712668Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.712865Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-17T18:03:48.712898Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.712925Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-17T18:03:48.712992Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-17T18:03:48.713023Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713042Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-17T18:03:48.713093Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-17T18:03:48.713117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713132Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-17T18:03:48.713175Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-17T18:03:48.713189Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713202Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-17T18:03:48.713269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-17T18:03:48.713284Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713296Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-17T18:03:48.713330Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-17T18:03:48.713350Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713376Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-17T18:03:48.713449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T18:03:48.713464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713477Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T18:03:48.713543Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T18:03:48.713564Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713577Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T18:03:48.713614Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T18:03:48.713627Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713638Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-17T18:03:48.713680Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-17T18:03:48.713703Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713719Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-17T18:03:48.713766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-17T18:03:48.713778Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713791Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-17T18:03:48.713843Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T18:03:48.713866Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713882Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T18:03:48.713938Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T18:03:48.713953Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.713965Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T18:03:48.714015Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T18:03:48.714031Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.714043Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T18:03:48.714079Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T18:03:48.714090Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.714104Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2025-04-17T18:03:43.766750Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.787783Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.788209Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792027Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.853910Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.867866Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.954885Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.956685Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.956772Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.956871Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.957241Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.957687Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.957761Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.958067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.958164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.033377Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.066796Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.067006Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.067125Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.067188Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.067232Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.067272Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.067433Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.067501Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.067780Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.067873Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.068008Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.068049Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.068100Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.068149Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.068201Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.068239Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.068284Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.068382Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.068416Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.068474Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.071066Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.071121Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.071204Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.071349Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.071398Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.071491Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.071550Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.071585Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.071641Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.071688Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.071978Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.072016Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.072048Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.072081Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.072130Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.072166Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.072207Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.072241Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.072267Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.084458Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.084568Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.084601Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.084646Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.084701Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.085298Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.085354Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.085399Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.085492Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.085539Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.085653Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.085690Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.085744Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.085780Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.089629Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.089694Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.089897Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.089938Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.089996Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.090039Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.090073Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.090132Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.090171Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.090238Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.090281Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.090331Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.090366Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.090505Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.090539Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.090563Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.090585Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.090609Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.090663Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.090694Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.090732Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.090781Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.090842Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.090890Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.090935Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.090979Z node 1 :TX_DATA ... m 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:48.847833Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T18:03:48.847854Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.847928Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.847961Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.848004Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:48.848040Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T18:03:48.848059Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.848145Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.848165Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.848206Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.848249Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T18:03:48.848270Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.848352Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.848383Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.848414Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.848477Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T18:03:48.848500Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.848584Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.848604Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.848635Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.848657Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.848734Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.848753Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.848778Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.848799Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.848872Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:48.848909Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T18:03:48.848953Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:48.848990Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:48.849179Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-17T18:03:48.849210Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.849237Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-17T18:03:48.849321Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-17T18:03:48.849346Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.849367Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-17T18:03:48.849426Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-17T18:03:48.849447Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.849466Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-17T18:03:48.849555Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-17T18:03:48.849583Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.849605Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-17T18:03:48.849702Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-17T18:03:48.849728Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.849750Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-17T18:03:48.849814Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-17T18:03:48.849840Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.849869Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-17T18:03:48.849957Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T18:03:48.849977Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.849996Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T18:03:48.850075Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T18:03:48.850103Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.850136Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T18:03:48.850219Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T18:03:48.850240Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.850261Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-17T18:03:48.850326Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-17T18:03:48.850346Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.850372Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-17T18:03:48.850429Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-17T18:03:48.850450Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.850470Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-17T18:03:48.850541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T18:03:48.850566Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.850587Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T18:03:48.850674Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T18:03:48.850720Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.850745Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T18:03:48.850823Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T18:03:48.850846Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.850865Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T18:03:48.850910Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T18:03:48.850930Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.850952Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2025-04-17T18:03:43.766314Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.790930Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.791348Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792033Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.847830Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.852681Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.931539Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.933245Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.933320Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.933422Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.933762Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.934226Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.934301Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.934609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.934644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.050478Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.080227Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.080408Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.080501Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.080547Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.080595Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.080634Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.080779Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.080830Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.081090Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.081178Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.081297Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.081333Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.081377Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.081417Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.081453Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.081483Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.081518Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.081614Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.081649Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.081694Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.084088Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.084141Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.084225Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.084381Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.084422Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.084605Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.084654Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.084686Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.084741Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.084773Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.085051Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.085083Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.085118Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.085163Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.085217Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.085248Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.085288Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.085319Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.085341Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.097092Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.097151Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.097195Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.097238Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.097279Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.097707Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.097743Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.097773Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.097830Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.097850Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.097937Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.097970Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.098007Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.098034Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.100510Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.100762Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.100952Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.100976Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.101016Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.101043Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.101068Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.101096Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.101122Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.101157Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.101182Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.101222Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.101248Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.101345Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.101365Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.101379Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.101391Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.101403Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.101438Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.101453Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.101476Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.101501Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.101529Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.101561Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.101587Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.101614Z node 1 :TX_DATA ... CE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:03:49.609529Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-17T18:03:49.609549Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-17T18:03:49.609735Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-17T18:03:49.609763Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-17T18:03:49.609824Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-17T18:03:49.609853Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-17T18:03:49.609883Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:49.609901Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-17T18:03:49.609930Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-17T18:03:49.609959Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:49.609985Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:49.610033Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:49.610068Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:49.610300Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [2:99:2134], Recipient [2:343:2310]: {TEvPlanStep step# 1000016 MediatorId# 0 TabletID 9437185} 2025-04-17T18:03:49.610332Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:49.610413Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit WaitForPlan 2025-04-17T18:03:49.610440Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.610464Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit WaitForPlan 2025-04-17T18:03:49.610508Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PlanQueue 2025-04-17T18:03:49.610623Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 45 at step 1000016 at tablet 9437185 { Transactions { TxId: 45 AckTo { RawX1: 99 RawX2: 8589936726 } } Step: 1000016 MediatorID: 0 TabletID: 9437185 } 2025-04-17T18:03:49.610651Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T18:03:49.610818Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:343:2310], Recipient [2:343:2310]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:49.610849Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:49.610881Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-17T18:03:49.610910Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:49.610932Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T18:03:49.610958Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-04-17T18:03:49.610981Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-04-17T18:03:49.611005Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.611027Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-04-17T18:03:49.611051Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-04-17T18:03:49.611072Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-04-17T18:03:49.611622Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-04-17T18:03:49.611660Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.611684Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-04-17T18:03:49.611709Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-17T18:03:49.611731Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-04-17T18:03:49.611761Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.611782Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-17T18:03:49.611802Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:49.611822Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-04-17T18:03:49.611858Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2025-04-17T18:03:49.611882Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-04-17T18:03:49.611914Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2025-04-17T18:03:49.611960Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.611980Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:49.612001Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-04-17T18:03:49.612023Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-04-17T18:03:49.612059Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.612078Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-04-17T18:03:49.612121Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-04-17T18:03:49.612143Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-04-17T18:03:49.612163Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.612181Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-04-17T18:03:49.612199Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-04-17T18:03:49.612219Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-04-17T18:03:49.612242Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.612260Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-04-17T18:03:49.612279Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-04-17T18:03:49.612298Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-04-17T18:03:49.612318Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.612340Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-17T18:03:49.612375Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-17T18:03:49.612396Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-17T18:03:49.612685Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-17T18:03:49.612727Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:03:49.612759Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.612779Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-17T18:03:49.612800Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-17T18:03:49.612829Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-17T18:03:49.612984Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-17T18:03:49.613007Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-17T18:03:49.613030Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-17T18:03:49.613052Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-17T18:03:49.613094Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:49.613116Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-17T18:03:49.613138Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-17T18:03:49.613173Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:49.613206Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T18:03:49.613233Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T18:03:49.613256Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T18:03:49.625983Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-17T18:03:49.626045Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-17T18:03:49.626117Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:49.626160Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-17T18:03:49.626240Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:49.626281Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:49.626528Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-17T18:03:49.626557Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-17T18:03:49.626589Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T18:03:49.626621Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-17T18:03:49.626666Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:49.626692Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2025-04-17T18:03:46.263813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:46.264330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:46.264384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e09/r3tmp/tmpdmdtZA/pdisk_1.dat 2025-04-17T18:03:46.640413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.682248Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.721350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:46.721471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:46.732649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:46.814971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.854917Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:46.856010Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:46.856489Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:03:46.856768Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:46.904087Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:46.904841Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:46.904958Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:46.906761Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:46.906841Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:46.906911Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:46.907276Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:46.907412Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:46.907510Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:03:46.918279Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:46.938401Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:46.938599Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:46.938719Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:03:46.938752Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:46.938781Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:46.938831Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:46.939042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.939091Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.939297Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:46.939389Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:46.939508Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:46.939541Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.939581Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:46.939609Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:46.939635Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:46.939659Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:46.939687Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:46.939792Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.939822Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.939865Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:03:46.940173Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:03:46.940204Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:46.940273Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:46.940455Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:46.940519Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:46.940592Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:46.940628Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.940655Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:46.940695Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:46.940720Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:46.940948Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:46.940977Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:46.941009Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:46.941040Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:46.941097Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:46.941125Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:46.941163Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:46.941188Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:46.941206Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:46.942235Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:46.942273Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:46.952819Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:46.952881Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:46.952913Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:46.952961Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:03:46.953022Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:47.102139Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.102212Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.102256Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:03:47.102631Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T18:03:47.102670Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:47.102784Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:47.102843Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T18:03:47.102916Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T18:03:47.102961Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T18:03:47.106124Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:03:47.106192Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:47.106888Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.106924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.106972Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:4 ... de 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:48.848763Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-04-17T18:03:48.848861Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:755:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-04-17T18:03:48.859943Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-17T18:03:48.860075Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:666:2570], Recipient [1:755:2634]: {TEvReadSet step# 3017 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-17T18:03:48.860109Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.860149Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2025-04-17T18:03:48.860234Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T18:03:48.860284Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:755:2634], Recipient [1:666:2570]: {TEvReadSet step# 3017 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-17T18:03:48.860299Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:48.860314Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2025-04-17T18:03:49.481847Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqp0ccvtg0e9qckpmzcyy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q5ODI4MjAtNjRlZTgyMjUtYjQ5ZGUxZjctNDNhZGRmZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:49.485676Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1105:2876], Recipient [1:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-04-17T18:03:49.485898Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T18:03:49.485962Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-17T18:03:49.486033Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T18:03:49.486068Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-17T18:03:49.486125Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:49.486169Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:03:49.486236Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-17T18:03:49.486289Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T18:03:49.486315Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:49.486342Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T18:03:49.486365Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:03:49.486472Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T18:03:49.486672Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:03:49.486732Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-17T18:03:49.486785Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1105:2876], 0} after executionsCount# 1 2025-04-17T18:03:49.486844Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1105:2876], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:03:49.486920Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1105:2876], 0} finished in read 2025-04-17T18:03:49.487000Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T18:03:49.487026Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:03:49.487047Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:03:49.487076Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:03:49.487109Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-17T18:03:49.487140Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:03:49.487166Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-17T18:03:49.487198Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:03:49.487277Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T18:03:49.487558Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-17T18:03:49.487692Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1107:2877], Recipient [1:755:2634]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-04-17T18:03:49.487818Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-17T18:03:49.487865Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-04-17T18:03:49.487921Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-17T18:03:49.487946Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-04-17T18:03:49.487969Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:49.487990Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-17T18:03:49.488021Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2025-04-17T18:03:49.488046Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-17T18:03:49.488066Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:49.488098Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-04-17T18:03:49.488133Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-04-17T18:03:49.488215Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T18:03:49.488373Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-17T18:03:49.488414Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-17T18:03:49.488445Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[1:1107:2877], 0} after executionsCount# 1 2025-04-17T18:03:49.488474Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1107:2877], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:03:49.488523Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1107:2877], 0} finished in read 2025-04-17T18:03:49.488565Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-17T18:03:49.488586Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-04-17T18:03:49.488606Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-04-17T18:03:49.488626Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-04-17T18:03:49.488669Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-17T18:03:49.488691Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-04-17T18:03:49.488718Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2025-04-17T18:03:49.488757Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-17T18:03:49.488808Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-17T18:03:49.488969Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:755:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-17T18:03:49.489906Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1105:2876], Recipient [1:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:03:49.489964Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-17T18:03:49.490616Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1107:2877], Recipient [1:755:2634]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:03:49.490671Z node 1 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> DataShardTxOrder::ZigZag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 18984, MsgBus: 4112 2025-04-17T18:03:30.475387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344314147256208:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:30.476865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a8a/r3tmp/tmpHiFihs/pdisk_1.dat 2025-04-17T18:03:30.802555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:30.823169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:30.823288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:30.825132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18984, node 1 2025-04-17T18:03:30.898653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:30.898680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:30.898687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:30.898785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4112 TClient is connected to server localhost:4112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:31.434524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.447894Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:03:31.453801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.592608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.765728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:31.845881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:33.424660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344327032159663:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.424789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.711800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.734133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.756398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.779307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.803579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.832027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.861769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:33.919169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344327032160323:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.919244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344327032160328:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.919258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:33.922573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:33.931707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344327032160330:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:03:34.014164Z node 1 :TX_PROXY ERROR: Actor# [1:7494344331327127682:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:34.916844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:03:35.053124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:03:35.053334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:03:35.053589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:03:35.053689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:03:35.053805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:03:35.053934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:03:35.054016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:03:35.054113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:03:35.054214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494344331327128183:2523];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:03:35.054241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:03:35.054260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494344331327128183:2523];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:03:35.054308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:03:35.054372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494344331327128161:2522];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:03:35.054423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494344331327128183:2523];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:03:35.054463Z node 1 :TX_COLUMNSHARD WARN: tablet_id= ... 9; 2025-04-17T18:03:35.208857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:03:35.208964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:03:35.208982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:03:35.209073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:03:35.209091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:03:35.209249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:03:35.209272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:03:35.209349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:03:35.209363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:03:35.209371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:03:35.209408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:03:35.209611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:03:35.209641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:03:35.209794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:03:35.209829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:03:35.210039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:03:35.210080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:03:35.210233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:03:35.210297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:03:35.243078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.243379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.247826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.249868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.252730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.255298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.256536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.260746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.260935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.264661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:03:35.380416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T18:03:35.380814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T18:03:35.381072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;self_id=[1:7494344335622095512:2529];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037931;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037926;receive=72075186224037930; 2025-04-17T18:03:35.381377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T18:03:35.461886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710676;tx_id=281474976710676;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710676; 2025-04-17T18:03:35.474444Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344314147256208:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:35.474503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 7736 cpu_time_us: 2028 affected_shards: 1 } query_phases { duration_us: 15110 cpu_time_us: 272 affected_shards: 1 } compilation { duration_us: 50228 cpu_time_us: 48080 } process_cpu_time_us: 655 total_duration_us: 75173 total_cpu_time_us: 51035 AddressSanitizer:DEADLYSIGNAL ================================================================= ==274491==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018e0e20d bp 0x7fffaa94c420 sp 0x7fffaa94c280 T0) ==274491==The signal is caused by a READ memory access. ==274491==Hint: address points to the zero page. 2025-04-17T18:03:45.801222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:03:45.801265Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18e0e20d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18e0e20d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18e0e20d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18e0e20d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18e0e20d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18e33327 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18e33327 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18e33327 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18e33327 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18e33327 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x197855d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19754f68 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #14 0x18e321d3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19756835 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #16 0x1977fb4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7efc2f6bed8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7efc2f6bee3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16571028 in _start (/home/runner/.ya/build/build_root/6873/001a8a/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16571028) (BuildId: 1bdc4290ca22bc21b0df7e2b25fc7bf1adbeab0b) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==274491==ABORTING ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2025-04-17T18:03:43.766251Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.787723Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.788176Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792031Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.852157Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.856895Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.941008Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.942524Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.942594Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.942699Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.943047Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.943448Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.943525Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.943793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.943825Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.031120Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.067138Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.067306Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.067394Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.067429Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.067468Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.067528Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.067660Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.067714Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.067984Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.068061Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.068163Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.068211Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.068254Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.068293Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.068345Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.068374Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.068408Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.068501Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.068530Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.068585Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.070923Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.070974Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.071045Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.071166Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.071204Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.071268Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.071318Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.071356Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.071402Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.071433Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.071712Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.071755Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.071786Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.071814Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.071856Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.071891Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.071928Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.071954Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.071974Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.083893Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.083960Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.083996Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.084032Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.084079Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.084577Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.084632Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.084670Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.084733Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.084759Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.084863Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.084927Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.084984Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.085019Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.088407Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.088469Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.088633Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.088667Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.088711Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.088743Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.088769Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.088804Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.088834Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.088874Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.088916Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.088964Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.088994Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.089121Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.089149Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.089168Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.089185Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.089203Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.089249Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.089272Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.089303Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.089332Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.089362Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.089409Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.089437Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.089480Z node 1 :TX_DATA ... 1000005:149] at 9437186 on unit CompleteOperation 2025-04-17T18:03:49.742744Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:49.742766Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:49.742840Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:49.743078Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-17T18:03:49.743115Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.743144Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-17T18:03:49.743206Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-17T18:03:49.743228Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.743255Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-17T18:03:49.743376Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:234:2227], Recipient [1:457:2399]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-04-17T18:03:49.743414Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:03:49.743459Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-04-17T18:03:49.743506Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-04-17T18:03:49.743537Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-04-17T18:03:49.743582Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T18:03:49.743649Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:457:2399], Recipient [1:457:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:49.743666Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:49.743691Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-17T18:03:49.743713Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:03:49.743740Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-04-17T18:03:49.743767Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-04-17T18:03:49.743790Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-17T18:03:49.743824Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-04-17T18:03:49.743869Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-04-17T18:03:49.743900Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-04-17T18:03:49.744237Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-04-17T18:03:49.744273Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:03:49.744305Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-04-17T18:03:49.744322Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-04-17T18:03:49.744355Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-04-17T18:03:49.744382Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T18:03:49.744539Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-04-17T18:03:49.744565Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-04-17T18:03:49.744585Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-04-17T18:03:49.744602Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-04-17T18:03:49.744640Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-17T18:03:49.744655Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-04-17T18:03:49.744671Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-04-17T18:03:49.744704Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:49.744720Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-17T18:03:49.744746Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-17T18:03:49.744767Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-17T18:03:49.744961Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-17T18:03:49.744983Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.745000Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-17T18:03:49.745044Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-17T18:03:49.745056Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.745069Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-17T18:03:49.745137Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-17T18:03:49.745160Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.745180Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-17T18:03:49.745223Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T18:03:49.745243Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.745267Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T18:03:49.745333Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T18:03:49.745359Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.745374Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T18:03:49.745441Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T18:03:49.745461Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.745481Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T18:03:49.745554Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T18:03:49.745569Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.745589Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-17T18:03:49.745668Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T18:03:49.745691Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.745711Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T18:03:49.745772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T18:03:49.745785Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.745801Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T18:03:49.761617Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:49.761722Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T18:03:49.761785Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:49.761846Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T18:03:49.761887Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:49.762195Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T18:03:49.762236Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:49.762273Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |85.5%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2025-04-17T18:03:43.766540Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.786280Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.786628Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.791954Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.842485Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.847059Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.920781Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.923638Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.923715Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.923820Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.927287Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.927827Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.927905Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.930229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.930273Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.034087Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.067598Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.067788Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.067906Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.067952Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.067988Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.068026Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.068185Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.068252Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.068573Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.068672Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.068792Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.068830Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.068879Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.068920Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.068959Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.068992Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.069032Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.069144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.069180Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.069250Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.071723Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.071782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.071863Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.072014Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.072063Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.072142Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.072187Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.072224Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.072274Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.072310Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.072660Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.072699Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.072748Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.072797Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.072852Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.072890Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.072929Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.072959Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.072980Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.085066Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.085137Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.085190Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.085232Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.085283Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.085948Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.086016Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.086061Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.086130Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.086155Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.086283Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.086324Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.086385Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.086421Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.090239Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.090299Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.090482Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.090516Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.090564Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.090604Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.090639Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.090682Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.090715Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.090758Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.090794Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.090850Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.090886Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.091041Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.091079Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.091100Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.091120Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.091140Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.091192Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.091217Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.091248Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.091280Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.091318Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.091367Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.091397Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.091435Z node 1 :TX_DATA ... aitInRS 2025-04-17T18:03:50.579267Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:50.579287Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-17T18:03:50.579307Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-17T18:03:50.579326Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-17T18:03:50.579698Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-17T18:03:50.579780Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:03:50.579830Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:50.579855Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-17T18:03:50.579878Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-17T18:03:50.579900Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-17T18:03:50.580097Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-17T18:03:50.580130Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-17T18:03:50.580170Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-17T18:03:50.580205Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-17T18:03:50.580243Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:03:50.580272Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-17T18:03:50.580298Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-17T18:03:50.580338Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:50.580380Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T18:03:50.580424Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T18:03:50.580472Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T18:03:50.580650Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:233:2226], Recipient [2:233:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.580688Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.580734Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:50.580761Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:50.580786Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:50.580825Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-04-17T18:03:50.580851Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-04-17T18:03:50.580876Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.580898Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:50.580920Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:50.580942Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:50.581549Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-04-17T18:03:50.581599Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.581625Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:50.581649Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-17T18:03:50.581671Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-04-17T18:03:50.581703Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.581723Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-17T18:03:50.581744Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:50.581764Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:50.581800Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-04-17T18:03:50.581826Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-04-17T18:03:50.581868Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-04-17T18:03:50.581901Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.581922Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:50.581943Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-04-17T18:03:50.581969Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-04-17T18:03:50.582011Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.582032Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-04-17T18:03:50.582052Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-04-17T18:03:50.582072Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-04-17T18:03:50.582093Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.582114Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-04-17T18:03:50.582132Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-04-17T18:03:50.582161Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-04-17T18:03:50.582361Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.582398Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-04-17T18:03:50.582422Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-04-17T18:03:50.582443Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-04-17T18:03:50.582467Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.582506Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-17T18:03:50.582532Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:03:50.582565Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-17T18:03:50.582861Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-17T18:03:50.582907Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:03:50.582941Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.582964Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:03:50.582987Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-17T18:03:50.583011Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-17T18:03:50.583179Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-17T18:03:50.583208Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-17T18:03:50.583234Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-17T18:03:50.583263Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-17T18:03:50.583291Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:03:50.583312Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-17T18:03:50.583346Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-17T18:03:50.583374Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:50.583422Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:50.583467Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:50.583492Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:50.596362Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-17T18:03:50.596436Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-17T18:03:50.596496Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T18:03:50.596537Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-17T18:03:50.596602Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:50.596669Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T18:03:50.596938Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-17T18:03:50.596970Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-17T18:03:50.597015Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:50.597045Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-17T18:03:50.597089Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:50.597119Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |85.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> DataShardOutOfOrder::UncommittedReads [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2025-04-17T18:03:46.720650Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:46.731986Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:46.732311Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:46.732480Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:46.770567Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:46.775201Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:46.847208Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:46.848550Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:46.848607Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:46.848678Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:46.848995Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:46.849310Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:46.849365Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:46.849585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:46.849612Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.910978Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:46.941362Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:46.941545Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:46.941647Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:46.941685Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:46.941720Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:46.941766Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:46.941910Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.941963Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.942236Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:46.942321Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:46.942432Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:46.942470Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.942523Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:46.942569Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:46.942609Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:46.942640Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:46.942681Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:46.942790Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.942823Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.942896Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:46.949122Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:46.949180Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:46.949263Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:46.949394Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:46.949437Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:46.949519Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:46.949567Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.949626Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:46.949682Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:46.949718Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:46.950023Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:46.950070Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:46.950104Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:46.950137Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:46.950201Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:46.950237Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:46.950277Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:46.950308Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:46.950331Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:46.963764Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:46.963825Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:46.963851Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:46.963884Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:46.963926Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:46.964399Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.964453Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.964497Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:46.964570Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:46.964600Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:46.964707Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:46.964740Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:46.964795Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:46.964824Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:46.972899Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:46.972968Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:46.973165Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.973217Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.973272Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:46.973316Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:46.973352Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:46.973391Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:46.973426Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:46.973483Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:46.973532Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:46.973589Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:46.973622Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:46.973775Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:46.973812Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:46.973838Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:46.973862Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:46.973885Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:46.973942Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.973969Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:46.974005Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:46.974043Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:46.974082Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:46.974145Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:46.974201Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:46.974255Z node 1 :TX_DATA ... lt to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:51.540132Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:51.540193Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:51.540214Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-17T18:03:51.540244Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:51.540263Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:51.540317Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:51.540331Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-17T18:03:51.540353Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:51.540367Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:51.540416Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:51.540433Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-17T18:03:51.540453Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:51.540468Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:51.540540Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:51.540556Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-17T18:03:51.540598Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:51.540617Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:51.540673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:51.540693Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T18:03:51.540793Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2025-04-17T18:03:51.540819Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.540842Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2025-04-17T18:03:51.540914Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2025-04-17T18:03:51.540932Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.540946Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2025-04-17T18:03:51.541012Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-04-17T18:03:51.541035Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541051Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-04-17T18:03:51.541112Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-04-17T18:03:51.541128Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541141Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-04-17T18:03:51.541187Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-17T18:03:51.541205Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541220Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-17T18:03:51.541272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-17T18:03:51.541289Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541301Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-17T18:03:51.541342Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-17T18:03:51.541372Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541389Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-17T18:03:51.541458Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-17T18:03:51.541477Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541491Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-17T18:03:51.541545Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-17T18:03:51.541560Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541573Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-17T18:03:51.541612Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-17T18:03:51.541626Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541639Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-17T18:03:51.541675Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T18:03:51.541688Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541704Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T18:03:51.541766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T18:03:51.541792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541808Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T18:03:51.541857Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T18:03:51.541871Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541884Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T18:03:51.541930Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T18:03:51.541949Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.541964Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-17T18:03:51.542015Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T18:03:51.542040Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.542061Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T18:03:51.542124Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T18:03:51.542142Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.542156Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T18:03:51.559618Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:51.559676Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T18:03:51.559737Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-04-17T18:03:51.559802Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T18:03:51.559841Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:51.560120Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T18:03:51.560164Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:51.560199Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: 2025-04-17T18:03:46.288706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:46.289199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:46.289251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e23/r3tmp/tmpJeSqZA/pdisk_1.dat 2025-04-17T18:03:46.639367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.649406Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-17T18:03:46.649473Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-17T18:03:46.674384Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.711567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:46.711698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:46.722830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:46.796713Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-17T18:03:46.796790Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T18:03:46.796975Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-17T18:03:46.797281Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-17T18:03:46.797336Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-04-17T18:03:46.799566Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-17T18:03:46.799647Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-17T18:03:46.799687Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-17T18:03:46.799735Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-17T18:03:46.803898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.840144Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:46.841045Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:46.841453Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:665:2570] 2025-04-17T18:03:46.841691Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:46.887881Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:46.888539Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:46.888625Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:46.890145Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:46.890232Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:46.890300Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:46.890621Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:46.890732Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:46.890796Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:681:2570] in generation 1 2025-04-17T18:03:46.891175Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:46.917614Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:46.917796Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:46.917892Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2580] 2025-04-17T18:03:46.917921Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:46.917953Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:46.917982Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:46.918164Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:665:2570], Recipient [1:665:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.918238Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.918481Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:46.918575Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:46.918668Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:46.918717Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.918751Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:46.918780Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:46.918811Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:46.918837Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:46.918876Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:46.918976Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:672:2574], Recipient [1:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.919005Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.919044Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:661:2567], serverId# [1:672:2574], sessionId# [0:0:0] 2025-04-17T18:03:46.919383Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:672:2574] 2025-04-17T18:03:46.919438Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:46.919539Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:46.919751Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:46.919797Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:46.919887Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:46.919925Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.919971Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:46.920000Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:46.920042Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:46.920326Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:46.920361Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:46.920390Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:46.920418Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:46.920462Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:46.920494Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:46.920519Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:46.920557Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:46.920578Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:46.921191Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:46.921242Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:46.921274Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:46.921306Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:03:46.921351Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:46.923223Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 HANDLE EvProposeTransaction marker# C0 2025-04-17T18:03:46.923285Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 step# 1000 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-17T18:03:46.923561Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:684:2581], Recipient [1:665:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:46.923612Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:47.073153Z node 1 :TX_COORDINATOR DEBUG: Transaction 281474976715657 has been planned 2025-04-17T18:03:47.073234Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72057594046644480 2025-04-17T18:03:47.073276Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for ... 000 2025-04-17T18:03:51.349613Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [1:24:2071], Recipient [1:1376:3052]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 4000} 2025-04-17T18:03:51.349659Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-04-17T18:03:51.349703Z node 1 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 4000 at tablet 72075186224037888 2025-04-17T18:03:51.349762Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:51.350225Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2025-04-17T18:03:51.350329Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 1 SubscriptionId: 2 LatestStep: 4000 2025-04-17T18:03:51.350930Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 1 TimeBarrier# 4000} 2025-04-17T18:03:51.482997Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01js2eqr5rd0th03tptq0x0x2b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk2Yjg5M2ItZjFlMDQyNDMtOTg2YTg1MDAtODhlNWE3NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:51.484847Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1439:3091], Recipient [1:1376:3052]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715682 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-17T18:03:51.485092Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T18:03:51.485188Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-04-17T18:03:51.485284Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:03:51.485333Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-04-17T18:03:51.485375Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:51.485435Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:03:51.485505Z node 1 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-17T18:03:51.485581Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:03:51.485615Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:51.485642Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T18:03:51.485670Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:03:51.485806Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715682 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T18:03:51.486097Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715682, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:03:51.486159Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2025-04-17T18:03:51.486238Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1439:3091], 0} after executionsCount# 1 2025-04-17T18:03:51.486298Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1439:3091], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:03:51.486389Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1439:3091], 0} finished in read 2025-04-17T18:03:51.486470Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:03:51.486524Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:03:51.486555Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:03:51.486581Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:03:51.486627Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:03:51.486650Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:03:51.486689Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-17T18:03:51.486731Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:03:51.486827Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T18:03:51.487961Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1439:3091], Recipient [1:1376:3052]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:03:51.488034Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2025-04-17T18:03:51.638701Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2025-04-17T18:03:51.638801Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4500 in 0.499900s at 4.450000s 2025-04-17T18:03:51.640174Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01js2eqra64vnxss4xg6ppy2pj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTMyMjNlNTgtYjEwYjhjNDktMjAzN2EyY2QtNmYyNDlkMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:51.642031Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1463:3108], Recipient [1:1376:3052]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715685 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-17T18:03:51.642339Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T18:03:51.642425Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-17T18:03:51.642523Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:03:51.642615Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-17T18:03:51.642669Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:51.642713Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:03:51.642768Z node 1 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-17T18:03:51.642818Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:03:51.642847Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:51.642872Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T18:03:51.642896Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:03:51.643040Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715685 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T18:03:51.643483Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715685, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:03:51.643556Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-17T18:03:51.643626Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1463:3108], 0} after executionsCount# 1 2025-04-17T18:03:51.643686Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1463:3108], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:03:51.643780Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1463:3108], 0} finished in read 2025-04-17T18:03:51.643880Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:03:51.643911Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:03:51.643939Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:03:51.643966Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:03:51.644011Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:03:51.644037Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:03:51.644074Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-17T18:03:51.644113Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:03:51.644247Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T18:03:51.644587Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:1376:3052]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715685 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-17T18:03:51.645601Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1463:3108], Recipient [1:1376:3052]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:03:51.645686Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } >> BsControllerConfig::SelectAllGroups >> BsControllerConfig::AddDriveSerial >> BsControllerConfig::MergeIntersectingBoxes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: 2025-04-17T18:03:49.910558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:49.911060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:49.911113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dcf/r3tmp/tmp3Mz3xd/pdisk_1.dat 2025-04-17T18:03:50.233267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:50.275882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:50.314944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:50.315091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:50.329096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:50.410840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:50.448325Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:50.449281Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:50.449730Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:03:50.449976Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:50.495568Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:50.496294Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:50.496387Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:50.498076Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:50.498148Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:50.498229Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:50.498547Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:50.498672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:50.498745Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:03:50.499175Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:50.527503Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:50.527686Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:50.527816Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:03:50.527860Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:50.527894Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:50.527926Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:50.528172Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.528229Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.528495Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:50.528761Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:50.528895Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:50.528931Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:50.528969Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:50.529002Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:50.529033Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:50.529064Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:50.529104Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:50.529211Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:50.529242Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:50.529285Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:03:50.529657Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:03:50.529702Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:50.529804Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:50.530014Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:50.530078Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:50.530255Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:50.530305Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:50.530357Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:50.530394Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:50.530441Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:50.530734Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:50.530776Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:50.530807Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:50.530838Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:50.530890Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:50.530921Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:50.530962Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:50.531009Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:50.531041Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:50.531782Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:50.531828Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:50.531859Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:50.531893Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:03:50.531943Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:50.534054Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:50.534108Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:50.682570Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:50.682626Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:50.682677Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:03:50.683554Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T18:03:50.683643Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:50.683770Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:50.683816Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T18:03:50.683876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T18:03:50.683916Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T18:03:50.688510Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:03:50.688585Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:50.688969Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.689014Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.689078Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:5 ... 7T18:03:51.642103Z node 1 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-17T18:03:51.642144Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T18:03:51.642206Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-17T18:03:51.642266Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... waiting for blocked commit 2025-04-17T18:03:51.695211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2eqrez17j7eee9yj1g314k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFhNjM0YmItODZjZmRiNmItNTNlNjUxNzAtZjE2NmE3NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:51.697395Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [1:961:2749], Recipient [1:666:2570]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-04-17T18:03:51.697445Z node 1 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-17T18:03:51.697567Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T18:03:51.697599Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T18:03:51.697658Z node 1 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-04-17T18:03:51.697794Z node 1 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-04-17T18:03:51.697863Z node 1 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-04-17T18:03:51.697913Z node 1 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-17T18:03:51.697983Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-04-17T18:03:51.698269Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T18:03:51.698305Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-04-17T18:03:51.698351Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:51.698401Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:03:51.698437Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-17T18:03:51.698485Z node 1 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-17T18:03:51.698536Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T18:03:51.698563Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:51.698584Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T18:03:51.698603Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-04-17T18:03:51.698634Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-17T18:03:51.698672Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-17T18:03:51.698770Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:6] at 72075186224037888, row count=1 2025-04-17T18:03:51.698817Z node 1 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T18:03:51.698864Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:51.698888Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T18:03:51.698934Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-17T18:03:51.698964Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T18:03:51.698995Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-04-17T18:03:51.699018Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-17T18:03:51.699049Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:03:51.699079Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:03:51.699112Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T18:03:51.699131Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:03:51.699154Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2025-04-17T18:03:51.809500Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqrgkan3pfd4txmwrejh4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjUwODI1ZjQtMjg0N2JhYzYtNWNjM2E3NzktNTc4NWNhODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:51.811200Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:979:2773], Recipient [1:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-17T18:03:51.811340Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T18:03:51.811383Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-17T18:03:51.811409Z node 1 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v2500/18446744073709551615 2025-04-17T18:03:51.811469Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-17T18:03:51.811541Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T18:03:51.811565Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-17T18:03:51.811588Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:51.811609Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:03:51.811639Z node 1 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-17T18:03:51.811663Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T18:03:51.811697Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:51.811725Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T18:03:51.811744Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:03:51.811841Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-17T18:03:51.812023Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is DelayComplete 2025-04-17T18:03:51.812061Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:03:51.812085Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:03:51.812106Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:03:51.812132Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T18:03:51.812145Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:03:51.812170Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-17T18:03:51.812197Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:03:51.885851Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 3000 2025-04-17T18:03:51.885931Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} 2025-04-17T18:03:52.022840Z node 1 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-17T18:03:52.022903Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T18:03:52.022969Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 1000 ms, status: STATUS_COMPLETED 2025-04-17T18:03:52.023046Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:52.023170Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T18:03:52.023204Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:03:52.023254Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:979:2773], 0} after executionsCount# 1 2025-04-17T18:03:52.023329Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:979:2773], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:03:52.023473Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:979:2773], 0} finished in read 2025-04-17T18:03:52.025097Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:979:2773], Recipient [1:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:03:52.025144Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } >> BsControllerConfig::PDiskCreate >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2025-04-17T18:03:48.192991Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:48.207194Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:48.207680Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:48.207917Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:48.256186Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:48.261206Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:48.344621Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:48.346288Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:48.346362Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:48.346493Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:48.346824Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:48.347269Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:48.347337Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:48.347635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:48.347672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:48.412229Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:48.451317Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:48.451534Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:48.451637Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:48.451674Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:48.451711Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:48.451747Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:48.451906Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.451960Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.452229Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:48.452319Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:48.452433Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:48.452472Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:48.452520Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:48.452563Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:48.452605Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:48.452636Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:48.452675Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:48.452765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.452798Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.452872Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:48.455430Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:48.455502Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:48.455586Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:48.455728Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:48.455774Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:48.455853Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:48.455903Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:48.455954Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:48.456002Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:48.456037Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:48.456340Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:48.456386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:48.456423Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:48.456460Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:48.456515Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:48.456558Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:48.456597Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:48.456628Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:48.456656Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:48.469229Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:48.469301Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:48.469339Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:48.469378Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:48.469441Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:48.469998Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.470069Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.470113Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:48.470199Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:48.470238Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:48.470348Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:48.470387Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:48.470448Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:48.470491Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:48.474444Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:48.474509Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:48.474705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.474743Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.474792Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:48.474837Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:48.474878Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:48.474915Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:48.474948Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:48.475015Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:48.475069Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:48.475118Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:48.475152Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:48.475338Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:48.475377Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:48.475401Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:48.475426Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:48.475461Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:48.475521Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:48.475547Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:48.475586Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:48.475620Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:48.475663Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:48.475710Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:48.475745Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:48.475789Z node 1 :TX_DATA ... lt to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.850304Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.850392Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.850420Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.850463Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.850490Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.850566Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.850586Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.850612Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.850634Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.850706Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.850728Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.850755Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.850775Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.850866Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.850892Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.850950Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.850979Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.851064Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.851088Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T18:03:52.851228Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2025-04-17T18:03:52.851260Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.851289Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2025-04-17T18:03:52.851390Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2025-04-17T18:03:52.851418Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.851456Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2025-04-17T18:03:52.851555Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-04-17T18:03:52.851587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.851607Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-04-17T18:03:52.851692Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-04-17T18:03:52.851717Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.851738Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-04-17T18:03:52.851811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-17T18:03:52.851843Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.851863Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-17T18:03:52.851933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-17T18:03:52.851955Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.851974Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-17T18:03:52.852033Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-17T18:03:52.852055Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.852094Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-17T18:03:52.852184Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-17T18:03:52.852214Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.852234Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-17T18:03:52.852306Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-17T18:03:52.852329Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.852347Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-17T18:03:52.852404Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-17T18:03:52.852425Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.852445Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-17T18:03:52.852505Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T18:03:52.852540Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.852565Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T18:03:52.852651Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T18:03:52.852675Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.852696Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T18:03:52.852752Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T18:03:52.852773Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.852792Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T18:03:52.852864Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T18:03:52.852889Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.852909Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-17T18:03:52.852979Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T18:03:52.853004Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.853037Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T18:03:52.853124Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T18:03:52.853150Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.853170Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T18:03:52.867398Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.867468Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.867518Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-04-17T18:03:52.867575Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T18:03:52.867607Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.867845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T18:03:52.867896Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.867928Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2025-04-17T18:03:48.151969Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:48.166286Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:48.166742Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:48.167009Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:48.214530Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:48.219529Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:48.300665Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:48.302611Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:48.302683Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:48.302795Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:48.303159Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:48.303699Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:48.303767Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:48.304078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:48.304116Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:48.366797Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:48.404444Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:48.404648Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:48.404764Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:48.404802Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:48.404839Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:48.404876Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:48.405042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.405095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.405365Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:48.405462Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:48.405587Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:48.405625Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:48.405675Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:48.405719Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:48.405758Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:48.405790Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:48.405827Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:48.405928Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.405962Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.406033Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:48.408633Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:48.408687Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:48.408769Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:48.408905Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:48.408949Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:48.409028Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:48.409075Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:48.409109Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:48.409161Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:48.409200Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:48.409547Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:48.409596Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:48.409632Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:48.409665Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:48.409714Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:48.409770Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:48.409810Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:48.409841Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:48.409869Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:48.422212Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:48.422280Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:48.422314Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:48.422353Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:48.422411Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:48.422976Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.423032Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.423076Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:48.423148Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:48.423188Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:48.423307Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:48.423358Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:03:48.423406Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:48.423465Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:48.427205Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:48.427263Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:48.427469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.427505Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.427565Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:48.427615Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:48.427647Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:48.427684Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-17T18:03:48.427716Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:48.427762Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:03:48.427829Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:48.427869Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:48.427902Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:48.428061Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-17T18:03:48.428097Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:03:48.428122Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:48.428146Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:48.428169Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:48.428225Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:48.428248Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:48.428286Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:48.428339Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:48.428379Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-17T18:03:48.428411Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:48.428475Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2025-04-17T18:03:48.428516Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:03:48.428551Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1 ... :52.905947Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:134] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.905973Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 134] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:52.905997Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T18:03:52.906013Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.906074Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.906087Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:137] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.906105Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:52.906134Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T18:03:52.906156Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.906238Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.906267Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:140] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.906291Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.906317Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T18:03:52.906331Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.906405Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.906424Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:143] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.906453Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.906481Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T18:03:52.906497Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.906555Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.906570Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:146] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.906592Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.906607Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.906673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.906688Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:149] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.906719Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.906736Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.906788Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:03:52.906802Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2025-04-17T18:03:52.906835Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:03:52.906852Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:03:52.906982Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-17T18:03:52.907013Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.907034Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-17T18:03:52.907103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-17T18:03:52.907268Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.907298Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-17T18:03:52.907351Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-17T18:03:52.907367Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.907384Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-17T18:03:52.907412Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-17T18:03:52.907442Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.907460Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-17T18:03:52.907507Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-17T18:03:52.907524Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.907537Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-17T18:03:52.907567Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-17T18:03:52.907580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.907638Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-17T18:03:52.907715Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-17T18:03:52.907751Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.907766Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-17T18:03:52.907804Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-17T18:03:52.907817Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.907832Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-17T18:03:52.907869Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-17T18:03:52.907902Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.907920Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-17T18:03:52.907971Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-17T18:03:52.907996Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.908027Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-17T18:03:52.908081Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-17T18:03:52.908095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.908111Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-17T18:03:52.908140Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-17T18:03:52.908154Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.908205Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-17T18:03:52.908267Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-17T18:03:52.908284Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.908297Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-17T18:03:52.908339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-17T18:03:52.908353Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.908378Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-17T18:03:52.908421Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-17T18:03:52.908439Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:52.908452Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TPersQueueTest::TestReadPartitionStatus [GOOD] >> TPersQueueTest::TxCounters >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] Test command err: 2025-04-17T18:03:46.470243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:46.470640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:46.470683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e0d/r3tmp/tmp5XZd4o/pdisk_1.dat 2025-04-17T18:03:46.807053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.843533Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.880944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:46.881071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:46.892281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:46.972686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:47.003895Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:47.004614Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:47.004933Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:03:47.005116Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:47.045650Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:47.046197Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:47.046277Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:47.047616Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:47.047728Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:47.047802Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:47.048139Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:47.048291Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:47.048385Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:03:47.059052Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:47.088450Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:47.088618Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:47.088737Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:03:47.088775Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:47.088807Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:47.088836Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:47.089051Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.089113Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.089331Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:47.089424Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:47.089557Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:47.089598Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:47.089637Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:47.089672Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:47.089717Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:47.089747Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:47.089787Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:47.089913Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.089958Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.090007Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:03:47.090374Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:03:47.090414Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:47.090572Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:47.090802Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:47.090850Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:47.090945Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:47.090990Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:47.091028Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:47.091064Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:47.091104Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:47.091346Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:47.091377Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:47.091417Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:47.091443Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:47.091500Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:47.091530Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:47.091565Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:47.091591Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:47.091619Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:47.092833Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:47.092887Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:47.103475Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:47.103545Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:47.103580Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:47.103626Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:03:47.103678Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:47.255571Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.255626Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.255666Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:03:47.256067Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T18:03:47.256111Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:47.256221Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:47.256261Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T18:03:47.256300Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T18:03:47.256337Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T18:03:47.260920Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:03:47.260990Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:47.261725Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.261767Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.261823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:4 ... because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state); 2025-04-17T18:03:53.862211Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1032:2737] TxId: 281474976715671. Ctx: { TraceId: 01js2eqsv5136fcdscbrsmzmz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI5NDJhZjktYjdhYWYxNzQtYjc3NWQwODgtYjUwMzNjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-17T18:03:53.862248Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1032:2737] TxId: 281474976715671. Ctx: { TraceId: 01js2eqsv5136fcdscbrsmzmz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI5NDJhZjktYjdhYWYxNzQtYjc3NWQwODgtYjUwMzNjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-04-17T18:03:53.862301Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1032:2737] TxId: 281474976715671. Ctx: { TraceId: 01js2eqsv5136fcdscbrsmzmz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI5NDJhZjktYjdhYWYxNzQtYjc3NWQwODgtYjUwMzNjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:53.862858Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01js2eqsv10eaxzw3v3mex2ys0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Nzk3MWQ3Y2YtODEzY2Y5MWQtZDQxZmQ2NjYtYWEyOWI5ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:53.862894Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715677. Ctx: { TraceId: 01js2eqsv10eaxzw3v3mex2ys0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Nzk3MWQ3Y2YtODEzY2Y5MWQtZDQxZmQ2NjYtYWEyOWI5ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T18:03:53.862929Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1144:2725] TxId: 281474976715677. Ctx: { TraceId: 01js2eqsv10eaxzw3v3mex2ys0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Nzk3MWQ3Y2YtODEzY2Y5MWQtZDQxZmQ2NjYtYWEyOWI5ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:03:53.862978Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1144:2725] TxId: 281474976715677. Ctx: { TraceId: 01js2eqsv10eaxzw3v3mex2ys0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Nzk3MWQ3Y2YtODEzY2Y5MWQtZDQxZmQ2NjYtYWEyOWI5ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:53.863044Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1144:2725] TxId: 281474976715677. Ctx: { TraceId: 01js2eqsv10eaxzw3v3mex2ys0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Nzk3MWQ3Y2YtODEzY2Y5MWQtZDQxZmQ2NjYtYWEyOWI5ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:03:53.863153Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmI5NDJhZjktYjdhYWYxNzQtYjc3NWQwODgtYjUwMzNjOGU=, ActorId: [2:902:2737], ActorState: ExecuteState, TraceId: 01js2eqsv5136fcdscbrsmzmz2, Create QueryResponse for error on request, msg: 2025-04-17T18:03:53.863787Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Resolved key sets: 0 2025-04-17T18:03:53.864634Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2eqsv13ym2yewf6qfj81qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc5YjFkYTQtM2I5ZmIyZGYtODgyYWU3NjktZjQwZjk0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:53.864671Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Ctx: { TraceId: 01js2eqsv13ym2yewf6qfj81qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc5YjFkYTQtM2I5ZmIyZGYtODgyYWU3NjktZjQwZjk0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T18:03:53.864717Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1153:2720] TxId: 281474976715678. Ctx: { TraceId: 01js2eqsv13ym2yewf6qfj81qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc5YjFkYTQtM2I5ZmIyZGYtODgyYWU3NjktZjQwZjk0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:03:53.864774Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1153:2720] TxId: 281474976715678. Ctx: { TraceId: 01js2eqsv13ym2yewf6qfj81qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc5YjFkYTQtM2I5ZmIyZGYtODgyYWU3NjktZjQwZjk0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:53.864809Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1153:2720] TxId: 281474976715678. Ctx: { TraceId: 01js2eqsv13ym2yewf6qfj81qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc5YjFkYTQtM2I5ZmIyZGYtODgyYWU3NjktZjQwZjk0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:03:53.865112Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Resolved key sets: 0 2025-04-17T18:03:53.865256Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01js2eqsv16vepbbv9eax00er2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjM5NjFhMTktNmYwZGE4YmItZTEyZjc0ZGMtZWQxZGY2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:53.865286Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Ctx: { TraceId: 01js2eqsv16vepbbv9eax00er2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjM5NjFhMTktNmYwZGE4YmItZTEyZjc0ZGMtZWQxZGY2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T18:03:53.865320Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1155:2722] TxId: 281474976715679. Ctx: { TraceId: 01js2eqsv16vepbbv9eax00er2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjM5NjFhMTktNmYwZGE4YmItZTEyZjc0ZGMtZWQxZGY2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:03:53.865368Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1155:2722] TxId: 281474976715679. Ctx: { TraceId: 01js2eqsv16vepbbv9eax00er2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjM5NjFhMTktNmYwZGE4YmItZTEyZjc0ZGMtZWQxZGY2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:53.865403Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1155:2722] TxId: 281474976715679. Ctx: { TraceId: 01js2eqsv16vepbbv9eax00er2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjM5NjFhMTktNmYwZGE4YmItZTEyZjc0ZGMtZWQxZGY2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:03:53.865913Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Resolved key sets: 0 2025-04-17T18:03:53.865955Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Resolved key sets: 0 2025-04-17T18:03:53.866387Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01js2eqsv4aq0cgtbnmwyqg3yq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWNjZGJlYy1lZWZjNWUxMS0zOTUwN2NhNy0zM2Y5MTYwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:53.866425Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Ctx: { TraceId: 01js2eqsv4aq0cgtbnmwyqg3yq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWNjZGJlYy1lZWZjNWUxMS0zOTUwN2NhNy0zM2Y5MTYwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T18:03:53.866458Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1164:2733] TxId: 281474976715680. Ctx: { TraceId: 01js2eqsv4aq0cgtbnmwyqg3yq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWNjZGJlYy1lZWZjNWUxMS0zOTUwN2NhNy0zM2Y5MTYwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:03:53.866538Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1164:2733] TxId: 281474976715680. Ctx: { TraceId: 01js2eqsv4aq0cgtbnmwyqg3yq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWNjZGJlYy1lZWZjNWUxMS0zOTUwN2NhNy0zM2Y5MTYwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:53.866580Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1164:2733] TxId: 281474976715680. Ctx: { TraceId: 01js2eqsv4aq0cgtbnmwyqg3yq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWNjZGJlYy1lZWZjNWUxMS0zOTUwN2NhNy0zM2Y5MTYwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:03:53.866626Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01js2eqsv5136fcdscbrsmzmz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI5NDJhZjktYjdhYWYxNzQtYjc3NWQwODgtYjUwMzNjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:53.866655Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Ctx: { TraceId: 01js2eqsv5136fcdscbrsmzmz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI5NDJhZjktYjdhYWYxNzQtYjc3NWQwODgtYjUwMzNjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T18:03:53.866686Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1165:2737] TxId: 281474976715681. Ctx: { TraceId: 01js2eqsv5136fcdscbrsmzmz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI5NDJhZjktYjdhYWYxNzQtYjc3NWQwODgtYjUwMzNjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:03:53.866725Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1165:2737] TxId: 281474976715681. Ctx: { TraceId: 01js2eqsv5136fcdscbrsmzmz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI5NDJhZjktYjdhYWYxNzQtYjc3NWQwODgtYjUwMzNjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:53.866758Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1165:2737] TxId: 281474976715681. Ctx: { TraceId: 01js2eqsv5136fcdscbrsmzmz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI5NDJhZjktYjdhYWYxNzQtYjc3NWQwODgtYjUwMzNjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectAll >> GenericFederatedQuery::IcebergHadoopTokenSelectAll |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> BsControllerConfig::SelectAllGroups [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: 2025-04-17T18:03:46.114758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:46.115230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:46.115284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e3a/r3tmp/tmpbqOitU/pdisk_1.dat 2025-04-17T18:03:46.509163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.558447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.601624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:46.602252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:46.614878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:46.703723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.752217Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:46.753206Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:46.753629Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:03:46.753885Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:46.788039Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:46.788547Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:46.788617Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:46.789833Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:46.789902Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:46.789942Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:46.790246Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:46.790342Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:46.790413Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:03:46.801099Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:46.824955Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:46.825118Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:46.825244Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:03:46.825280Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:46.825312Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:46.825358Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:46.825561Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.825618Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.825856Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:46.825927Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:46.826052Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:46.826097Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.826131Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:46.826162Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:46.826213Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:46.826241Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:46.826283Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:46.826395Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.826426Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.826482Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:03:46.826854Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:03:46.826893Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:46.826999Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:46.827215Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:46.827279Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:46.827369Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:46.827408Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.827453Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:46.827512Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:46.827542Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:46.827827Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:46.827864Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:46.827896Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:46.827926Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:46.827974Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:46.828013Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:46.828048Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:46.828077Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:46.828120Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:46.829559Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:46.829609Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:46.840197Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:46.840260Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:46.840311Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:46.840367Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:03:46.840429Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:46.990371Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.990419Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.990455Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:03:46.990784Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T18:03:46.990824Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:46.990931Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:46.990972Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T18:03:46.991022Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T18:03:46.991061Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T18:03:46.995246Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:03:46.995303Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:46.996095Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.996143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.996185Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:4 ... 058929Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:54.058984Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-04-17T18:03:54.059050Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-04-17T18:03:54.059111Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:03:54.059162Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-04-17T18:03:54.059204Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1184:2935] 2025-04-17T18:03:54.059226Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-04-17T18:03:54.059254Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-04-17T18:03:54.059276Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-17T18:03:54.059454Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1166:2917], Recipient [2:666:2570]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037891 ClientId: [2:1166:2917] ServerId: [2:1168:2919] } 2025-04-17T18:03:54.059483Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:03:54.059551Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1029:2825], Recipient [2:755:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-04-17T18:03:54.059589Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-04-17T18:03:54.059833Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1029:2825], Recipient [2:1029:2825]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:54.059860Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:54.059989Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1170:2921], Recipient [2:755:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1170:2921] ServerId: [2:1173:2924] } 2025-04-17T18:03:54.060013Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:03:54.060165Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1024:2821]: {TEvRegisterTabletResult TabletId# 72075186224037891 Entry# 2000} 2025-04-17T18:03:54.060191Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T18:03:54.060219Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-04-17T18:03:54.060245Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:03:54.060344Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-17T18:03:54.060376Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:54.060404Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2025-04-17T18:03:54.060429Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2025-04-17T18:03:54.060453Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2025-04-17T18:03:54.060477Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-17T18:03:54.060516Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-17T18:03:54.060550Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-17T18:03:54.060572Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:54.060592Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-04-17T18:03:54.060612Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-04-17T18:03:54.060632Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-04-17T18:03:54.060652Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-04-17T18:03:54.060676Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-17T18:03:54.060745Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1168:2919], Recipient [2:1024:2821]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:03:54.060773Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:03:54.060805Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:1166:2917], serverId# [2:1168:2919], sessionId# [0:0:0] 2025-04-17T18:03:54.060845Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1173:2924], Recipient [2:1029:2825]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:03:54.060868Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:03:54.060891Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1170:2921], serverId# [2:1173:2924], sessionId# [0:0:0] 2025-04-17T18:03:54.061092Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1029:2825]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-04-17T18:03:54.061121Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T18:03:54.061144Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-04-17T18:03:54.061166Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-17T18:03:54.061749Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1024:2821]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-17T18:03:54.061790Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T18:03:54.061817Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 0 next step 2000 2025-04-17T18:03:54.061848Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-17T18:03:54.061886Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037891 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-17T18:03:54.061977Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1029:2825]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-17T18:03:54.062001Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T18:03:54.062021Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-04-17T18:03:54.062052Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-17T18:03:54.062081Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-17T18:03:54.072928Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 ack split to schemeshard 281474976715664 2025-04-17T18:03:54.073707Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-04-17T18:03:54.101522Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:409:2404], Recipient [2:671:2572] 2025-04-17T18:03:54.101593Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037888, state: SplitSrcWaitForPartitioningChanged 2025-04-17T18:03:54.104071Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:409:2404], Recipient [2:763:2638] 2025-04-17T18:03:54.104123Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-17T18:03:54.105511Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 ack split partitioning changed to schemeshard 281474976715664 2025-04-17T18:03:54.105577Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:03:54.105812Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:657:2564], Recipient [2:666:2570]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:03:54.106610Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-04-17T18:03:54.106658Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:03:54.106756Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:748:2629], Recipient [2:755:2634]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:03:54.475839Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:977:2681], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937273 } TxBody: " \0008\000`\200\200\200\005j\324\006\010\001\022\225\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021y\n\000\000\002\000\000\000\032\256\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2025-04-17T18:03:54.475913Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:54.476007Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-17T18:03:54.476429Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-04-17T18:03:54.476900Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: 2025-04-17T18:03:46.277813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:46.278423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:46.278494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e11/r3tmp/tmpl2JB5P/pdisk_1.dat 2025-04-17T18:03:46.655981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.691779Z node 1 :KQP_RESOURCE_MANAGER INFO: Updated table service config: ComputeActorsCount: 10000 ChannelBufferSize: 8388608 MkqlLightProgramMemoryLimit: 1048576 MkqlHeavyProgramMemoryLimit: 31457280 QueryMemoryLimit: 32212254720 PublishStatisticsIntervalSec: 2 MaxTotalChannelBuffersSize: 2147483648 MinChannelBufferSize: 2048 2025-04-17T18:03:46.691888Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T18:03:46.691946Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 7 2025-04-17T18:03:46.692058Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Updated table service config. 2025-04-17T18:03:46.692229Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.729000Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:03:46.729864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:46.729988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:46.730269Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:03:46.741661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:46.823844Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:03:46.823908Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:03:46.824091Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:642:2550] 2025-04-17T18:03:46.931881Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value1" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:03:46.931965Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:03:46.932440Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:03:46.932533Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:03:46.932753Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:03:46.932919Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:03:46.933014Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:03:46.934422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.934774Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:03:46.935222Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:03:46.935290Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:03:46.962061Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:46.963020Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:46.963398Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:667:2571] 2025-04-17T18:03:46.963618Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:47.000352Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:47.001085Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:47.001174Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:47.002827Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:47.002896Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:47.002949Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:47.003236Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:47.003337Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:47.003420Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:683:2571] in generation 1 2025-04-17T18:03:47.014163Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:47.048853Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:47.049030Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:47.049142Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:685:2581] 2025-04-17T18:03:47.049174Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:47.049209Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:47.049236Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:47.049427Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:667:2571], Recipient [1:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.049472Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.049704Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:47.049775Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:47.049887Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:47.049923Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:47.049978Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:47.050014Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:47.050069Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:47.050099Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:47.050131Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:47.050496Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:672:2573], Recipient [1:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.050524Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.050563Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:672:2573], sessionId# [0:0:0] 2025-04-17T18:03:47.050610Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:672:2573] 2025-04-17T18:03:47.050638Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:47.050722Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:47.050873Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:47.050917Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:47.051005Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:47.051054Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:47.051082Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:47.051108Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:47.051136Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:47.051395Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:47.051426Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:47.051454Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:47.051506Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:47.051549Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:47.051569Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:47.051593Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:47.051616Z node 1 :TX_DATASH ... d::TEvSchemaChangedResult 2025-04-17T18:03:54.806009Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037888 state Ready 2025-04-17T18:03:54.806057Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 --- resending captured proposals --- waiting for result 2025-04-17T18:03:54.806780Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553206, Sender [2:890:2720], Recipient [2:667:2571]: NKikimrTxDataShard.TEvKqpScan TxId: 281474976715662 ScanId: 2 LocalPathId: 2 TablePath: "/Root/table-1" SchemaVersion: 1 ColumnTags: 3 ColumnTypes: 2 Ranges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } Snapshot { Step: 2000 TxId: 281474976715661 } Generation: 1 ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC StatsMode: DQ_STATS_MODE_NONE ColumnTypeInfos { } LockNodeId: 0 2025-04-17T18:03:54.806847Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715662. Table '/Root/table-1' schema version changed at 72075186224037888 2025-04-17T18:03:54.806928Z node 2 :KQP_COMPUTE WARN: SelfId: [2:890:2720]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/table-1' scheme changed., code: 2028 , tablet id: 72075186224037888, actor_id: [2:667:2571] 2025-04-17T18:03:54.806958Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Enqueue for resolve 72075186224037888 2025-04-17T18:03:54.806993Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:359;event=scanner_finished;tablet_id=72075186224037888;stop_shard=1; 2025-04-17T18:03:54.807023Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:93;event=stop_scanner;actor_id=NO_VALUE_OPTIONAL;message=;final_flag=1; 2025-04-17T18:03:54.807089Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Sending TEvResolveKeySet update for table '/Root/table-1', range: [(Uint32 : NULL) ; ()), attempt #1 2025-04-17T18:03:54.807237Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Received TEvResolveKeySetResult update for table '/Root/table-1' 2025-04-17T18:03:54.807277Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:890:2720]. Resolve request failed for table '/Root/table-1', ErrorCount# 1 2025-04-17T18:03:54.807343Z node 2 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:167 :TEvTerminateFromFetcher: [2:890:2720]/[2:888:2718] 2025-04-17T18:03:54.807396Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:888:2718], TxId: 281474976715662, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==. TraceId : 01js2eqtk71k5yw5qyf7zs444a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2025-04-17T18:03:54.807513Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 1. pass away 2025-04-17T18:03:54.807567Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-17T18:03:54.809164Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T18:03:54.809260Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:99;event=TEvTerminateFromCompute;sender=[2:888:2718];info={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-17T18:03:54.809305Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:289;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2025-04-17T18:03:54.809428Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2025-04-17T18:03:54.809574Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:888:2718], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 164864 Tasks { TaskId: 1 CpuTimeUs: 163793 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 8 BuildCpuTimeUs: 163785 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-gkdwnlcfnq" NodeId: 2 CreateTimeMs: 1744913034282 UpdateTimeMs: 1744913034807 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:54.809667Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:888:2718] 2025-04-17T18:03:54.809728Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-17T18:03:54.809778Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:889:2719], task: 2 2025-04-17T18:03:54.809865Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:54.809924Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==. CustomerSuppliedId : . TraceId : 01js2eqtk71k5yw5qyf7zs444a. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646735 2025-04-17T18:03:54.809975Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==. CustomerSuppliedId : . TraceId : 01js2eqtk71k5yw5qyf7zs444a. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Handle abort execution event from: [2:883:2692], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-04-17T18:03:54.810042Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2025-04-17T18:03:54.810107Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-17T18:03:54.812060Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-17T18:03:54.812239Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, ActorId: [2:857:2692], ActorState: ExecuteState, TraceId: 01js2eqtk71k5yw5qyf7zs444a, Create QueryResponse for error on request, msg: 2025-04-17T18:03:54.812515Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvExecuteKqpTransaction 2025-04-17T18:03:54.812547Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-04-17T18:03:54.812905Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-17T18:03:54.813056Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-04-17T18:03:54.813086Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 0 ProcessProposeTransaction 2025-04-17T18:03:54.813199Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 0 reqId# [2:926:2751] SnapshotReq marker# P0 2025-04-17T18:03:54.813418Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Resolved key sets: 0 2025-04-17T18:03:54.813553Z node 2 :TX_PROXY DEBUG: Actor# [2:928:2751] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-04-17T18:03:54.813672Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:54.813711Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T18:03:54.813754Z node 2 :KQP_EXECUTER INFO: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:03:54.813824Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:54.813866Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01js2eqtk71k5yw5qyf7zs444a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:03:54.814063Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:594:2519], selfId: [2:57:2104], source: [2:857:2692] 2025-04-17T18:03:54.814113Z node 2 :TX_PROXY DEBUG: Actor# [2:928:2751] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-04-17T18:03:54.814242Z node 2 :TX_PROXY DEBUG: Actor# [2:926:2751] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-04-17T18:03:54.814395Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:926:2751], Recipient [2:667:2571]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-17T18:03:54.814923Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OWE2ZjViNWItYTk0NzEyMi1mM2Y2ZGVlOS0xN2FhMzFhNg==, workerId: [2:857:2692], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 332 >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2025-04-17T18:03:53.599827Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:53.615715Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:53.622333Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T18:03:53.625156Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:53.626099Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T18:03:53.626672Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:53.626703Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:53.626863Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T18:03:53.639219Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T18:03:53.639414Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T18:03:53.641611Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T18:03:53.641775Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:53.641867Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:53.641937Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T18:03:53.835308Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.135902s 2025-04-17T18:03:53.835467Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.136084s >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] >> DataShardTxOrder::DelayData [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] Test command err: 2025-04-17T18:03:46.086732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:46.087161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:46.087210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e40/r3tmp/tmpxCYQVw/pdisk_1.dat 2025-04-17T18:03:46.509138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.570454Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.609672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:46.609813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:46.621306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:46.704735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:47.023146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:47.287593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:829:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:47.287721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:838:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:47.287796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:47.293983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:03:47.448861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:843:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:03:47.511123Z node 1 :TX_PROXY ERROR: Actor# [1:903:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:48.129505Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2eqm6jfpe3zk2x00xddktz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFhM2EwMzItNmNlOWZmOTQtODNjNGE3YzMtNDZiYjBlMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:48.231836Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2eqn2a0x0jh2zdksjtr5d2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYzZTZlMjQtZmUzY2U1ZTYtYWFkNzk3MTQtYmU5ZDY0ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:48.845499Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2eqnarcr2rycz4rpnregyw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmMzZDQ3MTgtNWE4MDY5MTgtODA3MTYyNDktM2ZiYWExOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-17T18:03:49.212604Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2eqp0r6t8xrjj0x4dm7n51, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVhZWIxNTktZmQyMDhmMTUtODhlNTNmODQtY2ZmMGU1ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:49.297316Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqp3a3tdezm9vj34qsskf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmMzZDQ3MTgtNWE4MDY5MTgtODA3MTYyNDktM2ZiYWExOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:49.387631Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2eqp5z0hshtvvx0ckjkzx8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmMzZDQ3MTgtNWE4MDY5MTgtODA3MTYyNDktM2ZiYWExOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:49.458127Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmMzZDQ3MTgtNWE4MDY5MTgtODA3MTYyNDktM2ZiYWExOTQ=, ActorId: [1:966:2781], ActorState: ExecuteState, TraceId: 01js2eqp8e3yft67bpcxf4p3dj, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-17T18:03:49.469671Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2eqp8e3yft67bpcxf4p3dj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmMzZDQ3MTgtNWE4MDY5MTgtODA3MTYyNDktM2ZiYWExOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:52.557806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:52.558066Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:52.558262Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e40/r3tmp/tmpYFAbuy/pdisk_1.dat 2025-04-17T18:03:52.820684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:52.846597Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:52.882958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:52.883085Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:52.894413Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:52.976830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:53.230277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:53.487019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:53.487104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:53.487169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:53.491868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:03:53.642662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:03:53.678134Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:53.726123Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2eqt8d7tfhpzs24bf3km6k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTcxMzk0ZDMtYTc4MGZhZjMtYjIzMTU3Ny02MjRiMjJiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:53.783145Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2eqtgf3275q80tzjjffra6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjY0YzZlNDUtY2E1Y2E1YzItNmE1ZGU5YjktZDlmZTJhZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2025-04-17T18:03:54.268473Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2eqtqt22jeyn33eebts00y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGUxZWJiYWYtMjNiYWU3Mi1jNTI0ZDYwMC1mY2QyNjY1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-17T18:03:54.563512Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2eqv1bavk0jm55ss30ekhv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUwMzdlZGQtNGQ2MmU3MTAtNTk0MDI0NmUtMWQ3MGIwYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2025-04-17T18:03:54.638834Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqvac88ts8gg2gn48rg2q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUwMzdlZGQtNGQ2MmU3MTAtNTk0MDI0NmUtMWQ3MGIwYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2025-04-17T18:03:55.019510Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2eqvpx7zxqmdw7j61hq70k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGYzMGI2ODUtOTg2M2JkNzEtOWJjZjMyYjItYzI4OWQ2YjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2025-04-17T18:03:55.112412Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrt93etn0p44xa8e3fw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGUxZWJiYWYtMjNiYWU3Mi1jNTI0ZDYwMC1mY2QyNjY1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2025-04-17T18:03:55.181368Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2eqvvbc2mkjvmtrekcyx4j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGUxZWJiYWYtMjNiYWU3Mi1jNTI0ZDYwMC1mY2QyNjY1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2025-04-17T18:03:55.232749Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGUxZWJiYWYtMjNiYWU3Mi1jNTI0ZDYwMC1mY2QyNjY1Nw==, ActorId: [2:975:2779], ActorState: ExecuteState, TraceId: 01js2eqvxgfk78p8vc21wsh1h9, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: 2025-04-17T18:03:46.190335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:46.190810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:46.190880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e22/r3tmp/tmp4pjZUz/pdisk_1.dat 2025-04-17T18:03:46.573629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.615555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.658127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:46.658264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:46.669525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:46.748267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.781829Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:46.782699Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:46.783016Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:03:46.783186Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:46.812699Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:46.813259Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:46.813327Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:46.814596Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:46.814688Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:46.814757Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:46.815081Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:46.815182Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:46.815237Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:03:46.825932Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:46.856306Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:46.856486Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:46.856596Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:03:46.856628Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:46.856659Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:46.856691Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:46.856888Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.856943Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.857155Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:46.857227Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:46.857350Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:46.857390Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.857423Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:46.857464Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:46.857501Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:46.857529Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:46.857569Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:46.857689Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.857734Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.857781Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:03:46.858131Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:03:46.858169Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:46.858307Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:46.858532Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:46.858576Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:46.858671Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:46.858709Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.858743Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:46.858772Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:46.858820Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:46.859069Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:46.859105Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:46.859141Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:46.859176Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:46.859223Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:46.859251Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:46.859279Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:46.859307Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:46.859346Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:46.860674Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:46.860720Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:46.871331Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:46.871390Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:46.871446Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:46.871494Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:03:46.871550Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:47.020088Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.020132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.020188Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:03:47.020540Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T18:03:47.020583Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:47.020683Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:47.020719Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T18:03:47.020755Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T18:03:47.020790Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T18:03:47.024750Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:03:47.024805Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:47.025471Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.025508Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.025583Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:4 ... MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1086:2878], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 407 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 146 FinishTimeMs: 1744913035372 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 113 BuildCpuTimeUs: 33 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913035371 CreateTimeMs: 1744913035349 UpdateTimeMs: 1744913035372 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:55.374379Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1086:2878] 2025-04-17T18:03:55.374419Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], CA [2:1087:2879], CA [2:1091:2883], CA [2:1088:2880], CA [2:1089:2881], 2025-04-17T18:03:55.374448Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1087:2879], CA [2:1091:2883], CA [2:1088:2880], CA [2:1089:2881], 2025-04-17T18:03:55.374790Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1087:2879], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 639 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 406 FinishTimeMs: 1744913035372 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 349 BuildCpuTimeUs: 57 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913035372 CreateTimeMs: 1744913035349 UpdateTimeMs: 1744913035372 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:55.374837Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1087:2879] 2025-04-17T18:03:55.374871Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], CA [2:1091:2883], CA [2:1088:2880], CA [2:1089:2881], 2025-04-17T18:03:55.374898Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1091:2883], CA [2:1088:2880], CA [2:1089:2881], 2025-04-17T18:03:55.375054Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1088:2880], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 437 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 246 FinishTimeMs: 1744913035373 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 220 BuildCpuTimeUs: 26 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913035372 CreateTimeMs: 1744913035349 UpdateTimeMs: 1744913035373 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:55.375102Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1088:2880] 2025-04-17T18:03:55.375135Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], CA [2:1091:2883], CA [2:1089:2881], 2025-04-17T18:03:55.375157Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1091:2883], CA [2:1089:2881], 2025-04-17T18:03:55.375227Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1089:2881], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 535 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 320 FinishTimeMs: 1744913035374 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 263 BuildCpuTimeUs: 57 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913035373 CreateTimeMs: 1744913035350 UpdateTimeMs: 1744913035374 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:55.375261Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1089:2881] 2025-04-17T18:03:55.375283Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], CA [2:1091:2883], 2025-04-17T18:03:55.375304Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1091:2883], 2025-04-17T18:03:55.375459Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1090:2882], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 523 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 185 FinishTimeMs: 1744913035374 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 135 BuildCpuTimeUs: 50 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 CreateTimeMs: 1744913035350 UpdateTimeMs: 1744913035374 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:55.375496Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1090:2882] 2025-04-17T18:03:55.375524Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1091:2883], 2025-04-17T18:03:55.375544Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1091:2883], 2025-04-17T18:03:55.375710Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1091:2883], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 327 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 135 FinishTimeMs: 1744913035375 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 104 BuildCpuTimeUs: 31 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913035374 CreateTimeMs: 1744913035350 UpdateTimeMs: 1744913035375 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:55.375754Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1091:2883] 2025-04-17T18:03:55.375907Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:55.375963Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1079:2860] TxId: 281474976715667. Ctx: { TraceId: 01js2eqvrg360gfttg9zp7a6qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIwNTgxYzAtYjcxMjUwMjctMjc1ZjJmNDUtZjBmMGI5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003969s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TopicAutoscaling::ControlPlane_CDC_Enable [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2025-04-17T18:03:45.804189Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:45.819874Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:45.820299Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:45.820513Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:45.861343Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:45.866076Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:45.931881Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:45.933549Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:45.933625Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:45.933747Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:45.934070Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:45.934510Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:45.934581Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:45.934848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:45.934878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.005042Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:46.028971Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:46.029135Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:46.029238Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:46.029272Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:46.029310Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:46.029341Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:46.029455Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.029501Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.029716Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:46.029800Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:46.029898Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:46.029935Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.029973Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:46.030007Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:46.030037Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:46.030062Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:46.030092Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:46.030196Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.030223Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.030267Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:46.032496Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:46.032545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:46.032610Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:46.032738Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:46.032778Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:46.032852Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:46.032892Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.032924Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:46.032979Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:46.033015Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:46.033277Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:46.033311Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:46.033348Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:46.033383Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:46.033424Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:46.033454Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:46.033490Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:46.033518Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:46.033538Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:46.045620Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:46.045700Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:46.045739Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:46.045773Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:46.045822Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:46.046444Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.046484Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.046512Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:46.046557Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:46.046576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:46.046663Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:46.046702Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:46.046750Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:46.046797Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:46.049887Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:46.049933Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:46.050070Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.050093Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.050129Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:46.050154Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:46.050209Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:46.050243Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:46.050274Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:46.050308Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:46.050338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:46.050408Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:46.050441Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:46.050599Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:46.050633Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:46.050655Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:46.050675Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:46.050694Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:46.050742Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.050758Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:46.050782Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:46.050806Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:46.050844Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:46.050897Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:46.050930Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:46.050966Z node 1 :TX_D ... e 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit PrepareDataTxInRS 2025-04-17T18:03:56.137119Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit LoadAndWaitInRS 2025-04-17T18:03:56.137140Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit LoadAndWaitInRS 2025-04-17T18:03:56.137165Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-17T18:03:56.137184Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit LoadAndWaitInRS 2025-04-17T18:03:56.137204Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:03:56.137222Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit ExecuteDataTx 2025-04-17T18:03:56.137611Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:506] at tablet 9437184 with status COMPLETE 2025-04-17T18:03:56.137670Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:506] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 81, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:03:56.137717Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-17T18:03:56.137752Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:03:56.137777Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompleteOperation 2025-04-17T18:03:56.137802Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.137969Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is DelayComplete 2025-04-17T18:03:56.138009Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompleteOperation 2025-04-17T18:03:56.138045Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompletedOperations 2025-04-17T18:03:56.138076Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompletedOperations 2025-04-17T18:03:56.138120Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-17T18:03:56.138164Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompletedOperations 2025-04-17T18:03:56.138214Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:506] at 9437184 has finished 2025-04-17T18:03:56.138252Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:56.138283Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:56.138318Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2025-04-17T18:03:56.138596Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:234:2227], Recipient [1:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:56.138631Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:56.138671Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:56.138700Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:56.138730Z node 1 :TX_DATASHARD DEBUG: Return cached ready operation [1000005:507] at 9437184 2025-04-17T18:03:56.138763Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2025-04-17T18:03:56.138787Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-17T18:03:56.138806Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:56.138826Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:56.138858Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:56.139352Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2025-04-17T18:03:56.139398Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-17T18:03:56.139464Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:56.139489Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-17T18:03:56.139510Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2025-04-17T18:03:56.139552Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-17T18:03:56.139580Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-17T18:03:56.139601Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:56.139620Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:56.139659Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically complete end at 9437184 2025-04-17T18:03:56.139683Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically incomplete end at 9437184 2025-04-17T18:03:56.139711Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:507] at 9437184 2025-04-17T18:03:56.139749Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-17T18:03:56.139763Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:56.139778Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2025-04-17T18:03:56.139790Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2025-04-17T18:03:56.139855Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-17T18:03:56.139870Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2025-04-17T18:03:56.139881Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2025-04-17T18:03:56.139893Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2025-04-17T18:03:56.139906Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-17T18:03:56.139919Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2025-04-17T18:03:56.139931Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2025-04-17T18:03:56.139941Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2025-04-17T18:03:56.139955Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-17T18:03:56.139965Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2025-04-17T18:03:56.139975Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2025-04-17T18:03:56.139986Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2025-04-17T18:03:56.140008Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-17T18:03:56.140031Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2025-04-17T18:03:56.140044Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:03:56.140056Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2025-04-17T18:03:56.140278Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2025-04-17T18:03:56.140321Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:03:56.140356Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:56.140376Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:03:56.140404Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2025-04-17T18:03:56.140453Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.140548Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is DelayComplete 2025-04-17T18:03:56.140562Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2025-04-17T18:03:56.140581Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2025-04-17T18:03:56.140621Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2025-04-17T18:03:56.140643Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-17T18:03:56.140665Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2025-04-17T18:03:56.140682Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:507] at 9437184 has finished 2025-04-17T18:03:56.140704Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:56.140725Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:56.140764Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:56.140791Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:56.159191Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2025-04-17T18:03:56.159259Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-04-17T18:03:56.159322Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.159353Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.159425Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:56.159463Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.159584Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.159617Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.159635Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:56.159648Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> BSCReadOnlyPDisk::ReadOnlySlay >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive >> BsControllerConfig::PDiskCreate [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2025-04-17T18:03:46.037144Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:46.049911Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:46.050425Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:46.050679Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:46.101262Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:46.112490Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:46.202855Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:46.204685Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:46.204762Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:46.204877Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:46.205241Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:46.205707Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:46.205778Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:46.206117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:46.206160Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:46.301836Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:46.334355Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:46.334562Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:46.334693Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:46.334731Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:46.334772Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:46.334809Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:46.334958Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.335018Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.335291Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:46.335380Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:46.335507Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:46.335544Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:46.335601Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:46.335647Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:46.335689Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:46.335722Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:46.335767Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:46.335876Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.335911Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.335978Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:46.338823Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:46.338875Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:46.338959Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:46.339093Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:46.339136Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:46.339224Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:46.339282Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.339318Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:46.339399Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:46.339441Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:46.339761Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:46.339813Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:46.339854Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:46.339888Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:46.339941Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:46.339978Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:46.340019Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:46.340054Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:46.340083Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:46.352113Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:46.352178Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:46.352213Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:46.352253Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:46.352311Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:46.352882Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.352935Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:46.352980Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:46.353049Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:46.353082Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:46.353190Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:46.353233Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:46.353309Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:46.353354Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:46.357546Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:46.357612Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:46.357811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.357849Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:46.357902Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:46.357944Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:46.357981Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:46.358021Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:46.358056Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:46.358103Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:46.358159Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:46.358225Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:46.358265Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:46.358443Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:46.358475Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:46.358500Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:46.358522Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:46.358547Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:46.358604Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:46.358632Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:46.358670Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:46.358707Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:46.358752Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:46.358811Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:46.358848Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:46.358895Z node 1 :TX_D ... eady operations at 9437184 2025-04-17T18:03:56.797982Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.798045Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.798113Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:56.798213Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-17T18:03:56.798278Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.798436Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:56.798473Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.798495Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.798532Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:56.798581Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-17T18:03:56.798614Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.798727Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:56.798748Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:56.798768Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.798789Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.798829Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:56.798885Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-17T18:03:56.798913Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.799002Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:56.799020Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:56.799036Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:56.799052Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.799072Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.799116Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:56.799172Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-17T18:03:56.799196Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.799287Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:03:56.799306Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.799324Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.799352Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:56.799383Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-17T18:03:56.799418Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.799500Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.799592Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.799627Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:56.799677Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-17T18:03:56.799713Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.799816Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.799839Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.799879Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:56.799916Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T18:03:56.799936Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.800035Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.800067Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:516] at 9437184 on unit FinishPropose 2025-04-17T18:03:56.800126Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-17T18:03:56.800209Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.800347Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.800374Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.800407Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:03:56.800443Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T18:03:56.800468Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.800557Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:56.800577Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2025-04-17T18:03:56.800605Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:03:56.800625Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:56.800799Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:344:2312]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-17T18:03:56.800842Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:56.800888Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2025-04-17T18:03:56.801104Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:344:2312]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-17T18:03:56.801137Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:56.801160Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2025-04-17T18:03:56.801307Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:344:2312]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-17T18:03:56.801342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:56.801364Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2025-04-17T18:03:56.801538Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:344:2312]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-17T18:03:56.801582Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:56.801615Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2025-04-17T18:03:56.801753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:344:2312]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-17T18:03:56.801781Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:56.801806Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-04-17T18:03:56.801949Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:344:2312]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-17T18:03:56.801982Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:56.802006Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-04-17T18:03:56.802120Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:344:2312]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T18:03:56.802143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:56.802162Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-04-17T18:03:56.802250Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:344:2312]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T18:03:56.802273Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:56.802293Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:201:2076] 2025-04-17T18:03:53.599798Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:53.615711Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:53.622363Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T18:03:53.625162Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:53.626133Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T18:03:53.626810Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:53.626859Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:53.627087Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T18:03:53.639215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T18:03:53.639354Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T18:03:53.641640Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T18:03:53.641794Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:53.641884Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:53.641944Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:242:2066] recipient: [1:20:2067] 2025-04-17T18:03:53.658482Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T18:03:53.658649Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:53.670287Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:53.670426Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:53.670496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:53.670550Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:53.670643Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:53.670684Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:53.670720Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:53.670762Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:53.681431Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:53.681555Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:53.692338Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:53.692517Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T18:03:53.696773Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T18:03:53.696854Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T18:03:53.697074Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T18:03:53.697983Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T18:03:53.715250Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2025-04-17T18:03:53.715837Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-04-17T18:03:53.715882Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-04-17T18:03:53.715907Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-04-17T18:03:53.715929Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-04-17T18:03:53.715966Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-04-17T18:03:53.715989Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-04-17T18:03:53.716011Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-04-17T18:03:53.716032Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-17T18:03:53.716053Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-04-17T18:03:53.716072Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-17T18:03:53.716095Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-17T18:03:53.716132Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-17T18:03:53.716169Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-04-17T18:03:53.716203Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-17T18:03:53.716229Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-04-17T18:03:53.716262Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-04-17T18:03:53.716287Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-04-17T18:03:53.716316Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-04-17T18:03:53.716343Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-04-17T18:03:53.716365Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-04-17T18:03:53.716398Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-04-17T18:03:53.716421Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-04-17T18:03:53.716444Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-04-17T18:03:53.716466Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-04-17T18:03:53.716485Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-04-17T18:03:53.716509Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-04-17T18:03:53.716529Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-04-17T18:03:53.716566Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-04-17T18:03:53.716589Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-04-17T18:03:53.716631Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:195:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:195:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:195:2076] 2025-04-17T18:03:55.886685Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:55.887633Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:55.887863Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T18:03:55.889203Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:55.889674Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T18:03:55.890255Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:55.890287Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:55.890473Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T18:03:55.899875Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T18:03:55.900038Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T18:03:55.900150Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T18:03:55.900245Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:55.900362Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:55.900440Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:242:2066] recipient: [11:20:2067] 2025-04-17T18:03:55.912219Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T18:03:55.912417Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:55.926701Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:55.926820Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:55.926890Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:55.926962Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:55.927090Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:55.927151Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:55.927233Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:55.927299Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:55.942686Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:55.942797Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:55.954770Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:55.954923Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T18:03:55.956199Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T18:03:55.956256Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T18:03:55.956437Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T18:03:55.956481Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T18:03:55.957276Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2025-04-17T18:03:55.957728Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-04-17T18:03:55.957779Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-04-17T18:03:55.957801Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1002 Path# /dev/disk3 2025-04-17T18:03:55.957825Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk1 2025-04-17T18:03:55.957855Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1001 Path# /dev/disk2 2025-04-17T18:03:55.957877Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1002 Path# /dev/disk3 2025-04-17T18:03:55.957900Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk1 2025-04-17T18:03:55.957924Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1001 Path# /dev/disk2 2025-04-17T18:03:55.957951Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1002 Path# /dev/disk3 2025-04-17T18:03:55.957980Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk1 2025-04-17T18:03:55.958021Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2025-04-17T18:03:55.958050Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1002 Path# /dev/disk3 2025-04-17T18:03:55.958072Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk1 2025-04-17T18:03:55.958095Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2025-04-17T18:03:55.958116Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1002 Path# /dev/disk3 2025-04-17T18:03:55.958161Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2025-04-17T18:03:55.962314Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1001 Path# /dev/disk2 2025-04-17T18:03:55.962372Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1002 Path# /dev/disk3 2025-04-17T18:03:55.962408Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk1 2025-04-17T18:03:55.962433Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2025-04-17T18:03:55.962459Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1002 Path# /dev/disk3 2025-04-17T18:03:55.962487Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk1 2025-04-17T18:03:55.962514Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2025-04-17T18:03:55.962537Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2025-04-17T18:03:55.962560Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk1 2025-04-17T18:03:55.962595Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1001 Path# /dev/disk2 2025-04-17T18:03:55.962679Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2025-04-17T18:03:55.962710Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk1 2025-04-17T18:03:55.962736Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2025-04-17T18:03:55.962758Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1002 Path# /dev/disk3 >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] Test command err: 2025-04-17T18:03:49.162136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:49.162569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:49.162614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dec/r3tmp/tmpmiHmj6/pdisk_1.dat 2025-04-17T18:03:49.468661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:49.505037Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:49.546492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:49.546616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:49.558099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:49.642419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:49.680981Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:49.681980Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:49.682426Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:03:49.682672Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:49.730753Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:49.731478Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:49.731574Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:49.733182Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:49.733270Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:49.733340Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:49.733684Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:49.733806Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:49.733919Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:03:49.744609Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:49.780318Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:49.780536Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:49.780650Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:03:49.780691Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:49.780729Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:49.780765Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:49.781016Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:49.781080Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:49.781368Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:49.781462Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:49.781593Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:49.781642Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:49.781680Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:49.781719Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:49.781784Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:49.781820Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:49.781863Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:49.782009Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:49.782058Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:49.782111Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:03:49.782818Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:03:49.782869Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:49.782996Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:49.783225Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:49.783275Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:49.783383Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:49.783432Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:49.783493Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:49.783530Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:49.783576Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:49.783907Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:49.783949Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:49.783992Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:49.784026Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:49.784083Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:49.784115Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:49.784150Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:49.784187Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:49.784225Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:49.785707Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:49.785755Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:49.796450Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:49.796520Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:49.796558Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:49.796599Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:03:49.796670Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:49.948842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:49.948900Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:49.948947Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:03:49.949338Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T18:03:49.949381Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:49.949500Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:49.949544Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T18:03:49.949593Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T18:03:49.949629Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T18:03:49.954357Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:03:49.954427Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:49.955308Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:49.955349Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:49.955415Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:4 ... utionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2845], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 685 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 420 FinishTimeMs: 1744913036798 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 346 BuildCpuTimeUs: 74 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913036797 CreateTimeMs: 1744913036789 UpdateTimeMs: 1744913036798 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:56.799701Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2845] 2025-04-17T18:03:56.799747Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], CA [2:1050:2846], CA [2:1051:2847], CA [2:1048:2844], CA [2:1052:2848], 2025-04-17T18:03:56.799786Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1053:2849], CA [2:1050:2846], CA [2:1051:2847], CA [2:1048:2844], CA [2:1052:2848], 2025-04-17T18:03:56.800323Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2844], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 455 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 105 FinishTimeMs: 1744913036798 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 63 BuildCpuTimeUs: 42 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913036797 CreateTimeMs: 1744913036789 UpdateTimeMs: 1744913036799 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:56.800417Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2844] 2025-04-17T18:03:56.800477Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], CA [2:1050:2846], CA [2:1051:2847], CA [2:1052:2848], 2025-04-17T18:03:56.800509Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1053:2849], CA [2:1050:2846], CA [2:1051:2847], CA [2:1052:2848], 2025-04-17T18:03:56.800940Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2846], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 482 DurationUs: 2000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 250 FinishTimeMs: 1744913036799 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 212 BuildCpuTimeUs: 38 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913036797 CreateTimeMs: 1744913036789 UpdateTimeMs: 1744913036799 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:56.801002Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2846] 2025-04-17T18:03:56.801045Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], CA [2:1051:2847], CA [2:1052:2848], 2025-04-17T18:03:56.801082Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1053:2849], CA [2:1051:2847], CA [2:1052:2848], 2025-04-17T18:03:56.801316Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2847], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 628 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 367 FinishTimeMs: 1744913036800 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 317 BuildCpuTimeUs: 50 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913036799 CreateTimeMs: 1744913036789 UpdateTimeMs: 1744913036800 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:56.801394Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2847] 2025-04-17T18:03:56.801438Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], CA [2:1052:2848], 2025-04-17T18:03:56.801476Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1053:2849], CA [2:1052:2848], 2025-04-17T18:03:56.801683Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1052:2848], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 447 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 188 FinishTimeMs: 1744913036801 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 116 BuildCpuTimeUs: 72 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 CreateTimeMs: 1744913036790 UpdateTimeMs: 1744913036801 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:56.801735Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1052:2848] 2025-04-17T18:03:56.801772Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], 2025-04-17T18:03:56.801803Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1053:2849], 2025-04-17T18:03:56.802040Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1053:2849], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1241 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 176 FinishTimeMs: 1744913036801 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 119 BuildCpuTimeUs: 57 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913036800 CreateTimeMs: 1744913036791 UpdateTimeMs: 1744913036801 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:56.802094Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1053:2849] 2025-04-17T18:03:56.802276Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:56.802343Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2826] TxId: 281474976715665. Ctx: { TraceId: 01js2eqx4h8yrf22fmzm8x7b3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ0MjdhNTQtOTA3MTc4NTEtYzhmYjBiYjAtY2Q4ODc5Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.004959s ReadRows: 2 ReadBytes: 16 ru: 3 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] |85.6%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] Test command err: 2025-04-17T18:03:48.722355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:48.722860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:48.722913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000def/r3tmp/tmpTdHxRZ/pdisk_1.dat 2025-04-17T18:03:49.038654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:49.070229Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:49.108197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:49.108317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:49.119798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:49.200709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:49.494251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:49.748281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:829:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:49.748389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:838:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:49.748462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:49.753678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:03:49.916704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:843:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:03:50.012552Z node 1 :TX_PROXY ERROR: Actor# [1:903:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:50.281038Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2eqpkj3xydjs412tah7myv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA5YWFmNmUtYmE3MjE0ZGItZWRlZGVmZTYtODNhMjBkY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:50.352281Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2eqq4z0cpg4tqznn0hzhpk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjVlMTJkYWMtOTVlYWI1OWYtNTk5OTNhNGItOWVkNDY0YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-04-17T18:03:51.097689Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqqtv6jnj4ntht6ym1mpj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI5Mjg0My1kMDBmNDNiMy1jNDg4NmNhNC1iZjlmNzNlMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:51.181325Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2eqqy0a8ayb5sxm6n41cxn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDAwMjM4OWYtMjNiYTJiODgtNjhkMTI5OS05ODgyZDlkNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-04-17T18:03:54.751508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:54.751760Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:54.751902Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000def/r3tmp/tmpQdVBoY/pdisk_1.dat 2025-04-17T18:03:54.991951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:55.015028Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:55.051362Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:55.051501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:55.062961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:55.144162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:55.408617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:55.662981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:55.663083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:55.663157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:55.668268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:03:55.824640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:03:55.861615Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:55.916477Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2eqwcd928dynf029gkqyvg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWEyM2FiZDctZGQzMjczZjQtYWZmZGU4ZTgtNTI5Y2ViMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:55.985772Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2eqwmyc35jxa9v5vprcyyj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODU3ODI2NGItMmQ1YmJhNDEtMTYzYzgxZjQtMWFiODQ4YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:56.569951Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2eqwx4dk5wyf3egvndcf9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODlkMzVlYjgtZTE5NGZkOGYtMTljMzNlZC1jNTNhMjc5Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-17T18:03:56.906723Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2eqxj3cype2cjcdbnchfcq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjQ1NzE5YjktMzI1ZTNjOGMtMjIwMThiNjMtNzIzYzIwMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:56.993307Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqxksfqzdcbt53s4p4gbk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODlkMzVlYjgtZTE5NGZkOGYtMTljMzNlZC1jNTNhMjc5Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:57.065093Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2eqxp49w1e00kdw3c8av8f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODlkMzVlYjgtZTE5NGZkOGYtMTljMzNlZC1jNTNhMjc5Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:57.110817Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODlkMzVlYjgtZTE5NGZkOGYtMTljMzNlZC1jNTNhMjc5Zg==, ActorId: [2:975:2779], ActorState: ExecuteState, TraceId: 01js2eqxrc191x71xd5eh3kg1c, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: 2025-04-17T18:03:53.591417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:53.592160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:53.592309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:03:53.593123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:53.593647Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:53.593691Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000da7/r3tmp/tmpdqNvSH/pdisk_1.dat 2025-04-17T18:03:53.938987Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:54.094366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:54.188235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:54.188348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:54.192063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:54.192138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:54.205091Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:03:54.205786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:54.206133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:54.471098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:54.537859Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:54.543920Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:54.544348Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1282:2390] 2025-04-17T18:03:54.544566Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:54.594157Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:54.597405Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:54.597500Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:54.599054Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:54.599120Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:54.599181Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:54.599488Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:54.599594Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:54.599662Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1302:2390] in generation 1 2025-04-17T18:03:54.603352Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:54.630595Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:54.630777Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:54.630887Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1308:2406] 2025-04-17T18:03:54.630924Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:54.630956Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:54.630991Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:54.631166Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1282:2390], Recipient [2:1282:2390]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:54.631213Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:54.631502Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:54.631614Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:54.631709Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:54.631759Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:54.631810Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:54.631841Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:54.631870Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:54.631922Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:54.631960Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:54.696718Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1312:2407], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:54.696770Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:54.696811Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1266:2774], serverId# [2:1312:2407], sessionId# [0:0:0] 2025-04-17T18:03:54.697118Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:838:2465], Recipient [2:1312:2407] 2025-04-17T18:03:54.697176Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:54.697285Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:54.697456Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:54.697498Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:54.697591Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:54.697649Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:54.697689Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:54.697719Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:54.697761Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:54.698004Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:54.698031Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:54.698060Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:54.698088Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:54.698138Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:54.698163Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:54.698216Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:54.698263Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:54.698292Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:54.701690Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1313:2408], Recipient [2:1282:2390]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:54.701752Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:54.701916Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:54.701953Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:54.701982Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:54.702029Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:54.702095Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:55.046913Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1345:2417], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:55.046958Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:55.047004Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1343:2796], serverId# [2:1345:2417], sessionId# [0:0:0] 2025-04-17T18:03:55.048241Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1057:2618], Recipient [2:1345:2417] 2025-04-17T18:03:55.048290Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:55.048413Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:55.048451Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... :2453], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:03:57.043184Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1716:2453], 0} finished in read 2025-04-17T18:03:57.043238Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T18:03:57.043267Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:03:57.043295Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:03:57.043322Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:03:57.043364Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T18:03:57.043385Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:03:57.043438Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-17T18:03:57.043478Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:03:57.043561Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T18:03:57.044598Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1716:2453], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:03:57.044679Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-17T18:03:57.151034Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqxqq077n4qwge29zky14, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODA3YTFkYTItNWFlMDJiMTQtYTUwYmM0MjAtZGRiOGZjNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:57.153138Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1734:2454], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-17T18:03:57.153273Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T18:03:57.153325Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-17T18:03:57.153386Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T18:03:57.153415Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-17T18:03:57.153441Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:57.153468Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:03:57.153502Z node 2 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-17T18:03:57.153531Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T18:03:57.153549Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:57.153564Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T18:03:57.153580Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:03:57.153663Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T18:03:57.153828Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:03:57.153868Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-17T18:03:57.153903Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1734:2454], 0} after executionsCount# 1 2025-04-17T18:03:57.153939Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1734:2454], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:03:57.154010Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1734:2454], 0} finished in read 2025-04-17T18:03:57.154050Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T18:03:57.154067Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:03:57.154085Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:03:57.154101Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:03:57.154130Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-17T18:03:57.154146Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:03:57.154166Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-17T18:03:57.154209Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:03:57.154273Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T18:03:57.155004Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1734:2454], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:03:57.155045Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-17T18:03:57.288829Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2eqxveaxbyf6mtvqw8khe1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNmYzE1YzQtYTkyNDA0NTctZGYzNTUwOTItMzJmNDIyMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:57.290940Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1758:2455], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-17T18:03:57.291194Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T18:03:57.291286Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-04-17T18:03:57.291351Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T18:03:57.291386Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-04-17T18:03:57.291431Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:57.291459Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:03:57.291497Z node 2 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-04-17T18:03:57.291526Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T18:03:57.291546Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:57.291579Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T18:03:57.291605Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:03:57.291706Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T18:03:57.291935Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:03:57.291973Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-17T18:03:57.292008Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1758:2455], 0} after executionsCount# 1 2025-04-17T18:03:57.292055Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1758:2455], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:03:57.292126Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1758:2455], 0} finished in read 2025-04-17T18:03:57.292188Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T18:03:57.292206Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:03:57.292226Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:03:57.292265Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:03:57.292299Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-17T18:03:57.292324Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:03:57.292347Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-17T18:03:57.292375Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:03:57.292441Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T18:03:57.293152Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1758:2455], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:03:57.293198Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-17T18:03:57.293768Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:240:2131], Recipient [2:1282:2390]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> KqpScanLogs::WideCombine-EnabledLogs >> KqpScanSpilling::SelfJoin >> KqpScanLogs::GraceJoin-EnabledLogs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: 2025-04-17T18:03:52.209950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:52.210835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:52.211035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:03:52.211968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:52.212569Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:52.212612Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ddf/r3tmp/tmpinblhY/pdisk_1.dat 2025-04-17T18:03:52.555556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:52.725994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:52.819278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:52.819418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:52.824206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:52.824307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:52.837456Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:03:52.838261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:52.838693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:53.107364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:53.178605Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:53.182864Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:53.183441Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1282:2390] 2025-04-17T18:03:53.183693Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:53.235732Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:53.239684Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:53.239798Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:53.241704Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:53.241800Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:53.241875Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:53.242261Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:53.242401Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:53.242512Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1302:2390] in generation 1 2025-04-17T18:03:53.246978Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:53.268907Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:53.269109Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:53.269237Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1308:2406] 2025-04-17T18:03:53.269277Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:53.269313Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:53.269351Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:53.269544Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1282:2390], Recipient [2:1282:2390]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:53.269593Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:53.269826Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:53.269915Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:53.269997Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:53.270037Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:53.270077Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:53.270116Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:53.270142Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:53.270173Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:53.270237Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:53.334634Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1312:2407], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:53.334684Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:53.334727Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1266:2774], serverId# [2:1312:2407], sessionId# [0:0:0] 2025-04-17T18:03:53.334954Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:838:2465], Recipient [2:1312:2407] 2025-04-17T18:03:53.335002Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:53.335116Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:53.335482Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:53.335543Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:53.335679Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:53.335762Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:53.335821Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:53.335866Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:53.335924Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:53.336256Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:53.336302Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:53.336337Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:53.336374Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:53.336431Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:53.336462Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:53.336493Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:53.336556Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:53.336590Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:53.340448Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1313:2408], Recipient [2:1282:2390]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:53.340506Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:53.340741Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:53.340797Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:53.340832Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:53.340889Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:53.340968Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:53.690664Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1345:2417], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:53.690709Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:53.690761Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1343:2796], serverId# [2:1345:2417], sessionId# [0:0:0] 2025-04-17T18:03:53.692074Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1057:2618], Recipient [2:1345:2417] 2025-04-17T18:03:53.692117Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:53.692242Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:53.692280Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... RD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:57.230643Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976715669 2025-04-17T18:03:57.265075Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2339:2586], Recipient [2:2211:2543]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:57.265144Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:57.265187Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:2205:3335], serverId# [2:2339:2586], sessionId# [0:0:0] 2025-04-17T18:03:57.266012Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2340:2587], Recipient [2:2211:2543]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:57.266054Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:57.266333Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [2:2203:2541], serverId# [2:2340:2587], sessionId# [0:0:0] 2025-04-17T18:03:57.266427Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2083:2513], Recipient [2:2211:2543]: {TEvReadSet step# 2543 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T18:03:57.266449Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:03:57.266475Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715667 2025-04-17T18:03:57.266540Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2543 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T18:03:57.266953Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:03:57.267132Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2083:2513], Recipient [2:2211:2543]: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T18:03:57.267156Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:03:57.267177Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-04-17T18:03:57.267228Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T18:03:57.267413Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2083:2513], Recipient [2:2211:2543]: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-04-17T18:03:57.267433Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:03:57.267455Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-04-17T18:03:57.267481Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-04-17T18:03:57.267561Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2143:3288], Recipient [2:2339:2586] 2025-04-17T18:03:57.267581Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:03:57.267602Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715668 2025-04-17T18:03:57.267630Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2544 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T18:03:57.267826Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2143:3288], Recipient [2:2339:2586] 2025-04-17T18:03:57.267848Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:03:57.267869Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-04-17T18:03:57.267936Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2143:3288], Recipient [2:2339:2586] 2025-04-17T18:03:57.267952Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:03:57.267973Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-04-17T18:03:57.268173Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:03:57.268680Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T18:03:57.268792Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-04-17T18:03:57.268873Z node 2 :TX_DATASHARD DEBUG: Complete volatile write [2694 : 281474976715669] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2295:3353] 2025-04-17T18:03:57.269110Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:03:57.273494Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-17T18:03:57.273582Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2211:2543], Recipient [2:2083:2513]: {TEvReadSet step# 2543 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 1} 2025-04-17T18:03:57.273608Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:57.273636Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715667 2025-04-17T18:03:57.273683Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-17T18:03:57.273729Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T18:03:57.273766Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-17T18:03:57.274815Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2211:2543], Recipient [2:2083:2513]: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-17T18:03:57.274857Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:03:57.274888Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976715669 2025-04-17T18:03:57.274982Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-17T18:03:57.275036Z node 2 :TX_DATASHARD NOTICE: Outdated readset for 2694:281474976715669 at 72075186224037888 2025-04-17T18:03:57.275088Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-17T18:03:57.276345Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-17T18:03:57.276437Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-17T18:03:57.276471Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T18:03:57.276546Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-17T18:03:57.277203Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2211:2543], Recipient [2:2083:2513]: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2025-04-17T18:03:57.277244Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:57.277280Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 2025-04-17T18:03:57.277532Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2211:2543], Recipient [1:2143:3288] 2025-04-17T18:03:57.277569Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:57.277606Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715668 2025-04-17T18:03:57.278079Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2211:2543], Recipient [1:2143:3288] 2025-04-17T18:03:57.278117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:03:57.278157Z node 1 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976715669 2025-04-17T18:03:57.278241Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2694 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-17T18:03:57.278278Z node 1 :TX_DATASHARD NOTICE: Outdated readset for 2694:281474976715669 at 72075186224037889 2025-04-17T18:03:57.278324Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T18:03:57.278461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2211:2543], Recipient [1:2143:3288] 2025-04-17T18:03:57.278494Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:03:57.278529Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] Test command err: 2025-04-17T18:03:50.128984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:50.129517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:50.129581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dcd/r3tmp/tmparSR9f/pdisk_1.dat 2025-04-17T18:03:50.509011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:50.545287Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:50.584208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:50.584351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:50.595959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:50.676495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:50.992191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-17T18:03:51.248769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:829:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:51.248881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:838:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:51.248957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:51.254092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:03:51.411189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:843:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:03:51.483408Z node 1 :TX_PROXY ERROR: Actor# [1:903:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:51.759691Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2eqr2ee9wv12w3yv8g96ek, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjlhN2NmYmEtMzNjMWFhZWItOWJkMWNhY2EtOTE4Nzg5NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:51.832292Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2eqrk6bjk9vmh74ghsm921, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkyMDRkZmMtNjRiOWQ0MjItMzg1NGI1M2MtOTgzZmRlNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-04-17T18:03:52.209965Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2eqrnnbyry7pf10en5sjw3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM2MmRiZTAtNWY0ZDlkMDgtZDYwOGIyODMtY2Q4YzM0YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-17T18:03:52.291977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2eqs10agk31pxvywn615me, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM2MmRiZTAtNWY0ZDlkMDgtZDYwOGIyODMtY2Q4YzM0YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet 2025-04-17T18:03:52.461573Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1031:2772], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:967:2772]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT 2025-04-17T18:03:52.701538Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqsca6x697sbav7abbbjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThkZjQ3NDgtMjMwZTliZDUtMTM0NzFhNWUtODMxZDUyYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2025-04-17T18:03:55.695321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:55.695703Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:55.695892Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dcd/r3tmp/tmpvt2j1S/pdisk_1.dat 2025-04-17T18:03:55.995034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:56.025606Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:56.062159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:56.062308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:56.073710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:56.153700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:56.411065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-17T18:03:56.675862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:56.675941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:56.676010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:56.679834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:03:56.831003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:03:56.866479Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:56.929178Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2eqxc1cgwxwg2w1mkzk300, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyZTIxZjAtZTMwMmUzYWQtNzMzYzBiNjctNDM2YjJlNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:57.005501Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2eqxmm9g79r2h1187wvvah, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDFhMDgyNGItY2UzYmE4Y2MtNDBkNzBkYWMtMTdmN2QzOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-04-17T18:03:57.360311Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2eqxq1dadtj0y6fn2x87sm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTU3MGIyNTUtZGVlYTk5OWQtZWIyZDA2ODgtNTA5YWVlMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-17T18:03:57.469552Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2eqy1x48b3j949pmxf7t2r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTU3MGIyNTUtZGVlYTk5OWQtZWIyZDA2ODgtNTA5YWVlMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2025-04-17T18:03:57.911069Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqye43hhwkd6vc1gznay0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzQxZDljOS1kZDZiODg2OC02MDZhOGU5NS0yMWE4YmE5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } >> KqpScanSpilling::SelfJoinQueryService >> KqpScanSpilling::SpillingPragmaParseError >> KqpScanLogs::WideCombine+EnabledLogs >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling >> KqpScanSpilling::HandleErrorsCorrectly >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2025-04-17T18:03:51.440054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:51.440578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:51.440641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d95/r3tmp/tmpqlOp1G/pdisk_1.dat 2025-04-17T18:03:51.781314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:51.826689Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:51.865548Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:03:51.866325Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:03:51.866504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:51.866622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:51.878061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:51.956493Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:03:51.956569Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:03:51.956726Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T18:03:52.084970Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:03:52.085066Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:03:52.085642Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:03:52.085754Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:03:52.086066Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:03:52.086317Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:03:52.086415Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:03:52.088289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:52.088766Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:03:52.089401Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:03:52.089481Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:03:52.131424Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:52.132766Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:52.133209Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-04-17T18:03:52.133496Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:52.143848Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:52.181052Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:52.182485Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:52.182867Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-04-17T18:03:52.183049Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:52.191448Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:52.191876Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:52.191999Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:52.193580Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:52.193656Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:52.193723Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:52.194072Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:52.194261Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:52.194329Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:705:2573] in generation 1 2025-04-17T18:03:52.194719Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:52.194803Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:52.196002Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:03:52.196075Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:03:52.196126Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:03:52.196383Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:52.196491Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:52.196540Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-04-17T18:03:52.207259Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:52.243580Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:52.243754Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:52.243852Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-04-17T18:03:52.243887Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:52.243921Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:52.243968Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:52.244285Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:52.244342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:52.244417Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:52.244449Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:03:52.244501Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:52.244550Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-04-17T18:03:52.244590Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:03:52.244633Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:03:52.244669Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:03:52.244937Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:52.244976Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:52.245209Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:52.245293Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:52.245376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:52.245417Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:52.245450Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:52.245481Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:52.245513Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:52.245546Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:52.245586Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:52.245661Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:03:52.245722Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:03:52.246203Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:690:2584], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:52.246241Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:52.246286Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:665:2569], serverId# [1:690:2584], sessionId# [0:0:0] 2025-04-17T18:03:52.246331Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 202 ... tYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-17T18:03:58.670771Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [2:976:2764] 2025-04-17T18:03:58.670802Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [2:976:2764], channels: 0 2025-04-17T18:03:58.670843Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:976:2764], 2025-04-17T18:03:58.670880Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:976:2764], 2025-04-17T18:03:58.670928Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-17T18:03:58.671816Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:976:2764], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-17T18:03:58.671883Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:976:2764], 2025-04-17T18:03:58.671946Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:976:2764], 2025-04-17T18:03:58.672623Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:976:2764], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 736 Tasks { TaskId: 1 CpuTimeUs: 269 FinishTimeMs: 1744913038672 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 19 BuildCpuTimeUs: 250 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 CreateTimeMs: 1744913038671 UpdateTimeMs: 1744913038672 } MaxMemoryUsage: 1048576 } 2025-04-17T18:03:58.672724Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:976:2764] 2025-04-17T18:03:58.672798Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[2:972:2764] 2025-04-17T18:03:58.672847Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000736s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:03:58.673292Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:981:2780], Recipient [2:931:2747]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:58.673327Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:58.673362Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:980:2779], serverId# [2:981:2780], sessionId# [0:0:0] 2025-04-17T18:03:58.673454Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [2:979:2764], Recipient [2:931:2747]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-04-17T18:03:58.673472Z node 2 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-17T18:03:58.673555Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [2:931:2747], Recipient [2:931:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T18:03:58.673592Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T18:03:58.673644Z node 2 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-04-17T18:03:58.673743Z node 2 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-04-17T18:03:58.673809Z node 2 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-04-17T18:03:58.673849Z node 2 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-17T18:03:58.673919Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckWrite 2025-04-17T18:03:58.673962Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:03:58.673987Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckWrite 2025-04-17T18:03:58.674021Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:58.674048Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:03:58.674079Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-17T18:03:58.674130Z node 2 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-17T18:03:58.674158Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:03:58.674191Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:03:58.674212Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T18:03:58.674259Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-04-17T18:03:58.674292Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:03:58.674353Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-17T18:03:58.674434Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=1 2025-04-17T18:03:58.674467Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T18:03:58.674507Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:58.674525Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T18:03:58.674549Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-17T18:03:58.674574Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T18:03:58.674644Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:58.674667Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-17T18:03:58.674690Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:03:58.674711Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:03:58.674761Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:03:58.674777Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:03:58.674799Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-17T18:03:58.685814Z node 2 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-17T18:03:58.685898Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T18:03:58.685956Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-17T18:03:58.686092Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:58.686679Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:03:58.686775Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01js2eqz9763rkhmdgzz8bgbse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1MTBlMjItYmRhOTU3NmQtNTQ3MzBiYjQtZDE2YmMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] Test command err: 2025-04-17T18:03:50.213767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:50.214329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:50.214388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dcb/r3tmp/tmpuwZIjr/pdisk_1.dat 2025-04-17T18:03:50.511138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:50.538309Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:50.575904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:50.576043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:50.587267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:50.667712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:50.705263Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:50.706338Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:50.706708Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:03:50.706964Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:50.754293Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:50.754813Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:50.754877Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:50.756099Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:50.756206Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:50.756258Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:50.756489Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:50.756579Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:50.756628Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:03:50.767342Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:50.790524Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:50.790717Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:50.790827Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:03:50.790866Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:50.790898Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:50.790933Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:50.791133Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.791195Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.791454Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:50.791541Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:50.791680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:50.791719Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:50.791762Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:50.791796Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:50.791829Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:50.791860Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:50.791899Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:50.792018Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:50.792059Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:50.792099Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:03:50.792445Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:03:50.792508Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:50.792630Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:50.792829Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:50.792874Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:50.792990Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:50.793045Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:50.793083Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:50.793119Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:50.793152Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:50.793407Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:50.793444Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:50.793476Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:50.793511Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:50.793562Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:50.793592Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:50.793625Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:50.793656Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:50.793699Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:50.795080Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:50.795124Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:50.805773Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:50.805833Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:50.805867Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:50.805904Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:03:50.805965Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:50.957357Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:50.957415Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:50.957463Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:03:50.957866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T18:03:50.957903Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:50.958026Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:50.958070Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T18:03:50.958137Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T18:03:50.958190Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T18:03:50.963065Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:03:50.963136Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:50.964015Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.964072Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:50.964128Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:5 ... t Ack from dst 72075186224037891 for split OpId 281474976715664 2025-04-17T18:03:58.272430Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1040:2820], Recipient [2:1040:2820]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:58.272465Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:58.272641Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1178:2915], Recipient [2:666:2570]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037891 ClientId: [2:1178:2915] ServerId: [2:1182:2919] } 2025-04-17T18:03:58.272672Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:03:58.272813Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-04-17T18:03:58.272950Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-04-17T18:03:58.273034Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:03:58.273092Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-04-17T18:03:58.273139Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1196:2933] 2025-04-17T18:03:58.273164Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-04-17T18:03:58.273196Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-04-17T18:03:58.273219Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-17T18:03:58.273408Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1049:2824], Recipient [2:755:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-04-17T18:03:58.273457Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-04-17T18:03:58.273735Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1049:2824], Recipient [2:1049:2824]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:58.273767Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:58.273948Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1181:2918], Recipient [2:755:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1181:2918] ServerId: [2:1185:2922] } 2025-04-17T18:03:58.274000Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:03:58.274219Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1040:2820]: {TEvRegisterTabletResult TabletId# 72075186224037891 Entry# 2000} 2025-04-17T18:03:58.274254Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T18:03:58.274281Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-04-17T18:03:58.274309Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:03:58.274483Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-17T18:03:58.274517Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:58.274545Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2025-04-17T18:03:58.274572Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2025-04-17T18:03:58.274596Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2025-04-17T18:03:58.274619Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-17T18:03:58.274654Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-17T18:03:58.274794Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1182:2919], Recipient [2:1040:2820]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:03:58.274824Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:03:58.274857Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:1178:2915], serverId# [2:1182:2919], sessionId# [0:0:0] 2025-04-17T18:03:58.274891Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-17T18:03:58.274913Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:58.274934Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-04-17T18:03:58.274956Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-04-17T18:03:58.274978Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-04-17T18:03:58.274997Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-04-17T18:03:58.275024Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-17T18:03:58.275123Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1185:2922], Recipient [2:1049:2824]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:03:58.275149Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:03:58.275176Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1181:2918], serverId# [2:1185:2922], sessionId# [0:0:0] 2025-04-17T18:03:58.275380Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1049:2824]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-04-17T18:03:58.275426Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T18:03:58.275451Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-04-17T18:03:58.275476Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-17T18:03:58.275785Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1040:2820]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-17T18:03:58.275827Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T18:03:58.275853Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 0 next step 2000 2025-04-17T18:03:58.275888Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-17T18:03:58.275930Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037891 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-17T18:03:58.276204Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1049:2824]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-17T18:03:58.276234Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T18:03:58.276261Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-04-17T18:03:58.276294Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-17T18:03:58.276334Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-17T18:03:58.287086Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 ack split to schemeshard 281474976715664 2025-04-17T18:03:58.287786Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-04-17T18:03:58.290559Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:409:2404], Recipient [2:671:2572] 2025-04-17T18:03:58.290639Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037888, state: SplitSrcWaitForPartitioningChanged 2025-04-17T18:03:58.292749Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:409:2404], Recipient [2:763:2638] 2025-04-17T18:03:58.292795Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-17T18:03:58.294210Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 ack split partitioning changed to schemeshard 281474976715664 2025-04-17T18:03:58.294274Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:03:58.294842Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:657:2564], Recipient [2:666:2570]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:03:58.295067Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-04-17T18:03:58.295100Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:03:58.295486Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:748:2629], Recipient [2:755:2634]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:03:58.676995Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [2:994:2681], Recipient [2:666:2570]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 281474976715663 TxMode: MODE_VOLATILE_PREPARE Locks { SendingShards: 72075186224037888 SendingShards: 72075186224037889 ReceivingShards: 72075186224037888 ReceivingShards: 72075186224037889 Op: Commit } 2025-04-17T18:03:58.677055Z node 2 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-17T18:03:58.677156Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=281474976715663; 2025-04-17T18:03:58.677225Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-17T18:03:58.677671Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-04-17T18:03:58.678075Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 >> KqpScanLogs::GraceJoin+EnabledLogs |85.6%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::QSReplySize-useSink [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> KqpLimits::CancelAfterRwTx-useSink [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:201:2076] 2025-04-17T18:03:53.599847Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:53.615685Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:53.622334Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T18:03:53.625200Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:53.626083Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T18:03:53.626621Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:53.626671Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:53.626925Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T18:03:53.639220Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T18:03:53.639346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T18:03:53.641618Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T18:03:53.641760Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:53.641886Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:53.641966Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-04-17T18:03:53.659459Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T18:03:53.659578Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:53.670356Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:53.670498Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:53.670580Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:53.670654Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:53.670749Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:53.670796Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:53.670834Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:53.670876Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:53.681502Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:53.681628Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:53.692305Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:53.692438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T18:03:53.696772Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T18:03:53.696854Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T18:03:53.697081Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T18:03:53.698002Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T18:03:53.714297Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-17T18:03:53.727777Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-17T18:03:53.728557Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:195:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:195:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:195:2076] 2025-04-17T18:03:55.865688Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:55.866588Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:55.866783Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T18:03:55.867955Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:55.868395Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T18:03:55.868879Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:55.868910Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:55.869082Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T18:03:55.877934Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T18:03:55.878071Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T18:03:55.878167Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T18:03:55.878291Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:55.878395Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:55.878472Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-04-17T18:03:55.889620Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T18:03:55.889757Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:55.900489Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:55.900617Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:55.900718Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:55.900813Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:55.900896Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:55.900932Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:55.900954Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:55.900990Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:55.912849Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:55.912976Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:55.923695Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:55.923835Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T18:03:55.925084Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T18:03:55.925141Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T18:03:55.925411Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T18:03:55.925470Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T18:03:55.926084Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-17T18:03:55.927193Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-04-17T18:03:58.013915Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-04-17T18:03:58.014406Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-04-17T18:03:58.014812Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-04-17T18:03:58.015208Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-04-17T18:03:58.015884Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-04-17T18:03:58.016425Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-04-17T18:03:58.016863Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-04-17T18:03:58.017326Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-04-17T18:03:58.017786Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-04-17T18:03:58.018353Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-04-17T18:03:58.018953Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-04-17T18:03:58.019668Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-04-17T18:03:58.020268Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-04-17T18:03:58.020854Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:194:2076] Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:217:2066] recipient: [31:194:2076] 2025-04-17T18:04:00.124599Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T18:04:00.125609Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T18:04:00.125841Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T18:04:00.127157Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:04:00.127594Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T18:04:00.128208Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T18:04:00.128241Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T18:04:00.128460Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T18:04:00.139412Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T18:04:00.139534Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T18:04:00.139664Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T18:04:00.139778Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:04:00.139871Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:04:00.139935Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:239:2066] recipient: [31:20:2067] 2025-04-17T18:04:00.151367Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T18:04:00.151572Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:04:00.162424Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:04:00.162581Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:04:00.162671Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:04:00.162753Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:04:00.162872Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:04:00.162919Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:04:00.162955Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:04:00.163005Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:04:00.174788Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:04:00.174943Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:04:00.186331Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:04:00.186495Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T18:04:00.187806Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T18:04:00.187857Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T18:04:00.188055Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T18:04:00.188101Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T18:04:00.188690Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2025-04-17T18:04:00.189826Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2025-04-17T18:04:00.190472Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-04-17T18:04:00.191134Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-04-17T18:04:00.191754Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-04-17T18:04:00.192334Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-04-17T18:04:00.193006Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-04-17T18:04:00.193620Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-04-17T18:04:00.194226Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-04-17T18:04:00.194837Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-04-17T18:04:00.195576Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-04-17T18:04:00.196307Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-04-17T18:04:00.196973Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-04-17T18:04:00.197610Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-04-17T18:04:00.198323Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-04-17T18:04:00.198977Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-04-17T18:04:00.199699Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-04-17T18:04:00.200419Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-04-17T18:04:00.201095Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-04-17T18:04:00.201902Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: 2025-04-17T18:03:51.463340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:51.463854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:51.463902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d9a/r3tmp/tmp5K7YZN/pdisk_1.dat 2025-04-17T18:03:51.801110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:51.831479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:51.868519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:51.868644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:51.880009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:51.960738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:51.998041Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:51.999038Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:51.999463Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:03:51.999685Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:52.044741Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:52.045414Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:52.045507Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:52.047050Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:03:52.047156Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:03:52.047214Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:03:52.047548Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:52.047672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:52.047746Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:03:52.058482Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:52.082879Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:03:52.083034Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:52.083120Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:03:52.083146Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:03:52.083169Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:03:52.083192Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:52.083334Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:52.083375Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:52.083643Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:03:52.083732Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:03:52.083874Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:52.083913Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:52.083950Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:03:52.083981Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:03:52.084009Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:03:52.084038Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:03:52.084077Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:03:52.084202Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:52.084224Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:52.084254Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:03:52.084564Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:03:52.084600Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:52.084723Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:03:52.085017Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:03:52.085062Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:03:52.085155Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:03:52.085206Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:03:52.085248Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:03:52.085282Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:03:52.085316Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:52.085597Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:52.085635Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:03:52.085663Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:03:52.085688Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:52.085728Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:03:52.085751Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:03:52.085774Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:03:52.085812Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:52.085838Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:52.087053Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:03:52.087097Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:03:52.097755Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:03:52.097821Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:03:52.097857Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:03:52.097896Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:03:52.097960Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:52.247133Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:52.247182Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:52.247220Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:03:52.247617Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T18:03:52.247664Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:52.247775Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:03:52.247818Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T18:03:52.247862Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T18:03:52.247895Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T18:03:52.252178Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:03:52.252240Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:03:52.253079Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:52.253119Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:52.253182Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:03:5 ... Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-17T18:04:01.093622Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-17T18:04:01.093811Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715670. Shard resolve complete, resolved shards: 1 2025-04-17T18:04:01.093887Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-17T18:04:01.093969Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2025-04-17T18:04:01.094038Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:04:01.094097Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T18:04:01.094466Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [2:1277:2993] 2025-04-17T18:04:01.094547Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [2:1277:2993], channels: 1 2025-04-17T18:04:01.094619Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:04:01.094691Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1277:2993], 2025-04-17T18:04:01.094770Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1277:2993], 2025-04-17T18:04:01.094844Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-17T18:04:01.095612Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1277:2993], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-17T18:04:01.095684Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1277:2993], 2025-04-17T18:04:01.095763Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1277:2993], 2025-04-17T18:04:01.096047Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1279:2993], Recipient [2:1203:2949]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-04-17T18:04:01.096225Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T18:04:01.096303Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4015/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v4015/18446744073709551615 2025-04-17T18:04:01.096350Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-04-17T18:04:01.096432Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-17T18:04:01.096517Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:04:01.096562Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-17T18:04:01.096614Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:04:01.096659Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:04:01.096709Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-17T18:04:01.096752Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:04:01.096781Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:04:01.096806Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T18:04:01.096831Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:04:01.096936Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-17T18:04:01.097133Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1279:2993], 0} after executionsCount# 1 2025-04-17T18:04:01.097195Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1279:2993], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:04:01.097275Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1279:2993], 0} finished in read 2025-04-17T18:04:01.097345Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:04:01.097399Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:04:01.097424Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:04:01.097454Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:04:01.097500Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:04:01.097535Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:04:01.097567Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-17T18:04:01.097614Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:04:01.098597Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1279:2993], Recipient [2:1203:2949]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:04:01.098660Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-17T18:04:01.099331Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1277:2993], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 930 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 180 FinishTimeMs: 1744913041098 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 79 BuildCpuTimeUs: 101 HostName: "ghrun-gkdwnlcfnq" NodeId: 2 StartTimeMs: 1744913041097 CreateTimeMs: 1744913041095 UpdateTimeMs: 1744913041098 } MaxMemoryUsage: 1048576 } 2025-04-17T18:04:01.099494Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1277:2993] 2025-04-17T18:04:01.099677Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:04:01.099771Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01js2er1k82r73deh4jmecse3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyNjlhM2UtNGI1NWE1MWMtYmE0OWM1YmYtZTY5MzY5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000930s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } >> THealthCheckTest::Issues100GroupsListing >> THealthCheckTest::ShardsLimit999 >> THealthCheckTest::Basic >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> THealthCheckTest::OneIssueListing >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> THealthCheckTest::Issues100Groups100VCardListing >> THealthCheckTest::StaticGroupIssue >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> THealthCheckTest::StorageLimit95 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2025-04-17T18:03:53.336434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:53.336947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:53.336996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d8e/r3tmp/tmpQDIlkB/pdisk_1.dat 2025-04-17T18:03:53.688213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:53.717348Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:53.755289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:53.755449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:53.767121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:03:53.849790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:03:54.220116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:03:54.475232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:926:2736], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:54.475352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:936:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:54.475432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:54.480194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:03:54.633746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:940:2744], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:03:54.697001Z node 1 :TX_PROXY ERROR: Actor# [1:1001:2786] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:54.949952Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2eqv79eq0ha6z2p6b3c6nr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdjMWQwZjYtNWViZTg1YjUtNzkwZmVmYmEtNGMxYmU2ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:55.013474Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2eqvpxa3qexz8vy6a9w2tm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg5MTQ1ZWQtNzYxNjhmYWEtMzI1NDU1NGQtMjJkNGQxODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:03:55.370416Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2eqvs23hdd8sgyknptx57a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTczNGEzOTktYWUzMDNjMzMtZDE2MDhlNjItZjU4MDk4MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2025-04-17T18:03:55.453440Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2eqw3r4k9x9yc5h4qxfxt0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTczNGEzOTktYWUzMDNjMzMtZDE2MDhlNjItZjU4MDk4MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2025-04-17T18:03:55.529107Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2eqw6f8feej7s2gwav09e9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWZlZDc5ODgtZjA2NzNiNTItZjIyYzhkMC0xYWMxNjRkOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2025-04-17T18:03:55.539704Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2025-04-17T18:03:55.549985Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1198:2840], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1079:2840]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1198:2840]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2025-04-17T18:03:55.550554Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:1191:2840], SessionActorId: [1:1079:2840], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1079:2840]. isRollback=0 2025-04-17T18:03:55.552824Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWZlZDc5ODgtZjA2NzNiNTItZjIyYzhkMC0xYWMxNjRkOQ==, ActorId: [1:1079:2840], ActorState: ExecuteState, TraceId: 01js2eqw6f8feej7s2gwav09e9, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1192:2840] from: [1:1191:2840] 2025-04-17T18:03:55.553188Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1192:2840] TxId: 281474976715665. Ctx: { TraceId: 01js2eqw6f8feej7s2gwav09e9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWZlZDc5ODgtZjA2NzNiNTItZjIyYzhkMC0xYWMxNjRkOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } 2025-04-17T18:03:55.553583Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWZlZDc5ODgtZjA2NzNiNTItZjIyYzhkMC0xYWMxNjRkOQ==, ActorId: [1:1079:2840], ActorState: ExecuteState, TraceId: 01js2eqw6f8feej7s2gwav09e9, Create QueryResponse for error on request, msg: 2025-04-17T18:03:55.554008Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1164:2842], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1081:2842]TEvDeliveryProblem was received from tablet: 72075186224037889 2025-04-17T18:03:55.554104Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:1154:2842], SessionActorId: [1:1081:2842], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1081:2842]. isRollback=0 2025-04-17T18:03:55.555265Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTczNGEzOTktYWUzMDNjMzMtZDE2MDhlNjItZjU4MDk4MDQ=, ActorId: [1:1081:2842], ActorState: ExecuteState, TraceId: 01js2eqw3r4k9x9yc5h4qxfxt0, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1155:2842] from: [1:1154:2842] 2025-04-17T18:03:55.555663Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1155:2842] TxId: 281474976715664. Ctx: { TraceId: 01js2eqw3r4k9x9yc5h4qxfxt0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTczNGEzOTktYWUzMDNjMzMtZDE2MDhlNjItZjU4MDk4MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } 2025-04-17T18:03:55.556074Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTczNGEzOTktYWUzMDNjMzMtZDE2MDhlNjItZjU4MDk4MDQ=, ActorId: [1:1081:2842], ActorState: ExecuteState, TraceId: 01js2eqw3r4k9x9yc5h4qxfxt0, Create QueryResponse for error on request, msg: 2025-04-17T18:03:55.879241Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2eqwdx07m9bt169813qjz6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjliYzEzZWEtNjEwZTkyMzEtNDAzOTI0YjctMTg0ODg2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-17T18:03:59.644297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:03:59.644599Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:03:59.644817Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d8e/r3tmp/tmpgbNmtB/pdisk_1.dat 2025-04-17T18:03:59.949586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:03:59.978854Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:00.016088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:00.016204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:00.027764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:00.130703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:04:00.456743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:00.730663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:832:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:00.730748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:842:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:00.730824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:00.735293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:04:00.893081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:846:2692], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:04:00.929632Z node 2 :TX_PROXY ERROR: Actor# [2:906:2733] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:00.993152Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2er1as97wn8er6p98en44n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk1YzQ5MTMtN2U5NWZiZTQtYzdmY2NhZGMtMmVlMDQ3ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:04:01.070482Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2er1kmedr3ptmzc9g2gzdq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzE4OWJjZDItMjA0NTVjNy00ODZkZmExNi1kZTgyZDBkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2025-04-17T18:04:01.667779Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:1019:2811]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-04-17T18:04:01.668090Z node 2 :KQP_COMPUTE WARN: SelfId: [2:992:2772], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [2:968:2772]TEvDeliveryProblem was received from tablet: 72075186224037888 2025-04-17T18:04:01.668213Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:982:2772], SessionActorId: [2:968:2772], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:968:2772]. isRollback=0 2025-04-17T18:04:01.668433Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGEyNDA1YTYtNTUwODU1ZGMtNTRiMDIxNWQtNWUwYWZlZjY=, ActorId: [2:968:2772], ActorState: ExecuteState, TraceId: 01js2er1p44zyph4ch3en1d5k7, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:983:2772] from: [2:982:2772] 2025-04-17T18:04:01.668995Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:983:2772] TxId: 281474976715663. Ctx: { TraceId: 01js2er1p44zyph4ch3en1d5k7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGEyNDA1YTYtNTUwODU1ZGMtNTRiMDIxNWQtNWUwYWZlZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } 2025-04-17T18:04:01.669354Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGEyNDA1YTYtNTUwODU1ZGMtNTRiMDIxNWQtNWUwYWZlZjY=, ActorId: [2:968:2772], ActorState: ExecuteState, TraceId: 01js2er1p44zyph4ch3en1d5k7, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> KqpScanSpilling::SpillingPragmaParseError [GOOD] >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] >> TestYmqHttpProxy::TestSendMessage >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TestKinesisHttpProxy::MissingAction >> TestKinesisHttpProxy::DifferentContentTypes >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] >> KqpScanSpilling::SelfJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/0013a3/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk9 Trying to start YDB, gRPC: 3133, MsgBus: 28444 2025-04-17T18:03:59.718144Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344438889337475:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:59.718229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013a3/r3tmp/tmph0JQgw/pdisk_1.dat 2025-04-17T18:04:00.252157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:00.259583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:00.259686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:00.264527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3133, node 1 2025-04-17T18:04:00.369836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:00.369858Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:00.369864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:00.369978Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28444 TClient is connected to server localhost:28444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:00.887470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:00.900517Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:00.908364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.067017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.221029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.296393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:02.928026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344451774241132:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:02.928184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.245074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.271798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.303432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.339414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.374779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.443416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.472628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.535451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344456069209093:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.535567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.535874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344456069209099:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.539508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:03.550773Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:04:03.551039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344456069209101:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:03.647845Z node 1 :TX_PROXY ERROR: Actor# [1:7494344456069209158:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:04.527725Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344460364176790:2516], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2025-04-17T18:04:04.528005Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzdmZTEyMDItNmVkMGNhYzYtYzNiMmVmODAtMTdmZGNlYzQ=, ActorId: [1:7494344460364176783:2512], ActorState: ExecuteState, TraceId: 01js2er4z81fnxy9nhhvjrrr9t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:04:04.713775Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344438889337475:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:04.713842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 11104719852007762193 >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 4073, MsgBus: 20334 2025-04-17T18:00:33.627254Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343552178949749:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:33.628470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a7e/r3tmp/tmpSRSVQU/pdisk_1.dat 2025-04-17T18:00:34.288362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:34.288485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:34.290091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:00:34.308509Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4073, node 1 2025-04-17T18:00:34.426732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:34.426762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:34.426771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:34.426886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20334 TClient is connected to server localhost:20334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:35.060861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:35.075076Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:35.095896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:35.360986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:35.657988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:35.792291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:37.957668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343569358820562:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:37.957750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.332473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.424983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.493683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.552559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.590227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.609818Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343552178949749:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:38.609875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:38.653578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.747355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:38.902137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343573653788530:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.902229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.903255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343573653788535:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:38.908725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:38.925718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343573653788537:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:39.030766Z node 1 :TX_PROXY ERROR: Actor# [1:7494343577948755897:3559] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:41.681896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:43.942995Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343595128626034:2559], SessionActorId: [1:7494343595128626013:2559], statusCode=PRECONDITION_FAILED. Issue=
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 . sessionActorId=[1:7494343595128626013:2559]. isRollback=0 2025-04-17T18:00:43.973460Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGNiNDEzNDctNzkzNDdiNWItZmFhNDdmM2MtODJlOWY1NTQ=, ActorId: [1:7494343595128626013:2559], ActorState: ExecuteState, TraceId: 01js2ej0ht6y4ay5203ksb3hb0, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494343595128626035:2559] from: [1:7494343595128626034:2559] 2025-04-17T18:00:43.973565Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494343595128626035:2559] TxId: 281474976710673. Ctx: { TraceId: 01js2ej0ht6y4ay5203ksb3hb0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNiNDEzNDctNzkzNDdiNWItZmFhNDdmM2MtODJlOWY1NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 } 2025-04-17T18:00:43.973918Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494343595128626043:2572], TxId: 281474976710673, task: 5. Ctx: { TraceId : 01js2ej0ht6y4ay5203ksb3hb0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NGNiNDEzNDctNzkzNDdiNWItZmFhNDdmM2MtODJlOWY1NTQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494343595128626035:2559], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:00:43.974618Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGNiNDEzNDctNzkzNDdiNWItZmFhNDdmM2MtODJlOWY1NTQ=, ActorId: [1:7494343595128626013:2559], ActorState: ExecuteState, TraceId: 01js2ej0ht6y4ay5203ksb3hb0, Create QueryResponse for error on request, msg:
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 Trying to start YDB, gRPC: 20048, MsgBus: 6716 2025-04-17T18:00:44.835459Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494343601909758959:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:44.835698Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a7e/r3tmp/tmpYICN5q/pdisk_1.dat 2025-04-17T18:00:45.061174Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:45.077587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:45.077645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:45.087487Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20048, node 2 2025-04-17T18:00:45.146656Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:45.146678Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025 ... node 3 :TX_PROXY ERROR: Actor# [3:7494344331667379111:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:35.283138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:36.327005Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344318782474736:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:36.327104Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:44.492164Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344370322086175:2653], SessionActorId: [3:7494344370322086155:2653], statusCode=PRECONDITION_FAILED. Issue=
: Error: Stream write queries aren't allowed., code: 2029 . sessionActorId=[3:7494344370322086155:2653]. isRollback=0 2025-04-17T18:03:44.592027Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGQxMzA2NjEtNWM2ZDQ0NzYtMmYzNjU4MWMtNDZiNjdlZTA=, ActorId: [3:7494344370322086155:2653], ActorState: ExecuteState, TraceId: 01js2eqg3z8h0kwpprdcybj2w3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7494344370322086176:2653] from: [3:7494344370322086175:2653] 2025-04-17T18:03:44.592129Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494344370322086176:2653] TxId: 281474976715673. Ctx: { TraceId: 01js2eqg3z8h0kwpprdcybj2w3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGQxMzA2NjEtNWM2ZDQ0NzYtMmYzNjU4MWMtNDZiNjdlZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Stream write queries aren't allowed., code: 2029 } 2025-04-17T18:03:44.592258Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344370322086184:2667], TxId: 281474976715673, task: 5. Ctx: { TraceId : 01js2eqg3z8h0kwpprdcybj2w3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NGQxMzA2NjEtNWM2ZDQ0NzYtMmYzNjU4MWMtNDZiNjdlZTA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494344370322086176:2653], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:03:44.592594Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGQxMzA2NjEtNWM2ZDQ0NzYtMmYzNjU4MWMtNDZiNjdlZTA=, ActorId: [3:7494344370322086155:2653], ActorState: ExecuteState, TraceId: 01js2eqg3z8h0kwpprdcybj2w3, Create QueryResponse for error on request, msg:
: Error: Stream write queries aren't allowed., code: 2029 Trying to start YDB, gRPC: 28042, MsgBus: 18082 2025-04-17T18:03:45.481769Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494344377324259118:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:45.482048Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a7e/r3tmp/tmpwm98Z6/pdisk_1.dat 2025-04-17T18:03:45.603043Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:45.635943Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:45.636040Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:45.637840Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28042, node 4 2025-04-17T18:03:45.686850Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:45.686877Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:45.686887Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:45.687017Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18082 TClient is connected to server localhost:18082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:46.227386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:46.240502Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:46.327424Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:03:46.559735Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:03:46.633214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:48.778078Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344390209162783:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:48.778204Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:48.842496Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:03:48.877611Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:03:48.909892Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:03:48.944492Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:03:48.978739Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:03:49.015054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:03:49.084821Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:03:49.152525Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344394504130743:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:49.152589Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:49.152651Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344394504130748:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:49.156354Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:03:49.167014Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344394504130750:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:03:49.258909Z node 4 :TX_PROXY ERROR: Actor# [4:7494344394504130807:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:03:50.466122Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:50.532767Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344377324259118:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:50.535853Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:03:59.729400Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Yzk0MDdlMTgtNjA1NDRhOWUtNDVmZTQ5MzUtNmZmYTY5MDc=, ActorId: [4:7494344433158837834:2653], ActorState: ExecuteState, TraceId: 01js2eqyz7cywxp3q0789jt3nb, Create QueryResponse for error on request, msg:
: Error: Intermediate data materialization exceeded size limit (88240924 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 8242439794819203557 >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/0013c4/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 1397, MsgBus: 5887 2025-04-17T18:03:59.736995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344439421088203:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:59.738449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013c4/r3tmp/tmp1Ir2UK/pdisk_1.dat 2025-04-17T18:04:00.222102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:00.222267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:00.222526Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:00.227769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1397, node 1 2025-04-17T18:04:00.330660Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:00.330684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:00.330691Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:00.330867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5887 TClient is connected to server localhost:5887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:00.853494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:00.876948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.022516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.169027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.252827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:02.750869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344452305991648:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:02.750968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.063860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.094705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.120519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.149076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.177701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.210268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.278824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.371837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344456600959606:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.375152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.376007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344456600959613:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.381839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:03.395376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344456600959615:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:03.502396Z node 1 :TX_PROXY ERROR: Actor# [1:7494344456600959671:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:04.466404Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2025-04-17T18:04:04.466603Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344460895927317:2512], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2er4y2ec3n8znf5j1gx76x. SessionId : ydb://session/3?node_id=1&id=ZTdkZGFmMGEtMTg4OThlNy0xNDliOTIyZS0yOGNlZjhkZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Start compute actor [1:7494344460895927317:2512], task: 1 2025-04-17T18:04:04.466629Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344460895927317:2512], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2er4y2ec3n8znf5j1gx76x. SessionId : ydb://session/3?node_id=1&id=ZTdkZGFmMGEtMTg4OThlNy0xNDliOTIyZS0yOGNlZjhkZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Set execution timeout 299.952306s 2025-04-17T18:04:04.471595Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344460895927317:2512], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2er4y2ec3n8znf5j1gx76x. SessionId : ydb://session/3?node_id=1&id=ZTdkZGFmMGEtMTg4OThlNy0xNDliOTIyZS0yOGNlZjhkZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\036\n\016/Root/KeyValue\020\200\202\224\204\200\200\200\200\001\030\006(\001\"\t\n\003Key\020\001 \004*\t\n\003Key\020\001 \004*\014\n\005Value\020\002 \201 0\220\200\200\200\200\200@8\001@\000H\000R\022\t\021<\036\230\224B\001h\021\320\t\000\000\001\000\020\000X\000`\000h\000h\001x\000" } } } 2025-04-17T18:04:04.471737Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344460895927317:2512], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2er4y2ec3n8znf5j1gx76x. SessionId : ydb://session/3?node_id=1&id=ZTdkZGFmMGEtMTg4OThlNy0xNDliOTIyZS0yOGNlZjhkZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646926 2025-04-17T18:04:04.471783Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344460895927317:2512], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2er4y2ec3n8znf5j1gx76x. SessionId : ydb://session/3?node_id=1&id=ZTdkZGFmMGEtMTg4OThlNy0xNDliOTIyZS0yOGNlZjhkZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Received channels info: 2025-04-17T18:04:04.471877Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344460895927317:2512], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2er4y2ec3n8znf5j1gx76x. SessionId : ydb://session/3?node_id=1&id=ZTdkZGFmMGEtMTg4OThlNy0xNDliOTIyZS0yOGNlZjhkZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 0, toSend: 67108864, finished: 0 2025-04-17T18:04:04.472007Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Add data: 32 / 32 2025-04-17T18:04:04.472045Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Send data=32, closed=1, bufferActorId=[1:7494344460895927313:2512] 2025-04-17T18:04:04.472066Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344460895927317:2512], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2er4y2ec3n8znf5j1gx76x. SessionId : ydb://session/3?node_id=1&id=ZTdkZGFmMGEtMTg4OThlNy0xNDliOTIyZS0yOGNlZjhkZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 32 2025-04-17T18:04:04.472099Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2025-04-17T18:04:04.472114Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344460895927317:2512], TxId: 281474976710672, task: 1. Ctx: ... OMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.577116Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.577283Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.577417Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.577545Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894930:2565], TxId: 281474976710683, task: 3. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-17T18:04:05.577563Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894930:2565], TxId: 281474976710683, task: 3. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.577593Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894930:2565], TxId: 281474976710683, task: 3. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:04:05.577600Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-17T18:04:05.577620Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710683, task: 4. Finish input channelId: 4, from: [1:7494344465190894930:2565] 2025-04-17T18:04:05.577659Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.577674Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894930:2565], TxId: 281474976710683, task: 3. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-17T18:04:05.577684Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894930:2565], TxId: 281474976710683, task: 3. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.577706Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710683, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2025-04-17T18:04:05.577716Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710683, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2025-04-17T18:04:05.577723Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710683, task: 3. Tasks execution finished 2025-04-17T18:04:05.577732Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.577738Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894930:2565], TxId: 281474976710683, task: 3. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:04:05.577808Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710683, task: 3. pass away 2025-04-17T18:04:05.577906Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710683;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:04:05.578101Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.578147Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.578476Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.578630Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.578723Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.578749Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:04:05.579015Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.579041Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:04:05.579065Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.579111Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:04:05.579396Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:04:05.579434Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710683, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2025-04-17T18:04:05.579442Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710683, task: 4. Tasks execution finished 2025-04-17T18:04:05.579452Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494344465190894931:2566], TxId: 281474976710683, task: 4. Ctx: { TraceId : 01js2er5kf132ntjket9b8pfk1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWE0ODQxMWMtZGE0NjAwNi05MmM0NTMwYi1kZTgxMjk3ZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:04:05.579513Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710683, task: 4. pass away 2025-04-17T18:04:05.579597Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710683;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:04:05.580447Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913045561, txId: 281474976710682] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/0013ad/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk6 Trying to start YDB, gRPC: 25684, MsgBus: 23445 2025-04-17T18:03:59.731346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344439809676713:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:59.731761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013ad/r3tmp/tmplOABwR/pdisk_1.dat 2025-04-17T18:04:00.281549Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:00.284061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:00.284173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:00.288295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25684, node 1 2025-04-17T18:04:00.354962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:00.354989Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:00.355000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:00.355146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23445 TClient is connected to server localhost:23445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:00.849251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:00.877261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.006802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.164152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.224326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:02.927490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344452694580365:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:02.927618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.226315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.253625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.280021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.310455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.344980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.389589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.457432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.538773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344456989548324:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.538890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.539134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344456989548329:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.542414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:03.553295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344456989548331:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:03.650789Z node 1 :TX_PROXY ERROR: Actor# [1:7494344456989548389:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:04.731793Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344439809676713:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:04.731874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (DataType 'String)) (let $5 (OptionalType $4)) (let $6 (StructType '('"Key" $3) '('"Value" $5))) (let $7 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($21) (block '( (let $22 (lambda '($23) (block '( (let $24 (VariantType (TupleType $6 $6))) (let $25 (Variant $23 '0 $24)) (let $26 (Variant $23 '1 $24)) (return $25 $26) )))) (return (FromFlow (MultiMap (ToFlow $21) $22))) ))) '('('"_logical_id" '706) '('"_id" '"d2a77712-801f5598-79d071e1-11c83bb5")))) (let $8 (DqCnUnionAll (TDqOutput $7 '1))) (let $9 '('('"_logical_id" '551) '('"_id" '"35f2caec-c18421a2-50e94b2c-66a9df65") '('"_wide_channels" $6))) (let $10 (DqPhyStage '($8) (lambda '($27) (block '( (let $28 (lambda '($29) (Member $29 '"Key") (Member $29 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $27) $28))) ))) $9)) (let $11 (DqCnMap (TDqOutput $7 '0))) (let $12 (DqCnBroadcast (TDqOutput $10 '0))) (let $13 (StructType '('"t1.Key" $3) '('"t1.Value" $5) '('"t2.Key" $3) '('"t2.Value" $5))) (let $14 '('('"_logical_id" '621) '('"_id" '"2a6a744e-cec72d64-5480d560-4f6e4a") '('"_wide_channels" $13))) (let $15 (DqPhyStage '($11 $12) (lambda '($30 $31) (block '( (let $32 '('Many 'Hashed 'Compact)) (let $33 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $31) (lambda '($36 $37) (Exists $37))) (lambda '($38 $39) (IfPresent $39 (lambda '($40) (Just '($40 (AsStruct '('"Key" $38) '('"Value" $39))))) (Nothing (OptionalType (TupleType $4 $6)))))) (lambda '($41) (Nth $41 '0)) (lambda '($42) (Nth $42 '1)) $32)) (let $34 (Sort (FlatMap $33 (lambda '($43) (block '( (let $44 '('"Value")) (let $45 '('"Key" '"t1.Key" '"Value" '"t1.Value")) (let $46 '('"Key" '"t2.Key" '"Value" '"t2.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $30) (lambda '($47) (Exists (Member $47 '"Value")))) $43 'Inner $44 $44 $45 $46 '('"t1.Value") '('"t2.Value"))) )))) (Bool 'true) (lambda '($48) (Member $48 '"t1.Key")))) (let $35 (lambda '($49) (Member $49 '"t1.Key") (Member $49 '"t1.Value") (Member $49 '"t2.Key") (Member $49 '"t2.Value"))) (return (FromFlow (ExpandMap $34 $35))) ))) $14)) (let $16 (DqCnMerge (TDqOutput $15 '0) '('('0 '"Asc")))) (let $17 (DqPhyStage '($16) (lambda '($50) (FromFlow (NarrowMap (ToFlow $50) (lambda '($51 $52 $53 $54) (AsStruct '('"t1.Key" $51) '('"t1.Value" $52) '('"t2.Key" $53) '('"t2.Value" $54)))))) '('('"_logical_id" '633) '('"_id" '"370e7aca-3eecf430-88e2270e-25bee405")))) (let $18 '($7 $10 $15 $17)) (let $19 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $20 (DqCnResult (TDqOutput $17 '0) $19)) (return (KqpPhysicalQuery '((KqpPhysicalTx $18 '($20) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $13) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 14436186290831269665 2025-04-17T18:03:59.510703Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:03:59.512606Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12552107987875090196] 2025-04-17T18:03:59.535774Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning >> GenericFederatedQuery::IcebergHadoopBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] |85.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestCreateQueue >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] >> TPersQueueMirrorer::TestBasicRemote >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_NotSplitedTopic [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] Test command err: 2025-04-17T18:02:43.885765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344114702435965:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.885828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:02:44.141561Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018a3/r3tmp/tmplJgoxL/pdisk_1.dat 2025-04-17T18:02:44.424811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.447571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.447691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.449500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3670, node 1 2025-04-17T18:02:44.571109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0018a3/r3tmp/yandexgTcHFO.tmp 2025-04-17T18:02:44.571142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0018a3/r3tmp/yandexgTcHFO.tmp 2025-04-17T18:02:44.572134Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0018a3/r3tmp/yandexgTcHFO.tmp 2025-04-17T18:02:44.572304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:44.779743Z INFO: TTestServer started on Port 7221 GrpcPort 3670 TClient is connected to server localhost:7221 PQClient connected to localhost:3670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:45.139362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.168047Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.178092Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:02:45.183563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:45.335452Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:47.147149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344131882305952:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:47.147260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:47.149678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344131882305988:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:47.153091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:02:47.168374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344131882305990:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:02:47.266205Z node 1 :TX_PROXY ERROR: Actor# [1:7494344131882306054:2453] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.787733Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344131882306069:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.819360Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjJkMmQyOTQtNWRiY2ZkNDktZGIyNGJiOGEtMmMyM2FhZg==, ActorId: [1:7494344131882305948:2335], ActorState: ExecuteState, TraceId: 01js2ensf164tm7n3hp2wwjbey, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.834148Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:48.195742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.238657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.342065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.439201Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344114702435965:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.439459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7494344144767208283:2660] === CheckClustersList. Ok 2025-04-17T18:02:55.110658Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T18:02:55.158465Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T18:02:55.211960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344166242045024:2795], Recipient [1:7494344118997403698:2198]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:02:55.212085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494344166242045020:2792], Recipient [1:7494344118997403698:2198]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T18:02:55.212100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:02:55.260843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:02:55.261213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:02:55.261437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-17T18:02:55.261466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-17T18:02:55.261485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-17T1 ... 031Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [7:7494344430234930459:2241], Recipient [7:7494344430234930300:2153]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 15] Version: 2 } 2025-04-17T18:04:07.196057Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T18:04:07.196106Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-17T18:04:07.196131Z node 7 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [7:7494344473184604600:2475], Recipient [7:7494344473184604479:2475]: NKikimr::TEvKeyValue::TEvIntermediate 2025-04-17T18:04:07.196167Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-17T18:04:07.196178Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715673 2025-04-17T18:04:07.196192Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715673, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], version: 2 2025-04-17T18:04:07.196206Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 4 2025-04-17T18:04:07.196261Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715673, subscribers: 1 2025-04-17T18:04:07.196281Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [7:7494344473184604450:2473] 2025-04-17T18:04:07.196312Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T18:04:07.196427Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T18:04:07.196939Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-17T18:04:07.196952Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T18:04:07.196989Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-17T18:04:07.196997Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T18:04:07.197062Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:7494344473184604450:2473] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715673 at schemeshard: 72057594046644480 2025-04-17T18:04:07.197468Z node 7 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [7:7494344473184604599:2485], Recipient [7:7494344473184604479:2475]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-17T18:04:07.197774Z node 7 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [7:7494344473184604479:2475], Recipient [7:7494344473184604479:2475]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2025-04-17T18:04:07.197797Z node 7 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-04-17T18:04:07.197819Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:04:07.197841Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state DELETING 2025-04-17T18:04:07.197860Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, State DELETING 2025-04-17T18:04:07.197883Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976715673 2025-04-17T18:04:07.201735Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:7494344473184604466:2838], Recipient [7:7494344430234930300:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:07.201781Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:07.201800Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T18:04:07.202123Z node 7 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [7:7494344473184604479:2475], Recipient [7:7494344473184604479:2475]: NKikimr::TEvKeyValue::TEvCollect 2025-04-17T18:04:07.202433Z node 7 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [7:7494344473184604605:2486], Recipient [7:7494344473184604479:2475]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-17T18:04:07.212107Z node 7 :PQ_READ_PROXY DEBUG: new alter topic request 2025-04-17T18:04:07.214141Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:7494344473184604615:2918], Recipient [7:7494344430234930300:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:04:07.214211Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:04:07.214235Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:04:07.214284Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:7494344473184604611:2916], Recipient [7:7494344430234930300:2153]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2025-04-17T18:04:07.214312Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:04:07.217084Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "Root/origin/feed" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "streamImpl" PathId: 15 TotalGroupCount: 3 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "feed" TopicPath: "/Root/origin/feed/streamImpl" YdbDatabasePath: "/Root" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 3 MaxPartitionCount: 107 ScaleThresholdSeconds: 30 PartitionStrategyType: DISABLED } } Partitions { PartitionId: 0 TabletId: 72075186224037893 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 1 } ApplyIf { PathId: 15 PathVersion: 2 } AllowAccessToPrivatePaths: true } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:04:07.217462Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: Root/origin/feed/streamImpl, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:07.217626Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046644480 2025-04-17T18:04:07.217852Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T18:04:07.218439Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 281474976715674 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:04:07.218669Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715674, database: /Root, subject: root@builtin, status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: Root/origin/feed/streamImpl 2025-04-17T18:04:07.218719Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T18:04:07.218868Z node 7 :TX_PROXY ERROR: Actor# [7:7494344473184604611:2916] txid# 281474976715674, issues: { message: "Can`t disable auto partitioning." severity: 1 } 2025-04-17T18:04:07.219122Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:7494344473184604615:2918], Recipient [7:7494344430234930300:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:07.219161Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:07.219180Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T18:04:07.285199Z node 7 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [7:7494344473184604479:2475], Partition 0, Sender [0:0:0], Recipient [7:7494344473184604573:2481], Cookie: 0 2025-04-17T18:04:07.285291Z node 7 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [7:7494344473184604573:2481]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:07.285329Z node 7 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:07.285394Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:07.285485Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:07.285529Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:07.285570Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:07.380455Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:7494344430234930300:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:04:07.380508Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:04:07.380568Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:7494344430234930300:2153], Recipient [7:7494344430234930300:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:04:07.380591Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:04:07.385674Z node 7 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [7:7494344473184604479:2475], Partition 0, Sender [0:0:0], Recipient [7:7494344473184604573:2481], Cookie: 0 2025-04-17T18:04:07.385775Z node 7 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [7:7494344473184604573:2481]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:07.385810Z node 7 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:07.385882Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:07.385980Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:07.386018Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:07.386070Z node 7 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks >> TPersQueueTest::TxCounters [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageLimit87 >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::NoStoragePools >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::TestTabletIsDead >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit50 >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::TxCounters [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T17:57:15.658709Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494342705259014472:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:15.658761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:15.775320Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494342704693168927:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T17:57:15.775485Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T17:57:16.006511Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T17:57:16.011531Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00081a/r3tmp/tmp3Cx6jn/pdisk_1.dat 2025-04-17T17:57:16.656498Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T17:57:16.716870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T17:57:16.750665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:16.750771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:16.752673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T17:57:16.752752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T17:57:16.757367Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T17:57:16.757477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T17:57:16.757882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26851, node 1 2025-04-17T17:57:16.922133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00081a/r3tmp/yandexwvawck.tmp 2025-04-17T17:57:16.922189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00081a/r3tmp/yandexwvawck.tmp 2025-04-17T17:57:16.922346Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00081a/r3tmp/yandexwvawck.tmp 2025-04-17T17:57:16.922480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T17:57:16.990845Z INFO: TTestServer started on Port 5515 GrpcPort 26851 TClient is connected to server localhost:5515 PQClient connected to localhost:26851 === TenantModeEnabled() = 0 === Init PQ - start server on port 26851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T17:57:17.392206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T17:57:17.392417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:17.392666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T17:57:17.392874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T17:57:17.392905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:17.395274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T17:57:17.395398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T17:57:17.395568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:17.395605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T17:57:17.395620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T17:57:17.395631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-17T17:57:17.398698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:17.398747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T17:57:17.398765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-17T17:57:17.401754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:17.401789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:17.401854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:57:17.401895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:17.406127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T17:57:17.406519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:17.406536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T17:57:17.406565Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T17:57:17.408259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T17:57:17.408426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T17:57:17.411491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744912637454, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T17:57:17.411623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744912637454 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T17:57:17.411648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:57:17.411930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T17:57:17.411954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T17:57:17.412105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T17:57:17.412141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T17:57:17.413969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T17:57:17.413995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T17:57:17.414165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T17:57:17.414210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494342709553982414:2393], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T17:57:17.414253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T17:57:17.414270Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T17:57:17.414336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:57:17.414345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:17.415809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T17:57:17.415826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:17.415842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T17:57:17.415861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T17:57:17.415882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T17:57:17.415890Z n ... CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:04:08.314816Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:04:08.314851Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7494344478747438627:2496] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-17T18:04:08.314886Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:04:08.316065Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-17T18:04:08.316323Z node 32 :PERSQUEUE INFO: new Cookie 123|96818db1-ee80888e-a06470b4-27d189ca_0 generated for partition 0 topic 'topic' owner 123 2025-04-17T18:04:08.316929Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 123|96818db1-ee80888e-a06470b4-27d189ca_0 2025-04-17T18:04:08.335482Z node 32 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-17T18:04:08.335840Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/topic" include_location: true 2025-04-17T18:04:08.335971Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[32:7494344478747438633:2498]: Bootstrap 2025-04-17T18:04:08.344656Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7494344478747438633:2498]: Request location 2025-04-17T18:04:08.346679Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7494344478747438642:2499] connected; active server actors: 1 2025-04-17T18:04:08.350407Z node 32 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 32, Generation 1 2025-04-17T18:04:08.350492Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7494344478747438633:2498]: Got location 2025-04-17T18:04:08.357125Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7494344478747438642:2499] disconnected; active server actors: 1 2025-04-17T18:04:08.357183Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7494344478747438642:2499] disconnected no session 2025-04-17T18:04:08.357241Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 123|96818db1-ee80888e-a06470b4-27d189ca_0 grpc read done: success: 0 data: 2025-04-17T18:04:08.357258Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|96818db1-ee80888e-a06470b4-27d189ca_0 grpc read failed 2025-04-17T18:04:08.357286Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|96818db1-ee80888e-a06470b4-27d189ca_0 grpc closed 2025-04-17T18:04:08.357304Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|96818db1-ee80888e-a06470b4-27d189ca_0 is DEAD 2025-04-17T18:04:08.360397Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:04:08.361219Z node 32 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-17T18:04:08.361243Z node 32 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-04-17T18:04:08.361828Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { path: "topic" producer_id: "123" partition_with_generation { generation: 1 } } 2025-04-17T18:04:08.361977Z node 32 :PQ_WRITE_PROXY INFO: session request cookie: 3 path: "topic" producer_id: "123" partition_with_generation { generation: 1 } from ipv6:[::1]:49314 2025-04-17T18:04:08.361998Z node 32 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="topic server" ip=ipv6:[::1]:49314 proto=topic topic=topic durationSec=0 2025-04-17T18:04:08.362013Z node 32 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:04:08.362067Z node 32 :PQ_WRITE_PROXY INFO: session to partition: 0, generation: 1 2025-04-17T18:04:08.363310Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-17T18:04:08.363553Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-17T18:04:08.363580Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:04:08.363596Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:04:08.363618Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7494344478747438647:2501] (SourceId=123, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2025-04-17T18:04:08.363635Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2025-04-17T18:04:08.364272Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-17T18:04:08.364516Z node 32 :PERSQUEUE INFO: new Cookie 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 generated for partition 0 topic 'topic' owner 123 2025-04-17T18:04:08.364994Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 2025-04-17T18:04:08.368825Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T18:04:08.370483Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T18:04:08.370893Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-17T18:04:08.371271Z node 32 :PQ_WRITE_PROXY DEBUG: SessionId: ydb://session/3?node_id=32&id=YWY2MGNkMjAtMTY1MDA0NWYtY2Q3ZTcyZDEtOGQyMzE0YmI= TxId: 01js2er8jdfhz5njryff41kmea WriteId: {32, 281474976715673} 2025-04-17T18:04:08.372634Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T18:04:08.374782Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976715673}, 100000}, State: StateInit] bootstrapping {0, {32, 281474976715673}, 100000} [32:7494344478747438659:2503] 2025-04-17T18:04:08.375893Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T18:04:08.378724Z node 32 :PERSQUEUE INFO: [topic:{0, {32, 281474976715673}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:04:08.378800Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976715673}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {32, 281474976715673}, 100000} generation 1 [32:7494344478747438659:2503] 2025-04-17T18:04:08.379393Z node 32 :PERSQUEUE INFO: new Cookie 123|e82989a-bcec96ee-3eb339f5-9c171aec_0 generated for partition {0, {32, 281474976715673}, 100000} topic 'topic' owner 123 2025-04-17T18:04:08.382062Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:04:08.382148Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:04:08.382198Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:04:08.382230Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:04:08.389981Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:04:08.393592Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:04:08.393658Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:04:08.393689Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:04:08.470356Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 grpc read done: success: 0 data: 2025-04-17T18:04:08.470388Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 grpc read failed 2025-04-17T18:04:08.470435Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 grpc closed 2025-04-17T18:04:08.470459Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|8b316e53-7af326a8-183f0042-7b9ffa83_0 is DEAD 2025-04-17T18:04:08.471650Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:04:08.471701Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:04:08.520284Z node 32 :PERSQUEUE WARN: [PQ: 72075186224037892] Unknown transaction 0 Counters: ================================
name=api.grpc.topic.stream_write.bytes: 20796
name=api.grpc.topic.stream_write.messages: 4
name=topic.write.bytes: 20796
name=topic.write.discarded_bytes: 0
name=topic.write.discarded_messages: 0
name=topic.write.messages: 4
name=topic.write.uncompressed_bytes: 16
name=topic.write.lag_milliseconds:
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=180000: 0
    bin=200: 0
    bin=2000: 3
    bin=30000: 0
    bin=500: 0
    bin=5000: 1
    bin=60000: 0
    bin=999999: 0
name=topic.write.message_size_bytes:
    bin=1024: 1
    bin=10240: 2
    bin=102400: 0
    bin=1048576: 0
    bin=10485760: 0
    bin=20480: 1
    bin=204800: 0
    bin=2097152: 0
    bin=5120: 0
    bin=51200: 0
    bin=524288: 0
    bin=5242880: 0
    bin=67108864: 0
    bin=99999999: 0
name=topic.write.partition_throttled_milliseconds:
    bin=0: 4
    bin=1: 0
    bin=10: 0
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=20: 0
    bin=2500: 0
    bin=5: 0
    bin=50: 0
    bin=500: 0
    bin=5000: 0
    bin=999999: 0
>> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |85.8%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestReceiveMessage >> TestKinesisHttpProxy::GoodRequestPutRecords |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |85.8%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> THealthCheckTest::NoStoragePools [GOOD] >> THealthCheckTest::NoBscResponse >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue >> KqpQueryService::DdlGroup >> KqpQueryService::ExecuteCollectMeta >> KqpService::Shutdown >> KqpQueryServiceScripts::ExecuteScriptWithParameters >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery >> KqpQueryService::ShowCreateTable >> KqpQueryService::IssuesInCaseOfSuccess >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_ChildFirst [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckSessionResetAfterCommit >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::SpecificServerless >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessBadTablets >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks >> TopicAutoscaling::MidOfRange [GOOD] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> TestKinesisHttpProxy::TestRequestWithIAM >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream >> TestYmqHttpProxy::TestReceiveMessageWithAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2025-04-17T18:02:43.885084Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344114241623827:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.885450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001878/r3tmp/tmpX7x5Ch/pdisk_1.dat 2025-04-17T18:02:44.159289Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:02:44.362895Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.382526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.382642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.385268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1223, node 1 2025-04-17T18:02:44.578300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001878/r3tmp/yandexXjt8SR.tmp 2025-04-17T18:02:44.578334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001878/r3tmp/yandexXjt8SR.tmp 2025-04-17T18:02:44.578530Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001878/r3tmp/yandexXjt8SR.tmp 2025-04-17T18:02:44.578667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:44.786319Z INFO: TTestServer started on Port 15124 GrpcPort 1223 TClient is connected to server localhost:15124 PQClient connected to localhost:1223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:45.133233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.149483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.177366Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:02:45.185657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:45.357277Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:46.868518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344127126526525:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.868529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344127126526551:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.868684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.873274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.874309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344127126526584:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.874372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.883182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344127126526554:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:02:46.976722Z node 1 :TX_PROXY ERROR: Actor# [1:7494344127126526612:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.788405Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344127126526628:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.819581Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmRmMjVlZjQtNGZlN2RhM2YtYTNhODNiZGQtZDFjZjBhMQ==, ActorId: [1:7494344127126526522:2336], ActorState: ExecuteState, TraceId: 01js2ens63b0as4m7ahg5bb2gf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.833355Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:48.188857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.225522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.335390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.439673Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344114241623827:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.439929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7494344144306396140:2659] === CheckClustersList. Ok 2025-04-17T18:02:56.319937Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T18:02:56.335226Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T18:02:56.336396Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344170076200225:2817], Recipient [1:7494344118536591577:2206]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:56.336424Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:56.336435Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:02:56.336462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494344170076200221:2814], Recipient [1:7494344118536591577:2206]: {TEvModifySchemeTransaction txid# 281474976710674 TabletId# 72057594046644480} 2025-04-17T18:02:56.336472Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:02:56.390988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:02:56.391510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:02:56.391847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the ... FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [6:7494344515417423779:2466] 2025-04-17T18:04:17.772308Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T18:04:17.772418Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:04:17.772432Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T18:04:17.772549Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-17T18:04:17.772567Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T18:04:17.772589Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [6:7494344515417423695:2456], Recipient [6:7494344515417423695:2456]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdWrite { Key: "tx_00000281474976715673" Value: "\010\231\247\200\200\200\200@\020\t\030\342\217\261\247\3442 \377\377\377\377\377\377\377\377\377\001@\214\220\261\247\3442H\001X\002b\370\001\022~\010\377\377\377\377\007\030\200\243\0050\200\260T8\200\200@@\200\200@h\001r\006\n\004testr\006\n\004testr\006\n\004testr\006\n\004testr\006\n\004testr\006\n\004testr\006\n\004testr\006\n\004testr\006\n\004testr\006\n\004testr\006\n\004testr\006\n\004test\370\001\200\233\356\002\030\000\"\020autoscalit-topic(\0028\001@\001\240\001\000\252\001\000\272\001\026/Root/autoscalit-topic\322\001\000\332\001\000\342\001\000\352\001\005/Root\372\001\010\010\000\030\0010\0018\000\232\002\r\010\003\020k\030\254\002 Z(\0360\003\242\002\020\010\000\030\0010\0018\204\200\204\200\200\200\204\200\001j\000r\022\t\347:\332I\227B\001h\021`\010\000\000\006\000\000\000\202\001\004\n\002\010\000" } CmdWrite { Key: "_txinfo" Value: "\020\214\220\261\247\3442\030\231\247\200\2 2025-04-17T18:04:17.772620Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:7494344515417423779:2466] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715673 at schemeshard: 72057594046644480 2025-04-17T18:04:17.773178Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [6:7494344515417423824:2456], Recipient [6:7494344515417423695:2456]: NKikimr::TEvKeyValue::TEvIntermediate 2025-04-17T18:04:17.773188Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7494344515417423795:2843], Recipient [6:7494344472467749607:2144]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:17.773210Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:17.773231Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-17T18:04:17.773792Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [6:7494344515417423695:2456], Recipient [6:7494344515417423695:2456]: NKikimrClient.TResponse Status: 1 Cookie: 5 WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } 2025-04-17T18:04:17.773826Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-04-17T18:04:17.773843Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:04:17.773865Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-17T18:04:17.773879Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, State EXECUTED 2025-04-17T18:04:17.773897Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 State EXECUTED FrontTxId 281474976715673 2025-04-17T18:04:17.773917Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T18:04:17.773933Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, NewState WAIT_RS_ACKS 2025-04-17T18:04:17.773948Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T18:04:17.773973Z node 6 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-04-17T18:04:17.773984Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T18:04:17.773997Z node 6 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-04-17T18:04:17.774016Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715673 to the list for deletion 2025-04-17T18:04:17.774035Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, NewState DELETING 2025-04-17T18:04:17.774059Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715673 2025-04-17T18:04:17.774104Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:04:17.774165Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [6:7494344515417423695:2456], Recipient [6:7494344515417423695:2456]: NKikimr::TEvKeyValue::TEvCollect 2025-04-17T18:04:17.774334Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [6:7494344515417423695:2456], Recipient [6:7494344515417423695:2456]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdDeleteRange { Range { From: "tx_00000281474976715673" IncludeFrom: true To: "tx_00000281474976715673" IncludeTo: true } } CmdWrite { Key: "_txinfo" Value: "\020\214\220\261\247\3442\030\231\247\200\200\200\200@(\240\215\0060\214\220\261\247\34428\231\247\200\200\200\200@" StorageChannel: INLINE } 2025-04-17T18:04:17.774455Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [6:7494344515417423827:2456], Recipient [6:7494344515417423695:2456]: NKikimr::TEvKeyValue::TEvIntermediate 2025-04-17T18:04:17.774901Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [6:7494344515417423826:2470], Recipient [6:7494344515417423695:2456]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-17T18:04:17.775214Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [6:7494344515417423695:2456], Recipient [6:7494344515417423695:2456]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2025-04-17T18:04:17.775238Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-04-17T18:04:17.775252Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:04:17.775268Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-17T18:04:17.775292Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, State DELETING 2025-04-17T18:04:17.775310Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715673 2025-04-17T18:04:17.775454Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [6:7494344515417423695:2456], Recipient [6:7494344515417423695:2456]: NKikimr::TEvKeyValue::TEvCollect 2025-04-17T18:04:17.775565Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [6:7494344515417423830:2471], Recipient [6:7494344515417423695:2456]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-17T18:04:17.779833Z node 6 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-17T18:04:17.779914Z node 6 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "autoscalit-topic" 2025-04-17T18:04:17.780030Z node 6 :PQ_READ_PROXY DEBUG: Describe topic actor for path autoscalit-topic 2025-04-17T18:04:17.817010Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344515417423695:2456], Partition 0, Sender [0:0:0], Recipient [6:7494344515417423754:2460], Cookie: 0 2025-04-17T18:04:17.817096Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344515417423754:2460]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:17.817124Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:17.817179Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:17.817257Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:17.817290Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:17.817326Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:17.918489Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344515417423695:2456], Partition 0, Sender [0:0:0], Recipient [6:7494344515417423754:2460], Cookie: 0 2025-04-17T18:04:17.918561Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344515417423754:2460]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:17.918585Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:17.918633Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:17.918698Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:17.918725Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:17.918758Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:17.946046Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [6:7494344472467749607:2144]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:04:17.946092Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:04:17.946162Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7494344472467749607:2144], Recipient [6:7494344472467749607:2144]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:04:17.946206Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:04:18.025846Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344515417423695:2456], Partition 0, Sender [0:0:0], Recipient [6:7494344515417423754:2460], Cookie: 0 2025-04-17T18:04:18.025944Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344515417423754:2460]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:18.025975Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:18.026033Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:18.026111Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:18.026158Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:18.026209Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> THealthCheckTest::TestTabletIsDead [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead >> KqpService::SwitchCache-UseCache >> TestYmqHttpProxy::TestSendMessageWithAttributes >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> KqpQueryService::ExecuteCollectMeta [GOOD] >> KqpQueryService::ExecuteQuery >> KqpQueryService::ShowCreateTable [GOOD] >> KqpQueryService::ShowCreateTableDisable >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> KqpQueryService::DdlGroup [GOOD] >> KqpQueryService::DdlPermission >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery [GOOD] >> KqpQueryService::CloseSessionsWithLoad >> THealthCheckTest::ServerlessBadTablets [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> KqpQueryService::IssuesInCaseOfSuccess [GOOD] >> KqpQueryService::MaterializeTxResults >> KqpQueryServiceScripts::ExecuteScriptWithParameters [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageNoQuota >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> KqpService::Shutdown [GOOD] >> KqpService::SessionBusyRetryOperationSync >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_PQv1 >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsLimit800 >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TPersQueueMirrorer::ValidStartStream >> TestKinesisHttpProxy::TestRequestNoAuthorization >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> TestKinesisHttpProxy::ListShards >> TestKinesisHttpProxy::GoodRequestGetRecords >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> GenericFederatedQuery::IcebergHadoopBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> KqpQueryService::ExecuteQuery [GOOD] >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback >> KqpQueryService::ShowCreateTableDisable [GOOD] >> KqpQueryService::ShowCreateSysView >> GenericFederatedQuery::IcebergHadoopTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_OffsetCommit >> TestYmqHttpProxy::TestSetQueueAttributes >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> KqpQueryService::DdlPermission [GOOD] >> KqpQueryService::DdlSecret >> KqpQueryService::MaterializeTxResults [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate >> KqpService::SessionBusyRetryOperationSync [GOOD] >> KqpService::SwitchCache+UseCache >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> THealthCheckTest::LayoutIncorrect [GOOD] >> THealthCheckTest::LayoutCorrect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] Test command err: 2025-04-17T18:04:03.309504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344458225117932:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:03.309648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b9/r3tmp/tmps3qY9L/pdisk_1.dat 2025-04-17T18:04:03.752117Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:03.767909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:03.768696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:03.779590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32035, node 1 2025-04-17T18:04:03.954894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:03.954945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:03.954956Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:03.955094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:04.411858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:06.516544Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344469872947206:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:06.517005Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b9/r3tmp/tmpjGw2n5/pdisk_1.dat 2025-04-17T18:04:06.641913Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:06.644279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:06.644356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:06.656515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6062, node 2 2025-04-17T18:04:06.705101Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:06.705133Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:06.705139Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:06.705248Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:04:06.902475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:04:15.271993Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:15.272430Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:15.272592Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:15.274095Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:15.274519Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:15.274624Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b9/r3tmp/tmpNC2JUx/pdisk_1.dat 2025-04-17T18:04:15.589965Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61977, node 3 TClient is connected to server localhost:19068 2025-04-17T18:04:16.025460Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:16.025530Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:16.025569Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:16.026020Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:23.536872Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:23.537129Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:23.537300Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:23.538201Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:23.538288Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:23.538332Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b9/r3tmp/tmp7TjXmT/pdisk_1.dat 2025-04-17T18:04:23.863488Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24268, node 5 TClient is connected to server localhost:14614 2025-04-17T18:04:24.301070Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:24.301135Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:24.301177Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:24.302008Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-17T18:04:29.311985Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:29.312321Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:29.312385Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b9/r3tmp/tmpxlDuoq/pdisk_1.dat 2025-04-17T18:04:29.629740Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14021, node 7 TClient is connected to server localhost:25225 2025-04-17T18:04:30.076612Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:30.076676Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:30.076720Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:30.077185Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TestKinesisHttpProxy::ListShards [GOOD] >> BsControllerConfig::MoveGroups [GOOD] >> THealthCheckTest::StorageNoQuota [GOOD] >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TestKinesisHttpProxy::ListShardsEmptyFields >> THealthCheckTest::ShardsLimit800 [GOOD] >> THealthCheckTest::ShardsNoLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2920:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2920:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2920:2116] 2025-04-17T18:03:53.965660Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:53.969656Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:53.969952Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T18:03:53.971436Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:53.971828Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T18:03:53.972278Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:53.972300Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T18:03:53.972512Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T18:03:53.979816Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T18:03:53.979946Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T18:03:53.980115Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T18:03:53.980246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:53.980351Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:03:53.980437Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-04-17T18:03:53.992654Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T18:03:53.992808Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:54.003553Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:03:54.003654Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:54.003711Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:03:54.003777Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:54.003869Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:03:54.003905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:54.003935Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:03:54.003982Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:54.014675Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:03:54.014774Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:54.025628Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:03:54.025776Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T18:03:54.027110Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T18:03:54.027165Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T18:03:54.027359Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T18:03:54.027415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-17T18:03:54.040543Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T18:03:54.042038Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-04-17T18:03:54.042097Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-04-17T18:03:54.042124Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-04-17T18:03:54.042147Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-04-17T18:03:54.042171Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-04-17T18:03:54.042215Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-04-17T18:03:54.042240Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-04-17T18:03:54.042300Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-17T18:03:54.042329Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-04-17T18:03:54.042353Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-17T18:03:54.042376Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-17T18:03:54.042422Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-17T18:03:54.042449Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-04-17T18:03:54.042473Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-17T18:03:54.042497Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-04-17T18:03:54.042532Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-04-17T18:03:54.042558Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-04-17T18:03:54.042585Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-04-17T18:03:54.042606Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-04-17T18:03:54.042629Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-04-17T18:03:54.042651Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 78:1000 Path# /dev/disk1 2025-04-17T18:04:23.146812Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2025-04-17T18:04:23.146835Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2025-04-17T18:04:23.146866Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2025-04-17T18:04:23.146899Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2025-04-17T18:04:23.146927Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2025-04-17T18:04:23.146956Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2025-04-17T18:04:23.146984Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2025-04-17T18:04:23.147011Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2025-04-17T18:04:23.147039Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2025-04-17T18:04:23.147065Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2025-04-17T18:04:23.147092Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2025-04-17T18:04:23.147119Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2025-04-17T18:04:23.147148Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2025-04-17T18:04:23.147177Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2025-04-17T18:04:23.147207Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2025-04-17T18:04:23.147233Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2025-04-17T18:04:23.147274Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2025-04-17T18:04:23.147304Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2025-04-17T18:04:23.147332Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2025-04-17T18:04:23.147362Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2025-04-17T18:04:23.147391Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2025-04-17T18:04:23.147420Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2025-04-17T18:04:23.147449Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2025-04-17T18:04:23.147476Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2025-04-17T18:04:23.147504Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2025-04-17T18:04:23.147533Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2025-04-17T18:04:23.147563Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2025-04-17T18:04:23.147592Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2025-04-17T18:04:23.147621Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2025-04-17T18:04:23.147652Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2025-04-17T18:04:23.147682Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2025-04-17T18:04:23.147709Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2025-04-17T18:04:23.147738Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2025-04-17T18:04:23.147766Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2025-04-17T18:04:23.147793Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2025-04-17T18:04:23.147823Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2025-04-17T18:04:23.147851Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2025-04-17T18:04:23.147886Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2025-04-17T18:04:23.147918Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2025-04-17T18:04:23.147946Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2025-04-17T18:04:23.147976Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2025-04-17T18:04:23.148001Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2025-04-17T18:04:23.148030Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2025-04-17T18:04:23.148056Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2025-04-17T18:04:23.148083Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2025-04-17T18:04:23.148111Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2025-04-17T18:04:23.148143Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2025-04-17T18:04:23.148171Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2025-04-17T18:04:23.148198Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2025-04-17T18:04:23.148233Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2025-04-17T18:04:23.148261Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2025-04-17T18:04:23.148289Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2025-04-17T18:04:23.148317Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-04-17T18:04:23.148349Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2025-04-17T18:04:23.148381Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2025-04-17T18:04:23.148410Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-04-17T18:04:23.148438Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2025-04-17T18:04:23.148466Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2025-04-17T18:04:23.148492Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-04-17T18:04:23.148523Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2025-04-17T18:04:23.148552Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2025-04-17T18:04:23.148578Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2025-04-17T18:04:23.148606Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2025-04-17T18:04:23.148633Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2025-04-17T18:04:23.148673Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2025-04-17T18:04:23.148702Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2025-04-17T18:04:23.148730Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2025-04-17T18:04:23.148760Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2025-04-17T18:04:23.425352Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2166} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.282858s 2025-04-17T18:04:23.425591Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:666} StateWork event processing took too much time Type# 2146435078 Duration# 0.283127s 2025-04-17T18:04:23.458723Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-17T18:04:23.544792Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2025-04-17T18:04:23.560240Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-17T18:04:23.648788Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2025-04-17T18:04:23.664726Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-17T18:04:23.757540Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2025-04-17T18:04:23.774705Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl |85.8%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::StorageNoQuota [GOOD] Test command err: 2025-04-17T18:04:08.475637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:693:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.476006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.476216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.476990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.477289Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.477360Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b8/r3tmp/tmpipEBzu/pdisk_1.dat 2025-04-17T18:04:08.871380Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24805, node 1 TClient is connected to server localhost:8783 2025-04-17T18:04:09.247907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.247960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.247993Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.248178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:15.884857Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:498:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:15.885097Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:15.885162Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:15.886255Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2157], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:15.886534Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:15.886772Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b8/r3tmp/tmpt12TFd/pdisk_1.dat 2025-04-17T18:04:16.169538Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2982, node 3 TClient is connected to server localhost:29099 2025-04-17T18:04:16.469129Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:16.469174Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:16.469202Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:16.469674Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:23.186083Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:491:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:23.186489Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:23.186758Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:23.187491Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:486:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:23.187763Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:23.188075Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b8/r3tmp/tmpSV0Q4b/pdisk_1.dat 2025-04-17T18:04:23.537774Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28849, node 5 TClient is connected to server localhost:1408 2025-04-17T18:04:23.842307Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:23.842352Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:23.842373Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:23.842882Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:31.049929Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:31.050327Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:31.050463Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:31.051607Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:31.051919Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:31.052131Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b8/r3tmp/tmpzqLgLM/pdisk_1.dat 2025-04-17T18:04:31.337899Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29083, node 7 TClient is connected to server localhost:13959 2025-04-17T18:04:31.650016Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:31.650064Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:31.650091Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:31.650315Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] |85.8%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback [GOOD] >> KqpQueryService::ExecuteDDLStatusCodeSchemeError >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged >> KqpQueryService::ShowCreateSysView [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> TestYmqHttpProxy::TestListQueues >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> TestYmqHttpProxy::BillingRecordsForJsonApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] Test command err: 2025-04-17T18:04:08.989355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:693:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.989691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.989880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.990665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.990982Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.991052Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cd/r3tmp/tmpDZezbD/pdisk_1.dat 2025-04-17T18:04:09.398520Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26661, node 1 TClient is connected to server localhost:6978 2025-04-17T18:04:09.785851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.785914Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.785950Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.786167Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:16.584701Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.584973Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.585078Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.585963Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.586249Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.586330Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cd/r3tmp/tmpZVyx8U/pdisk_1.dat 2025-04-17T18:04:16.892675Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28110, node 3 TClient is connected to server localhost:17286 2025-04-17T18:04:17.276533Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.276590Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.276625Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.277092Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:21.707588Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:452:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:21.707913Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:21.708061Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cd/r3tmp/tmpg4gUfj/pdisk_1.dat 2025-04-17T18:04:22.033350Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19650, node 5 TClient is connected to server localhost:27082 2025-04-17T18:04:22.451477Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:22.451533Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:22.451573Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:22.452036Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:26.923592Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:169:2215], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:26.923792Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:26.924029Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cd/r3tmp/tmpPZzSKz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19209, node 7 TClient is connected to server localhost:1113 2025-04-17T18:04:31.957507Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:31.957749Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:31.957900Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cd/r3tmp/tmpSHcvHR/pdisk_1.dat 2025-04-17T18:04:32.298737Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32362, node 8 TClient is connected to server localhost:22836 2025-04-17T18:04:32.795425Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:32.795497Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:32.795540Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:32.796202Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] >> KqpQueryServiceScripts::ParseScript ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateSysView [GOOD] Test command err: Trying to start YDB, gRPC: 8927, MsgBus: 26006 2025-04-17T18:04:16.533476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344513722761729:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:16.546242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c44/r3tmp/tmpv41rKv/pdisk_1.dat 2025-04-17T18:04:16.942487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:16.942956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:16.944571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:16.962398Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8927, node 1 2025-04-17T18:04:17.106792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.106815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.106822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.106935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26006 TClient is connected to server localhost:26006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:17.745558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.766900Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:17.782032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:17.930235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:04:18.106363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.194945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:19.703586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344526607665357:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:19.703743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.045983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.075088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.100841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.175768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.211069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.292611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.332617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.420761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344530902633320:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.420850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.421063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344530902633325:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.425038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:20.440463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344530902633327:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:20.528958Z node 1 :TX_PROXY ERROR: Actor# [1:7494344530902633384:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:21.535133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344513722761729:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:21.536120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:21.675393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64337, MsgBus: 27918 2025-04-17T18:04:22.796343Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344538593473163:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:22.796387Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c44/r3tmp/tmpXSZIlf/pdisk_1.dat 2025-04-17T18:04:23.005087Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:23.029209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:23.029301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:23.030910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64337, node 2 2025-04-17T18:04:23.198759Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:23.198783Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:23.198790Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:23.198910Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27918 TClient is connected to server localhost:27918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:23.644469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:23.665613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:23.749082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147497 ... 56Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344555773344766:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:26.384213Z node 2 :TX_PROXY ERROR: Actor# [2:7494344555773344823:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:27.486264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:27.569302Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494344560068312539:2526], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: SHOW CREATE statement is not supported 2025-04-17T18:04:27.570435Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDhjNDUxNTYtNWFmMmE5OGEtZWJhZjQ2MC1kYjE2YmM4Mg==, ActorId: [2:7494344560068312454:2512], ActorState: ExecuteState, TraceId: 01js2ervgt83chg9dpypqk2g73, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:04:27.798274Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344538593473163:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:27.798346Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2721, MsgBus: 3551 2025-04-17T18:04:28.457346Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344562449671793:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:28.457443Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c44/r3tmp/tmpAmVYVg/pdisk_1.dat 2025-04-17T18:04:28.627156Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:28.644291Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:28.644373Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:28.645688Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2721, node 3 2025-04-17T18:04:28.730916Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:28.730940Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:28.730948Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:28.731071Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3551 TClient is connected to server localhost:3551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:29.233089Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:29.238587Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:29.254039Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:29.345053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:29.509868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:29.588601Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:31.907364Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344575334575444:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:31.907441Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:31.970827Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.011386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.045528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.073312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.107816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.181159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.254608Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.337928Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344579629543414:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:32.338034Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:32.338374Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344579629543419:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:32.342072Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:32.352937Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344579629543422:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:32.409173Z node 3 :TX_PROXY ERROR: Actor# [3:7494344579629543477:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:33.435747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:33.457419Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344562449671793:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:33.457481Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:33.504924Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494344583924511193:2526], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:04:33.505112Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGIwZmNmM2MtZTU0MGE0ZDYtNjVhYzc1YzEtMjk5Mjc2Y2U=, ActorId: [3:7494344583924511107:2512], ActorState: ExecuteState, TraceId: 01js2es19yfw3j89j5f6eezj2m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:04:33.543139Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494344583924511212:2530], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:04:33.543350Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGIwZmNmM2MtZTU0MGE0ZDYtNjVhYzc1YzEtMjk5Mjc2Y2U=, ActorId: [3:7494344583924511107:2512], ActorState: ExecuteState, TraceId: 01js2es1b72s6b3djvqah79x20, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] Test command err: 2025-04-17T18:04:08.891630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:693:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.891948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.892126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.892719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.892962Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.893029Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b2/r3tmp/tmpT2vKoM/pdisk_1.dat 2025-04-17T18:04:09.279804Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16014, node 1 TClient is connected to server localhost:19216 2025-04-17T18:04:09.655339Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.655395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.655423Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.655594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-70fb-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-1" reason: "YELLOW-e9e2-1231c6b1-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-4847-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-ef3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-4847-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 1 host: "::1" port: 12001 } 2025-04-17T18:04:16.464465Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:498:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.464778Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.464882Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.466297Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2157], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.466567Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.466780Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b2/r3tmp/tmpE3eeFH/pdisk_1.dat 2025-04-17T18:04:16.768427Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3693, node 3 TClient is connected to server localhost:24245 2025-04-17T18:04:17.199408Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.199464Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.199495Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.199919Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:21.694269Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:452:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:21.694579Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:21.694706Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b2/r3tmp/tmppzjXn8/pdisk_1.dat 2025-04-17T18:04:21.981897Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9258, node 5 TClient is connected to server localhost:9603 2025-04-17T18:04:22.379428Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:22.379486Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:22.379519Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:22.379757Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:27.035086Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:528:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:27.035434Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:27.035700Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b2/r3tmp/tmppWInc8/pdisk_1.dat 2025-04-17T18:04:27.391301Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15500, node 7 TClient is connected to server localhost:26629 2025-04-17T18:04:27.855434Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:27.855505Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:27.855550Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:27.856250Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:32.893347Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:446:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:32.893617Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:32.893744Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b2/r3tmp/tmpr63sVn/pdisk_1.dat 2025-04-17T18:04:33.241304Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14961, node 10 TClient is connected to server localhost:26211 2025-04-17T18:04:33.762839Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:33.762908Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:33.762954Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:33.764003Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues >> KqpQueryServiceScripts::ExecuteScriptStatsBasic >> TestYmqHttpProxy::TestTagQueue >> KqpQueryService::AlterTempTable |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> KqpQueryService::TableSink_Htap+withOltpSink >> THealthCheckTest::LayoutCorrect [GOOD] >> KqpDocumentApi::RestrictWriteExplicitPrepare >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> KqpService::SessionBusy >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: 2025-04-17T18:04:08.587434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:693:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.587781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.587983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.588821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.589158Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.589255Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e8/r3tmp/tmptTPJjr/pdisk_1.dat 2025-04-17T18:04:08.944995Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27963, node 1 TClient is connected to server localhost:1542 2025-04-17T18:04:09.332458Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.332521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.332553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.332760Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:16.595749Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.596097Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.596251Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.597568Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.597938Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.598045Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e8/r3tmp/tmpA44CBw/pdisk_1.dat 2025-04-17T18:04:16.866105Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23449, node 3 TClient is connected to server localhost:13046 2025-04-17T18:04:17.247466Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.247543Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.247594Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.248036Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-04-17T18:04:24.131308Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:24.131537Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:24.131693Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:24.132494Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:24.132566Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:24.132601Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e8/r3tmp/tmpLct5Xn/pdisk_1.dat 2025-04-17T18:04:24.436101Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11780, node 5 TClient is connected to server localhost:17585 2025-04-17T18:04:24.881194Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:24.881251Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:24.881284Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:24.882029Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-17T18:04:29.448870Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:29.449118Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:29.449212Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e8/r3tmp/tmpk0F1G7/pdisk_1.dat 2025-04-17T18:04:29.833580Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12116, node 7 TClient is connected to server localhost:2046 2025-04-17T18:04:30.259838Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:30.259897Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:30.259940Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:30.260988Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:35.397872Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:35.398208Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:35.398358Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e8/r3tmp/tmpEw7xcC/pdisk_1.dat 2025-04-17T18:04:35.971880Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21331, node 9 TClient is connected to server localhost:16036 2025-04-17T18:04:36.617899Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:36.617972Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:36.618027Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:36.618919Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> KqpQueryService::Write ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutCorrect [GOOD] Test command err: 2025-04-17T18:04:08.583714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:693:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.584043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.584196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.584829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.585087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.585142Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015f5/r3tmp/tmp3iI5mi/pdisk_1.dat 2025-04-17T18:04:08.952686Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17012, node 1 TClient is connected to server localhost:18730 2025-04-17T18:04:09.336391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.336446Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.336474Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.336667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:13.334062Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:446:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:13.334515Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:13.334615Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015f5/r3tmp/tmp8Yaeb0/pdisk_1.dat 2025-04-17T18:04:13.653894Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15496, node 3 TClient is connected to server localhost:3030 2025-04-17T18:04:14.010795Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:14.010854Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:14.010884Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:14.011410Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:18.142007Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:452:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:18.142347Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:18.142486Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015f5/r3tmp/tmp4XNwNM/pdisk_1.dat 2025-04-17T18:04:18.501851Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16424, node 5 TClient is connected to server localhost:21750 2025-04-17T18:04:18.923145Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:18.923212Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:18.923251Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:18.923567Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:18.999530Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:18.999677Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:19.031183Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:30.664089Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:30.664318Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:30.664404Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015f5/r3tmp/tmpZhyZAD/pdisk_1.dat 2025-04-17T18:04:31.020231Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18289, node 7 TClient is connected to server localhost:22821 2025-04-17T18:04:31.423482Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:31.423534Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:31.423560Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:31.423962Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:36.467659Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:36.467908Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:36.468021Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015f5/r3tmp/tmpklidBj/pdisk_1.dat 2025-04-17T18:04:36.784279Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8536, node 9 TClient is connected to server localhost:27278 2025-04-17T18:04:37.131197Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:37.131247Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:37.131275Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:37.131898Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 65022, MsgBus: 8891 2025-04-17T18:03:55.856477Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344421691820980:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:55.856864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00182e/r3tmp/tmpwiyb88/pdisk_1.dat 2025-04-17T18:03:56.220630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:56.272266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:56.272988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:56.275874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65022, node 1 2025-04-17T18:03:56.424908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:56.424938Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:56.424945Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:56.425078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8891 TClient is connected to server localhost:8891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:57.047038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:58.709596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344434576723402:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:58.709724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:59.171232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T18:03:59.321649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344438871690823:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:59.321740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:59.321971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344438871690828:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:59.325631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T18:03:59.333363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344438871690830:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:03:59.408600Z node 1 :TX_PROXY ERROR: Actor# [1:7494344438871690870:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:00.402686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:00.812626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T18:04:00.856375Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344421691820980:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:00.856439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:01.230552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:04:01.705041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:04:02.083956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:04:02.480244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T18:04:02.516185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T18:04:04.054608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-17T18:04:04.086745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-17T18:04:04.088026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-17T18:04:04.089909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } ... .383975Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344578247133672:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:31.384078Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:31.384320Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344578247133677:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:31.388489Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T18:04:31.402126Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344578247133679:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:04:31.462949Z node 4 :TX_PROXY ERROR: Actor# [4:7494344578247133745:2407] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:31.996431Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.394082Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344561067263713:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:32.394205Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:32.615132Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T18:04:33.253076Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:33.891713Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:04:34.468301Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:04:35.113364Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T18:04:35.168681Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T18:04:37.638348Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710710:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TPersQueueMirrorer::ValidStartStream [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> TestKinesisHttpProxy::TestWrongStream >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] Test command err: Trying to start YDB, gRPC: 2174, MsgBus: 16460 2025-04-17T18:04:16.509228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344512432664541:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:16.509648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c5a/r3tmp/tmpCKdYnr/pdisk_1.dat 2025-04-17T18:04:16.950510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:16.950605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:16.963624Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:16.982239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2174, node 1 2025-04-17T18:04:17.107652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.107675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.107686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.107806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16460 TClient is connected to server localhost:16460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:17.745954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.771327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:17.792605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.965867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.132513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.204892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:19.508862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344525317568057:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:19.508952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.042866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.078634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.161071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.196922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.236514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.272042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.343296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.409179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344529612536023:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.409275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.409510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344529612536029:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.412912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:20.423238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344529612536031:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:20.504319Z node 1 :TX_PROXY ERROR: Actor# [1:7494344529612536087:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:21.506987Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344512432664541:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:21.508842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32550, MsgBus: 11542 2025-04-17T18:04:22.691935Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344535684548090:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:22.692009Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c5a/r3tmp/tmpOqArOk/pdisk_1.dat 2025-04-17T18:04:22.789700Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32550, node 2 2025-04-17T18:04:22.818593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:22.818671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:22.846889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:22.910921Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:22.910944Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:22.910951Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:22.911068Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11542 TClient is connected to server localhost:11542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:04:23.387425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:04:23.400982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:23.479375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:23.629739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... : true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:04:29.211428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:04:29.216760Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:29.232184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:29.310681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:29.487453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:29.559118Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:04:31.868907Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344575225644012:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:31.868991Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:31.915302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:31.952288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:31.986351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.021333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.051539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.124007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.181822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:32.273012Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344579520611979:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:32.273088Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:32.273235Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344579520611984:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:32.276036Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:32.284920Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344579520611986:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:32.346198Z node 3 :TX_PROXY ERROR: Actor# [3:7494344579520612043:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:33.354000Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344562340740336:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:33.355057Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:33.490118Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWE0NTk0ZDktYjY2MTZjM2YtNjU4NzQwNDgtZjQ3MDg1MmY=, ActorId: [3:7494344583815579676:2512], ActorState: ReadyState, TraceId: 01js2es1a6e73aeg6cjspsf97s, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 23250, MsgBus: 15370 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c5a/r3tmp/tmpYEQFhO/pdisk_1.dat 2025-04-17T18:04:34.468285Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:34.469759Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23250, node 4 2025-04-17T18:04:34.518751Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:34.518842Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:34.521664Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:34.578308Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:34.578327Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:34.578333Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:34.578440Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15370 TClient is connected to server localhost:15370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:35.087120Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:35.093396Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:38.214942Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344604567076305:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.215054Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.215762Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344604567076317:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.220294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:04:38.237965Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344604567076319:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:04:38.300248Z node 4 :TX_PROXY ERROR: Actor# [4:7494344604567076370:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:38.386820Z node 4 :TX_PROXY ERROR: Actor# [4:7494344604567076401:2350] txid# 281474976715660, issues: { message: "Type \'TzTimestamp\' specified for column \'payload\' is not supported by storage" severity: 1 } 2025-04-17T18:04:38.387230Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2I4NjY0NTEtODZiZGJhY2ItYTZiNzI2NDgtYTI5MGZjMzQ=, ActorId: [4:7494344604567076303:2329], ActorState: ExecuteState, TraceId: 01js2es2x099tzkvvthpf2v25g, Create QueryResponse for error on request, msg: >> KqpQueryService::TableSink_OlapUpdate >> KqpQueryServiceScripts::TestPaging >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7369, MsgBus: 61297 2025-04-17T18:04:16.505586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344514048517208:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:16.505659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c79/r3tmp/tmpKLI3wx/pdisk_1.dat 2025-04-17T18:04:16.910649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:16.945561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:16.945636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:16.947005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7369, node 1 2025-04-17T18:04:17.108019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.108041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.108046Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.108120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61297 TClient is connected to server localhost:61297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:17.760121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.784413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.951341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.080457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.144559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:19.510337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344526933420866:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:19.510438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.042941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.115856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.185819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.231753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.265202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.318341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.362282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.447148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344531228388840:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.447197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.447366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344531228388845:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.450295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:20.461078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344531228388847:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:20.526504Z node 1 :TX_PROXY ERROR: Actor# [1:7494344531228388906:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:21.505918Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344514048517208:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:21.505976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:21.678750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:21.724100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:21.766579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18160, MsgBus: 7156 2025-04-17T18:04:24.526390Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344545623049843:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:24.534274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c79/r3tmp/tmpFKjE3m/pdisk_1.dat 2025-04-17T18:04:24.661522Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:24.678049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:24.678135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:24.679303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18160, node 2 2025-04-17T18:04:24.742442Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:24.742465Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:24.742471Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:24.742567Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7156 TClient is connected to server localhost:7156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:25.183931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:25.199271Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at ... 14;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.606384Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.609436Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.611344Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.616150Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.616772Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.622738Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.622985Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.628187Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.629549Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.633565Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.635640Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.639226Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037918;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.641508Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.645102Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.647186Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.654216Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.656507Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.660231Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.663893Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.666017Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.671999Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.672419Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.678965Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.680079Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.685388Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.688131Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.692366Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.694511Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.698699Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.700387Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.705037Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.706586Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.711110Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.712212Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.717253Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.718618Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.723803Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.723983Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.730441Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.732080Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.736341Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.738121Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:36.747009Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:04:37.254504Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:04:37.254604Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:04:37.254974Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[3:7494344590938224307:2362];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899,72075186224037907;receive=72075186224037903; 2025-04-17T18:04:37.255017Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:04:37.255064Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[3:7494344590938224307:2362];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899,72075186224037907;receive=72075186224037903; 2025-04-17T18:04:37.255394Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[3:7494344590938224307:2362];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899;receive=72075186224037907; 2025-04-17T18:04:37.255456Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[3:7494344590938224307:2362];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899;receive=72075186224037907; 2025-04-17T18:04:37.255800Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 26041, MsgBus: 7989 2025-04-17T18:03:55.855451Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344423689379405:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:55.856259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001839/r3tmp/tmpfYGyYl/pdisk_1.dat 2025-04-17T18:03:56.224015Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:56.287209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:03:56.287435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:03:56.288853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26041, node 1 2025-04-17T18:03:56.424581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:03:56.424604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:03:56.424611Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:03:56.424740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7989 TClient is connected to server localhost:7989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:03:57.047348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:03:58.740659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344436574281960:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:58.740771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:59.166202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T18:03:59.314530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344440869249383:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:59.314618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:59.314938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344440869249388:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:03:59.319407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T18:03:59.329250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344440869249390:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:03:59.393901Z node 1 :TX_PROXY ERROR: Actor# [1:7494344440869249430:2404] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:00.403414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:00.812624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T18:04:00.855708Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344423689379405:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:00.855779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:01.264008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:04:01.762962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:04:02.225464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:04:02.633643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T18:04:02.664398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T18:04:04.202029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-17T18:04:04.229970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-17T18:04:04.231339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-17T18:04:04.232432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_ ... 81474976715658:2, at schemeshard: 72057594046644480 2025-04-17T18:04:32.614989Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344580406356773:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:32.615072Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:32.615599Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344580406356778:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:32.620118Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-17T18:04:32.630396Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344580406356780:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:04:32.701388Z node 4 :TX_PROXY ERROR: Actor# [4:7494344580406356820:2399] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:33.286028Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:33.558768Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344563226486809:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:33.559128Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:33.845128Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-17T18:04:34.477333Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-17T18:04:35.102226Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:04:35.727586Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-17T18:04:36.345126Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:04:36.452582Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.813194Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> KqpQueryService::PeriodicTaskInSessionPool >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |85.9%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::TableSink_Olap_Replace >> KqpQueryServiceScripts::ParseScript [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> THealthCheckTest::ShardsNoLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2025-04-17T18:02:43.915075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344113311537873:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.915132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001898/r3tmp/tmpqTIogX/pdisk_1.dat 2025-04-17T18:02:44.193283Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:02:44.447489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.447611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.449475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:02:44.456906Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10729, node 1 2025-04-17T18:02:44.570898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001898/r3tmp/yandex0gedUC.tmp 2025-04-17T18:02:44.570929Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001898/r3tmp/yandex0gedUC.tmp 2025-04-17T18:02:44.574535Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001898/r3tmp/yandex0gedUC.tmp 2025-04-17T18:02:44.574715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:44.779750Z INFO: TTestServer started on Port 21328 GrpcPort 10729 TClient is connected to server localhost:21328 PQClient connected to localhost:10729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:45.144280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.170522Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.182840Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:02:45.192096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:45.347143Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:46.892248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126196440361:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.892346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126196440380:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.892399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.897847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.902293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126196440413:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.902682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.920105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344126196440382:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:02:46.976375Z node 1 :TX_PROXY ERROR: Actor# [1:7494344126196440440:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.787988Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344126196440456:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.819499Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDRkOWU5MjAtZWE0MDVhODgtMmExYjliNWUtOGExOWFlYmU=, ActorId: [1:7494344126196440350:2336], ActorState: ExecuteState, TraceId: 01js2ens7124h5x9xsw2p7gvkx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.833137Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:48.189457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.249148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.338397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.447435Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344113311537873:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.447721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7494344143376309968:2659] === CheckClustersList. Ok 2025-04-17T18:02:55.096292Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T18:02:55.158507Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T18:02:55.211977Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344164851146710:2795], Recipient [1:7494344117606505380:2197]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212050Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:02:55.212104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494344164851146706:2792], Recipient [1:7494344117606505380:2197]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T18:02:55.212126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:02:55.298445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:02:55.298804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:02:55.299013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-17T18:02:55 ... Partitions# 0 2025-04-17T18:04:38.327910Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 1 committedOffset 1 2025-04-17T18:04:38.327996Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 sending to client partition status 2025-04-17T18:04:38.330537Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2025-04-17T18:04:38.330615Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 got read request: guid# 8d5161dd-ea5e5c9f-d0b834a3-e767e8e5 2025-04-17T18:04:38.334732Z :INFO: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 5 2025-04-17T18:04:38.336239Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 5 } } 2025-04-17T18:04:38.336376Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2025-04-17T18:04:38.336438Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 1 committedOffset 1 clientCommitOffset (empty maybe) clientReadOffset 5 2025-04-17T18:04:38.336483Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2025-04-17T18:04:38.336553Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1744913078080, sizeLag# 1179 2025-04-17T18:04:38.336576Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1TEvPartitionReady. Aval parts: 1 2025-04-17T18:04:38.336621Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 performing read request: guid# f538f6e-a728a17-daa2301d-c5168f03, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 1414, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-17T18:04:38.336729Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 1414 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 1 committedOffset 1 Guid f538f6e-a728a17-daa2301d-c5168f03 2025-04-17T18:04:38.337115Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-17T18:04:38.337158Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-17T18:04:38.337290Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 1414 endOffset 10 max time lag 0ms effective offset 5 2025-04-17T18:04:38.337324Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 5, current partition end offset: 10 2025-04-17T18:04:38.337463Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-17T18:04:38.337487Z node 9 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T18:04:38.337628Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2025-04-17T18:04:38.338471Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1744913078187 CreateTimestampMS: 1744913078184 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1744913078253 CreateTimestampMS: 1744913078184 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1744913078258 CreateTimestampMS: 1744913078184 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1744913078258 CreateTimestampMS: 1744913078185 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1744913078259 CreateTimestampMS: 1744913078185 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 18446744073709551581 RealReadOffset: 9 WaitQuotaTimeMs: 0 EndOffset: 10 StartOffset: 0 } Cookie: 5 } 2025-04-17T18:04:38.338713Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2025-04-17T18:04:38.338763Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid f538f6e-a728a17-daa2301d-c5168f03 has messages 1 2025-04-17T18:04:38.338891Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 read done: guid# f538f6e-a728a17-daa2301d-c5168f03, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 795 2025-04-17T18:04:38.338929Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 response to read: guid# f538f6e-a728a17-daa2301d-c5168f03 2025-04-17T18:04:38.339094Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 Process answer. Aval parts: 0 2025-04-17T18:04:38.342625Z :DEBUG: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] [] Got ReadResponse, serverBytesSize = 795, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428005 2025-04-17T18:04:38.342793Z :DEBUG: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428005 2025-04-17T18:04:38.343276Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2025-04-17T18:04:38.343334Z :DEBUG: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] [] Returning serverBytesSize = 795 to budget 2025-04-17T18:04:38.343375Z :DEBUG: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] [] In ContinueReadingDataImpl, ReadSizeBudget = 795, ReadSizeServerDelta = 52428005 2025-04-17T18:04:38.343621Z :DEBUG: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-17T18:04:38.343897Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 grpc read done: success# 1, data# { read_request { bytes_size: 795 } } 2025-04-17T18:04:38.344019Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 got read request: guid# 47df482b-bac7f6b1-40615563-9aaac61b 2025-04-17T18:04:38.344484Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2025-04-17T18:04:38.344565Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (6-6) 2025-04-17T18:04:38.344605Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (7-7) 2025-04-17T18:04:38.344637Z :DEBUG: [] Take Data. Partition 0. Read: {2, 1} (8-8) 2025-04-17T18:04:38.344679Z :DEBUG: [] Take Data. Partition 0. Read: {3, 0} (9-9) 2025-04-17T18:04:38.344742Z :DEBUG: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2025-04-17T18:04:38.344799Z :DEBUG: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] [] Returning serverBytesSize = 0 to budget 2025-04-17T18:04:38.344992Z :INFO: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] Closing read session. Close timeout: 0.000000s 2025-04-17T18:04:38.345050Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:1 2025-04-17T18:04:38.345104Z :INFO: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 57 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:04:38.345227Z :NOTICE: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T18:04:38.345279Z :DEBUG: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] [] Abort session to cluster 2025-04-17T18:04:38.345798Z :NOTICE: [] [] [aed3966e-a40714bb-8c07ddd8-eac50bfb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:04:38.346598Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 grpc read done: success# 0, data# { } 2025-04-17T18:04:38.346637Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 grpc read failed 2025-04-17T18:04:38.346663Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 grpc closed 2025-04-17T18:04:38.346705Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_4821503367035747310_v1 is DEAD 2025-04-17T18:04:38.347638Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [8:7494344608650268622:2527] disconnected; active server actors: 1 2025-04-17T18:04:38.347668Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [8:7494344608650268622:2527] client user disconnected session shared/user_8_2_4821503367035747310_v1 2025-04-17T18:04:38.348077Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_8_2_4821503367035747310_v1 2025-04-17T18:04:38.348132Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [8:7494344608650268625:2530] destroyed 2025-04-17T18:04:38.348180Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_8_2_4821503367035747310_v1 2025-04-17T18:04:38.349719Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|fbf0247b-270a2c6c-adaf36cc-27e518a8_0] Write session: destroy >> KqpQueryService::TableSink_HtapInteractive-withOltpSink >> KqpQueryService::AlterTempTable [GOOD] >> KqpQueryService::CTASWithoutPerStatement |85.9%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpQueryService::TableSink_HtapComplex+withOltpSink >> TestYmqHttpProxy::TestListQueues [GOOD] >> KqpQueryService::FlowControllOnHugeLiteralAsTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-17T18:02:43.916816Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344111402038053:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.916870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:02:44.145590Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001888/r3tmp/tmpTqq27K/pdisk_1.dat 2025-04-17T18:02:44.421166Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.448284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.448373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.456775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5296, node 1 2025-04-17T18:02:44.571977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001888/r3tmp/yandexLRl4EG.tmp 2025-04-17T18:02:44.572008Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001888/r3tmp/yandexLRl4EG.tmp 2025-04-17T18:02:44.572183Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001888/r3tmp/yandexLRl4EG.tmp 2025-04-17T18:02:44.572327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:44.786414Z INFO: TTestServer started on Port 5646 GrpcPort 5296 TClient is connected to server localhost:5646 PQClient connected to localhost:5296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:45.233718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.255231Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.283191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:45.432077Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:46.933048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344124286940744:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.933053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344124286940759:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.933187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.936820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.947995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344124286940761:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:02:47.039205Z node 1 :TX_PROXY ERROR: Actor# [1:7494344128581908121:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.787860Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344128581908136:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.819506Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzVmMWYwNzUtODM3NDY1NTMtZDBkN2I0OWItZDkxNmU0MjU=, ActorId: [1:7494344124286940727:2336], ActorState: ExecuteState, TraceId: 01js2ens8bag1v803k26epa46b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.833251Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:48.189087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.298652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.361350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.436777Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344111402038053:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.447949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7494344141466810349:2658] === CheckClustersList. Ok 2025-04-17T18:02:55.153527Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T18:02:55.162916Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T18:02:55.212001Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344162941647079:2782], Recipient [1:7494344115697005759:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212057Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212071Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:02:55.212121Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494344162941647075:2779], Recipient [1:7494344115697005759:2196]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T18:02:55.212145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:02:55.266033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:02:55.266589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:02:55.266885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-17T18:02:55.266932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-17T18:02:55.266958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-17T18:02:55.266996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was ... 219Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:39.205234Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_6_1_8761778573990941139_v1 2025-04-17T18:04:39.205261Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [6:7494344605829051476:2794] destroyed 2025-04-17T18:04:39.205470Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_1_8761778573990941139_v1 2025-04-17T18:04:39.206508Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494344597239116602:3310], Recipient [6:7494344580059246331:2457]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:39.206535Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:39.206548Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:39.206578Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [6:7494344597239116600:2730] destroyed 2025-04-17T18:04:39.206689Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [6:7494344580059246331:2457], Partition 0, Sender [6:7494344580059246331:2457], Recipient [6:7494344580059246391:2461], Cookie: 0 2025-04-17T18:04:39.206736Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [6:7494344580059246331:2457], Recipient [6:7494344580059246391:2461]: NKikimr::TEvPQ::TEvPipeDisconnected 2025-04-17T18:04:39.206760Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2025-04-17T18:04:39.206789Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T18:04:39.206816Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2025-04-17T18:04:39.206847Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:39.206920Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:39.206950Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:39.206978Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:39.291585Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344601534083981:2749], Partition 2, Sender [0:0:0], Recipient [6:7494344605829051382:2766], Cookie: 0 2025-04-17T18:04:39.291655Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344605829051382:2766]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.291675Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.291709Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:39.291771Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:39.291790Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:39.291817Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:39.291861Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344601534083983:2750], Partition 1, Sender [0:0:0], Recipient [6:7494344605829051386:2768], Cookie: 0 2025-04-17T18:04:39.291891Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344605829051386:2768]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.291904Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.291923Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:39.291952Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:39.291964Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:39.291978Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:39.303905Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344580059246331:2457], Partition 0, Sender [0:0:0], Recipient [6:7494344580059246391:2461], Cookie: 0 2025-04-17T18:04:39.303973Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344580059246391:2461]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.303995Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.304033Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:39.304092Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:39.304114Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:39.304141Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:39.392819Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344601534083981:2749], Partition 2, Sender [0:0:0], Recipient [6:7494344605829051382:2766], Cookie: 0 2025-04-17T18:04:39.392929Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344605829051382:2766]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.392955Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.393007Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:39.393088Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:39.393110Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:39.393157Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:39.393953Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344601534083983:2750], Partition 1, Sender [0:0:0], Recipient [6:7494344605829051386:2768], Cookie: 0 2025-04-17T18:04:39.394015Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344605829051386:2768]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.394054Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.394081Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:39.394136Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:39.394171Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:39.394207Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:39.404107Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344580059246331:2457], Partition 0, Sender [0:0:0], Recipient [6:7494344580059246391:2461], Cookie: 0 2025-04-17T18:04:39.404172Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344580059246391:2461]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.404197Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.404233Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:39.404295Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:39.404316Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:39.404340Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:39.492515Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344601534083981:2749], Partition 2, Sender [0:0:0], Recipient [6:7494344605829051382:2766], Cookie: 0 2025-04-17T18:04:39.492581Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344605829051382:2766]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.492606Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.492642Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:39.492699Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:39.492720Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:39.492745Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:39.497775Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344601534083983:2750], Partition 1, Sender [0:0:0], Recipient [6:7494344605829051386:2768], Cookie: 0 2025-04-17T18:04:39.497842Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344605829051386:2768]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.497865Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:39.497903Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:39.497962Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:39.497982Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:39.498006Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 |85.9%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: 2025-04-17T18:04:08.519345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.520132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.520296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.521206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.521808Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.521861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cf/r3tmp/tmpl9f5q8/pdisk_1.dat 2025-04-17T18:04:08.898537Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2075, node 1 TClient is connected to server localhost:3516 2025-04-17T18:04:09.258823Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.258863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.258884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.259291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:16.047588Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.047831Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.047924Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.048828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.049071Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.049144Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cf/r3tmp/tmphGfRuz/pdisk_1.dat 2025-04-17T18:04:16.329378Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7442, node 3 TClient is connected to server localhost:23374 2025-04-17T18:04:16.601130Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:16.601183Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:16.601210Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:16.601560Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:23.805528Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:491:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:23.805909Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:23.806168Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:23.806919Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:486:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:23.807177Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:23.807493Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cf/r3tmp/tmpTexdcz/pdisk_1.dat 2025-04-17T18:04:24.128791Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25382, node 5 TClient is connected to server localhost:17239 2025-04-17T18:04:24.439726Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:24.439774Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:24.439801Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:24.440339Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:31.701327Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:31.701628Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:31.701736Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:31.702778Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:31.703073Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:31.703262Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cf/r3tmp/tmpsHrc2d/pdisk_1.dat 2025-04-17T18:04:32.015802Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1472, node 7 TClient is connected to server localhost:23651 2025-04-17T18:04:32.329229Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:32.329273Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:32.329295Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:32.329457Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:39.830875Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:706:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:39.831342Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:39.831610Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:39.832180Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:703:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:39.832429Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:39.832608Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015cf/r3tmp/tmpu5iGOn/pdisk_1.dat 2025-04-17T18:04:40.206976Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25626, node 9 TClient is connected to server localhost:18333 2025-04-17T18:04:40.604530Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:40.604584Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:40.604614Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:40.605383Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] >> TestYmqHttpProxy::TestPurgeQueue >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap >> TestYmqHttpProxy::TestTagQueue [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckSessionResetAfterCommit [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic >> KqpQueryServiceScripts::ExecuteScript >> KqpQueryServiceScripts::ExecuteScriptStatsBasic [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull >> KqpDocumentApi::RestrictWriteExplicitPrepare [GOOD] >> KqpDocumentApi::Scripting >> TestYmqHttpProxy::TestUntagQueue >> KqpService::SessionBusy [GOOD] >> KqpService::SessionBusyRetryOperation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] Test command err: Trying to start YDB, gRPC: 21288, MsgBus: 8578 2025-04-17T18:04:16.513980Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344513388145171:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:16.514027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c42/r3tmp/tmpUFLDAv/pdisk_1.dat 2025-04-17T18:04:16.961979Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21288, node 1 2025-04-17T18:04:16.991269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:16.991394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:16.995117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:17.105265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.105287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.105295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.105423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8578 TClient is connected to server localhost:8578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:17.773494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.806624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:17.815467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.953112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.149908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.217624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:19.529880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344526273048827:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:19.529985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.042818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.072291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.147558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.239620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.282277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.356180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.390247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.476071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344530568016797:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.476185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.476420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344530568016802:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.480083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:20.490469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344530568016804:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:20.589679Z node 1 :TX_PROXY ERROR: Actor# [1:7494344530568016865:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:21.514252Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344513388145171:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:21.514329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:21.645026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:21.646480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:21.648349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:23.849189Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913063866, txId: 281474976710696] shutting down Trying to start YDB, gRPC: 5067, MsgBus: 5772 2025-04-17T18:04:24.598138Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344544900045964:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:24.598173Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c42/r3tmp/tmpErseIx/pdisk_1.dat 2025-04-17T18:04:24.723282Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:24.748041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:24.748115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:24.749451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5067, node 2 2025-04-17T18:04:24.814733Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:24.814756Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:24.814768Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:24.814880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5772 TClient is connected to server localhost:5772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:25.243457Z node 2 ... 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:29.264889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:29.614530Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344544900045964:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:29.614590Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:30.850376Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: e403610-dc4e1179-7f7db722-e42a48db, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=OTE3YTQ2Y2UtZWQ5M2MxZTgtMmNlZDM5Ny0xMGJiOWQ0Nw==, TxId: 2025-04-17T18:04:31.711668Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e403610-dc4e1179-7f7db722-e42a48db, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=YTU5ZjgxNDUtY2NiNTM1YWYtZTExOWUyYTctM2E3OWMwODM=, TxId: 2025-04-17T18:04:31.847574Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: e403610-dc4e1179-7f7db722-e42a48db, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-04-17T18:04:31.878348Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e403610-dc4e1179-7f7db722-e42a48db, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=ZjFlODZiYzctYmI1NzBiZGMtOGUxMjI0MWEtNDM2ZTY1YmY=, TxId: 2025-04-17T18:04:31.878483Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: e403610-dc4e1179-7f7db722-e42a48db, check lease failed 2025-04-17T18:04:32.207090Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: e403610-dc4e1179-7f7db722-e42a48db, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=2&id=OWFhYTMxNDItMTI5ODFkOC01YWJjNGRjZC0zNmRhOTk0OQ==, TxId: Trying to start YDB, gRPC: 14921, MsgBus: 22690 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c42/r3tmp/tmpnYG8xW/pdisk_1.dat 2025-04-17T18:04:34.221883Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:34.296279Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:34.307735Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:34.307816Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:34.310640Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14921, node 3 2025-04-17T18:04:34.371535Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:34.371557Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:34.371564Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:34.371697Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22690 TClient is connected to server localhost:22690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:34.925059Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:34.931105Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:34.934150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:35.013151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:35.175972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:35.255934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:04:37.794884Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344603066301466:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:37.795014Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:37.857517Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:37.903967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:37.949475Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:37.996877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.032238Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.107135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.189446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.277013Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344607361269435:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.277110Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.277728Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344607361269440:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.280597Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:38.296307Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344607361269443:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:38.373003Z node 3 :TX_PROXY ERROR: Actor# [3:7494344607361269498:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:39.376056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:39.377671Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:39.379381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.761524Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913081786, txId: 281474976715700] shutting down 2025-04-17T18:04:42.047244Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913082080, txId: 281474976715703] shutting down 2025-04-17T18:04:42.384407Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913082409, txId: 281474976715706] shutting down 2025-04-17T18:04:42.664567Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913082703, txId: 281474976715709] shutting down 2025-04-17T18:04:42.697598Z node 3 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 50fcf37e-c22e0801-538629b0-d8c0afd4, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=3&id=YjQyZTgxZmYtY2Y3NzRjOGItMjMyMmQ2NDMtYjUyNzQ3YzQ=, TxId: >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> KqpQueryService::Ddl >> KqpQueryServiceScripts::ValidateScript >> KqpQueryService::Write [GOOD] >> KqpQueryServiceScripts::CancelScriptExecution >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] Test command err: 2025-04-17T18:04:08.748331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:693:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.748681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.748899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.749744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.750099Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.750196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a2/r3tmp/tmpr0rNiU/pdisk_1.dat 2025-04-17T18:04:09.146090Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20455, node 1 TClient is connected to server localhost:21048 2025-04-17T18:04:09.508658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.508702Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.508734Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.508928Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:16.157979Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:498:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.158388Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.158503Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.160021Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2157], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.160305Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.160527Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a2/r3tmp/tmpBqOdAa/pdisk_1.dat 2025-04-17T18:04:16.456434Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11042, node 3 TClient is connected to server localhost:3938 2025-04-17T18:04:16.831302Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:16.831365Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:16.831438Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:16.831889Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:24.759769Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:24.760031Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:24.760212Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:24.761142Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:24.761228Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:24.761272Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a2/r3tmp/tmpN7z2Ki/pdisk_1.dat 2025-04-17T18:04:25.079827Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25808, node 5 TClient is connected to server localhost:23473 2025-04-17T18:04:25.507487Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:25.507540Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:25.507574Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:25.508307Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:33.215990Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:33.216470Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:33.216660Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:33.217937Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:33.218296Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:33.218543Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a2/r3tmp/tmpRjleFo/pdisk_1.dat 2025-04-17T18:04:33.596141Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14978, node 7 TClient is connected to server localhost:27271 2025-04-17T18:04:34.055256Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:34.055322Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:34.055360Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:34.055618Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:42.588557Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:277:2212], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:42.588969Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:42.589283Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:42.590475Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:42.590876Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:42.590990Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a2/r3tmp/tmpFwxUnf/pdisk_1.dat 2025-04-17T18:04:43.041074Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25215, node 9 TClient is connected to server localhost:21190 2025-04-17T18:04:43.617389Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:43.617468Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:43.617514Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:43.618053Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> KqpService::SwitchCache-UseCache [GOOD] >> KqpService::ToDictCache+UseCache >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> KqpQueryService::TableSink_OltpLiteralUpsert >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] >> KqpQueryService::CTASWithoutPerStatement [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] Test command err: 2025-04-17T18:04:08.820245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:693:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.820672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.820909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.821887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.822319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.822434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmpy5nSEi/pdisk_1.dat 2025-04-17T18:04:09.209134Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19208, node 1 TClient is connected to server localhost:2657 2025-04-17T18:04:09.606936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.607011Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.607034Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.607176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:16.665586Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:498:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.665936Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.666036Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.667428Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2157], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.667707Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.667918Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmpOFBYQR/pdisk_1.dat 2025-04-17T18:04:17.038652Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23899, node 3 TClient is connected to server localhost:30230 2025-04-17T18:04:17.468760Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.468805Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.468832Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.469262Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e463-3-3-42" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmpOFBYQR/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-43" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmpOFBYQR/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-44" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmpOFBYQR/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } location { id: 3 host: "::1" port: 12001 } 2025-04-17T18:04:24.944928Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:24.945098Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:24.945211Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:24.945786Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:24.945836Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:24.945861Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmprpGEKf/pdisk_1.dat 2025-04-17T18:04:25.295217Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30125, node 5 TClient is connected to server localhost:14924 2025-04-17T18:04:25.781528Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:25.781596Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:25.781630Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:25.782494Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-a594-5-5-42" status: YELLOW message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-42" path: "/home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmprpGEKf/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-a594-5-5-43" status: YELLOW message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-43" path: "/home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmprpGEKf/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-a594-5-5-44" status: YELLOW message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-44" path: "/home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmprpGEKf/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } location { id: 5 host: "::1" port: 12001 } 2025-04-17T18:04:33.950857Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:33.951377Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:33.951566Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:33.952908Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:33.953285Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:33.953554Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmp0GsTk7/pdisk_1.dat 2025-04-17T18:04:34.333736Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14939, node 7 TClient is connected to server localhost:22846 2025-04-17T18:04:34.801486Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:34.801545Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:34.801581Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:34.802010Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:43.852297Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:706:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:43.852795Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:43.853112Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:43.853691Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:703:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:43.853935Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:43.854101Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c0/r3tmp/tmpsxQK3K/pdisk_1.dat 2025-04-17T18:04:44.282212Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20253, node 9 TClient is connected to server localhost:3531 2025-04-17T18:04:44.785334Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:44.785400Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:44.785441Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:44.786340Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TestKinesisHttpProxy::TestWrongStream2 >> TestYmqHttpProxy::TestChangeMessageVisibility >> KqpQueryService::TableSink_Olap_Replace [GOOD] >> KqpQueryService::TableSink_OlapUpsert >> KqpQueryService::SessionFromPoolError >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged [GOOD] >> KqpQueryService::ExecuteQueryPure >> TestKinesisHttpProxy::ListShardsTimestamp >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter >> KqpQueryService::TableSink_HtapInteractive-withOltpSink [GOOD] >> KqpQueryService::TableSink_OlapInsert >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] >> KqpQueryService::TableSink_OltpUpsert >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> KqpQueryServiceScripts::TestPaging [GOOD] >> KqpQueryServiceScripts::TestFetchMoreThanLimit >> KqpQueryService::FlowControllOnHugeLiteralAsTable [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow >> KqpDocumentApi::Scripting [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid >> TestKinesisHttpProxy::ErroneousRequestGetRecords ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] Test command err: 2025-04-17T18:02:43.884189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344113586454024:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.884256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:02:44.076499Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001884/r3tmp/tmpeCrRf7/pdisk_1.dat 2025-04-17T18:02:44.398906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.401182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.401269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.406139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11394, node 1 2025-04-17T18:02:44.574789Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001884/r3tmp/yandexNAjIrn.tmp 2025-04-17T18:02:44.574812Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001884/r3tmp/yandexNAjIrn.tmp 2025-04-17T18:02:44.574964Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001884/r3tmp/yandexNAjIrn.tmp 2025-04-17T18:02:44.575059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:44.779658Z INFO: TTestServer started on Port 10874 GrpcPort 11394 TClient is connected to server localhost:10874 PQClient connected to localhost:11394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:45.131504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.151046Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.165677Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:02:45.183406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:02:45.360440Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T18:02:46.840891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126471356733:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.841367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.841904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126471356746:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.849751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.862566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344126471356748:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:02:46.940752Z node 1 :TX_PROXY ERROR: Actor# [1:7494344126471356812:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.788009Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344126471356829:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.819680Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODZhMjdmZDEtNjdjNzExNWQtYmVlYWVmMS00ZTlkYjM1Yg==, ActorId: [1:7494344126471356731:2336], ActorState: ExecuteState, TraceId: 01js2ens5d2qe11x7mjhyt36s4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.835641Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:48.189687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.254774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.335324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.440291Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344113586454024:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.440579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7494344143651226338:2659] === CheckClustersList. Ok 2025-04-17T18:02:55.122599Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T18:02:55.158467Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T18:02:55.211966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344165126063079:2794], Recipient [1:7494344117881421745:2192]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:02:55.212090Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494344165126063075:2791], Recipient [1:7494344117881421745:2192]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T18:02:55.212101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:02:55.257297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:02:55.257631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:02:55.257814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-17T18:02:55.257843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-17T18:02:55.257862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04 ... :48.533253Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7494344651578392228:2922] disconnected; active server actors: 1 2025-04-17T18:04:48.533277Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [6:7494344651578392228:2922] client test-consumer disconnected session test-consumer_6_1_2002398163931395256_v1 2025-04-17T18:04:48.533449Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494344651578392241:3759], Recipient [6:7494344642988457236:2843]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:48.533471Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:48.533491Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:48.533520Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_6_1_2002398163931395256_v1 2025-04-17T18:04:48.533557Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [6:7494344651578392239:2927] destroyed 2025-04-17T18:04:48.533602Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494344651578392240:3758], Recipient [6:7494344642988457235:2842]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:48.533617Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:48.533628Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:48.533650Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_6_1_2002398163931395256_v1 2025-04-17T18:04:48.533671Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [6:7494344651578392238:2926] destroyed 2025-04-17T18:04:48.533713Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7494344651578392232:3755], Recipient [6:7494344595743815419:2458]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:48.533726Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:48.533739Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:48.533752Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_6_1_2002398163931395256_v1 2025-04-17T18:04:48.533770Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [6:7494344651578392231:2925] destroyed 2025-04-17T18:04:48.533813Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_1_2002398163931395256_v1 2025-04-17T18:04:48.533831Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_1_2002398163931395256_v1 2025-04-17T18:04:48.533848Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_1_2002398163931395256_v1 2025-04-17T18:04:48.539532Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344642988457236:2843], Partition 1, Sender [0:0:0], Recipient [6:7494344642988457319:2851], Cookie: 0 2025-04-17T18:04:48.539610Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344642988457319:2851]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.539655Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.539714Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:48.539803Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:48.539836Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:48.539896Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:48.658720Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [6:7494344552794141339:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:04:48.658762Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:04:48.658815Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7494344552794141339:2146], Recipient [6:7494344552794141339:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:04:48.658833Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:04:48.717179Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344595743815419:2458], Partition 0, Sender [0:0:0], Recipient [6:7494344595743815484:2462], Cookie: 0 2025-04-17T18:04:48.717248Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344595743815484:2462]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.717270Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.717311Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:48.717384Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:48.717406Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:48.717434Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:48.717492Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344642988457235:2842], Partition 2, Sender [0:0:0], Recipient [6:7494344642988457314:2849], Cookie: 0 2025-04-17T18:04:48.717526Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344642988457314:2849]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.717539Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.717561Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:48.717591Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:48.717605Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:48.717621Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:48.717672Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344642988457236:2843], Partition 1, Sender [0:0:0], Recipient [6:7494344642988457319:2851], Cookie: 0 2025-04-17T18:04:48.717701Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344642988457319:2851]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.717712Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.717733Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:48.717760Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:48.717774Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:48.717790Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:48.817897Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344595743815419:2458], Partition 0, Sender [0:0:0], Recipient [6:7494344595743815484:2462], Cookie: 0 2025-04-17T18:04:48.817964Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344595743815484:2462]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.817992Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.818028Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:48.818090Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:48.818111Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:48.818136Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:48.818196Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344642988457235:2842], Partition 2, Sender [0:0:0], Recipient [6:7494344642988457314:2849], Cookie: 0 2025-04-17T18:04:48.818235Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344642988457314:2849]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.818247Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.818273Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:48.818320Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:48.818341Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:48.818363Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:48.818419Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344642988457236:2843], Partition 1, Sender [0:0:0], Recipient [6:7494344642988457319:2851], Cookie: 0 2025-04-17T18:04:48.818465Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344642988457319:2851]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.818482Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:48.818511Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:48.818547Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:48.818565Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:48.818586Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> KqpQueryService::TableSink_Htap+withOltpSink [GOOD] >> KqpQueryService::TableSink_Htap-withOltpSink >> KqpQueryServiceScripts::ValidateScript [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows >> KqpQueryService::TableSink_OltpLiteralUpsert [GOOD] >> KqpQueryService::TableSink_OltpInsert >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> KqpService::SessionBusyRetryOperation [GOOD] >> KqpService::RangeCache-UseCache >> KqpQueryService::Ddl [GOOD] >> KqpQueryService::DdlColumnTable >> KqpQueryServiceScripts::ExecuteScript [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> TestYmqHttpProxy::TestSendMessageBatch >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] Test command err: 2025-04-17T18:04:08.662996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:693:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.663376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:08.663610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.664465Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:08.664836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:08.664914Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e1/r3tmp/tmpvpBJha/pdisk_1.dat 2025-04-17T18:04:08.988344Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13487, node 1 TClient is connected to server localhost:19663 2025-04-17T18:04:09.352032Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.352102Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.352150Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.352437Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:16.434968Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:360:2219], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.435370Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:16.435500Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.436661Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:775:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:16.436984Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:16.437051Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e1/r3tmp/tmpoSZnKh/pdisk_1.dat 2025-04-17T18:04:16.767735Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11508, node 3 TClient is connected to server localhost:32493 2025-04-17T18:04:20.124758Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:20.124821Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:20.124862Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:20.126077Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:20.141967Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:20.142113Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:20.176622Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T18:04:20.177327Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:20.324525Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2025-04-17T18:04:20.325373Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-f489-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-6fa7-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-6fa7-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-e5e3-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" reason: "YELLOW-e9e2-1231c6b1-5" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-e5e3-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-04-17T18:04:28.253556Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:577:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:28.254059Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:28.254403Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:28.254566Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:574:2162], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:28.254853Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:28.254942Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e1/r3tmp/tmp0FgR3R/pdisk_1.dat 2025-04-17T18:04:28.609298Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10231, node 6 TClient is connected to server localhost:28703 2025-04-17T18:04:32.199805Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:32.199884Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:32.199929Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:32.201004Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:32.223709Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:32.223848Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:32.253423Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-04-17T18:04:32.254243Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-6" reason: "YELLOW-e9e2-1231c6b1-7" reason: "YELLOW-e9e2-1231c6b1-8" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 6 host: "::1" port: 12001 } 2025-04-17T18:04:41.032009Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:853:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:41.032473Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:41.032524Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:04:41.033924Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:850:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:04:41.034463Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:41.034680Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/bu ... 72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected nodes count 1 2025-04-17T18:04:45.638952Z node 9 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected max priority nodes count 1 2025-04-17T18:04:45.639023Z node 9 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected node 11 2025-04-17T18:04:45.639092Z node 9 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 11) 2025-04-17T18:04:45.639158Z node 9 :HIVE TRACE: HIVE#72057594037968897 Node(11, (0,0,0,0)->(0,1048576,0,0)) 2025-04-17T18:04:45.639280Z node 9 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-04-17T18:04:45.639385Z node 9 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-04-17T18:04:45.639467Z node 9 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T18:04:45.639589Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-04-17T18:04:45.639802Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(PersQueue.72075186224037888.Leader.1) to node 11 storage {Version# 1 TabletID# 72075186224037888 TabletType# PersQueue Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.067536Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.067536Z}}, 2:{Channel# 2 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.067536Z}}} Tenant: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:04:45.659249Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(11)::Complete 2025-04-17T18:04:45.659345Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T18:04:45.659638Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [11:1581:2365] NKikimrLocal.TEvBootTablet Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } TabletType: PersQueue Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 1 } SuggestedGeneration: 1 BootMode: BOOT_MODE_LEADER FollowerId: 0} 2025-04-17T18:04:45.660217Z node 9 :HIVE TRACE: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 11 Cookie 72075186224037888 2025-04-17T18:04:45.725562Z node 9 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-04-17T18:04:45.725715Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Execute for tablet PersQueue.72075186224037888.Leader.1 status 0 generation 1 follower 0 from local [11:1581:2365] 2025-04-17T18:04:45.725810Z node 9 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Starting -> Running (Node 11) 2025-04-17T18:04:45.725888Z node 9 :HIVE TRACE: HIVE#72057594037968897 Node(11, (0,1048576,0,0)->(0,0,0,0)) 2025-04-17T18:04:45.726034Z node 9 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-04-17T18:04:45.726154Z node 9 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-04-17T18:04:45.726440Z node 9 :HIVE TRACE: HIVE#72057594037968897 Node(11, (0,0,0,0)->(0,1048576,0,0)) 2025-04-17T18:04:45.726582Z node 9 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-04-17T18:04:45.726696Z node 9 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-04-17T18:04:45.726851Z node 9 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T18:04:45.726908Z node 9 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-04-17T18:04:45.727183Z node 9 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2025-04-17T18:04:45.727267Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T18:04:45.727343Z node 9 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T18:04:45.727401Z node 9 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T18:04:45.775610Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040207 [9:1338:2701] {EvTabletCreationResult Status: OK TabletID: 72075186224037888}} 2025-04-17T18:04:45.775704Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T18:04:49.256798Z node 9 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 11: Status: 2 2025-04-17T18:04:49.256929Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(11)::Execute 2025-04-17T18:04:49.256996Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-17T18:04:49.257129Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(11)::Complete 2025-04-17T18:04:49.257901Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRestartTablet(PersQueue.72075186224037888.Leader.1)::Execute 2025-04-17T18:04:49.258047Z node 9 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 11) 2025-04-17T18:04:49.258111Z node 9 :HIVE TRACE: HIVE#72057594037968897 Node(11, (0,1048576,0,0)->(0,0,0,0)) 2025-04-17T18:04:49.262374Z node 9 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-04-17T18:04:49.262514Z node 9 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-04-17T18:04:49.262608Z node 9 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(PersQueue.72075186224037888.Leader.1 gen 1) to node 11 2025-04-17T18:04:49.262710Z node 9 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Stopped -> Booting 2025-04-17T18:04:49.262777Z node 9 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-17T18:04:49.262832Z node 9 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-04-17T18:04:49.263503Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxKillNode(11)::Execute 2025-04-17T18:04:49.263625Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:04:49.263679Z node 9 :HIVE TRACE: Node(11) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2025-04-17T18:04:49.263744Z node 9 :HIVE DEBUG: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(3, 11) 2025-04-17T18:04:49.263806Z node 9 :HIVE TRACE: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [9:1645:2712] 2025-04-17T18:04:49.263867Z node 9 :HIVE DEBUG: HIVE#72057594037968897 TryToDeleteNode(11): waiting 3600.000000s 2025-04-17T18:04:49.265620Z node 9 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([11:1582:2365]) [9:1645:2712] 2025-04-17T18:04:49.272726Z node 9 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([9:2042:2734]) [9:2043:2739] 2025-04-17T18:04:49.280563Z node 9 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([9:2042:2734]) [9:2043:2739] 2025-04-17T18:04:49.282979Z node 9 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([12:2015:2364]) [9:2076:2741] 2025-04-17T18:04:49.292713Z node 9 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [12:2014:2364] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-17T18:04:49.292846Z node 9 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(12)::Execute 2025-04-17T18:04:49.292979Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:49.293035Z node 9 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T18:04:49.293078Z node 9 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-17T18:04:49.293120Z node 9 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T18:04:49.293163Z node 9 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-17T18:04:49.293263Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:49.293925Z node 9 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 12 Location DataCenter: "4" Module: "4" Rack: "4" Unit: "4" self_check_result: EMERGENCY issue_log { id: "RED-f489-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-6fa7-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-6fa7-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-e5e3-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-10" reason: "YELLOW-e9e2-1231c6b1-11" reason: "YELLOW-e9e2-1231c6b1-12" reason: "YELLOW-e9e2-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-11" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 11 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-12" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 12 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-e5e3-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 9 host: "::1" port: 12001 } >> TopicAutoscaling::PartitionSplit_OffsetCommit [GOOD] >> KqpQueryService::TableSink_OlapUpdate [GOOD] >> KqpQueryService::TableSink_OlapOrder >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken |85.9%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::TableSink_HtapComplex+withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapComplex-withOltpSink >> KqpQueryService::ExecuteQueryPure [GOOD] >> KqpQueryService::ExecuteQueryScalar >> KqpQueryService::SessionFromPoolError [GOOD] >> KqpQueryService::ReturnAndCloseSameTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_OffsetCommit [GOOD] Test command err: 2025-04-17T18:02:43.888870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344110613933986:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.889566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00188f/r3tmp/tmpelxMIp/pdisk_1.dat 2025-04-17T18:02:44.102702Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:02:44.361801Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.374621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.375299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.384322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20553, node 1 2025-04-17T18:02:44.578472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00188f/r3tmp/yandex9ckySa.tmp 2025-04-17T18:02:44.578502Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00188f/r3tmp/yandex9ckySa.tmp 2025-04-17T18:02:44.578666Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00188f/r3tmp/yandex9ckySa.tmp 2025-04-17T18:02:44.578793Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:44.790233Z INFO: TTestServer started on Port 7504 GrpcPort 20553 TClient is connected to server localhost:7504 PQClient connected to localhost:20553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:45.195812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.220179Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.242278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:45.380410Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:46.988464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344123498836705:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.988481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344123498836697:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.988571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.992573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.995915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344123498836742:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.996526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:47.003742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344123498836711:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:02:47.062206Z node 1 :TX_PROXY ERROR: Actor# [1:7494344127793804063:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.788012Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344127793804079:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.819459Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzI1ZmQ4Yy05ZTJhY2MzNC1hNGRlMGRmYS1lODY1ODU4Yg==, ActorId: [1:7494344123498836694:2337], ActorState: ExecuteState, TraceId: 01js2ens9s08jym648abccqpjp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.833568Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:48.189422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.227980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.340123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.436676Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344110613933986:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.436984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7494344140678706294:2660] === CheckClustersList. Ok 2025-04-17T18:02:55.122607Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T18:02:55.158442Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T18:02:55.211996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344162153543028:2788], Recipient [1:7494344114908901707:2199]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212052Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:02:55.212111Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494344162153543024:2785], Recipient [1:7494344114908901707:2199]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T18:02:55.212128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:02:55.263587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:02:55.263954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:02:55.264175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: ... nected 2025-04-17T18:04:51.869314Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:04:51.869335Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [6:7494344663734630688:3199] destroyed 2025-04-17T18:04:51.885888Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344655144695282:2931], Partition 4, Sender [0:0:0], Recipient [6:7494344655144695373:2946], Cookie: 0 2025-04-17T18:04:51.885963Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344655144695373:2946]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.885996Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.886062Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:51.886165Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:51.886206Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:51.886237Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:51.886399Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344612195020509:2456], Partition 0, Sender [0:0:0], Recipient [6:7494344612195020572:2460], Cookie: 0 2025-04-17T18:04:51.886441Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344612195020572:2460]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.886457Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.886486Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:51.886526Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:51.887739Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:51.887801Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:51.887880Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344642259792902:2807], Partition 2, Sender [0:0:0], Recipient [6:7494344642259792980:2814], Cookie: 0 2025-04-17T18:04:51.887931Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344642259792980:2814]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.887952Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.887984Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:51.888034Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:51.888049Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:51.888061Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:51.888108Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344655144695263:2930], Partition 3, Sender [0:0:0], Recipient [6:7494344655144695378:2948], Cookie: 0 2025-04-17T18:04:51.888147Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344655144695378:2948]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.888158Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.888182Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:51.888214Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:51.888230Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:51.888249Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:51.888907Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344642259792920:2808], Partition 1, Sender [0:0:0], Recipient [6:7494344642259792983:2816], Cookie: 0 2025-04-17T18:04:51.888955Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344642259792983:2816]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.888972Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.889003Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:51.889055Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:51.889077Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:51.889238Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:51.990364Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344655144695282:2931], Partition 4, Sender [0:0:0], Recipient [6:7494344655144695373:2946], Cookie: 0 2025-04-17T18:04:51.990447Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344655144695373:2946]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.990477Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.990529Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:51.990602Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:51.998220Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:51.998312Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:51.998443Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344612195020509:2456], Partition 0, Sender [0:0:0], Recipient [6:7494344612195020572:2460], Cookie: 0 2025-04-17T18:04:51.998508Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344612195020572:2460]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.998544Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.998595Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:51.998671Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:51.998695Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:51.998715Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:51.998767Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344642259792902:2807], Partition 2, Sender [0:0:0], Recipient [6:7494344642259792980:2814], Cookie: 0 2025-04-17T18:04:51.998805Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344642259792980:2814]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.998819Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:51.998842Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:51.998872Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:51.998889Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:52.005018Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:52.005220Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344655144695263:2930], Partition 3, Sender [0:0:0], Recipient [6:7494344655144695378:2948], Cookie: 0 2025-04-17T18:04:52.005290Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344655144695378:2948]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:52.005329Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:52.005388Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:52.005461Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:52.005487Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:52.005515Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:04:52.005558Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344642259792920:2808], Partition 1, Sender [0:0:0], Recipient [6:7494344642259792983:2816], Cookie: 0 2025-04-17T18:04:52.005590Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344642259792983:2816]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:52.005604Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:04:52.005623Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:04:52.005652Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:04:52.005666Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:04:52.005681Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid >> KqpQueryService::SessionFromPoolSuccess >> KqpQueryService::AlterTable_DropNotNull_Valid [GOOD] >> KqpQueryService::AlterCdcTopic >> TestKinesisHttpProxy::TestWrongRequest >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow [GOOD] >> KqpQueryService::Explain >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> KqpQueryService::TempTablesDrop >> TestKinesisHttpProxy::ListShardsToken >> KqpQueryService::DdlSecret [GOOD] >> KqpQueryService::DdlMixedDml >> KqpQueryServiceScripts::ListScriptExecutions [GOOD] >> KqpQueryServiceScripts::Tcl >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> KqpQueryServiceScripts::TestFetchMoreThanLimit [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression >> TestKinesisHttpProxy::GoodRequestCreateStream >> KqpQueryService::PeriodicTaskInSessionPool [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithTimeout >> KqpQueryService::TableSink_OlapUpsert [GOOD] >> KqpQueryService::TableSink_OltpDelete >> KqpService::SwitchCache+UseCache [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] >> KqpQueryService::TableSink_OltpUpsert [GOOD] >> KqpQueryService::TableSink_OltpUpdate >> KqpQueryService::TableSink_OlapInsert [GOOD] >> KqpQueryService::TableSink_OlapDelete >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink [GOOD] >> KqpLimits::ManyPartitionsSorting >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> KqpQueryService::ExecuteQueryScalar [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] Test command err: Trying to start YDB, gRPC: 61738, MsgBus: 18154 2025-04-17T18:04:36.530833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344597769070838:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:36.535984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2b/r3tmp/tmp8fXK2t/pdisk_1.dat 2025-04-17T18:04:37.083333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:37.083447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:37.087618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:37.110627Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61738, node 1 2025-04-17T18:04:37.266680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:37.266707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:37.266714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:37.266837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18154 TClient is connected to server localhost:18154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:37.892722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:37.905901Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:37.922804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:38.061953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:38.210245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:38.292693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:39.974682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344610653974355:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:39.974783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.343647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:40.377258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:40.407162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:40.441576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:40.469330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:40.556024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:40.624215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:40.697386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344614948942319:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.697494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.697686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344614948942324:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.704762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:40.717713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344614948942326:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:40.781950Z node 1 :TX_PROXY ERROR: Actor# [1:7494344614948942382:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:41.518285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344597769070838:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:41.518353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:41.784282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.785618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.787985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23926, MsgBus: 4676 2025-04-17T18:04:44.411950Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344632960699999:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:44.413396Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2b/r3tmp/tmpsog1YY/pdisk_1.dat 2025-04-17T18:04:44.595590Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:44.623675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:44.623750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:44.625488Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23926, node 2 2025-04-17T18:04:44.779711Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:44.779733Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:44.779740Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:44.779839Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4676 TClient is connected to server localhost:4676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:45.148462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemesha ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.226044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344650140571624:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:48.226151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:48.226191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344650140571630:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:48.232483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:48.243263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344650140571632:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:48.345027Z node 2 :TX_PROXY ERROR: Actor# [2:7494344650140571689:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:49.398083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.400458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.403830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.413354Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344632960699999:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:49.413440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28213, MsgBus: 9288 2025-04-17T18:04:52.095414Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344664694310327:2155];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2b/r3tmp/tmpYJ1TOl/pdisk_1.dat 2025-04-17T18:04:52.183921Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:04:52.267289Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:52.278838Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:52.278930Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:52.282780Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28213, node 3 2025-04-17T18:04:52.330692Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:52.330717Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:52.330725Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:52.330852Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9288 TClient is connected to server localhost:9288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:52.846715Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:52.858052Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:52.869220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:52.952392Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:53.122074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:53.248939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.500412Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344677579213861:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.500489Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.553968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.592512Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.622172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.650645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.680871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.716364Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.747025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.808111Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344677579214516:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.808353Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.808594Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344677579214521:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.811884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:55.821113Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344677579214523:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:55.888945Z node 3 :TX_PROXY ERROR: Actor# [3:7494344677579214578:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:56.842684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.843574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.844705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.094338Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344664694310327:2155];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:57.094404Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::SwitchCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 4104, MsgBus: 24718 2025-04-17T18:04:16.527187Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344511542587205:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:16.527233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c66/r3tmp/tmpskKDqv/pdisk_1.dat 2025-04-17T18:04:16.930443Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:16.945761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:16.945851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:16.948609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4104, node 1 2025-04-17T18:04:17.113148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.113185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.113191Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.113284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24718 TClient is connected to server localhost:24718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:17.756310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.768696Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:17.785058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.927907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.120359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.197203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:19.497417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344524427490870:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:19.505985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.042693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.073052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.099643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.127999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.161825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.200794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.288350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.402202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344528722458833:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.402271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.402629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344528722458838:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.410041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:20.422206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344528722458841:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:20.517190Z node 1 :TX_PROXY ERROR: Actor# [1:7494344528722458899:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:21.569583Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344511542587205:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:21.576502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:22.686164Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344537312394915:2656] TxId: 281474976710735. Ctx: { TraceId: 01js2erp6b4nnmq0b1ge7cn2k5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZjYzM0OGMtMjQxMmI3ZTctNjNkYWM2ZGYtYjM5MWFiNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:04:22.694384Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344537312394988:2656], TxId: 281474976710735, task: 1. Ctx: { TraceId : 01js2erp6b4nnmq0b1ge7cn2k5. SessionId : ydb://session/3?node_id=1&id=Y2ZjYzM0OGMtMjQxMmI3ZTctNjNkYWM2ZGYtYjM5MWFiNzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494344537312394915:2656], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:04:22.699020Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344537312394899:2618] TxId: 281474976710729. Ctx: { TraceId: 01js2erp5sctrrsb4c6c4aypmn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzhiMWY1Yi1hNjYzN2YzNC1iNTFlODNhYy02NTZhYzc2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:04:22.703020Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344537312394994:2618], TxId: 281474976710729, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzhiMWY1Yi1hNjYzN2YzNC1iNTFlODNhYy02NTZhYzc2. TraceId : 01js2erp5sctrrsb4c6c4aypmn. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494344537312394899:2618], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:04:22.706839Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzhiMWY1Yi1hNjYzN2YzNC1iNTFlODNhYy02NTZhYzc2, ActorId: [1:7494344537312393980:2618], ActorState: ExecuteState, TraceId: 01js2erp5sctrrsb4c6c4aypmn, Create QueryResponse for error on request, msg: 2025-04-17T18:04:22.720649Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344537312394885:2525] TxId: 281474976710726. Ctx: { TraceId: 01js2erp436eh9nqc9ejfctr2x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2IzMDlkMDctMTcxZDBhY2MtOWM4ZjE4ODQtOWQ0ODcyZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:04:22.721016Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2IzMDlkMDctMTcxZDBhY2MtOWM4ZjE4ODQtOWQ0ODcyZjk=, ActorId: [1:7494344533017426567:2525], ActorState: ExecuteState, TraceId: 01js2erp436eh9nqc9ejfctr2x, Create QueryResponse for error on request, msg: 2025-04-17T18:04:22.721839Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344537312394912:2570] TxId: 281474976710732. Ctx: { TraceId: 01js2erp5hd9v4tjpvwwpt0qs2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJjYjVjN2QtY2E5Y2VhMmEtZmVhMTg2MjctMTY1Nzg0YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:04:22.723875Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344537312394914:2517] TxId: 281474976710734. Ctx: { TraceId: 01js2erp42ean197k6dtxghp0m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzY4YzI2YTctZjBjNTM4Yy03NDJmMDdiZi02ZmVmNWJjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:04:22.724409Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344537312395017:2517], TxI ... orState: ExecuteState, TraceId: 01js2erymqedxh1xzebj2w9wnb, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-04-17T18:04:30.745843Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzlmOTBlMjYtM2Q2MDRkOTYtMTJlMzU1MWYtZDQxOTMxOTk=, ActorId: [2:7494344573597432455:2587], ActorState: ExecuteState, TraceId: 01js2erymqedxh1xzebj2w9wnb, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-04-17T18:04:30.763549Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzlmOTBlMjYtM2Q2MDRkOTYtMTJlMzU1MWYtZDQxOTMxOTk=, ActorId: [2:7494344573597432455:2587], ActorState: ExecuteState, TraceId: 01js2erymqedxh1xzebj2w9wnb, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-04-17T18:04:30.764065Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzlmOTBlMjYtM2Q2MDRkOTYtMTJlMzU1MWYtZDQxOTMxOTk=, ActorId: [2:7494344573597432455:2587], ActorState: ExecuteState, TraceId: 01js2erymqedxh1xzebj2w9wnb, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-04-17T18:04:30.894938Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU3OTAyZWQtYjJmYWZiMTktYzU1MmMyZmMtMjlmOWNlMTc=, ActorId: [2:7494344573597432505:2598], ActorState: ExecuteState, TraceId: 01js2erysebe93zar9bfh03grg, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-04-17T18:04:30.903677Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU3OTAyZWQtYjJmYWZiMTktYzU1MmMyZmMtMjlmOWNlMTc=, ActorId: [2:7494344573597432505:2598], ActorState: ExecuteState, TraceId: 01js2erysebe93zar9bfh03grg, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-04-17T18:04:30.904440Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU3OTAyZWQtYjJmYWZiMTktYzU1MmMyZmMtMjlmOWNlMTc=, ActorId: [2:7494344573597432505:2598], ActorState: ExecuteState, TraceId: 01js2erysebe93zar9bfh03grg, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-04-17T18:04:30.986635Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Q4MmZkMzUtNDg2NjU1ZTktYzdkMjgzZGItYTQ3MmVmNTM=, ActorId: [2:7494344573597432556:2608], ActorState: ExecuteState, TraceId: 01js2erywa65y3x3ezhbahv8ms, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-04-17T18:04:30.987837Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Q4MmZkMzUtNDg2NjU1ZTktYzdkMjgzZGItYTQ3MmVmNTM=, ActorId: [2:7494344573597432556:2608], ActorState: ExecuteState, TraceId: 01js2erywa65y3x3ezhbahv8ms, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-04-17T18:04:31.098687Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGZiZjYyOTMtODgzODAxOWEtYjdmNzFmY2MtZjg4ZDkxMjA=, ActorId: [2:7494344577892399877:2617], ActorState: ExecuteState, TraceId: 01js2eryztf8k8ktm19bk27xt0, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 20686, MsgBus: 23891 2025-04-17T18:04:32.080862Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344579457414728:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:32.080896Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c66/r3tmp/tmpGFP1BY/pdisk_1.dat 2025-04-17T18:04:32.204233Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:32.232676Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:32.232771Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:32.234772Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20686, node 3 2025-04-17T18:04:32.285478Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:32.285504Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:32.285512Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:32.285648Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23891 TClient is connected to server localhost:23891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:32.783630Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:32.791007Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:32.801610Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:32.876540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:33.055759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:33.143556Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:35.614347Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344592342318377:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:35.614463Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:35.718080Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:35.772340Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:35.818125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:35.862235Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:35.908166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:35.963647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:36.018098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:36.125306Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344596637286343:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:36.125417Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:36.125641Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344596637286348:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:36.129891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:36.145043Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344596637286350:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:36.220678Z node 3 :TX_PROXY ERROR: Actor# [3:7494344596637286407:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:37.082295Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344579457414728:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:37.082380Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:37.380945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.203758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:04:47.203802Z node 3 :IMPORT WARN: Table profiles were not loaded took: 20.811385s took: 20.812644s took: 20.818929s took: 20.821207s took: 20.850340s took: 20.856472s took: 20.857172s took: 20.858687s took: 20.864259s took: 20.844051s >> KqpQueryServiceScripts::ExecuteMultiScript [GOOD] >> KqpQueryServiceScripts::ExecuteScriptPg >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> KqpQueryService::TempTablesDrop [GOOD] >> KqpQueryService::Tcl >> KqpQueryService::TableSink_OltpInsert [GOOD] >> KqpQueryService::TableSink_OltpInteractive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2025-04-17T18:03:43.766794Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.787990Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.788397Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792286Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.848650Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.853714Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.919352Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.923609Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.923688Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.923790Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.927271Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.927750Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.927824Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.930207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.930249Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.031597Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.073056Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.073237Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.073356Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.073408Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.073448Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.073483Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.073760Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.073820Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.074097Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.074246Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.074366Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.074406Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.074455Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.074500Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.074548Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.074580Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.074622Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.074730Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.074794Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.074854Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.077349Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.077406Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.077482Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.077638Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.077686Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.077755Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.077804Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.077838Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.077894Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.077939Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.078217Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.078254Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.078286Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.078344Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.078399Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.078436Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.078475Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.078512Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.078539Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.090389Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.090450Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.090491Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.090530Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.090587Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.091103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.091141Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.091178Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.091236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.091259Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.091363Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.091399Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.091440Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.091468Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.099446Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.099516Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.099753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.099790Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.099833Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.099869Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.099899Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.099934Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.099966Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.100004Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.100038Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.100093Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.100130Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.100286Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.100314Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.100334Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.100353Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.100374Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.100424Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.100448Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.100478Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.100517Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.100566Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.100608Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.100637Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.100675Z node 1 :TX_DATA ... ropose latency: 1 ms 2025-04-17T18:04:59.534072Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.534351Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.534390Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.534433Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.534468Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.534625Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.534655Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.534700Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.534733Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.534887Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.534920Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.534962Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.534994Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.535174Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.535205Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.535247Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.535282Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.535467Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.535500Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.535540Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.535573Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.535697Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.535730Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.535772Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.535803Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.535964Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.535995Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.536038Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.536070Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.536247Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.536278Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.536319Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.536351Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.536493Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.536525Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.536566Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.536597Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.536751Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.536786Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.536828Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.536859Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.536985Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.537017Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.537057Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.537088Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.537219Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.537249Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.537287Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.537319Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.537438Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:04:59.537470Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-17T18:04:59.537511Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:04:59.537548Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:04:59.537776Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-17T18:04:59.537821Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:04:59.537862Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-04-17T18:04:59.538027Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-17T18:04:59.538061Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:04:59.538094Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-17T18:04:59.538399Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-17T18:04:59.538441Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:04:59.538474Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-17T18:04:59.538605Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-17T18:04:59.538638Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:04:59.538669Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-17T18:04:59.538729Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-17T18:04:59.538758Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:04:59.538789Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-17T18:04:59.538919Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-17T18:04:59.538962Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:04:59.538994Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-17T18:04:59.539071Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T18:04:59.539105Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:04:59.539136Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-17T18:04:59.539221Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T18:04:59.539256Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:04:59.539287Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-17T18:04:59.539374Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-17T18:04:59.539409Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:04:59.539452Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 30 27 17 27 19 29 27 23 22 28 22 28 - 15 23 28 9 26 15 22 11 15 15 - 11 - 11 - 11 11 - - actual 30 27 17 27 19 29 27 23 22 28 22 28 - 15 23 28 9 26 15 22 11 15 15 - 11 - 11 - 11 11 - - interm 5 5 5 4 3 - 1 2 2 4 3 - - 0 - - - - - - 4 - - - - - 4 - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] Test command err: Trying to start YDB, gRPC: 21430, MsgBus: 30254 2025-04-17T18:04:37.345705Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344601705294691:2259];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:37.348094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2a/r3tmp/tmprOZks4/pdisk_1.dat 2025-04-17T18:04:38.033698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:38.049103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:38.049250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:38.053799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21430, node 1 2025-04-17T18:04:38.172029Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:38.172051Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:38.172057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:38.172167Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30254 TClient is connected to server localhost:30254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:38.793673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:38.808195Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:40.700113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344614590197023:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.700244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.706454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344614590197035:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.711385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:04:40.724368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344614590197037:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:04:40.823911Z node 1 :TX_PROXY ERROR: Actor# [1:7494344614590197088:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:41.158751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-04-17T18:04:41.335664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.399818Z node 1 :TX_PROXY ERROR: Actor# [1:7494344618885164631:2484] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:41.410255Z node 1 :TX_PROXY ERROR: Actor# [1:7494344618885164638:2489] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ODZkYWRhNDAtYjhiN2U5YjQtMzhjZWM1ODMtZDAzZWRhYjE=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:41.447565Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T18:04:41.452424Z node 1 :TX_PROXY ERROR: Actor# [1:7494344618885164698:2536] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:41.454291Z node 1 :TX_PROXY ERROR: Actor# [1:7494344618885164705:2541] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ODZkYWRhNDAtYjhiN2U5YjQtMzhjZWM1ODMtZDAzZWRhYjE=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:41.456614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.787869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.918052Z node 1 :TX_PROXY ERROR: Actor# [1:7494344618885164886:2647] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:41.920094Z node 1 :TX_PROXY ERROR: Actor# [1:7494344618885164893:2652] txid# 281474976710675, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ODZkYWRhNDAtYjhiN2U5YjQtMzhjZWM1ODMtZDAzZWRhYjE=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:41.962778Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344618885164933:2407], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:04:41.966408Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDEyMjg3YzktYWUzNDAzNTUtZTVhMjU5N2MtOTlmNDE2Mg==, ActorId: [1:7494344618885164931:2406], ActorState: ExecuteState, TraceId: 01js2es9jtc496atrcycdejxx8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:04:41.970254Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T18:04:42.039896Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344623180132253:2413], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:04:42.041148Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2YxZDRkYzktMTNiZTVjZjYtZWExNzYxNzgtYzk5NTE1N2M=, ActorId: [1:7494344623180132251:2412], ActorState: ExecuteState, TraceId: 01js2es9na24ms2p1sv9krc30y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 13330, MsgBus: 25426 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2a/r3tmp/tmpdh0JNJ/pdisk_1.dat 2025-04-17T18:04:42.877863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:04:42.973421Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:42.973512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:42.975201Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:42.987622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13330, node 2 2025-04-17T18:04:43.110723Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:43.110745Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:43.110751Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:43.110853Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25426 TClient is connected to server localhost:25426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTx ... at schemeshard: 72057594046644480 2025-04-17T18:04:51.097877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:51.170231Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:51.212982Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:51.244916Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:51.290645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:51.330857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:51.417532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344664276954477:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:51.417623Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:51.417826Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344664276954482:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:51.421957Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:51.438091Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344664276954484:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:51.528713Z node 3 :TX_PROXY ERROR: Actor# [3:7494344664276954541:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:52.477867Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344647097083004:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:52.478271Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:52.684572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.782334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.888768Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.374269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.585634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.684709Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.823681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-17T18:04:54.039265Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-17T18:04:54.183454Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30470, MsgBus: 6080 2025-04-17T18:04:55.156811Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494344679705077853:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:55.156876Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2a/r3tmp/tmpyjemLL/pdisk_1.dat 2025-04-17T18:04:55.310736Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:55.326134Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:55.326237Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:55.329639Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30470, node 4 2025-04-17T18:04:55.392079Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:55.392107Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:55.392114Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:55.392243Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6080 TClient is connected to server localhost:6080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:55.884050Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.891128Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:58.598991Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344692589980392:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.599093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.599517Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344692589980408:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.603526Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:04:58.615980Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344692589980410:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:04:58.713290Z node 4 :TX_PROXY ERROR: Actor# [4:7494344692589980461:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:58.838443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-17T18:04:59.100783Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494344696884947910:2355], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:84: Error: Failed to convert type: Struct<'id':Int32,'val1':Null,'val2':Int32> to Struct<'id':Int32,'val1':Int32,'val2':Int32?>
:2:84: Error: Failed to convert 'val1': Null to Int32
:2:84: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:04:59.102384Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YmI4OGIyOTctNzNlN2FjODAtYTNjY2UyYjEtNTU3YTIxNmI=, ActorId: [4:7494344696884947908:2354], ActorState: ExecuteState, TraceId: 01js2est9t8e1jkcf9beeebtf5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:04:59.134056Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.184169Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 >> KqpQueryService::SessionFromPoolSuccess [GOOD] >> KqpQueryService::SeveralCTAS+UseSink >> KqpQueryService::Explain [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] >> KqpQueryService::AlterCdcTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryScalar [GOOD] Test command err: Trying to start YDB, gRPC: 62406, MsgBus: 3927 2025-04-17T18:04:34.545323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344591384443063:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:34.545676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c36/r3tmp/tmpIFm34d/pdisk_1.dat 2025-04-17T18:04:34.939607Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62406, node 1 2025-04-17T18:04:34.990772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:34.990934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:35.010366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:35.063365Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:35.063391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:35.063396Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:35.063567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3927 TClient is connected to server localhost:3927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:35.694970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:35.709778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:35.721747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:35.956423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:36.129786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:36.230257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:37.985565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344604269346598:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:37.985680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.404120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.475219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.548394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.585949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.614633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.649686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.692098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:38.750592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344608564314560:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.750677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.750880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344608564314565:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:38.756444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:38.765641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344608564314568:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:38.860922Z node 1 :TX_PROXY ERROR: Actor# [1:7494344608564314625:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:39.544912Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344591384443063:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:39.554369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:39.797743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24880, MsgBus: 9935 2025-04-17T18:04:48.842918Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344648981758339:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:48.846239Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c36/r3tmp/tmpMf6sSy/pdisk_1.dat 2025-04-17T18:04:49.001992Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:49.016492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:49.016573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:49.018308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24880, node 2 2025-04-17T18:04:49.067816Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:49.067836Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:49.067843Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:49.067967Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9935 TClient is connected to server localhost:9935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:49.646847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:49.676149Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:49.685710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.7775 ... you don't have access permissions } 2025-04-17T18:04:52.146938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:52.193693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.222898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.246946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.274716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.311859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.354749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.390871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.481859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344666161629903:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:52.481944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:52.482171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344666161629908:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:52.486405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:52.501365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344666161629910:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:52.598903Z node 2 :TX_PROXY ERROR: Actor# [2:7494344666161629967:3537] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19072, MsgBus: 4102 2025-04-17T18:04:54.634584Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344673844484760:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:54.643480Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c36/r3tmp/tmpRFdUqY/pdisk_1.dat 2025-04-17T18:04:54.758823Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19072, node 3 2025-04-17T18:04:54.794701Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:54.794788Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:54.811709Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:54.853613Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:54.853652Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:54.853663Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:54.853775Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4102 TClient is connected to server localhost:4102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:55.405549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.412262Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.422894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.502758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.672291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.784460Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:57.923350Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344686729388257:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.923435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.974461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.011018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.095988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.139453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.185891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.226415Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.298347Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.371640Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344691024356217:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.371724Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.371925Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344691024356222:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.375178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:58.389005Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344691024356224:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:58.488826Z node 3 :TX_PROXY ERROR: Actor# [3:7494344691024356281:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:59.631421Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344673844484760:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:59.631492Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TestKinesisHttpProxy::TestWrongRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2025-04-17T18:04:05.963262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344465485664558:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:05.964297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004e6/r3tmp/tmp5frwBh/pdisk_1.dat 2025-04-17T18:04:06.432801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:06.432903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:06.435269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:06.488200Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3324, node 1 2025-04-17T18:04:06.604478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:06.604511Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:06.604522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:06.604663Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:07.098426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.118390Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:30407 2025-04-17T18:04:07.315995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.319609Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:04:07.321244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.342761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.484902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.532431Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-17T18:04:07.537261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.573395Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-17T18:04:07.578648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.612166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.645825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.677745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:04:07.709633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.744981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.820927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.102643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344482665535113:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:09.102784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344482665535125:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:09.102790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:09.106615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T18:04:09.117090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344482665535127:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T18:04:09.202117Z node 1 :TX_PROXY ERROR: Actor# [1:7494344482665535178:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:09.679131Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2er9gc286zw5kk46h431mb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY2ZWI2YWUtYzk2ZmVkNGYtZGY0NjI4MTItNDZhNmM2MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:04:09.757129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.787700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:09.826810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:04:09.856756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.885500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.914583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.944979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.969113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.993617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.042151Z node 1 :HTTP INFO: Listening on http://127.0.0.1:27666 2025-04-17T18:04:10.962662Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344465485664558:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:10.962728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:11.043855Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T18:04:11.043882Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T18:04:11.043957Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-17T18:04:11.045090Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T18:04:11.063481Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-17T18:04:11.063518Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-17T18:04:11.063628Z node 1 :SQS INFO: Request SQS users list 2025-04-17T18:04:11.063649Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T18:04:11.068474Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-17T18:04:11.068500Z node 1 :SQ ... 612a9" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "126e8d87-599de00f-e777770d-8f59a181" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ccbdb321-7329617b-12fe60cf-e5902e08" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } } 2025-04-17T18:05:00.429691Z node 7 :SQS TRACE: Request [899381d5-253f936e-92ef765-d5c612a9] Sending sqs response: { SendMessageBatch { RequestId: "899381d5-253f936e-92ef765-d5c612a9" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "126e8d87-599de00f-e777770d-8f59a181" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ccbdb321-7329617b-12fe60cf-e5902e08" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "899381d5-253f936e-92ef765-d5c612a9" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-04-17T18:05:00.429902Z node 7 :SQS DEBUG: Request SendMessageBatch working duration: 190ms 2025-04-17T18:05:00.430024Z node 7 :SQS TRACE: HandleSqsResponse SendMessageBatch { RequestId: "899381d5-253f936e-92ef765-d5c612a9" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "126e8d87-599de00f-e777770d-8f59a181" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ccbdb321-7329617b-12fe60cf-e5902e08" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "899381d5-253f936e-92ef765-d5c612a9" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-04-17T18:05:00.430134Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7494344701062384658:2501]: SendMessageBatch { RequestId: "899381d5-253f936e-92ef765-d5c612a9" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "126e8d87-599de00f-e777770d-8f59a181" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ccbdb321-7329617b-12fe60cf-e5902e08" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "899381d5-253f936e-92ef765-d5c612a9" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-04-17T18:05:00.430229Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7494344701062384669:3633]. Found: 1 2025-04-17T18:05:00.430447Z node 7 :SQS TRACE: Request [899381d5-253f936e-92ef765-d5c612a9] HandleResponse: { SendMessageBatch { RequestId: "899381d5-253f936e-92ef765-d5c612a9" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "126e8d87-599de00f-e777770d-8f59a181" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ccbdb321-7329617b-12fe60cf-e5902e08" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "899381d5-253f936e-92ef765-d5c612a9" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2025-04-17T18:05:00.430577Z node 7 :SQS DEBUG: Request [899381d5-253f936e-92ef765-d5c612a9] Sending reply from proxy actor: { SendMessageBatch { RequestId: "899381d5-253f936e-92ef765-d5c612a9" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "126e8d87-599de00f-e777770d-8f59a181" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ccbdb321-7329617b-12fe60cf-e5902e08" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "899381d5-253f936e-92ef765-d5c612a9" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-04-17T18:05:00.430895Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [899381d5-253f936e-92ef765-d5c612a9] Got succesfult GRPC response. 2025-04-17T18:05:00.431090Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [899381d5-253f936e-92ef765-d5c612a9] reply ok 2025-04-17T18:05:00.431208Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [899381d5-253f936e-92ef765-d5c612a9] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 643 SourceAddress: d8f2:6300:6050:0:c0f2:6300:6050:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2025-04-17T18:05:00.431344Z node 7 :HTTP DEBUG: (#40,[::1]:46606) <- (200 ) 2025-04-17T18:05:00.431406Z node 7 :HTTP DEBUG: (#40,[::1]:46606) connection closed 2025-04-17T18:05:00.433880Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2025-04-17T18:05:00.433910Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 4ms 2025-04-17T18:05:00.433954Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2025-04-17T18:05:00.433966Z node 7 :SQS DEBUG: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2025-04-17T18:05:00.434057Z node 7 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2025-04-17T18:05:00.434122Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2025-04-17T18:05:00.434389Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> KqpQueryServiceScripts::ExecuteScriptStatsProfile >> KqpQueryService::ShowCreateTableNotSuccess >> KqpQueryService::CloseSessionsWithLoad [GOOD] >> KqpQueryService::ClosedSessionRemovedFromPool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2025-04-17T18:04:09.002359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344483219457614:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:09.002502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000478/r3tmp/tmppFwYri/pdisk_1.dat 2025-04-17T18:04:09.323142Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8962, node 1 2025-04-17T18:04:09.396930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:09.397033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:09.400574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:09.429124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:09.429147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:09.429156Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:09.429275Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:09.684494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.706348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:24097 2025-04-17T18:04:09.887513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.897096Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:04:09.903689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.912095Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T18:04:09.919678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.023451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:10.060746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:10.101652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.128238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.158787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.189523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.225302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.257321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.287360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:11.876830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344491809393650:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:11.876831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344491809393645:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:11.876942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:11.881081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T18:04:11.899670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344491809393659:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T18:04:11.957670Z node 1 :TX_PROXY ERROR: Actor# [1:7494344491809393710:2914] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:12.279226Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2erc720bgnbzsevgkyq45v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDU1NWRlMDAtNDYxMWRlYWUtYTcxN2MwYmItOTNkNzM0MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:04:12.349180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:12.421429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:12.465646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:12.534211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:12.567091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:12.599836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:12.629155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:04:12.658120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:12.725685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:04:12.842827Z node 1 :HTTP INFO: Listening on http://127.0.0.1:12497 2025-04-17T18:04:13.844376Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T18:04:13.844464Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: false EnableDeadLetterQueues: true } 2025-04-17T18:04:13.845097Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T18:04:13.845887Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T18:04:13.850396Z node 1 :HTTP INFO: Listening on http://[::]:21028 2025-04-17T18:04:13.863005Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-17T18:04:13.863038Z node 1 :SQS INFO: Request SQS users list 2025-04-17T18:04:13.863066Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T18:04:13.863068Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-17T18:04:13.867002Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-04-17T18:04:13.867019Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-17T18:04:13.867248Z node 1 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromName (Parameter 'FROM_NAME (DataType 'Utf8String))) (let batchSize (Parameter 'BATCH_SIZE (DataType 'Uint64))) (let settingsTable '/Root/SQS/.Settings) ... ] } 0 2025-04-17T18:05:01.018627Z node 7 :HTTP DEBUG: (#119,[::1]:50534) Response: HTTP/1.1 500 InternalFailure Connection: close x-amzn-requestid: 76b69f9d-a3b9eabf-4ad40c38-e48d2e4 x-amz-crc32: 308364558 Content-Type: application/x-amz-json-1.1 Content-Length: 96 {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} 2025-04-17T18:05:01.018689Z node 7 :HTTP DEBUG: (#119,[::1]:50534) connection closed 2025-04-17T18:05:01.018701Z node 7 :HTTP DEBUG: (#135,[::1]:50616) <- (500 InternalFailure) 2025-04-17T18:05:01.018757Z node 7 :HTTP DEBUG: (#135,[::1]:50616) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.UntagQueue X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 95 { "QueueUrl":"http://ghrun-gkdwnlcfnq.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo", "TagKeys": [ "k23" ] } 0 2025-04-17T18:05:01.018793Z node 7 :HTTP DEBUG: (#135,[::1]:50616) Response: HTTP/1.1 500 InternalFailure Connection: close x-amzn-requestid: b182ae0-b3207c80-31ea5b82-4c99aeb2 x-amz-crc32: 308364558 Content-Type: application/x-amz-json-1.1 Content-Length: 96 {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} 2025-04-17T18:05:01.018865Z node 7 :HTTP DEBUG: (#135,[::1]:50616) connection closed Http output full {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} 2025-04-17T18:05:01.019002Z node 7 :HTTP DEBUG: (#98,[::1]:50702) <- (500 InternalFailure) 2025-04-17T18:05:01.019097Z node 7 :HTTP DEBUG: (#98,[::1]:50702) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.UntagQueue X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 95 { "QueueUrl":"http://ghrun-gkdwnlcfnq.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo", "TagKeys": [ "k39" ] } 0 2025-04-17T18:05:01.019150Z node 7 :HTTP DEBUG: (#98,[::1]:50702) Response: HTTP/1.1 500 InternalFailure Connection: close x-amzn-requestid: 27cbb5b-d972c05f-d5a321bc-e1c78365 x-amz-crc32: 308364558 Content-Type: application/x-amz-json-1.1 Content-Length: 96 {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} 2025-04-17T18:05:01.019243Z node 7 :HTTP DEBUG: (#98,[::1]:50702) connection closed Http output full {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} 2025-04-17T18:05:01.019690Z node 7 :HTTP DEBUG: (#87,[::1]:50686) <- (500 InternalFailure) 2025-04-17T18:05:01.019731Z node 7 :HTTP DEBUG: (#87,[::1]:50686) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.UntagQueue X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 95 { "QueueUrl":"http://ghrun-gkdwnlcfnq.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo", "TagKeys": [ "k32" ] } 0 2025-04-17T18:05:01.019752Z node 7 :HTTP DEBUG: (#87,[::1]:50686) Response: HTTP/1.1 500 InternalFailure Connection: close x-amzn-requestid: b28baec6-aa1830aa-82d0870e-1f0b2c9e x-amz-crc32: 308364558 Content-Type: application/x-amz-json-1.1 Content-Length: 96 {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} 2025-04-17T18:05:01.019789Z node 7 :HTTP DEBUG: (#87,[::1]:50686) connection closed 2025-04-17T18:05:01.019915Z node 7 :HTTP_PROXY DEBUG: http request [UntagQueue] requestId [90f460e0-a65784d2-2b03732e-10932746] Not retrying GRPC response. Code: 500, Error: InternalFailure 2025-04-17T18:05:01.019934Z node 7 :HTTP_PROXY INFO: http request [UntagQueue] requestId [90f460e0-a65784d2-2b03732e-10932746] reply with status: STATUS_UNDEFINED message: Untag queue query failed, conflicting query in parallel 2025-04-17T18:05:01.020008Z node 7 :HTTP_PROXY DEBUG: http request [UntagQueue] requestId [90f460e0-a65784d2-2b03732e-10932746] Send metering event. HttpStatusCode: 500 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 565 ResponseSizeInBytes: 288 SourceAddress: 1836:2300:6050:0:36:2300:6050:0 ResourceId: 000000000000000301v0 Action: UntagQueue Http output full {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} Http output full {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} Http output full {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} 2025-04-17T18:05:01.020471Z node 7 :HTTP DEBUG: (#120,[::1]:50712) <- (500 InternalFailure) 2025-04-17T18:05:01.020543Z node 7 :HTTP DEBUG: (#120,[::1]:50712) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.UntagQueue X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 95 { "QueueUrl":"http://ghrun-gkdwnlcfnq.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo", "TagKeys": [ "k31" ] } 0 2025-04-17T18:05:01.020589Z node 7 :HTTP DEBUG: (#120,[::1]:50712) Response: HTTP/1.1 500 InternalFailure Connection: close x-amzn-requestid: 90f460e0-a65784d2-2b03732e-10932746 x-amz-crc32: 308364558 Content-Type: application/x-amz-json-1.1 Content-Length: 96 {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} 2025-04-17T18:05:01.020653Z node 7 :HTTP DEBUG: (#120,[::1]:50712) connection closed 2025-04-17T18:05:01.023613Z node 7 :HTTP DEBUG: (#37,[::1]:50722) incoming connection opened 2025-04-17T18:05:01.023677Z node 7 :HTTP DEBUG: (#37,[::1]:50722) -> (POST /Root) 2025-04-17T18:05:01.023840Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [b840:801:6050:0:a040:801:6050:0] request [ListQueueTags] url [/Root] database [/Root] requestId: 6d1d0794-666659da-a354d914-982aacfd 2025-04-17T18:05:01.024208Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [6d1d0794-666659da-a354d914-982aacfd] got new request from [b840:801:6050:0:a040:801:6050:0] 2025-04-17T18:05:01.024875Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [6d1d0794-666659da-a354d914-982aacfd] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-17T18:05:01.024913Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [6d1d0794-666659da-a354d914-982aacfd] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-17T18:05:01.026260Z node 7 :SQS DEBUG: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 6d1d0794-666659da-a354d914-982aacfd 2025-04-17T18:05:01.026384Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Proxy actor: used user_name='cloud4', queue_name='000000000000000301v0', folder_id='folder4' 2025-04-17T18:05:01.026402Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Request proxy started 2025-04-17T18:05:01.026495Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2025-04-17T18:05:01.026572Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Get configuration duration: 0ms 2025-04-17T18:05:01.026668Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Send get leader node request to sqs service for cloud4/000000000000000301v0 2025-04-17T18:05:01.026692Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Leader node for queue [cloud4/000000000000000301v0] is 7 2025-04-17T18:05:01.026725Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Got leader node for queue response. Node id: 7. Status: 0 2025-04-17T18:05:01.026994Z node 7 :SQS TRACE: Request [6d1d0794-666659da-a354d914-982aacfd] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "6d1d0794-666659da-a354d914-982aacfd" 2025-04-17T18:05:01.027236Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "6d1d0794-666659da-a354d914-982aacfd" 2025-04-17T18:05:01.027340Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Request started. Actor: [7:7494344703273648450:5467] 2025-04-17T18:05:01.027407Z node 7 :SQS TRACE: Inc local leader ref for actor [7:7494344703273648450:5467] 2025-04-17T18:05:01.027424Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Forward configuration request to queue [cloud4/000000000000000301v0] leader 2025-04-17T18:05:01.027457Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Get configuration duration: 0ms 2025-04-17T18:05:01.027473Z node 7 :SQS TRACE: Request [6d1d0794-666659da-a354d914-982aacfd] Got configuration. Root url: http://ghrun-gkdwnlcfnq.auto.internal:8771, Shards: 1, Fail: 0 2025-04-17T18:05:01.027490Z node 7 :SQS TRACE: Request [6d1d0794-666659da-a354d914-982aacfd] DoRoutine 2025-04-17T18:05:01.027576Z node 7 :SQS TRACE: Request [6d1d0794-666659da-a354d914-982aacfd] SendReplyAndDie from action actor { ListQueueTags { RequestId: "6d1d0794-666659da-a354d914-982aacfd" } } 2025-04-17T18:05:01.027667Z node 7 :SQS TRACE: Request [6d1d0794-666659da-a354d914-982aacfd] Sending sqs response: { ListQueueTags { RequestId: "6d1d0794-666659da-a354d914-982aacfd" } RequestId: "6d1d0794-666659da-a354d914-982aacfd" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-04-17T18:05:01.027762Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7494344703273648450:5467]. Found: 1 2025-04-17T18:05:01.027774Z node 7 :SQS TRACE: HandleSqsResponse ListQueueTags { RequestId: "6d1d0794-666659da-a354d914-982aacfd" } RequestId: "6d1d0794-666659da-a354d914-982aacfd" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-04-17T18:05:01.027821Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7494344703273648449:2764]: ListQueueTags { RequestId: "6d1d0794-666659da-a354d914-982aacfd" } RequestId: "6d1d0794-666659da-a354d914-982aacfd" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-04-17T18:05:01.027955Z node 7 :SQS TRACE: Request [6d1d0794-666659da-a354d914-982aacfd] HandleResponse: { ListQueueTags { RequestId: "6d1d0794-666659da-a354d914-982aacfd" } RequestId: "6d1d0794-666659da-a354d914-982aacfd" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2025-04-17T18:05:01.028012Z node 7 :SQS DEBUG: Request [6d1d0794-666659da-a354d914-982aacfd] Sending reply from proxy actor: { ListQueueTags { RequestId: "6d1d0794-666659da-a354d914-982aacfd" } RequestId: "6d1d0794-666659da-a354d914-982aacfd" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-04-17T18:05:01.028248Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [6d1d0794-666659da-a354d914-982aacfd] Got succesfult GRPC response. 2025-04-17T18:05:01.028308Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [6d1d0794-666659da-a354d914-982aacfd] reply ok 2025-04-17T18:05:01.028397Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [6d1d0794-666659da-a354d914-982aacfd] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 179 SourceAddress: b840:801:6050:0:a040:801:6050:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2025-04-17T18:05:01.028489Z node 7 :HTTP DEBUG: (#37,[::1]:50722) <- (200 ) 2025-04-17T18:05:01.028589Z node 7 :HTTP DEBUG: (#37,[::1]:50722) connection closed Http output full {} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Explain [GOOD] Test command err: Trying to start YDB, gRPC: 31553, MsgBus: 7011 2025-04-17T18:04:43.382802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344629066365208:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:43.383993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bf7/r3tmp/tmp5Fhj5W/pdisk_1.dat 2025-04-17T18:04:43.878824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:43.922382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:43.922512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:43.932114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31553, node 1 2025-04-17T18:04:43.998625Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:43.998655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:43.998662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:43.998801Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7011 TClient is connected to server localhost:7011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:44.600866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:44.618046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:44.809988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:44.998227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.082093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:46.897776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344641951268892:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:46.897879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:47.270980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.308017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.342385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.369743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.436918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.485131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.526753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.601186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344646246236856:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:47.601286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:47.601608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344646246236861:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:47.604904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:47.616851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344646246236863:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:47.704808Z node 1 :TX_PROXY ERROR: Actor# [1:7494344646246236921:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:48.386350Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344629066365208:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:48.386427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16693, MsgBus: 63435 2025-04-17T18:04:50.392010Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344657676617766:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:50.392057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bf7/r3tmp/tmpx5Epvk/pdisk_1.dat 2025-04-17T18:04:50.564502Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:50.565824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:50.565901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:50.567277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16693, node 2 2025-04-17T18:04:50.603733Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:50.603769Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:50.603783Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:50.603884Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63435 TClient is connected to server localhost:63435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:04:51.032514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:51.051093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:51.157142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:51.297232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:51.384598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.631381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.671427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.714908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.787441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.878677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344670561522086:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:53.878741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:53.878937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344670561522091:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:53.882918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:53.899582Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-04-17T18:04:53.900384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344670561522093:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:53.989191Z node 2 :TX_PROXY ERROR: Actor# [2:7494344670561522149:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:54.966193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 10 2025-04-17T18:04:55.393014Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344657676617766:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:55.393078Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19657, MsgBus: 1169 2025-04-17T18:04:56.316896Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344681951993377:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:56.316944Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bf7/r3tmp/tmpxFJIWs/pdisk_1.dat 2025-04-17T18:04:56.468275Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:56.470456Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:56.470526Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:56.472160Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19657, node 3 2025-04-17T18:04:56.519230Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:56.519250Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:56.519256Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:56.519368Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1169 TClient is connected to server localhost:1169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:56.927407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.938949Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:56.960467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:57.039604Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:57.236983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:57.314501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:59.304641Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344694836897020:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.336199Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.363146Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.436989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.515719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.554815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.632622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.680313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.764823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.873614Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344694836897695:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.873722Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.873946Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344694836897700:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.877421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:59.888466Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344694836897702:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:59.988247Z node 3 :TX_PROXY ERROR: Actor# [3:7494344694836897757:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:01.318449Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344681951993377:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:01.318554Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery >> KqpQueryService::StreamExecuteQueryPure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 14714, MsgBus: 4027 2025-04-17T18:04:39.508314Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344612273442886:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:39.516381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c18/r3tmp/tmpaGdgen/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14714, node 1 2025-04-17T18:04:39.923648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:39.923766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:39.926300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:39.929153Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:04:39.931093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:39.956159Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:04:40.074738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:40.074760Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:40.074766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:40.074863Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4027 TClient is connected to server localhost:4027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:40.707232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:40.726839Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:40.731354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:40.882278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:41.054371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:41.142308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:42.812053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344625158346389:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:42.812161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:43.214632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:43.277883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:43.314056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:43.351479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:43.386269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:43.468506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:43.521779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:43.598608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344629453314351:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:43.598722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:43.606319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344629453314356:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:43.612754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:43.627011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344629453314358:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:43.686306Z node 1 :TX_PROXY ERROR: Actor# [1:7494344629453314414:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:44.498292Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344612273442886:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:44.505785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20340, MsgBus: 20568 2025-04-17T18:04:45.800562Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344637953963888:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:45.801295Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c18/r3tmp/tmpEdEipV/pdisk_1.dat 2025-04-17T18:04:45.989228Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20340, node 2 2025-04-17T18:04:46.083815Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:46.083839Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:46.083846Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:46.083951Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:04:46.087589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:46.087685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:46.089381Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20568 TClient is connected to server localhost:20568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:46.542568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:46.554403Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:46.563238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:46.654935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation p ... uboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.377561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344655133835322:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.377642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.377858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344655133835327:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.383329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:49.403346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344655133835329:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:49.457012Z node 2 :TX_PROXY ERROR: Actor# [2:7494344655133835386:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:50.404901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:50.406033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:50.407990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:50.802717Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344637953963888:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:50.802771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4399, MsgBus: 18429 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c18/r3tmp/tmpITO6KC/pdisk_1.dat 2025-04-17T18:04:53.940645Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344669345503744:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:53.940738Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:04:54.013814Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:54.015230Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:54.015302Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4399, node 3 2025-04-17T18:04:54.083031Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:54.182645Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:54.182666Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:54.182673Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:54.182779Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18429 TClient is connected to server localhost:18429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:54.603241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:54.618005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:54.680606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:54.832875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:54.912803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:57.117836Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344686525374539:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.117921Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.166602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.200172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.272411Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.307412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.342065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.379467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.456115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.541308Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344686525375206:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.541427Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.541621Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344686525375212:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.545243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:57.555372Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344686525375214:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:57.644389Z node 3 :TX_PROXY ERROR: Actor# [3:7494344686525375268:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:58.652363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.658013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.659672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.792549Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344669345503744:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:58.792600Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:01.197243Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913101232, txId: 281474976710701] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterCdcTopic [GOOD] Test command err: Trying to start YDB, gRPC: 18452, MsgBus: 3817 2025-04-17T18:04:38.440072Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344604413864008:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:38.440380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c1f/r3tmp/tmpAoNSLX/pdisk_1.dat 2025-04-17T18:04:38.798221Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18452, node 1 2025-04-17T18:04:38.834632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:38.834753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:38.836201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:38.933582Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:38.933614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:38.933621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:38.933736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3817 TClient is connected to server localhost:3817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:39.522689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:39.543607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:39.550453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:39.690911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:39.862942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:39.940984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:41.542088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344617298767651:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:41.542204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:41.836701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.879793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.919044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.954722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.026128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.073049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.117824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.213656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344621593735608:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:42.213727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:42.213843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344621593735613:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:42.217847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:42.229116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344621593735615:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:42.298698Z node 1 :TX_PROXY ERROR: Actor# [1:7494344621593735673:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:43.415525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:43.439969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344604413864008:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:43.440055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:43.585841Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344625888703376:2523], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-17T18:04:43.586114Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yzk2MDAzMjMtM2U0MWRhNmItNmFkZjE2NjktZDY0ODE0YTg=, ActorId: [1:7494344625888703304:2512], ActorState: ExecuteState, TraceId: 01js2esb3e0tj9gc0qhh49npx2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 8685, MsgBus: 4103 2025-04-17T18:04:44.562730Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344631341158540:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:44.564323Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c1f/r3tmp/tmpu4MINl/pdisk_1.dat 2025-04-17T18:04:44.769443Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:44.782021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:44.782351Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:44.785053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8685, node 2 2025-04-17T18:04:44.840194Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:44.840215Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:44.840224Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:44.840363Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4103 TClient is connected to server localhost:4103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPa ... aseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:54.235715Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:04:54.244811Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344674410235146:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:04:54.304059Z node 3 :TX_PROXY ERROR: Actor# [3:7494344674410235197:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:54.327341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-17T18:04:54.531425Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494344674410235347:2355], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:65: Error: Failed to convert type: Struct<'id':Int32,'val':Null> to Struct<'id':Int32,'val':Int32>
:2:65: Error: Failed to convert 'val': Null to Int32
:2:65: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:04:54.532902Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGE5YThlZGItY2ZmMzg3NDgtMjhhNTc0ZDYtMjBhMDhlOTU=, ActorId: [3:7494344674410235345:2354], ActorState: ExecuteState, TraceId: 01js2esnvnedcjarw667wc2sqe, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:04:54.558312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24314, MsgBus: 18472 2025-04-17T18:04:55.581189Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494344678409588793:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:55.610895Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c1f/r3tmp/tmpLoGTy1/pdisk_1.dat 2025-04-17T18:04:55.702268Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24314, node 4 2025-04-17T18:04:55.723205Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:55.723281Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:55.728654Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:55.770851Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:55.770875Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:55.770884Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:55.771002Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18472 TClient is connected to server localhost:18472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:56.248152Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.269384Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.348654Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.542227Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.661818Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:58.998312Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344691294492357:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.998414Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.062586Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.142367Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.218886Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.256288Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.293786Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.353134Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.428504Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.583241Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344695589460331:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.583361Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.583665Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344695589460336:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.587963Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:59.607063Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344695589460338:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:59.662093Z node 4 :TX_PROXY ERROR: Actor# [4:7494344695589460396:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:00.572561Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344678409588793:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:00.572636Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:00.735671Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:00.911606Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037922:1][4:7494344699884428235:2535] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:19:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T18:05:01.085685Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.197680Z node 4 :TX_PROXY ERROR: Actor# [4:7494344704179395718:4036] txid# 281474976715675, issues: { message: "Cannot change partition count. Use split/merge instead" severity: 1 } 2025-04-17T18:05:01.198152Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjAwZjU3OWQtNzdjOWY4MzgtZGEyYThjYjUtMjkyYWNkNjA=, ActorId: [4:7494344704179395627:2551], ActorState: ExecuteState, TraceId: 01js2eswbq09e0th5h89etv4v7, Create QueryResponse for error on request, msg: Query failed, status: BAD_REQUEST:
: Error: Cannot change partition count. Use split/merge instead, code: 2017 >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapInteractive+withOltpSink >> TestKinesisHttpProxy::ListShardsToken [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager >> KqpQueryService::CreateTempTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2025-04-17T18:04:05.969400Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344462665719369:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:05.969447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000593/r3tmp/tmpKZkUUU/pdisk_1.dat 2025-04-17T18:04:06.407293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:06.415548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:06.416395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:06.424321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10081, node 1 2025-04-17T18:04:06.610809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:06.610845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:06.610858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:06.611008Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:07.099008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:1893 2025-04-17T18:04:07.315664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.321240Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:04:07.323214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.338857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.488056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.525138Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-17T18:04:07.528230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.607156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.645429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.682085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.714355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.750427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.786562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.815916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:08.919030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344475550622705:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:08.919034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344475550622716:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:08.919129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:08.924251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T18:04:08.935004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344475550622719:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T18:04:09.015174Z node 1 :TX_PROXY ERROR: Actor# [1:7494344479845590068:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:09.678859Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2er9ah9yqd33t6cd4zcv8s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDllODJkMmQtMjRiMmQxZjYtMjI2ZDgzYWQtOWViNzMyYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:04:09.760130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.799702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.832136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.856827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.882118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.913155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.942618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.969410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.003728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.054866Z node 1 :HTTP INFO: Listening on http://127.0.0.1:23265 2025-04-17T18:04:10.969863Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344462665719369:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:10.969924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:11.056539Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T18:04:11.056646Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-17T18:04:11.057939Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T18:04:11.058253Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T18:04:11.063418Z node 1 :HTTP INFO: Listening on http://[::]:63786 2025-04-17T18:04:11.076254Z node 1 :SQS INFO: Request SQS users list 2025-04-17T18:04:11.076284Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T18:04:11.076335Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-17T18:04:11.076369Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-17T18:04:11.111138Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-04-17T18:04:11.111591Z node 1 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromQueue (Parameter 'FROM_QUEUE (DataType 'Utf8String))) (let batchSi ... SER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 18ms 2025-04-17T18:05:01.580954Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T18:05:01.580991Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-04-17T18:05:01.581082Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 20ms 2025-04-17T18:05:01.581360Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T18:05:01.582940Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T18:05:01.582973Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 20ms 2025-04-17T18:05:01.583425Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T18:05:01.583458Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-04-17T18:05:01.583614Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 21ms 2025-04-17T18:05:01.584061Z node 8 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-17T18:05:01.762357Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7494344706925854083:2444]: Pool not found 2025-04-17T18:05:01.762597Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-04-17T18:05:02.004441Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7494344706925854067:2439]: Pool not found 2025-04-17T18:05:02.005186Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-04-17T18:05:02.008897Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7494344711220821499:2463], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-17T18:05:02.008972Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494344711220821498:2462], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:02.009091Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:02.337930Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7494344711220821495:2460]: Pool not found 2025-04-17T18:05:02.338324Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete 2025-04-17T18:05:02.516737Z node 8 :HTTP DEBUG: (#37,[::1]:47476) incoming connection opened 2025-04-17T18:05:02.516816Z node 8 :HTTP DEBUG: (#37,[::1]:47476) -> (POST /) 2025-04-17T18:05:02.516998Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [b88d:4c00:6050:0:a08d:4c00:6050:0] request [CreateStream] url [/] database [] requestId: 99e9ee96-ee2caa18-78f55287-183a1199 2025-04-17T18:05:02.517666Z node 8 :HTTP_PROXY WARN: http request [CreateStream] requestId [99e9ee96-ee2caa18-78f55287-183a1199] got new request with incorrect json from [b88d:4c00:6050:0:a08d:4c00:6050:0] database '' 2025-04-17T18:05:02.517893Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [99e9ee96-ee2caa18-78f55287-183a1199] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2025-04-17T18:05:02.518233Z node 8 :HTTP DEBUG: (#37,[::1]:47476) <- (400 InvalidArgumentException) 2025-04-17T18:05:02.518278Z node 8 :HTTP DEBUG: (#37,[::1]:47476) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 57 { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 0 2025-04-17T18:05:02.518308Z node 8 :HTTP DEBUG: (#37,[::1]:47476) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 99e9ee96-ee2caa18-78f55287-183a1199 x-amz-crc32: 3053902336 Content-Type: application/x-amz-json-1.1 Content-Length: 135 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 2025-04-17T18:05:02.518413Z node 8 :HTTP DEBUG: (#37,[::1]:47476) connection closed Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] >> KqpQueryService::DdlMixedDml [GOOD] >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> KqpQueryService::ExecuteQueryExplicitTxTLI >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpService::CloseSessionsWithLoad >> KqpLimits::ManyPartitionsSortingLimit >> KqpQueryService::StreamExecuteQuery >> KqpQueryService::DdlUser >> KqpQueryServiceScripts::Tcl [GOOD] >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] >> KqpQueryService::TableSink_Htap-withOltpSink [GOOD] >> KqpQueryService::TableSink_DisableSink >> KqpService::ToDictCache+UseCache [GOOD] >> KqpService::ToDictCache-UseCache >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2025-04-17T18:04:05.958315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344465937935162:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:05.958412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005d4/r3tmp/tmpP4xjdq/pdisk_1.dat 2025-04-17T18:04:06.429475Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:06.432484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:06.432561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:06.434243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17169, node 1 2025-04-17T18:04:06.610823Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:06.610848Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:06.610856Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:06.610989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:07.100473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.123282Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:18112 2025-04-17T18:04:07.306395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.312163Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:04:07.314330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.329336Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T18:04:07.338751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.458878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.502202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.552320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.584890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.613349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.652052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.689248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.719092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.750170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.020536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344483117805798:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:09.020588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344483117805806:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:09.020621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:09.024349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T18:04:09.033638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344483117805812:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T18:04:09.089944Z node 1 :TX_PROXY ERROR: Actor# [1:7494344483117805863:2919] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:09.678860Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2er9dt85py6k6za0jsj5dr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVhN2I4ZDItNjI1MGM1OC02YzRkZDA2Zi1iNmZmYTg3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:04:09.756255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.785229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.817897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.845368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.870935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.893606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.919383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.945958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.972600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.028087Z node 1 :HTTP INFO: Listening on http://127.0.0.1:4213 2025-04-17T18:04:10.957578Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344465937935162:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:10.957665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:11.030439Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T18:04:11.031483Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T18:04:11.031718Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-17T18:04:11.033924Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T18:04:11.055263Z node 1 :SQS INFO: Request SQS users list 2025-04-17T18:04:11.055290Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T18:04:11.057923Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-17T18:04:11.058009Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-17T18:04:11.059296Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-04-17T18:04:11.060524Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-17T18:04:11.060766Z node 1 :SQS TRACE: Request [] Query(idx ... 5:03.807804Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976715690 State EXECUTED FrontTxId 281474976715690 2025-04-17T18:05:03.807817Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TPersQueue::SendEvReadSetAckToSenders 2025-04-17T18:05:03.807829Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976715690, NewState WAIT_RS_ACKS 2025-04-17T18:05:03.807841Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976715690 moved from EXECUTED to WAIT_RS_ACKS 2025-04-17T18:05:03.807860Z node 8 :PERSQUEUE DEBUG: [TxId: 281474976715690] PredicateAcks: 0/0 2025-04-17T18:05:03.807867Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-17T18:05:03.807876Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Registered with mediator time cast 2025-04-17T18:05:03.807877Z node 8 :PERSQUEUE DEBUG: [TxId: 281474976715690] PredicateAcks: 0/0 2025-04-17T18:05:03.807889Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] add an TxId 281474976715690 to the list for deletion 2025-04-17T18:05:03.807903Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976715690, NewState DELETING 2025-04-17T18:05:03.807920Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] delete key for TxId 281474976715690 2025-04-17T18:05:03.807954Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-17T18:05:03.807966Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:05:03.807982Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Try execute txs with state DELETING 2025-04-17T18:05:03.807996Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] TxId 281474976715690, State DELETING 2025-04-17T18:05:03.808009Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] delete TxId 281474976715690 2025-04-17T18:05:03.808050Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Registered with mediator time cast 2025-04-17T18:05:03.808678Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [4c3b7408-430ff78a-b4e950ab-fa7bb156] reply ok 2025-04-17T18:05:03.808922Z node 8 :HTTP DEBUG: (#37,[::1]:56586) <- (200 ) 2025-04-17T18:05:03.809037Z node 8 :HTTP DEBUG: (#37,[::1]:56586) connection closed Http output full {} 200 {} 2025-04-17T18:05:03.809213Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Registered with mediator time cast 2025-04-17T18:05:03.809257Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Registered with mediator time cast 2025-04-17T18:05:03.809664Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-17T18:05:03.809681Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Try execute txs with state DELETING 2025-04-17T18:05:03.809696Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976715690, State DELETING 2025-04-17T18:05:03.809710Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] delete TxId 281474976715690 2025-04-17T18:05:03.810140Z node 8 :HTTP DEBUG: (#37,[::1]:56588) incoming connection opened 2025-04-17T18:05:03.810215Z node 8 :HTTP DEBUG: (#37,[::1]:56588) -> (POST /Root) 2025-04-17T18:05:03.810323Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [588f:c800:6050:0:408f:c800:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 2c1fe27a-62cd7ff-3a6973e5-9a7f1b79 E0000 00:00:1744913103.811392 305473 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-17T18:05:03.810726Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [2c1fe27a-62cd7ff-3a6973e5-9a7f1b79] got new request from [588f:c800:6050:0:408f:c800:6050:0] database '/Root' stream 'teststream' 2025-04-17T18:05:03.811248Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [2c1fe27a-62cd7ff-3a6973e5-9a7f1b79] [auth] Authorized successfully 2025-04-17T18:05:03.811326Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [2c1fe27a-62cd7ff-3a6973e5-9a7f1b79] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-17T18:05:03.813893Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7494344715511377296:2522], now have 1 active actors on pipe 2025-04-17T18:05:03.813947Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7494344715511377297:2523], now have 1 active actors on pipe 2025-04-17T18:05:03.814878Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [2c1fe27a-62cd7ff-3a6973e5-9a7f1b79] reply ok 2025-04-17T18:05:03.815247Z node 8 :HTTP DEBUG: (#37,[::1]:56588) <- (200 ) 2025-04-17T18:05:03.815336Z node 8 :HTTP DEBUG: (#37,[::1]:56588) connection closed 2025-04-17T18:05:03.815486Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7494344715511377296:2522] destroyed 2025-04-17T18:05:03.815515Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7494344715511377297:2523] destroyed Http output full {"NextToken":"CMb3s6fkMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CMb3s6fkMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-17T18:05:03.816890Z node 8 :HTTP DEBUG: (#37,[::1]:56594) incoming connection opened 2025-04-17T18:05:03.817000Z node 8 :HTTP DEBUG: (#37,[::1]:56594) -> (POST /Root) 2025-04-17T18:05:03.817129Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [f8c5:b500:6050:0:e0c5:b500:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 1b0866a-23a358dd-bd2b35e5-ac7e82ec 2025-04-17T18:05:03.817657Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [1b0866a-23a358dd-bd2b35e5-ac7e82ec] got new request from [f8c5:b500:6050:0:e0c5:b500:6050:0] database '/Root' stream 'teststream' 2025-04-17T18:05:03.818164Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [1b0866a-23a358dd-bd2b35e5-ac7e82ec] [auth] Authorized successfully 2025-04-17T18:05:03.818237Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [1b0866a-23a358dd-bd2b35e5-ac7e82ec] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1744913103.818320 305474 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-17T18:05:03.819514Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7494344715511377308:2527], now have 1 active actors on pipe 2025-04-17T18:05:03.819520Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7494344715511377309:2528], now have 1 active actors on pipe 2025-04-17T18:05:03.820169Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [1b0866a-23a358dd-bd2b35e5-ac7e82ec] reply ok 2025-04-17T18:05:03.820234Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7494344715511377309:2528] destroyed 2025-04-17T18:05:03.820255Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7494344715511377308:2527] destroyed 2025-04-17T18:05:03.820341Z node 8 :HTTP DEBUG: (#37,[::1]:56594) <- (200 ) 2025-04-17T18:05:03.820428Z node 8 :HTTP DEBUG: (#37,[::1]:56594) connection closed Http output full {"NextToken":"CMv3s6fkMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CMv3s6fkMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-17T18:05:03.822310Z node 8 :HTTP DEBUG: (#37,[::1]:56598) incoming connection opened 2025-04-17T18:05:03.822393Z node 8 :HTTP DEBUG: (#37,[::1]:56598) -> (POST /Root) 2025-04-17T18:05:03.822534Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [180c:b300:6050:0:c:b300:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 816b0bf9-48134740-8acef12d-b5460119 2025-04-17T18:05:03.823011Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [816b0bf9-48134740-8acef12d-b5460119] got new request from [180c:b300:6050:0:c:b300:6050:0] database '/Root' stream 'teststream' 2025-04-17T18:05:03.823462Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [816b0bf9-48134740-8acef12d-b5460119] [auth] Authorized successfully 2025-04-17T18:05:03.823512Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [816b0bf9-48134740-8acef12d-b5460119] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1744913103.823577 305474 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-17T18:05:03.824432Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7494344715511377320:2532], now have 1 active actors on pipe 2025-04-17T18:05:03.824477Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7494344715511377321:2533], now have 1 active actors on pipe 2025-04-17T18:05:03.824926Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7494344715511377321:2533] destroyed 2025-04-17T18:05:03.824950Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7494344715511377320:2532] destroyed 2025-04-17T18:05:03.825068Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [816b0bf9-48134740-8acef12d-b5460119] reply ok 2025-04-17T18:05:03.825205Z node 8 :HTTP DEBUG: (#37,[::1]:56598) <- (200 ) 2025-04-17T18:05:03.825290Z node 8 :HTTP DEBUG: (#37,[::1]:56598) connection closed Http output full {"NextToken":"CND3s6fkMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CND3s6fkMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlMixedDml [GOOD] Test command err: Trying to start YDB, gRPC: 16506, MsgBus: 1185 2025-04-17T18:04:16.529948Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344514115352004:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:16.530055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c4e/r3tmp/tmpuqpCLG/pdisk_1.dat 2025-04-17T18:04:16.930392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:16.946438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:16.946521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:16.962923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16506, node 1 2025-04-17T18:04:17.110766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.110790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.110813Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.110929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1185 TClient is connected to server localhost:1185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:17.776360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.800205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.936014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.097817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:18.167468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:19.497443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344527000255509:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:19.497799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.042775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.115081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.149402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.215044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.285401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.325505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.357312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.419231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344531295223475:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.419323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.419567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344531295223480:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.423559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:20.435105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344531295223482:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:20.528907Z node 1 :TX_PROXY ERROR: Actor# [1:7494344531295223542:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:21.545998Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344514115352004:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:21.554166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:21.715858Z node 1 :TX_PROXY ERROR: Actor# [1:7494344535590191203:3805] txid# 281474976710673, issues: { message: "Group already exists" severity: 1 } 2025-04-17T18:04:21.725366Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTMxZTIxOGUtODU5ZGVjMGQtMjJjZjA4MjEtZjcxYTJhN2U=, ActorId: [1:7494344535590191197:2520], ActorState: ExecuteState, TraceId: 01js2ernt7e6p92r302crmsgrs, Create QueryResponse for error on request, msg: 2025-04-17T18:04:21.803560Z node 1 :TX_PROXY ERROR: Actor# [1:7494344535590191269:3848] txid# 281474976710677, issues: { message: "Group not found" severity: 1 } 2025-04-17T18:04:21.803803Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjkzNmQwZjUtMjIyMTYwYjMtZjQwNzM4ZTUtNDNmMzM4OGI=, ActorId: [1:7494344535590191263:2529], ActorState: ExecuteState, TraceId: 01js2ernwz1y1xxbczkdg1z3tn, Create QueryResponse for error on request, msg: 2025-04-17T18:04:21.896677Z node 1 :TX_PROXY ERROR: Actor# [1:7494344535590191328:3877] txid# 281474976710682, issues: { message: "Group already exists" severity: 1 } 2025-04-17T18:04:21.896937Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2Q3MDIwZjYtZjU2NGI5MjMtNzhmNjJiNGYtNGM3OGJmOTY=, ActorId: [1:7494344535590191322:2541], ActorState: ExecuteState, TraceId: 01js2ernzt102kezt7snkmp3q8, Create QueryResponse for error on request, msg: 2025-04-17T18:04:22.005007Z node 1 :TX_PROXY ERROR: Actor# [1:7494344539885158706:3929] txid# 281474976710687, issues: { message: "Role \"user1\" is already a member of role \"group1\"" issue_code: 2 severity: 3 } 2025-04-17T18:04:22.059693Z node 1 :TX_PROXY ERROR: Actor# [1:7494344539885158720:3936] txid# 281474976710688, issues: { message: "Member account not found" severity: 1 } 2025-04-17T18:04:22.059936Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzdjNzY3OGQtNTQ3NzUwODAtY2Q4NjE0OTktMzMzNjMwNA==, ActorId: [1:7494344539885158714:2556], ActorState: ExecuteState, TraceId: 01js2erp3y3j64ge54wyecertd, Create QueryResponse for error on request, msg: 2025-04-17T18:04:22.098367Z node 1 :TX_PROXY ERROR: Actor# [1:7494344539885158750:3950] txid# 281474976710691, issues: { message: "Role \"user1\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-17T18:04:22.116637Z node 1 :TX_PROXY ERROR: Actor# [1:7494344539885158785:3978] txid# 281474976710692, issues: { message: "Role \"user3\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-17T18:04:22.148784Z node 1 :TX_PROXY ERROR: Actor# [1:7494344539885158812:3993] txid# 281474976710694, issues: { message: "Member account not found" severity: 1 } 2025-04-17T18:04:22.149095Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjhjN2IxNjQtOTZiMDRlZTQtOTdlNWRlZjYtMTE5ZThhMDA=, ActorId: [1:7494344539885158794:2568], ActorState: ExecuteState, TraceId: 01js2erp7e5ejjmaegg1sk2873, Create QueryResponse for error on request, msg: 2025-04-17T18:04:22.179980Z node 1 :TX_PROXY ERROR: Actor# [1:7494344539885158829:4001] txid# 281474976710696, issues: { message: "Role \"user1\" is already a member of role \"group1\"" issue_code: 2 severity: 3 } 2025-04-17T18:04:22.226900Z node 1 :TX_PROXY ERROR: Actor# [1:7494344539885158869:4022] txid# 281474976710699, issues: { message: "Role \"user3\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-17T18:04:22.252480Z node 1 :TX_PROXY ERROR: Actor# [1:7494344539885158891:4034] txid# 281474976710701, issues: { message: "Role \"user1\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-17T18:04:22.271970Z node 1 :TX_PROXY ERROR: Actor# [1:7494344539885158905:4041] txid# 281474976710702, issues: { message: "Role \"user2\" is not a member of role \"group1\"" issue_code: 3 sever ... ecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:04:54.611977Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344676406079083:4193], TxId: 281474976715877, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZWYyMDdjMTAtNDU2NDZkYTktNzk0ZGZkNGMtNDhjMGYxN2E=. CustomerSuppliedId : . TraceId : 01js2esnm0atarsdg6fdkpm8e1. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494344676406079079:4070], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:04:54.612256Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWYyMDdjMTAtNDU2NDZkYTktNzk0ZGZkNGMtNDhjMGYxN2E=, ActorId: [3:7494344672111111424:4070], ActorState: ExecuteState, TraceId: 01js2esnm0atarsdg6fdkpm8e1, Create QueryResponse for error on request, msg: 2025-04-17T18:04:54.613924Z node 3 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01js2esn7q8h59v67ktst9pb7w" } } } } ;request=session_id: "ydb://session/3?node_id=3&id=ZWYyMDdjMTAtNDU2NDZkYTktNzk0ZGZkNGMtNDhjMGYxN2E=" tx_control { tx_id: "01js2esn7q8h59v67ktst9pb7w" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/values`\nSELECT ownerUserId,secretId,value FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_2" } items { text_value: "qwerty" } } } } } ; 2025-04-17T18:04:54.615204Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmRjZGZiMTItMTBkOWRlN2ItNTRiMGQ0NzItNzZkMGQwYzE=, ActorId: [3:7494344672111111416:4065], ActorState: ExecuteState, TraceId: 01js2esmtj7p60s8q1774256ya, Create QueryResponse for error on request, msg: Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Trying to start YDB, gRPC: 11591, MsgBus: 23886 2025-04-17T18:04:57.193172Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494344689634107240:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:57.193267Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c4e/r3tmp/tmpKqj1Dp/pdisk_1.dat 2025-04-17T18:04:57.364234Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:57.369724Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:57.369801Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:57.371498Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11591, node 4 2025-04-17T18:04:57.447414Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:57.447439Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:57.447448Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:57.447581Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23886 TClient is connected to server localhost:23886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:58.054734Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:58.062916Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:58.076727Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:58.232546Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:58.414392Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.561907Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:01.440839Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344706813978207:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:01.440947Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:01.517494Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.561259Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.606011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.656409Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.715420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.779435Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.827322Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:02.193472Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344689634107240:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:02.193647Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:02.214681Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344711108946182:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:02.214804Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:02.216700Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344711108946187:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:02.222126Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:02.242391Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344711108946189:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:02.333165Z node 4 :TX_PROXY ERROR: Actor# [4:7494344711108946252:3573] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:03.782657Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494344715403913886:2520], status: GENERIC_ERROR, issues:
: Error: Optimization, code: 1070
:8:25: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2025-04-17T18:05:03.785080Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjE3OGI5NTItZjA2NTE1MTQtNDNhMWU1MjctZjk5YjRkMQ==, ActorId: [4:7494344715403913879:2516], ActorState: ExecuteState, TraceId: 01js2esyvz1mz1vm927613ekw8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpDocumentApi::RestrictWrite >> KqpService::RangeCache-UseCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2025-04-17T18:03:48.074223Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:48.087435Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:48.087870Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:48.088091Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:48.134642Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:48.141125Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:48.205106Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:48.206585Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:48.206657Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:48.206742Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:48.207045Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:48.207361Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:48.207421Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:48.207701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:48.207733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:48.274431Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:48.307523Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:48.307736Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:48.307845Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:48.307880Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:48.307919Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:48.307958Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:48.308097Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.308154Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.308454Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:48.308536Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:48.308645Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:48.308686Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:48.308735Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:48.308777Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:48.308818Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:48.308849Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:48.308892Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:48.308995Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.309044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.309118Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:48.311531Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:48.311579Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:48.311658Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:48.311793Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:48.311835Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:48.311916Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:48.311963Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:48.311998Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:48.312048Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:48.312095Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:48.312358Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:48.312398Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:48.312439Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:48.312470Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:48.312511Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:48.312548Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:48.312586Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:48.312617Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:48.312643Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:48.324432Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:48.324489Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:48.324515Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:48.324553Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:48.324601Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:48.324999Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.325039Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:48.325073Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:48.325123Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:48.325140Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:48.325235Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:48.325266Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:48.325309Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:48.325340Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:48.327902Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:48.327954Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:48.328102Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.328131Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:48.328172Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:48.328201Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:48.328246Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:48.328296Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:48.328325Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:48.328359Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:48.328425Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:48.328477Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:48.328514Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:48.328644Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:48.328670Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:48.328687Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:48.328701Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:48.328716Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:48.328756Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:48.328772Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:48.328799Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:48.328840Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:48.328872Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:48.328906Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:48.328935Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:48.328973Z node 1 :TX_DATA ... ms, propose latency: 1 ms 2025-04-17T18:05:04.630449Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.630652Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.630684Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.630728Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.630760Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.630920Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.630949Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.631005Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.631036Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.631169Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.631199Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.631241Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.631272Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.631393Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.631422Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.631462Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.631493Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.631700Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.631730Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.631769Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.631801Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.632002Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.632031Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.632071Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.632100Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.632302Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.632332Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.632371Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.632405Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.632556Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.632585Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.632624Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.632655Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.632781Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.632809Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.632849Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.632880Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.633042Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.633072Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.633114Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.633144Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.633274Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.633302Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.633342Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.633373Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.633544Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.633573Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.633612Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.633643Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.633814Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:04.633845Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-17T18:05:04.633885Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:04.633916Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:04.634331Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:344:2311]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-17T18:05:04.634386Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:04.634427Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-04-17T18:05:04.634570Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:344:2311]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-17T18:05:04.634619Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:04.634650Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-17T18:05:04.634734Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:344:2311]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-17T18:05:04.634765Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:04.634793Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-17T18:05:04.634874Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:344:2311]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-17T18:05:04.634905Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:04.634934Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-17T18:05:04.635030Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:344:2311]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-17T18:05:04.635062Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:04.635088Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-17T18:05:04.635169Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:344:2311]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-17T18:05:04.635198Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:04.635227Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-17T18:05:04.635325Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:344:2311]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T18:05:04.635356Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:04.635385Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-17T18:05:04.635443Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:344:2311]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T18:05:04.635473Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:04.635501Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-17T18:05:04.635585Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:235:2228], Recipient [32:344:2311]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-17T18:05:04.635613Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:04.635641Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 30 20 30 25 28 28 24 28 31 31 28 16 30 24 31 5 25 1 24 23 1 25 31 1 25 25 - - - - - - actual 30 20 30 25 28 28 24 28 31 31 28 16 30 24 31 5 25 1 24 23 1 25 31 1 25 25 - - - - - - interm 6 3 3 - 5 5 1 3 6 5 5 4 - - - 5 6 1 6 - 1 1 - 1 - 1 - - - - - - >> KqpQueryService::SeveralCTAS+UseSink [GOOD] >> KqpQueryService::SeveralCTAS-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::Tcl [GOOD] Test command err: Trying to start YDB, gRPC: 12966, MsgBus: 1344 2025-04-17T18:04:36.003558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344593761604374:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:36.003655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2f/r3tmp/tmpGlrpr9/pdisk_1.dat 2025-04-17T18:04:36.588150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:36.588245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:36.590697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:36.617890Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12966, node 1 2025-04-17T18:04:36.742736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:36.742764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:36.742771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:36.742893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1344 TClient is connected to server localhost:1344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:37.325197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:37.348244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:37.365770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:37.525209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:37.723909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:37.818052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:39.494149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344610941475187:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:39.494281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:39.804316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:39.842589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:39.881457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:39.915012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:39.937563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:39.979014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:40.037525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:40.125551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344615236443149:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.125637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.125884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344615236443154:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:40.130214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:40.146842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344615236443156:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:40.203351Z node 1 :TX_PROXY ERROR: Actor# [1:7494344615236443212:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:40.999831Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344593761604374:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:40.999923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12314, MsgBus: 25737 2025-04-17T18:04:42.035201Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344622324425547:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:42.038274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2f/r3tmp/tmpbnXubp/pdisk_1.dat 2025-04-17T18:04:42.140530Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12314, node 2 2025-04-17T18:04:42.195244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:42.195339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:42.199199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:42.283217Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:42.283244Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:42.283252Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:42.283375Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25737 TClient is connected to server localhost:25737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:42.779329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:42.788332Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:42.809941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:42.891099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... ... de 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344635209329877:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:45.924573Z node 2 :TX_PROXY ERROR: Actor# [2:7494344635209329934:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:47.025554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.026946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.028928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.036087Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344622324425547:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:47.036160Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3690, MsgBus: 4575 2025-04-17T18:04:57.383026Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344688084444072:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:57.383102Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2f/r3tmp/tmpmcGg1Q/pdisk_1.dat 2025-04-17T18:04:57.510724Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:57.537159Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:57.537268Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:57.539581Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3690, node 3 2025-04-17T18:04:57.614716Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:57.614742Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:57.614750Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:57.614880Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4575 TClient is connected to server localhost:4575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:58.187647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:58.198400Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:58.220254Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:58.305374Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:58.480938Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.569251Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:01.106577Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344705264315018:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:01.106658Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:01.169079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.246933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.287309Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.367216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.407842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.462376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.515370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.590550Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344705264315688:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:01.590693Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:01.591999Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344705264315693:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:01.598081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:01.631311Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344705264315695:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:01.685887Z node 3 :TX_PROXY ERROR: Actor# [3:7494344705264315751:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:02.383519Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344688084444072:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:02.383602Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:02.979491Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:02.983308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:02.984931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:03.328714Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494344713854250990:2538], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2025-04-17T18:05:03.330644Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Mzg2MzE2NmYtZmRhZjkyZTAtZjk5OTc5NzItNmFiOTBkMWM=, ActorId: [3:7494344713854250988:2537], ActorState: ExecuteState, TraceId: 01js2esy3vcsqphjhh3b2g53b5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:05:04.665728Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494344718149218930:2731], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2025-04-17T18:05:04.666513Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTIyODc1MDctYzYzZTMwYzgtNzgyOGViZDMtZTg0Mjg0Zg==, ActorId: [3:7494344718149218928:2730], ActorState: ExecuteState, TraceId: 01js2eszqp8jmzvdvwwq42vvc9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2025-04-17T18:04:08.315356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344479651630225:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:08.315420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004b0/r3tmp/tmpT6qHOy/pdisk_1.dat 2025-04-17T18:04:08.666003Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27874, node 1 2025-04-17T18:04:08.732619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:08.733721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:08.735833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:08.770058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:08.770098Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:08.770111Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:08.770270Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:09.003098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:2957 2025-04-17T18:04:09.183273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.191970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.212247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.364248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:09.415283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:09.495847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.533903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:09.568222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:09.608973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.638128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.669633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.700312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:11.128226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344492536533565:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:11.128355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:11.128364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344492536533577:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:11.131845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T18:04:11.145792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344492536533579:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T18:04:11.236595Z node 1 :TX_PROXY ERROR: Actor# [1:7494344492536533630:2916] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:11.605694Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2erbfp9c1fkd75194kxxkr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjQ0YzNmYWItNzM0YWFlYTAtNWRhNjc3OTEtZWQyNDM0OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:04:11.669076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:11.715161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:04:11.750077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:11.793124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:11.823902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:11.859242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:11.898036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:04:11.924752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:11.954828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:12.072086Z node 1 :HTTP INFO: Listening on http://127.0.0.1:23236 2025-04-17T18:04:13.092646Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T18:04:13.092790Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-17T18:04:13.093943Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T18:04:13.094260Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T18:04:13.094885Z node 1 :HTTP INFO: Listening on http://[::]:18501 2025-04-17T18:04:13.139226Z node 1 :SQS INFO: Request SQS users list 2025-04-17T18:04:13.139261Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T18:04:13.142507Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-17T18:04:13.142545Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-17T18:04:13.143421Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-17T18:04:13.143760Z node 1 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromName (Parameter 'FROM_NAME (DataType 'Utf8String))) (let batchSize (Parameter 'BATCH_SIZE (DataType 'Uint64))) (let settingsTable '/Root/SQS/.Settings) (let range '( '('Account fromUser (Void)) '('Name fromName (Void)))) (let settingsSelect '( 'Account 'Name 'Value)) (let settingsResult (SelectRange settingsTable range settingsSelect '('('"ItemsLimit" batchSize)))) (let settings (Member settingsResult 'List)) (let truncated (Coalesce (Member settingsResult 'Truncated) (Bool 'false))) (return (Extend (AsList (SetResult 'settings settings)) (AsList (SetResult 'truncated truncated)) )) ) 2025-04-17T18:04:13.145424Z node 1 ... oud4/000000000000000101v0/1]. ActiveMessageRequests: 1 2025-04-17T18:05:04.693877Z node 7 :SQS DEBUG: Request [16ccdd39-78c440ca-d02cf309-df8a67f7] Sending execute request for query(idx=CHANGE_VISIBILITY_ID) to queue leader 2025-04-17T18:05:04.693911Z node 7 :SQS DEBUG: Request [16ccdd39-78c440ca-d02cf309-df8a67f7] Executing compiled query(idx=CHANGE_VISIBILITY_ID) 2025-04-17T18:05:04.693995Z node 7 :SQS DEBUG: Request [16ccdd39-78c440ca-d02cf309-df8a67f7] Starting executor actor for query(idx=CHANGE_VISIBILITY_ID). Mode: COMPILE_AND_EXEC 2025-04-17T18:05:04.694103Z node 7 :SQS TRACE: Request [16ccdd39-78c440ca-d02cf309-df8a67f7] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 1, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 5923258363543965525, "NOW": 1744913104693, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1744913104569, "Offset": 1, "NewVisibilityDeadline": 1744913105693}, {"LockTimestamp": 1744913104603, "Offset": 2, "NewVisibilityDeadline": 1744913106693}]} 2025-04-17T18:05:04.694575Z node 7 :SQS TRACE: Request [16ccdd39-78c440ca-d02cf309-df8a67f7] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\034\014Exists*NewVisibilityDeadline\014Offset\006Arg\014Member\nFlags\010Name\010Args\016Payload\022Parameter\006And\032LockTimestamp$VisibilityDeadline\014Invoke\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\n\203\014\207\203\010\203\014\203\010?\020(ChangeConddCurrentVisibilityDeadline\002\006\n$SetResult\000\003?\006\014result\t\211\006?\024\206\205\006?\020?\020?\020.\006\n?\032?\0220MapParameter\000\t\351\000?\034\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?&\003?(\010KEYS\003&\000\t\251\000?\032\016\000\005?\022\t\211\004?\010\207\203\014?\010 Coalesce\000\t\211\004?<\207\203\014\207\203\014*\000\t\211\006?B\203\005@\203\010?\0146\000\003?J\026LessOrEqual\t\351\000?L\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?X\003?Z\006NOW\003&\000\t\211\004?\014\207\205\004\207\203\010?\014.2\203\004\022\000\t\211\n?n\203\005\004\200\205\004\203\004\203\004.2\213\010\203\010\203\010\203\004?\020\203\004$SelectRow\000\003?t \000\001\205\000\000\000\000\001\030\000\000\000\000\000\000\000?l\005?z\003?v\020\003?x\026\003\013?\202\t\351\000?|\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?\226\003?\230> KqpQueryService::TableSink_OltpInteractive [GOOD] >> KqpQueryService::Tcl [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] Test command err: Trying to start YDB, gRPC: 16731, MsgBus: 19408 2025-04-17T18:04:41.211023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344621397993915:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:41.211098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c09/r3tmp/tmpINBwHY/pdisk_1.dat 2025-04-17T18:04:41.678703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:41.684534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:41.684645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:41.687730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16731, node 1 2025-04-17T18:04:41.775273Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:41.775293Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:41.775302Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:41.775446Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19408 TClient is connected to server localhost:19408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:42.441223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:42.484815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:42.717328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:42.895624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:42.985911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:44.679392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344634282897571:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:44.679512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.087526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.119230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.204272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.251832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.326995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.372325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.412631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.515895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344638577865540:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.515957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.516294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344638577865545:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.519533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:45.530479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344638577865547:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:45.633728Z node 1 :TX_PROXY ERROR: Actor# [1:7494344638577865605:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:46.214345Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344621397993915:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:46.214418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:46.704862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:46.706237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:46.707893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.984711Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-17T18:04:48.984926Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344651462769002:2802] TxId: 281474976710703. Ctx: { TraceId: 01js2esg6y82d08xsfa5pdtx8g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ3YTU3MTMtOTJhMDQwMWYtMjRlMzUyYTUtNDZiNDExZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:04:48.985099Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWQ3YTU3MTMtOTJhMDQwMWYtMjRlMzUyYTUtNDZiNDExZmQ=, ActorId: [1:7494344651462768976:2802], ActorState: ExecuteState, TraceId: 01js2esg6y82d08xsfa5pdtx8g, Create QueryResponse for error on request, msg: 2025-04-17T18:04:48.985504Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913089024, txId: 281474976710702] shutting down 2025-04-17T18:04:48.986048Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344651462769008:2807], TxId: 281474976710703, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NWQ3YTU3MTMtOTJhMDQwMWYtMjRlMzUyYTUtNDZiNDExZmQ=. CustomerSuppliedId : . TraceId : 01js2esg6y82d08xsfa5pdtx8g. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494344651462769002:2802], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:04:49.223665Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-17T18:04:49.224586Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913089262, txId: 281474976710706] shutting down 2025-04-17T18:04:49.458536Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-17T18:04:49.458716Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344655757736434:2835] TxId: 281474976710710. Ctx: { TraceId: 01js2esgpvax1kss9550871hvk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY2ZDQyNDctNTI0ZTNkZWUtZmNjZGIzZTYtYmYwZmZhYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:04:49.458853Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTY2ZDQyNDctNTI0ZTNkZWUtZmNjZGIzZTYtYmYwZmZhYmY=, ActorId: [1:7494344655757736409:2835], ActorState: ExecuteState, TraceId: 01js2esgpvax1kss9550871hvk, Create QueryResponse for error on request, msg: 2025-04-17T18:04:49.459177Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913089493, txId: 281474976710709] shutting down 2025-04-17T18:04:49.459406Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344655757736439:2840], TxId: 281474976710710, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2esgpvax1kss9550871hvk. SessionId : ydb://session/3?node_id=1&id=ZTY2ZDQyNDctNTI0ZTNkZWUtZmNjZGIzZTYtYmYwZmZhYmY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494344655757736434:2835], status: ABORTED, reason: {
: Error: Terminate execution } Tr ... {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:54.240562Z node 2 :TX_PROXY ERROR: Actor# [2:7494344675920843137:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:55.190094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.191473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.193423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.443897Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344658740971609:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:55.443993Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:57.501772Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-17T18:04:57.502035Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494344688805746744:2880] TxId: 281474976715711. Ctx: { TraceId: 01js2esrhx82n9rvg9686aqxmp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWUxNDE2ZjMtNzA1MjAwNjQtMjhjNDMyMmEtZTM5NjY1NTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:04:57.503064Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344688805746749:2886], TxId: 281474976715711, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWUxNDE2ZjMtNzA1MjAwNjQtMjhjNDMyMmEtZTM5NjY1NTE=. CustomerSuppliedId : . TraceId : 01js2esrhx82n9rvg9686aqxmp. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494344688805746744:2880], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:04:57.503798Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWUxNDE2ZjMtNzA1MjAwNjQtMjhjNDMyMmEtZTM5NjY1NTE=, ActorId: [2:7494344688805746717:2880], ActorState: ExecuteState, TraceId: 01js2esrhx82n9rvg9686aqxmp, Create QueryResponse for error on request, msg: 2025-04-17T18:04:57.504257Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913097529, txId: 281474976715710] shutting down Trying to start YDB, gRPC: 14819, MsgBus: 4159 2025-04-17T18:04:58.320278Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344693708362828:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:58.320322Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c09/r3tmp/tmp8lL2IS/pdisk_1.dat 2025-04-17T18:04:58.547127Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14819, node 3 2025-04-17T18:04:58.583605Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:58.583688Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:58.585436Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:58.639968Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:58.639989Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:58.639997Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:58.640121Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4159 TClient is connected to server localhost:4159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:59.095001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:59.101580Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:59.107197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:59.193478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:59.368503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:59.448018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:01.799455Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344706593266493:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:01.799532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:01.857023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.900026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.960902Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.998207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:02.039462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:02.106569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:02.150957Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:02.232818Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344710888234453:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:02.232917Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:02.233118Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344710888234458:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:02.240955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:02.258672Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344710888234460:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:02.318431Z node 3 :TX_PROXY ERROR: Actor# [3:7494344710888234517:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:03.322286Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344693708362828:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:03.322366Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:03.406776Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:03.408021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:03.408947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> KqpQueryService::ReadManyRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2025-04-17T18:04:05.959780Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344466134608124:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:05.960277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00050d/r3tmp/tmpuRjpF5/pdisk_1.dat 2025-04-17T18:04:06.416172Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:06.421104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:06.421215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:06.426155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15129, node 1 2025-04-17T18:04:06.608915Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:06.608935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:06.608942Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:06.609047Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:07.072000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:7911 2025-04-17T18:04:07.333325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.338814Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:04:07.340586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.354751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.469122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.512758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.554954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:07.590229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:07.617238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.682880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.751710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.822157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:07.868810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:08.958556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344479019511475:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:08.958656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344479019511470:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:08.958980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:08.962589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-17T18:04:08.971705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344479019511484:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-17T18:04:09.067690Z node 1 :TX_PROXY ERROR: Actor# [1:7494344483314478833:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:09.680852Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2er9bw62kxsgqq8kcwzn56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA1YWY3ZDQtOGJiOGZkYWMtMzYzZDI4YTgtY2Y3OGZkOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:04:09.756488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.787706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.812916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.842048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.868883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.904130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.929544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.958972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:09.993160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:10.040305Z node 1 :HTTP INFO: Listening on http://127.0.0.1:63852 2025-04-17T18:04:10.958284Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344466134608124:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:10.959303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:11.048169Z node 1 :SQS INFO: Start SQS service actor 2025-04-17T18:04:11.048211Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-17T18:04:11.066585Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-17T18:04:11.068071Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-17T18:04:11.070416Z node 1 :HTTP INFO: Listening on http://[::]:19005 2025-04-17T18:04:11.089440Z node 1 :SQS INFO: Request SQS users list 2025-04-17T18:04:11.089475Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-17T18:04:11.089541Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-17T18:04:11.089565Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-17T18:04:11.094554Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-04-17T18:04:11.095023Z node 1 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromQueue (Parameter 'FROM_QUEUE (DataType 'Utf8String))) (let batchSize (Parameter 'BATCH_SIZE (DataType 'Uint64))) (let queuesTable '/Root/SQS/.Queues) (let range '( '('Account fromUser (Void)) ... ed" retryable:0 2025-04-17T18:05:05.843931Z node 8 :TICKET_PARSER DEBUG: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2025-04-17T18:05:05.844019Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [a2317941-4e32b62f-23b398bc-cb46ba9f] [auth] Authorized successfully 2025-04-17T18:05:05.844179Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [a2317941-4e32b62f-23b398bc-cb46ba9f] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-17T18:05:05.846244Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7494344723535605028:2527], now have 1 active actors on pipe 2025-04-17T18:05:05.846321Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7494344723535605024:2523], now have 1 active actors on pipe 2025-04-17T18:05:05.846369Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7494344723535605026:2525], now have 1 active actors on pipe 2025-04-17T18:05:05.846408Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7494344723535605025:2524], now have 1 active actors on pipe 2025-04-17T18:05:05.846445Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7494344723535605027:2526], now have 1 active actors on pipe 2025-04-17T18:05:05.847476Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7494344723535605026:2525] destroyed 2025-04-17T18:05:05.847501Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7494344723535605027:2526] destroyed 2025-04-17T18:05:05.847518Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7494344723535605028:2527] destroyed 2025-04-17T18:05:05.847536Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7494344723535605024:2523] destroyed 2025-04-17T18:05:05.847554Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7494344723535605025:2524] destroyed 2025-04-17T18:05:05.848290Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [a2317941-4e32b62f-23b398bc-cb46ba9f] reply ok 2025-04-17T18:05:05.848524Z node 8 :HTTP DEBUG: (#37,[::1]:58166) <- (200 ) 2025-04-17T18:05:05.848626Z node 8 :HTTP DEBUG: (#37,[::1]:58166) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1744913106,"StorageLimitMb":0,"StreamName":"testtopic"}} 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1744913106,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-04-17T18:05:05.854367Z node 8 :HTTP DEBUG: (#37,[::1]:58180) incoming connection opened 2025-04-17T18:05:05.854433Z node 8 :HTTP DEBUG: (#37,[::1]:58180) -> (POST /Root) 2025-04-17T18:05:05.854585Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [9846:2501:6050:0:8046:2501:6050:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: d52296ac-5f981e0-9eb08306-158dfff6 2025-04-17T18:05:05.854973Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [d52296ac-5f981e0-9eb08306-158dfff6] got new request from [9846:2501:6050:0:8046:2501:6050:0] database '/Root' stream 'testtopic' 2025-04-17T18:05:05.858267Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStreamSummary] requestId [d52296ac-5f981e0-9eb08306-158dfff6] [auth] Authorized successfully 2025-04-17T18:05:05.858420Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [d52296ac-5f981e0-9eb08306-158dfff6] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-17T18:05:05.860256Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [d52296ac-5f981e0-9eb08306-158dfff6] reply ok 2025-04-17T18:05:05.860596Z node 8 :HTTP DEBUG: (#37,[::1]:58180) <- (200 ) 2025-04-17T18:05:05.860703Z node 8 :HTTP DEBUG: (#37,[::1]:58180) connection closed Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1744913.105,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1744913.105,"StreamName":"testtopic"}} 2025-04-17T18:05:05.861731Z node 8 :HTTP DEBUG: (#40,[::1]:58190) incoming connection opened 2025-04-17T18:05:05.861798Z node 8 :HTTP DEBUG: (#40,[::1]:58190) -> (POST /Root) 2025-04-17T18:05:05.861939Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [d8ac:100:6050:0:c0ac:100:6050:0] request [DescribeStream] url [/Root] database [/Root] requestId: a86e7616-d9b97644-bb9a3508-46e64b98 2025-04-17T18:05:05.862276Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [a86e7616-d9b97644-bb9a3508-46e64b98] got new request from [d8ac:100:6050:0:c0ac:100:6050:0] database '/Root' stream 'testtopic' 2025-04-17T18:05:05.862715Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [a86e7616-d9b97644-bb9a3508-46e64b98] [auth] Authorized successfully 2025-04-17T18:05:05.862802Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [a86e7616-d9b97644-bb9a3508-46e64b98] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-17T18:05:05.863779Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7494344723535605051:2535], now have 1 active actors on pipe 2025-04-17T18:05:05.863837Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7494344723535605053:2537], now have 1 active actors on pipe 2025-04-17T18:05:05.863891Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7494344723535605054:2538], now have 1 active actors on pipe 2025-04-17T18:05:05.863891Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7494344723535605052:2536], now have 1 active actors on pipe 2025-04-17T18:05:05.863927Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7494344723535605055:2539], now have 1 active actors on pipe 2025-04-17T18:05:05.865145Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7494344723535605051:2535] destroyed 2025-04-17T18:05:05.865176Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7494344723535605052:2536] destroyed 2025-04-17T18:05:05.865197Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7494344723535605053:2537] destroyed 2025-04-17T18:05:05.865216Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7494344723535605054:2538] destroyed 2025-04-17T18:05:05.865238Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7494344723535605055:2539] destroyed 2025-04-17T18:05:05.865315Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [a86e7616-d9b97644-bb9a3508-46e64b98] reply ok 2025-04-17T18:05:05.865982Z node 8 :HTTP DEBUG: (#40,[::1]:58190) <- (200 ) 2025-04-17T18:05:05.866082Z node 8 :HTTP DEBUG: (#40,[::1]:58190) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1744913106,"StorageLimitMb":0,"StreamName":"testtopic"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 23672, MsgBus: 4734 2025-04-17T18:04:39.187622Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344609078122163:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:39.187660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c1a/r3tmp/tmphheamV/pdisk_1.dat 2025-04-17T18:04:39.588900Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:39.636180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:39.636408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 23672, node 1 2025-04-17T18:04:39.639130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:39.679266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:39.679289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:39.679295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:39.679410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4734 TClient is connected to server localhost:4734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:40.237926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:40.268663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:40.414466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:40.568124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:40.633806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:42.267980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344621963025813:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:42.268075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:42.626523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.658459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.692873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.722279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.753864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.807064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.856251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:42.951749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344621963026470:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:42.951835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:42.952193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344621963026475:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:42.956449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:42.968164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344621963026477:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:43.033789Z node 1 :TX_PROXY ERROR: Actor# [1:7494344626257993830:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:44.017152Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNkNDllYWItZGY2MGFmNS0zZmE1ZTUzOC1iYzI4YmZiZg==, ActorId: [1:7494344626257994160:2512], ActorState: ExecuteState, TraceId: 01js2esbkf5q3dj6jq0g1xer07, Reply query error, msg: Pending previous query completion proxyRequestId: 7 2025-04-17T18:04:44.017249Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNkNDllYWItZGY2MGFmNS0zZmE1ZTUzOC1iYzI4YmZiZg==, ActorId: [1:7494344626257994160:2512], ActorState: ExecuteState, TraceId: 01js2esbkf5q3dj6jq0g1xer07, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2025-04-17T18:04:44.018720Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNkNDllYWItZGY2MGFmNS0zZmE1ZTUzOC1iYzI4YmZiZg==, ActorId: [1:7494344626257994160:2512], ActorState: ExecuteState, TraceId: 01js2esbkf5q3dj6jq0g1xer07, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-04-17T18:04:44.019283Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNkNDllYWItZGY2MGFmNS0zZmE1ZTUzOC1iYzI4YmZiZg==, ActorId: [1:7494344626257994160:2512], ActorState: ExecuteState, TraceId: 01js2esbkf5q3dj6jq0g1xer07, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-04-17T18:04:44.019325Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNkNDllYWItZGY2MGFmNS0zZmE1ZTUzOC1iYzI4YmZiZg==, ActorId: [1:7494344626257994160:2512], ActorState: ExecuteState, TraceId: 01js2esbkf5q3dj6jq0g1xer07, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-04-17T18:04:44.019723Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNkNDllYWItZGY2MGFmNS0zZmE1ZTUzOC1iYzI4YmZiZg==, ActorId: [1:7494344626257994160:2512], ActorState: ExecuteState, TraceId: 01js2esbkf5q3dj6jq0g1xer07, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-04-17T18:04:44.021713Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNkNDllYWItZGY2MGFmNS0zZmE1ZTUzOC1iYzI4YmZiZg==, ActorId: [1:7494344626257994160:2512], ActorState: ExecuteState, TraceId: 01js2esbkf5q3dj6jq0g1xer07, Reply query error, msg: Pending previous query completion proxyRequestId: 13 2025-04-17T18:04:44.034601Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNkNDllYWItZGY2MGFmNS0zZmE1ZTUzOC1iYzI4YmZiZg==, ActorId: [1:7494344626257994160:2512], ActorState: ExecuteState, TraceId: 01js2esbkf5q3dj6jq0g1xer07, Reply query error, msg: Pending previous query completion proxyRequestId: 14 2025-04-17T18:04:44.034687Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNkNDllYWItZGY2MGFmNS0zZmE1ZTUzOC1iYzI4YmZiZg==, ActorId: [1:7494344626257994160:2512], ActorState: ExecuteState, TraceId: 01js2esbkf5q3dj6jq0g1xer07, Reply query error, msg: Pending previous query completion proxyRequestId: 15 2025-04-17T18:04:44.190381Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344609078122163:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:44.190463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20328, MsgBus: 11275 2025-04-17T18:04:45.121409Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344637914341419:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:45.121493Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c1a/r3tmp/tmpJncr1y/pdisk_1.dat 2025-04-17T18:04:45.263062Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:45.275459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:45.275544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) Volat ... TZkYzgtZTU4MzBlMWYtNWFiM2ViM2EtNjUzMmYwYzg=, ActorId: [2:7494344659389180919:2575], ActorState: ExecuteState, TraceId: 01js2esj5b8x7gwsv5g98txxs7, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2025-04-17T18:04:50.844320Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzJmNzUyODItNDQ1MjliZGEtYWI5ZGFlZjgtYzRiYjRjMTQ=, ActorId: [2:7494344659389180971:2587], ActorState: ExecuteState, TraceId: 01js2esj8v5tq16dcpwa5j7wmv, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-04-17T18:04:50.844387Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzJmNzUyODItNDQ1MjliZGEtYWI5ZGFlZjgtYzRiYjRjMTQ=, ActorId: [2:7494344659389180971:2587], ActorState: ExecuteState, TraceId: 01js2esj8v5tq16dcpwa5j7wmv, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-04-17T18:04:50.844904Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzJmNzUyODItNDQ1MjliZGEtYWI5ZGFlZjgtYzRiYjRjMTQ=, ActorId: [2:7494344659389180971:2587], ActorState: ExecuteState, TraceId: 01js2esj8v5tq16dcpwa5j7wmv, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-04-17T18:04:50.848083Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzJmNzUyODItNDQ1MjliZGEtYWI5ZGFlZjgtYzRiYjRjMTQ=, ActorId: [2:7494344659389180971:2587], ActorState: ExecuteState, TraceId: 01js2esj8v5tq16dcpwa5j7wmv, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-04-17T18:04:50.973661Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGZhNzNiNDYtNTBlOTAzMWQtMWIzODMzZWItNjc1YzNlOGE=, ActorId: [2:7494344659389181024:2598], ActorState: ExecuteState, TraceId: 01js2esjcw7yj7e90fhghs4029, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-04-17T18:04:50.973738Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGZhNzNiNDYtNTBlOTAzMWQtMWIzODMzZWItNjc1YzNlOGE=, ActorId: [2:7494344659389181024:2598], ActorState: ExecuteState, TraceId: 01js2esjcw7yj7e90fhghs4029, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-04-17T18:04:50.973771Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGZhNzNiNDYtNTBlOTAzMWQtMWIzODMzZWItNjc1YzNlOGE=, ActorId: [2:7494344659389181024:2598], ActorState: ExecuteState, TraceId: 01js2esjcw7yj7e90fhghs4029, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-04-17T18:04:51.135704Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTI0MjM2OTQtNDg4NjE0MjEtZjg2OGMwZjctNjNiZjZiMWI=, ActorId: [2:7494344663684148354:2607], ActorState: ExecuteState, TraceId: 01js2esjhyfsdzeq510tv4k6wq, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-04-17T18:04:51.135782Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTI0MjM2OTQtNDg4NjE0MjEtZjg2OGMwZjctNjNiZjZiMWI=, ActorId: [2:7494344663684148354:2607], ActorState: ExecuteState, TraceId: 01js2esjhyfsdzeq510tv4k6wq, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-04-17T18:04:51.267578Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2E0ZDBiNGYtN2Q4NWIyZGMtNzU3YzE2NzEtMTczYmQ5ZDA=, ActorId: [2:7494344663684148409:2619], ActorState: ExecuteState, TraceId: 01js2esjnv2dfjqeweppr1tq04, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 7594, MsgBus: 7978 2025-04-17T18:04:52.489190Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344667307339214:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:52.489264Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c1a/r3tmp/tmpRys2VX/pdisk_1.dat 2025-04-17T18:04:52.622029Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:52.631209Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:52.631299Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:52.633406Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7594, node 3 2025-04-17T18:04:52.771297Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:52.771323Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:52.771330Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:52.771464Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7978 TClient is connected to server localhost:7978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:53.344405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:53.355213Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:53.373308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:53.479304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:53.690905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:53.784120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.911242Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344680192242876:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.911327Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.970034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.042746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.079235Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.109335Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.134765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.168694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.202545Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.316082Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344684487210837:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:56.316187Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:56.316347Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344684487210842:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:56.320282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:56.330466Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344684487210844:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:56.419846Z node 3 :TX_PROXY ERROR: Actor# [3:7494344684487210901:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:57.495188Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344667307339214:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:57.495264Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 8.320991s took: 8.369375s took: 8.390578s took: 8.388219s took: 8.391724s took: 8.393822s took: 8.395233s took: 8.398065s took: 8.406387s took: 8.352655s >> KqpQueryService::ExecuteQueryWithWorkloadManager ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 26221, MsgBus: 3160 2025-04-17T18:04:47.247696Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344644828014918:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:47.258441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bda/r3tmp/tmp41Ue0h/pdisk_1.dat 2025-04-17T18:04:47.689967Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:47.707007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:47.707128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:47.708498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26221, node 1 2025-04-17T18:04:47.780554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:47.780578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:47.780588Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:47.780705Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3160 TClient is connected to server localhost:3160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:48.405791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:48.426504Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:50.555936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344657712917391:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:50.556028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:50.769271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:50.892695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344657712917495:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:50.892762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:50.893027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344657712917500:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:50.897076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:04:50.907982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344657712917502:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:04:50.974152Z node 1 :TX_PROXY ERROR: Actor# [1:7494344657712917553:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24212, MsgBus: 12175 2025-04-17T18:04:52.128958Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344668020737201:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bda/r3tmp/tmpXsZsJn/pdisk_1.dat 2025-04-17T18:04:52.185584Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:04:52.243057Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:52.273394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:52.273477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:52.274617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24212, node 2 2025-04-17T18:04:52.342729Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:52.342753Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:52.342760Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:52.342877Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12175 TClient is connected to server localhost:12175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:52.741742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.206205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344680905639585:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.206292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.232460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.294077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344680905639686:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.294172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.294603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344680905639691:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:55.298830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:04:55.309002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344680905639693:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:04:55.370474Z node 2 :TX_PROXY ERROR: Actor# [2:7494344680905639744:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:55.712476Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-04-17T18:04:55.723580Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:04:55.723746Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:04:55.723974Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344680905639830:2366], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [2:7494344680905639814:2366]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[2:7494344680905639830:2366].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:04:55.724589Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344680905639823:2366], SessionActorId: [2:7494344680905639814:2366], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7494344680905639814:2366]. isRollback=0 2025-04-17T18:04:55.724829Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWMzMTJjYjktZGExMDViODYtN2RhZjMwNDUtNDQ2MjMzMGQ=, ActorId: [2:7494344680905639814:2366], ActorState: ExecuteState, TraceId: 01js2espytfycqayq618zj2b6e, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7494344680905639824:2366] from: [2:7494344680905639823:2366] 2025-04-17T18:04:55.724915Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494344680905639824:2366] TxId: 281474976715663. Ctx: { TraceId: 01js2espytfycqayq618zj2b6e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWMzMTJjYjktZGExMDViODYtN2RhZjMwNDUtNDQ2MjMzMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:04:55.725083Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWMzMTJjYjktZGExMDViODYtN2RhZjMwNDUtNDQ2MjMzMGQ=, ActorId: [2:7494344680905639814:2366], ActorState: ExecuteState, TraceId: 01js2espytfycqayq618zj2b6e, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:04:57.050293Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344668020737201:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:57.050361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 15995, MsgBus: 12956 2025-04-17T18:05:01.473777Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344706886514748:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:01.473883Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bda/r3tmp/tmpRf9lYU/pdisk_1.dat 2025-04-17T18:05:01.685926Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:01.707477Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:01.707566Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:01.708726Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15995, node 3 2025-04-17T18:05:01.816403Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:01.816431Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:01.816439Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:01.816559Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12956 TClient is connected to server localhost:12956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:02.278130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.789764Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344719771417255:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:04.789849Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:04.805452Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:05.005607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:05:05.248849Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344724066385931:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.248919Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.264293Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344724066385935:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.264379Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.264779Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344724066385940:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.268517Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:05:05.285197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344724066385942:2449], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:05:05.350079Z node 3 :TX_PROXY ERROR: Actor# [3:7494344724066386009:3240] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:06.474306Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344706886514748:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:06.474406Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::Followers >> KqpQueryService::ExecuteQueryPg >> KqpQueryService::ExecStats >> KqpQueryService::ShowCreateTableNotSuccess [GOOD] >> KqpQueryService::ShowCreateTableOnView |86.0%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2025-04-17T18:03:45.472706Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:45.492042Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:45.492475Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:45.492730Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:45.541722Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:45.546782Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:45.638631Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:45.640245Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:45.640324Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:45.640433Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:45.640823Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:45.641245Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:45.641336Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:45.641647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:45.641686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:45.718952Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:45.758386Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:45.758576Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:45.758669Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:45.758719Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:45.758752Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:45.758784Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.758920Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:45.758975Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:45.759248Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:45.759333Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:45.759436Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:45.759484Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:45.759532Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:45.759574Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:45.759611Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:45.759642Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:45.759683Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:45.759783Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:45.759832Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:45.759897Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:45.762595Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:45.762648Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:45.762721Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:45.762854Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:45.763079Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:45.763158Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:45.763202Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:45.763237Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:45.763285Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:45.763330Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:45.763636Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:45.763697Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:45.763729Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:45.763759Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:45.763803Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:45.763839Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:45.763874Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:45.763903Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:45.763925Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:45.782287Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:45.782347Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:45.782376Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:45.782427Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:45.782467Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:45.782885Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:45.782919Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:45.782949Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:45.782993Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:45.783012Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:45.783094Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:45.783124Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:45.783169Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:45.783193Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:45.791157Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:45.791213Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:45.791398Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:45.791423Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:45.791456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:45.791502Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:45.791524Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:45.791551Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:45.791574Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:45.791617Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:45.791650Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:45.791688Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:45.791714Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:45.791995Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:45.792024Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:45.792039Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:45.792052Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:45.792064Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:45.792120Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:45.792150Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:45.792187Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:45.792244Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:45.792289Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:45.792356Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:45.792394Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:45.792445Z node 1 :TX_DATA ... 4-17T18:05:07.284739Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.284888Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.284959Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.285005Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.285038Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.285238Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.285271Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.285316Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.285351Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.285508Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.285539Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.285583Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.285616Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.285810Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.285843Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.285887Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.285920Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.286388Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.286443Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.286493Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.286534Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.286705Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.286738Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.286782Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.286817Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.286961Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.287005Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.287051Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.287087Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.287247Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.287280Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.287325Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.287359Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.287544Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.287577Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.287620Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.287654Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.287863Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.287896Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.287943Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.287979Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.288125Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.288158Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.288201Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.288243Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.288446Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.288480Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.288525Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.288565Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.288804Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.288840Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.288885Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.288922Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.289080Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:07.289114Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-17T18:05:07.289159Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:07.289198Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:07.289498Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-17T18:05:07.289552Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:07.289597Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-17T18:05:07.289750Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-17T18:05:07.289786Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:07.289820Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-17T18:05:07.289919Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-17T18:05:07.289952Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:07.289986Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-17T18:05:07.290071Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-17T18:05:07.290104Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:07.290136Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-17T18:05:07.290260Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-17T18:05:07.290298Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:07.290330Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-17T18:05:07.290396Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T18:05:07.290427Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:07.290455Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-17T18:05:07.290554Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T18:05:07.290588Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:07.290619Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-17T18:05:07.290706Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-17T18:05:07.290742Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:07.290773Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 31 23 31 29 21 29 24 28 28 27 30 29 30 30 30 29 29 26 29 30 17 30 27 19 27 27 30 19 30 30 - - actual 31 23 31 29 21 29 24 28 28 27 30 29 30 30 30 29 29 26 29 30 17 30 27 19 27 27 30 19 30 30 - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> KqpQueryService::StreamExecuteQueryPure [GOOD] >> KqpQueryService::StreamExecuteQueryMultiResult >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] |86.0%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] >> KqpQueryService::TableSink_OltpDelete [GOOD] >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] >> KqpQueryService::TableSink_OltpUpdate [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink >> KqpQueryServiceScripts::ExecuteScriptStatsProfile [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter >> KqpQueryService::CreateTempTable [GOOD] >> KqpQueryService::CreateAndDropTopic >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2025-04-17T18:03:46.791812Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:46.805772Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:46.806241Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:46.806495Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:46.853719Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:46.858504Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:46.941221Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:46.943014Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:46.943091Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:46.943193Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:46.943572Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:46.943997Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:46.944063Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:46.944390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:46.944429Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:47.009107Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:47.045225Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:47.045438Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:47.045549Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:47.045589Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:47.045625Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:47.045662Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:47.045807Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.045865Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.046136Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:47.046263Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:47.046377Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:47.046417Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:47.046467Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:47.046511Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:47.046553Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:47.046588Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:47.046635Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:47.046742Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.046776Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.046838Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:47.049490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:47.049548Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:47.049630Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:47.049767Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:47.049816Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:47.049898Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:47.049957Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:47.049999Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:47.050049Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:47.050100Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:47.050399Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:47.050450Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:47.050493Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:47.050528Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:47.050576Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:47.050617Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:47.050658Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:47.050693Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:47.050723Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:47.062768Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:47.062842Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:47.062873Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:47.062912Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:47.062967Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:47.063529Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.063587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:47.063639Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:47.063707Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:47.063737Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:47.063867Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:47.063913Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:47.063989Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:47.064030Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:47.067926Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:47.067991Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:47.068194Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.068235Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:47.068291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:47.068333Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:47.068368Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:47.068411Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:47.068447Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:47.068491Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:47.068553Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:47.068604Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:47.068640Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:47.068804Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:47.068837Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:47.068859Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:47.068880Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:47.068902Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:47.068958Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:47.068985Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:47.069048Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:47.069087Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:47.069129Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:47.069190Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:47.069224Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:47.069274Z node 1 :TX_DATA ... y: 1 ms 2025-04-17T18:05:08.771282Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.771497Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.771530Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.771577Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.771612Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.771803Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.771834Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.771882Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.771923Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.772078Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.772113Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.772160Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.772198Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.772337Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.772372Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.772423Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.772458Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.772657Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.772693Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.772737Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.772776Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.772991Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.773029Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.773071Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.773107Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.773270Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.773301Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.773344Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.773377Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.773538Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.773570Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.773612Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.773644Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.773814Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.773848Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.773889Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.773922Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.774090Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.774124Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.774167Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.774223Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.774390Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.774423Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.774466Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.774498Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.774695Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.774727Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.774769Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.774802Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.774935Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.774982Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.775021Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.775054Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.775184Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:08.775214Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-17T18:05:08.775263Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:05:08.775295Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:08.775569Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-17T18:05:08.775711Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:08.775753Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-17T18:05:08.775887Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-17T18:05:08.775924Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:08.775953Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-17T18:05:08.776037Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-17T18:05:08.776068Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:08.776097Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-17T18:05:08.776180Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-17T18:05:08.776213Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:08.776245Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-17T18:05:08.776329Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-17T18:05:08.776363Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:08.776393Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-17T18:05:08.776478Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T18:05:08.776508Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:08.776539Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-17T18:05:08.776639Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T18:05:08.776672Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:08.776707Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-17T18:05:08.776764Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-17T18:05:08.776796Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:08.776825Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 31 27 26 23 30 29 25 28 27 29 29 24 29 29 29 23 27 24 10 27 8 27 29 - 6 - 27 - - - - - actual 31 27 26 23 30 29 25 28 27 29 29 24 29 29 29 23 27 24 10 27 8 27 29 - 6 - 27 - - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 12359, MsgBus: 16025 2025-04-17T18:04:44.490703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344630612454661:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:44.491132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001be9/r3tmp/tmpzgjvHs/pdisk_1.dat 2025-04-17T18:04:45.021384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:45.029105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:45.033828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:45.057953Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12359, node 1 2025-04-17T18:04:45.194815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:45.194850Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:45.194859Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:45.195000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16025 TClient is connected to server localhost:16025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:45.797164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:45.818641Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:45.829671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:45.980956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:46.167721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:46.268211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.896075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344643497358176:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:47.896198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:48.209221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.245980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.276341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.311126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.347708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.395401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.479048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.579943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344647792326142:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:48.580027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:48.580266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344647792326147:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:48.583263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:48.592387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344647792326149:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:48.651334Z node 1 :TX_PROXY ERROR: Actor# [1:7494344647792326205:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:49.472309Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344630612454661:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:49.481123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:49.590495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.593170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.596935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:51.802274Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913091838, txId: 281474976710699] shutting down Trying to start YDB, gRPC: 30831, MsgBus: 11537 2025-04-17T18:04:52.992750Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344667495397081:2154];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001be9/r3tmp/tmp57kFtI/pdisk_1.dat 2025-04-17T18:04:53.082249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:04:53.160120Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30831, node 2 2025-04-17T18:04:53.206538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:53.206622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:53.212590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:53.264843Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:53.264864Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:53.264871Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:53.264973Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11537 TClient is connected to server localhost:11537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:53.750391 ... pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:56.544798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344684675268566:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:56.547767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:56.559652Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344684675268568:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:56.642089Z node 2 :TX_PROXY ERROR: Actor# [2:7494344684675268622:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:57.590781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.592079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.593116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.137098Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344667495397081:2154];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:58.142522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:59.877597Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913099909, txId: 281474976715702] shutting down 2025-04-17T18:05:00.120673Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913100161, txId: 281474976715705] shutting down Trying to start YDB, gRPC: 62148, MsgBus: 17971 2025-04-17T18:05:01.066952Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344706000590835:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:01.067004Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001be9/r3tmp/tmpW8Hh8q/pdisk_1.dat 2025-04-17T18:05:01.210708Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:01.212924Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:01.212997Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:01.214539Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62148, node 3 2025-04-17T18:05:01.296692Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:01.296716Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:01.296726Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:01.296853Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17971 TClient is connected to server localhost:17971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:05:01.835188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:05:01.842267Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:01.857215Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:01.939325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:02.149734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:02.241870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.639103Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344718885494511:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:04.639199Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:04.696213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:04.760091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:04.796331Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:04.864549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:04.901131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:04.947850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:05.023526Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:05.106816Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344723180462472:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.106894Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.107108Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344723180462477:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.110480Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:05.130924Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344723180462479:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:05.201295Z node 3 :TX_PROXY ERROR: Actor# [3:7494344723180462538:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:06.069334Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344706000590835:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:06.069403Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:06.217333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.220226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.223346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.615460Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913108652, txId: 281474976715709] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpDelete [GOOD] Test command err: Trying to start YDB, gRPC: 20099, MsgBus: 22455 2025-04-17T18:04:42.096735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344623436307741:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:42.128022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bfe/r3tmp/tmplK7yJh/pdisk_1.dat 2025-04-17T18:04:42.531571Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:42.544535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:42.544647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:42.547766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20099, node 1 2025-04-17T18:04:42.821554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:42.821576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:42.821582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:42.821686Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22455 TClient is connected to server localhost:22455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:43.553585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:43.563676Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:45.692857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344636321210154:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.693003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.982748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:46.173289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:46.173492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:46.173707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:46.173779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:46.173868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:46.173953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:46.174027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:46.175301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:46.175485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:46.175599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:46.175712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:46.175805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494344640616177588:2335];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:46.186776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:46.186841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:46.187035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:46.187198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:46.187349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:46.187472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:46.187605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:46.187729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:46.187858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:46.187996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:46.188290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:46.188437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344640616177614:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:46.218145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640616177590:2336];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:46.218352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640616177590:2336];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:46.218712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640616177590:2336];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:46.218838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640616177590:2336];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:46.218935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640616177590:2336];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:46.219041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640616177590:2336];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:46.219179Z node 1 :TX_COLU ... MediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.274622Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7494344666689517014:2342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.274627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7494344666689517016:2343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.274817Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7494344662394549699:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.274838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7494344662394549697:2335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.275208Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7494344662394549699:2336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.275245Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7494344662394549697:2335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.278714Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7494344666689517012:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.279013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7494344666689517012:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.279174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7494344666689517003:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.279448Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7494344666689517003:2340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.287303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7494344666689517123:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.287866Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7494344662394549703:2338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.288372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7494344662394549701:2337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.288726Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7494344662394549701:2337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.289595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7494344666689517123:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.290454Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7494344662394549703:2338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.308082Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=766424ce-1bb611f0-aabd4b99-895ce9f7;tablet_id=72075186224037890;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.308539Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.344469Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344649509647043:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:53.353139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:53.363137Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=7665973c-1bb611f0-9af15daa-cfc27778;tablet_id=72075186224037894;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:04:53.363467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 64511, MsgBus: 15658 2025-04-17T18:04:59.309400Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344697330242065:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:59.309444Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bfe/r3tmp/tmpTaZ7nR/pdisk_1.dat 2025-04-17T18:04:59.504332Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:59.520151Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:59.520234Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:59.522602Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64511, node 3 2025-04-17T18:04:59.634695Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:59.634721Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:59.634751Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:59.634877Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15658 TClient is connected to server localhost:15658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:00.235339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:00.266364Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:03.024943Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344714510111913:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.025030Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.043446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:03.139182Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344714510112016:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.139259Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.139464Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344714510112021:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.143080Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:03.154353Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344714510112023:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:03.231894Z node 3 :TX_PROXY ERROR: Actor# [3:7494344714510112074:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } WAIT_INDEXATION: 0 2025-04-17T18:05:04.310287Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344697330242065:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:04.310369Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionRace >> KqpQueryService::DdlUser [GOOD] >> KqpQueryService::DdlTx >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager [GOOD] >> KqpQueryServiceScripts::ExplainScript ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 28955, MsgBus: 30993 2025-04-17T18:04:43.435274Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344629396100535:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:43.446540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bee/r3tmp/tmpQ9VJ99/pdisk_1.dat 2025-04-17T18:04:43.888224Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:43.907227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:43.907338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:43.909641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28955, node 1 2025-04-17T18:04:44.040785Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:44.040807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:44.040813Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:44.040933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30993 TClient is connected to server localhost:30993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:44.770641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:44.786024Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:46.854084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344642281003047:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:46.854210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:47.142115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.324746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:47.325026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:47.325318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:47.325484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:47.325626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:47.325741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:47.325858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:47.325982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:47.326090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:47.326253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:47.326391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:47.326510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344646575970515:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:47.326720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:47.326763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:47.326911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:47.327034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:47.327170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:47.327272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:47.327385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:47.327489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:47.327605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:47.327717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:47.327828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:47.327908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344646575970509:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:47.367903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344646575970492:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:47.367970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344646575970492:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:47.368171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344646575970492:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:47.368287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344646575970492:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:47.368393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344646575970492:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:47.368509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344646575970492:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:47.368611Z node 1 :TX_COLU ... d=[3:7494344731396057187:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:08.055290Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7494344731396057187:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:08.055395Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7494344731396057187:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:08.055511Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7494344731396057187:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:08.055630Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7494344731396057187:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:08.055752Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7494344731396057187:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:08.055884Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7494344731396057187:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:08.056003Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7494344731396057187:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:08.067840Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:08.067916Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:08.068059Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:08.068102Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:08.068771Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:08.068819Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:08.068941Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:08.068981Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:08.069049Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:08.069083Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:08.069130Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:08.069164Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:08.069880Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:08.069934Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:08.070167Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:08.070228Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:08.070391Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:08.070434Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:08.070661Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:08.070694Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:08.070853Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:08.070885Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:08.100041Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.100056Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.107061Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.107481Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.113252Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.113777Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.119915Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.120277Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.125857Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.127368Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:08.133354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.244121Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344735691024814:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.244240Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.244785Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344735691024819:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.249393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:05:08.261153Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344735691024821:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:05:08.336144Z node 3 :TX_PROXY ERROR: Actor# [3:7494344735691024872:2659] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:08.501413Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:05:08.715837Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715667; 2025-04-17T18:05:08.837720Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344714216187091:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:08.837793Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::SeveralCTAS-UseSink [GOOD] >> KqpQueryService::StreamExecuteQuery [GOOD] >> KqpQueryService::StreamExecuteCollectMeta >> KqpQueryService::TableSink_OlapDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [GOOD] Test command err: 2025-04-17T18:02:43.883905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344112080831412:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.883969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018a1/r3tmp/tmpXJ2aiV/pdisk_1.dat 2025-04-17T18:02:44.130826Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:02:44.381406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.386993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.387092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.391128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21591, node 1 2025-04-17T18:02:44.571848Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/0018a1/r3tmp/yandexf8U4PL.tmp 2025-04-17T18:02:44.571875Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/0018a1/r3tmp/yandexf8U4PL.tmp 2025-04-17T18:02:44.572076Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/0018a1/r3tmp/yandexf8U4PL.tmp 2025-04-17T18:02:44.572224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:44.781655Z INFO: TTestServer started on Port 18608 GrpcPort 21591 TClient is connected to server localhost:18608 PQClient connected to localhost:21591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:45.140728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.154884Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.177807Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:02:45.201078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:02:45.346782Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:46.979590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344124965734123:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.979596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344124965734129:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.979745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.984011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.985776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344124965734169:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.985848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.998862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344124965734137:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:02:47.068035Z node 1 :TX_PROXY ERROR: Actor# [1:7494344129260701489:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.787896Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344129260701505:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.819548Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDRjZDJiZGUtYmJhYWE0Yi0yYzkzNTg1OS1lMmNmMDBjZQ==, ActorId: [1:7494344124965734120:2337], ActorState: ExecuteState, TraceId: 01js2ens9y60prv338zj3wc7b9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.833351Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:48.193979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.235902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.337426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.444523Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344112080831412:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.447577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7494344142145603721:2659] === CheckClustersList. Ok 2025-04-17T18:02:55.089784Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T18:02:55.158483Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T18:02:55.211990Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344163620440456:2788], Recipient [1:7494344116375799139:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212063Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212092Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:02:55.212142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494344163620440452:2785], Recipient [1:7494344116375799139:2196]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T18:02:55.212168Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:02:55.292815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:02:55.293138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:02:55.293330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to t ... 44Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_1_17402243464329503561_v1 2025-04-17T18:05:09.608561Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_1_17402243464329503561_v1 2025-04-17T18:05:09.618593Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344709206267977:2865], Partition 2, Sender [0:0:0], Recipient [6:7494344709206268053:2872], Cookie: 0 2025-04-17T18:05:09.618690Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344709206268053:2872]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.618717Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.618768Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.618852Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.618898Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.618931Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:09.706335Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344674846528005:2463], Partition 0, Sender [0:0:0], Recipient [6:7494344674846528065:2467], Cookie: 0 2025-04-17T18:05:09.706422Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344674846528065:2467]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.706449Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.706498Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.706567Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.706591Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.706624Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:09.708082Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344709206267979:2866], Partition 1, Sender [0:0:0], Recipient [6:7494344709206268056:2874], Cookie: 0 2025-04-17T18:05:09.708155Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344709206268056:2874]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.708173Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.708212Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.708287Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.708316Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.708364Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:09.718862Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344709206267977:2865], Partition 2, Sender [0:0:0], Recipient [6:7494344709206268053:2872], Cookie: 0 2025-04-17T18:05:09.718938Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344709206268053:2872]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.718969Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.719016Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.719075Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.719110Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.719147Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:09.807797Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344674846528005:2463], Partition 0, Sender [0:0:0], Recipient [6:7494344674846528065:2467], Cookie: 0 2025-04-17T18:05:09.807883Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344674846528065:2467]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.807922Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.807972Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.808047Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.808096Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.808129Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:09.808421Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344709206267979:2866], Partition 1, Sender [0:0:0], Recipient [6:7494344709206268056:2874], Cookie: 0 2025-04-17T18:05:09.808469Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344709206268056:2874]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.808495Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.808527Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.808570Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.808588Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.808624Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:09.819195Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344709206267977:2865], Partition 2, Sender [0:0:0], Recipient [6:7494344709206268053:2872], Cookie: 0 2025-04-17T18:05:09.819299Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344709206268053:2872]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.819326Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.819377Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.819457Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.819482Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.819519Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:09.908674Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344674846528005:2463], Partition 0, Sender [0:0:0], Recipient [6:7494344674846528065:2467], Cookie: 0 2025-04-17T18:05:09.908781Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344674846528065:2467]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.908807Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.908858Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.908936Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.908963Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.908980Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344709206267979:2866], Partition 1, Sender [0:0:0], Recipient [6:7494344709206268056:2874], Cookie: 0 2025-04-17T18:05:09.908999Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:09.909048Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344709206268056:2874]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.909066Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.909100Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.909145Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.909163Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.909196Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:09.919868Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344709206267977:2865], Partition 2, Sender [0:0:0], Recipient [6:7494344709206268053:2872], Cookie: 0 2025-04-17T18:05:09.919952Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344709206268053:2872]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.919976Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:09.920022Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:09.920092Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:09.920116Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:09.920149Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> KqpQueryService::TableSink_DisableSink [GOOD] >> KqpDocumentApi::RestrictWrite [GOOD] >> KqpDocumentApi::AllowRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19376, MsgBus: 6018 2025-04-17T18:04:55.305432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344678978197972:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:55.306422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bbb/r3tmp/tmpuMwB5E/pdisk_1.dat 2025-04-17T18:04:55.724886Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:55.736786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:55.736893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19376, node 1 2025-04-17T18:04:55.739200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:55.786763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:55.786789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:55.786799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:55.786912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6018 TClient is connected to server localhost:6018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:56.280531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.336955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.468833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.609403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.687742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:58.283524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344691863101503:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.283608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.584659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.652033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.719154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.746863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.817922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.899826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.969720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.063650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344696158069480:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.063716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.063940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344696158069485:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.067527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:59.083683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:04:59.084397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344696158069487:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:59.154021Z node 1 :TX_PROXY ERROR: Actor# [1:7494344696158069541:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:00.302279Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344678978197972:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:00.302348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20221, MsgBus: 8257 2025-04-17T18:05:02.011221Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344707963310439:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:02.011277Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bbb/r3tmp/tmpLqR6u3/pdisk_1.dat 2025-04-17T18:05:02.215112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:02.215188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:02.216702Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:02.217786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20221, node 2 2025-04-17T18:05:02.354666Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:02.354690Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:02.354697Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:02.354792Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8257 TClient is connected to server localhost:8257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:02.792311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:02.813919Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:05.034572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344720848212963:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.034656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.034869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344720848212991:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:05.037964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:05:05.048939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344720848212993:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:05:05.107580Z node 2 :TX_PROXY ERROR: Actor# [2:7494344720848213044:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:05.172039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-17T18:05:05.334958Z node 2 :TX_PROXY ERROR: Actor# [2:7494344720848213327:2503] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:05.348249Z node 2 :TX_PROXY ERROR: Actor# [2:7494344720848213334:2508] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/MTFhYjEzNDEtZmIzYWU2MDMtOTExYTM0MGItNzkxZDBkM2M=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:05.350661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:05.491970Z node 2 :TX_PROXY ERROR: Actor# [2:7494344720848213522:2622] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:05.494154Z node 2 :TX_PROXY ERROR: Actor# [2:7494344720848213529:2627] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/MTFhYjEzNDEtZmIzYWU2MDMtOTExYTM0MGItNzkxZDBkM2M=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:05.496660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9889, MsgBus: 13860 2025-04-17T18:05:06.789952Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344727109955459:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:06.790003Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bbb/r3tmp/tmpyN41NT/pdisk_1.dat 2025-04-17T18:05:06.913631Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9889, node 3 2025-04-17T18:05:06.945309Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:06.945416Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:06.967989Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:07.006883Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:07.006912Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:07.006921Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:07.007053Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13860 TClient is connected to server localhost:13860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:05:07.475837Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:05:10.083911Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344744289825273:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.084007Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.086316Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344744289825300:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.092335Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:05:10.105608Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344744289825302:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:05:10.197755Z node 3 :TX_PROXY ERROR: Actor# [3:7494344744289825353:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.274889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-17T18:05:10.438304Z node 3 :TX_PROXY ERROR: Actor# [3:7494344744289825635:2502] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.440061Z node 3 :TX_PROXY ERROR: Actor# [3:7494344744289825642:2507] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZTQ3OGJkMjMtMzc4MDI4YzgtZmFmZDVmNWItMmEzZWUwMGE=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.442772Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:10.600992Z node 3 :TX_PROXY ERROR: Actor# [3:7494344744289825832:2622] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.604287Z node 3 :TX_PROXY ERROR: Actor# [3:7494344744289825839:2627] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZTQ3OGJkMjMtMzc4MDI4YzgtZmFmZDVmNWItMmEzZWUwMGE=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.606765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges >> KqpSystemView::FailResolve >> KqpSystemView::NodesRange2 >> KqpSystemView::QueryStatsSimple >> KqpSysColV1::StreamSelectRowAsterisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapDelete [GOOD] Test command err: Trying to start YDB, gRPC: 12364, MsgBus: 62471 2025-04-17T18:04:42.551825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344623067618850:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:42.551867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bf9/r3tmp/tmpbrccos/pdisk_1.dat 2025-04-17T18:04:43.221340Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:43.238827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:43.238947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:43.246591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12364, node 1 2025-04-17T18:04:43.354758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:43.354784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:43.354791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:43.354893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62471 TClient is connected to server localhost:62471 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:44.031935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:44.070897Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:46.068458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344640247488637:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:46.068553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:46.357191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:46.555002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:46.555236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:46.555587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:46.555713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:46.555760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:46.555806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:46.555843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:46.555977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:46.556021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:46.556111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:46.556137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:46.556252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:46.556270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:46.556384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:46.556418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:46.556536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:46.556565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:46.556665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:46.556701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:46.556780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:46.556832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344640247488801:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:46.556905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:46.557037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:46.557135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494344640247488822:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:46.611440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344640247488818:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:46.611499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344640247488818:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:46.611694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344640247488818:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:46.611806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344640247488818:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:46.611927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344640247488818:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:46.612016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494344640247488818:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:46.612112Z node 1 :TX_COLU ... 6224037890;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.764569Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7494344712932752973:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.764641Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7494344712932752921:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.764689Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7494344712932752921:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.764786Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7494344712932753051:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.764823Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7494344712932753051:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.764916Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7494344712932752971:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.764945Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7494344712932752971:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765025Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7494344712932752948:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765046Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7494344712932752948:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765192Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7494344712932752930:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765214Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7494344712932752930:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765304Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7494344712932752937:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765347Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7494344712932752937:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765444Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7494344712932752932:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765470Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7494344712932752932:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765557Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;self_id=[3:7494344712932752919:2335];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.765599Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;self_id=[3:7494344712932752919:2335];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-17T18:05:05.875168Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:05.882326Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;commit_tx_id=281474976715672;commit_lock_id=281474976715671;fline=manager.cpp:94;broken_lock_id=281474976715669; 2025-04-17T18:05:05.882609Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.016253Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7494344712932752928:2337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.016614Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344712932752919:2335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.016838Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7494344712932752937:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.017006Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7494344712932752930:2338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.020857Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7494344712932752928:2337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.020876Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344712932752919:2335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.021421Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7494344712932752937:2340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.021979Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7494344712932752930:2338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.022014Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7494344712932752932:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.022453Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7494344712932753051:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.022538Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7494344712932752932:2339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.024048Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7494344712932752971:2342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.024315Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7494344712932753051:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.024527Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7494344712932752973:2343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.025629Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7494344712932752921:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.027147Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7494344712932752971:2342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.027339Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7494344712932752973:2343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.027590Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7494344712932752921:2336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.031687Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7494344712932752948:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.034033Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7494344712932752948:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.054320Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=7dfdec56-1bb611f0-b548e782-e9d84262;tablet_id=72075186224037890;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.054781Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.056013Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=7dfd2e92-1bb611f0-9b894fa2-77e0648a;tablet_id=72075186224037897;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:05:06.056236Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSysColV1::StreamSelectRowById >> KqpQueryService::ReadManyRanges [GOOD] >> KqpQueryService::ReadManyShardsRange >> KqpSystemView::Sessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_DisableSink [GOOD] Test command err: Trying to start YDB, gRPC: 28073, MsgBus: 20176 2025-04-17T18:04:38.139800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344606956146555:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:38.140005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c22/r3tmp/tmpTtTNvB/pdisk_1.dat 2025-04-17T18:04:38.532943Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28073, node 1 2025-04-17T18:04:38.578328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:38.578449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:38.583261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:38.690703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:38.690723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:38.690729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:38.690828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20176 TClient is connected to server localhost:20176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:39.261367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:39.298227Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:41.247393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344619841048967:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:41.247530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:41.528608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:41.694688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:41.694688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:41.694882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:41.694903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:41.695163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:41.695235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:41.695292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:41.695317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:41.695425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:41.695546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:41.695689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:41.695796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:41.695886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:41.695989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:41.696088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:41.696189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:41.696303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:41.696408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494344619841049101:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:41.696955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:41.697200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:41.697346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:41.697494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:41.697645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:41.697781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344619841049153:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:41.730304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494344619841049119:2337];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:41.730361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494344619841049119:2337];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:41.730523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494344619841049119:2337];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:41.730627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494344619841049119:2337];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:41.730730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494344619841049119:2337];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:41.730825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494344619841049119:2337];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:41.730953Z node 1 :TX_COLU ... 86224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:10.553968Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:10.554066Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:10.554092Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:10.569862Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:10.569933Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:10.570045Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:10.570100Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:10.570418Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:10.570460Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:10.570578Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:10.570615Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:10.570686Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:10.570713Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:10.570758Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:10.570788Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:10.571593Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:10.571648Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:10.571897Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:10.571953Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:10.572118Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:10.572159Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:10.572425Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:10.572469Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:10.572608Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:10.572643Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:10.603234Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.603426Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.609313Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.609942Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.613726Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.615774Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.621621Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.628213Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.634638Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.639013Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.641042Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.644090Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.646507Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.650051Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.652167Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.656998Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:10.671447Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344744495367323:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.671559Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.671808Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344744495367328:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.676319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:10.687886Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344744495367330:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:10.767582Z node 3 :TX_PROXY ERROR: Actor# [3:7494344744495367381:2713] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.872512Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494344744495367407:2436] TxId: 281474976715661. Ctx: { TraceId: 01js2et5mcbm9gxz9twq7s9f86, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGYwZGJhZGEtZGVmZTUyZjEtZmI5NTRkMTktNjcxZWQ0ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data manipulation queries do not support column shard tables. 2025-04-17T18:05:10.872756Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGYwZGJhZGEtZGVmZTUyZjEtZmI5NTRkMTktNjcxZWQ0ZWY=, ActorId: [3:7494344744495367321:2436], ActorState: ExecuteState, TraceId: 01js2et5mcbm9gxz9twq7s9f86, Create QueryResponse for error on request, msg: 2025-04-17T18:05:10.960908Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344723020529487:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:10.960973Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryServiceScripts::ExecuteScriptWithTimeout [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter >> KqpSystemView::PartitionStatsRange2 >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier >> KqpQueryService::ShowCreateTableOnView [GOOD] >> KqpQueryService::ShowCreateView >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] >> KqpQueryService::TableSink_BadTransactions >> KqpQueryService::ExecuteQueryPg [GOOD] >> KqpQueryService::ExecuteQueryPgTableSelect >> KqpQueryService::ExecStats [GOOD] >> KqpQueryService::ExecStatsPlan >> KqpQueryService::CreateAndDropTopic [GOOD] >> KqpQueryService::CreateAndAlterTopic >> KqpSysColV0::UpdateAndDelete |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] >> KqpSysColV1::SelectRowAsterisk >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] Test command err: Trying to start YDB, gRPC: 6041, MsgBus: 28245 2025-04-17T18:04:56.667868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344683345590627:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:56.668725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb7/r3tmp/tmpLxtuM6/pdisk_1.dat 2025-04-17T18:04:57.099993Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:57.103533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:57.103633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:57.105829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6041, node 1 2025-04-17T18:04:57.226817Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:57.226848Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:57.226855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:57.226979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28245 TClient is connected to server localhost:28245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:57.794896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:59.779523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344696230493171:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.779647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.786326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344696230493183:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:59.793669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:04:59.805646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344696230493185:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:04:59.902143Z node 1 :TX_PROXY ERROR: Actor# [1:7494344696230493236:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:00.210992Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T18:05:00.231708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-04-17T18:05:00.500314Z node 1 :TX_PROXY ERROR: Actor# [1:7494344700525460772:2470] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:00.512640Z node 1 :TX_PROXY ERROR: Actor# [1:7494344700525460779:2475] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/N2QzNTBlNTUtM2E4MmMzMDAtODU2ODM3MTctYzllMDViYjY=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:00.543861Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T18:05:00.555414Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344700525460831:2366], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:05:00.556637Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2QzNTBlNTUtM2E4MmMzMDAtODU2ODM3MTctYzllMDViYjY=, ActorId: [1:7494344696230493142:2327], ActorState: ExecuteState, TraceId: 01js2esvqz8jkq3p7jd6q9w27h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:05:00.589801Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344700525460843:2373], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:05:00.590374Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2I3N2ZmNjQtNTYwNmY0YmItZGE5OWJhNjUtNGE5ZjQ5Zg==, ActorId: [1:7494344700525460839:2370], ActorState: ExecuteState, TraceId: 01js2esvs2ej3zt4n1rhecjv5f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 32453, MsgBus: 14835 2025-04-17T18:05:01.523164Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344706557944591:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:01.523981Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb7/r3tmp/tmpFSQYoT/pdisk_1.dat 2025-04-17T18:05:01.772977Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:01.789254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:01.789339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:01.792112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32453, node 2 2025-04-17T18:05:01.906758Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:01.906787Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:01.906797Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:01.906974Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14835 TClient is connected to server localhost:14835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:02.427481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:02.446132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:02.559029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:02.722396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:02.811364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.898275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344719442848270:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:04.898345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:04. ... OLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715669; 2025-04-17T18:05:13.368125Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715669; 2025-04-17T18:05:13.798000Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.798132Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.798223Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.798370Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.798568Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.798728Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.798896Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.799102Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.799195Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=23;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799251Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=24;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799309Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=25;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799366Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=26;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799419Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=27;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799478Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=28;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799533Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=29;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799541Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.799583Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=30;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799637Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=31;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799687Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=32;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799714Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.799735Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=33;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799791Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=34;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037910;receive=72075186224037915; 2025-04-17T18:05:13.799883Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.800046Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.800331Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; 2025-04-17T18:05:13.801858Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=36;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.801919Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=37;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.801983Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=38;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.802042Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=39;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.802093Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=40;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.802210Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=41;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.802274Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=42;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.802353Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=43;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.802415Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=44;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.802467Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=45;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.802518Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=46;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.802566Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7494344749883324241:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=47;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037909; 2025-04-17T18:05:13.803259Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715672; >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] >> KqpQueryService::ClosedSessionRemovedFromPool [GOOD] >> KqpQueryService::CloseConnection >> KqpSysColV1::StreamInnerJoinSelect >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle [GOOD] >> KqpQueryService::ReadDatashardAndColumnshard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSortingLimit [GOOD] Test command err: Trying to start YDB, gRPC: 25540, MsgBus: 1587 2025-04-17T18:00:29.845038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343538947047152:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:29.879963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a8f/r3tmp/tmpDARfKK/pdisk_1.dat 2025-04-17T18:00:30.887427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:00:31.251040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:31.309006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:31.309113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:31.320715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25540, node 1 2025-04-17T18:00:31.555183Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:31.555196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:31.555216Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:31.555306Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1587 TClient is connected to server localhost:1587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:32.263523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.322572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.514417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.691956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:32.817092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:33.795187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343556126918958:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:33.795402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.136975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.244556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.353356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.417848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.484327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.554885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.632671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:34.730705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343560421887408:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.730772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.734712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343560421887413:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:34.738712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:34.846401Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343538947047152:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:34.846467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:34.850173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343560421887416:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:34.933895Z node 1 :TX_PROXY ERROR: Actor# [1:7494343560421887518:4954] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:36.551106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:46.192604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:00:46.192628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:51.710968Z node 1 :TX_DATASHARD ERROR: CPU usage 67.5086 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037922 table: [/Root/LargeTable] 2025-04-17T18:01:52.523582Z node 1 :TX_DATASHARD ERROR: CPU usage 111.9 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037922 table: [/Root/LargeTable] 2025-04-17T18:02:23.121964Z node 1 :TX_DATASHARD ERROR: CPU usage 96.5533 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037924 table: [/Root/LargeTable] 2025-04-17T18:02:30.236123Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923028694304}: tablet 72075186224037924 could not find a group for channel 0 pool /Root:test 2025-04-17T18:02:30.236187Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923028694304}: tablet 72075186224037924 could not find a group for channel 1 pool /Root:test 2025-04-17T18:02:30.236337Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923028694304}: tablet 72075186224037924 wasn't changed 2025-04-17T18:02:30.236371Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923028694304}: tablet 72075186224037924 skipped channel 0 2025-04-17T18:02:30.236427Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923028694304}: tablet 72075186224037924 skipped channel 1 2025-04-17T18:02:31.170976Z node 1 :BS_SKELETON WARN: PDiskId# 1 VDISK[82000000:_:0:0:0]: (2181038080) TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-17T18:02:31.371050Z node 1 :BS_SKELETON WARN: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-17T18:02:32.174027Z node 1 :BS_SKELETON ERROR: PDiskId# 1 VDISK[82000000:_:0:0:0]: (2181038080) TDskSpaceTrackerActor: LIGHT_ORANGE ZONE Marker# BSVSOOST01 2025-04-17T18:02:32.337028Z node 1 :BS_PROXY_PUT ERROR: [f66fec142f0c9933] Result# TEvPutResult {Id# [72075186224037924:1:36:1:69771:8388608:0] Status# ERROR StatusFlags# { Valid Cyan LightYellow Yellow LightOrange PreOrange Orange } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037924:1:36:1:69771:8388608:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "out of space", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0.051282} GroupId# 2181038080 Marker# BPP12 2025-04-17T18:02:32.365999Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037924.1.36, eph 81} put [72075186224037924:1:36:1:69771:8388608:0] result ERROR flags { Valid Cyan LightYellow Yellow LightOrange PreOrange Orange } left 8388608b 2025-04-17T18:02:32.366088Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037924.1.36, eph 81} end=0, 141 blobs 0r (max 1620), put Spent{time=19.56s,wait=1.713s,interrupts=45} 2025-04-17T18:02:32.366225Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037924:1:107} Compact 17 on TGenComp ... nt\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"DurationUs\":{\"Count\":1,\"Sum\":167000,\"Max\":167000,\"Min\":167000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"BaseTimeMs\":1744913104165,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":37,\"Max\":37,\"Min\":37},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":172,\"Max\":172,\"Min\":172},\"ActiveMessageMs\":{\"Count\":1,\"Max\":172,\"Min\":6},\"FirstMessageMs\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"Bytes\":{\"Count\":1,\"Sum\":7793,\"Max\":7793,\"Min\":7793},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":166000,\"Max\":166000,\"Min\":166000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":172,\"Max\":172,\"Min\":172},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":170,\"Max\":170,\"Min\":170},\"FirstMessageMs\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"ActiveMessageMs\":{\"Count\":1,\"Max\":172,\"Min\":6},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":166000,\"Max\":166000,\"Min\":166000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":159930,\"Max\":159930,\"Min\":159930},\"WaitPeriods\":{\"Count\":1,\"Sum\":35,\"Max\":35,\"Min\":35},\"WaitMessageMs\":{\"Count\":1,\"Max\":170,\"Min\":2}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":12698,\"Max\":12698,\"Min\":12698},\"StageDurationUs\":167000,\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResultBytes\":{\"Count\":1,\"Sum\":7793,\"Max\":7793,\"Min\":7793},\"OutputBytes\":{\"Count\":1,\"Sum\":7793,\"Max\":7793,\"Min\":7793},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":43,\"Max\":43,\"Min\":43},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":171,\"Max\":171,\"Min\":171},\"ActiveMessageMs\":{\"Count\":1,\"Max\":171,\"Min\":6},\"FirstMessageMs\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":165000,\"Max\":165000,\"Min\":165000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":170,\"Max\":170,\"Min\":170},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":170,\"Max\":170,\"Min\":170},\"FirstMessageMs\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"ActiveMessageMs\":{\"Count\":1,\"Max\":170,\"Min\":6},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":164000,\"Max\":164000,\"Min\":164000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":39230,\"Max\":39230,\"Min\":39230},\"WaitPeriods\":{\"Count\":1,\"Sum\":37,\"Max\":37,\"Min\":37},\"WaitMessageMs\":{\"Count\":1,\"Max\":170,\"Min\":2}}}],\"UpdateTimeMs\":173,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":147756,\"CpuTimeUs\":144201},\"ProcessCpuTimeUs\":501,\"TotalDurationUs\":444597,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":101566},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Reverse\":false,\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'367) \'(\'\"_id\" \'\"c4f5e240-5020c816-27ad8876-12b30712\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'379) \'(\'\"_id\" \'\"38923e9-1663b94a-c20a241a-9f719156\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 444597 total_cpu_time_us: 339869 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":2},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1744913104\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"99432d42-ffb1472e-e4eca6ef-f89eb8a3\",\"version\":\"1.0\"}" Trying to start YDB, gRPC: 16820, MsgBus: 21171 2025-04-17T18:05:05.480703Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494344724468239918:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:05.480766Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a8f/r3tmp/tmpATNQQD/pdisk_1.dat 2025-04-17T18:05:05.591599Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:05.622653Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:05.622757Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:05.624209Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 4 2025-04-17T18:05:05.694733Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:05.694755Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:05.694761Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:05.694893Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21171 TClient is connected to server localhost:21171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:06.259626Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.264074Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:06.276900Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:09.183527Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344741648114429:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.183684Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.184187Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344741648114456:2638], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.189071Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:09.205062Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344741648114458:2639], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:09.279858Z node 4 :TX_PROXY ERROR: Actor# [4:7494344741648114529:5623] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryService::DdlTx [GOOD] >> KqpQueryService::DdlWithExplicitTransaction >> KqpQueryService::ExecuteQueryInteractiveTx [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery >> KqpQueryService::StreamExecuteCollectMeta [GOOD] >> KqpQueryService::ShowCreateViewOnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 65486, MsgBus: 11061 2025-04-17T18:04:49.990546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344655841588685:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:49.995075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bc8/r3tmp/tmpPWu7UM/pdisk_1.dat 2025-04-17T18:04:50.434979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:50.468171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:50.468312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 65486, node 1 2025-04-17T18:04:50.473337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:50.570729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:50.570749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:50.570761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:50.570879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11061 TClient is connected to server localhost:11061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:51.180532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:51.203106Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:53.057998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344673021458534:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:53.058103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:53.403628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:53.552435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344673021458637:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:53.552508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:53.558398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344673021458642:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:53.562130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:04:53.572787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344673021458644:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:04:53.631231Z node 1 :TX_PROXY ERROR: Actor# [1:7494344673021458695:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:54.125281Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344677316426089:2375], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2025-04-17T18:04:54.125491Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTdkNzY2ZjQtZGZjYzM5MzktMTE4NWJlNDItNDUzODE5YzQ=, ActorId: [1:7494344677316426087:2374], ActorState: ExecuteState, TraceId: 01js2esne62m1a07yxjkqfnejc, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 2025-04-17T18:04:54.987270Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344655841588685:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:54.990459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 6936, MsgBus: 20072 2025-04-17T18:04:59.969500Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344696560876657:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:59.969558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bc8/r3tmp/tmplEcPTa/pdisk_1.dat 2025-04-17T18:05:00.115068Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:00.119368Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:00.119442Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:00.124758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6936, node 2 2025-04-17T18:05:00.214759Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:00.214785Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:00.214791Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:00.214887Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20072 TClient is connected to server localhost:20072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:00.725949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:00.734659Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:03.370334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344713740746485:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.370431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.384160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:03.444974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344713740746587:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.445115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.446690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344713740746592:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:03.453728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:03.467555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344713740746594:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:03.526496Z node 2 :TX_PROXY ERROR: Actor# [2:7494344713740746645:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } WAIT_INDEXATION: 0 2025-04-17T18:05:04.969692Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344696560876657:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:04.969770Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 21059, MsgBus: 4900 2025-04-17T18:05:10.245667Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344745488937392:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:10.245759Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bc8/r3tmp/tmpQPlusl/pdisk_1.dat 2025-04-17T18:05:10.355878Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21059, node 3 2025-04-17T18:05:10.387923Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:10.387994Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:10.390839Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:10.426708Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:10.426733Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:10.426740Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:10.426848Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4900 TClient is connected to server localhost:4900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:05:10.898243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:05:13.343746Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344758373839925:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.343821Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.383775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:13.600783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:05:13.837901Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344758373841276:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.838002Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.838287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344758373841281:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.842228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:05:13.855073Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344758373841283:2445], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:05:13.946323Z node 3 :TX_PROXY ERROR: Actor# [3:7494344758373841350:3231] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:15.246258Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344745488937392:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:15.246325Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::ReadManyShardsRange [GOOD] >> KqpQueryService::ReadManyRangesAndPoints >> KqpDocumentApi::AllowRead [GOOD] >> KqpDocumentApi::RestrictAlter >> KqpSystemView::FailNavigate >> KqpQueryService::Followers [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpQueryService::ExecuteQueryPgTableSelect [GOOD] >> KqpQueryService::ExecuteQueryMultiScalar >> KqpQueryServiceScripts::ExplainScript [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecution >> KqpQueryService::TableSink_OlapOrder [GOOD] >> KqpQueryService::TableSink_OlapRWQueries >> KqpSysColV0::InnerJoinSelectAsterisk >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken >> KqpSystemView::FailResolve [GOOD] >> KqpSysColV1::StreamSelectRowById [GOOD] >> KqpQueryService::ExecStatsPlan [GOOD] >> KqpQueryService::ExecStatsAst >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> KqpQueryService::ShowCreateView [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] >> KqpQueryServiceScripts::TestTruncatedBySize >> KqpSystemView::PartitionStatsRange2 [GOOD] >> KqpSystemView::PartitionStatsFollower >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpOrder >> KqpQueryService::TableSink_BadTransactions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 26175, MsgBus: 24373 2025-04-17T18:05:13.410049Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344754916798341:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.410945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bad/r3tmp/tmpUHUUK7/pdisk_1.dat 2025-04-17T18:05:13.913279Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:13.921826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:13.921919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:13.924084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26175, node 1 2025-04-17T18:05:14.058910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:14.058944Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:14.058950Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:14.059053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24373 TClient is connected to server localhost:24373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:14.895529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:05:14.926163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:15.177054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.333745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.446130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.793847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344767801701862:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.793939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.432229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.482282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.513152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.541280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.571428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.625147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.699802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.782514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344772096669823:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.782622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.783034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344772096669828:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.788695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:17.800720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344772096669830:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:17.859842Z node 1 :TX_PROXY ERROR: Actor# [1:7494344772096669885:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:18.409004Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344754916798341:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.409065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:19.239163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.473790Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7494344780686604893:3849], for# user0@builtin, access# SelectRow 2025-04-17T18:05:19.474046Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 1] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-17T18:05:19.484326Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmQ3YTk1MzgtNzYxMTdmZjAtNWViMzdlMjAtYWI4YWMxNTA=, ActorId: [1:7494344780686604847:2518], ActorState: ExecuteState, TraceId: 01js2ete1s3sqwnkpmxc1hgcyj, Create QueryResponse for error on request, msg: 2025-04-17T18:05:19.485218Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913119471, txId: 281474976710673] shutting down 2025-04-17T18:05:19.486072Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2ete1s3sqwnkpmxc1hgcyj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ3YTk1MzgtNzYxMTdmZjAtNWViMzdlMjAtYWI4YWMxNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpQueryService::CreateAndAlterTopic [GOOD] >> KqpQueryService::CreateOrDropTopicOverTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 28971, MsgBus: 18111 2025-04-17T18:05:13.400310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344757117534387:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.402655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bae/r3tmp/tmpJY4UBv/pdisk_1.dat 2025-04-17T18:05:13.872141Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:13.877642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 28971, node 1 2025-04-17T18:05:13.882354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:13.887920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:13.890684Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:05:13.890876Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:05:14.058086Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:14.058118Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:14.058125Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:14.058307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18111 TClient is connected to server localhost:18111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:14.931318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:14.978037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.157272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.330983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.399177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.840788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344770002437967:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.840914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.437123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.513325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.577918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.616347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.667924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.712796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.763533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.868292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344774297405937:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.868363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.868441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344774297405942:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.872823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:17.888827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344774297405944:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:17.993860Z node 1 :TX_PROXY ERROR: Actor# [1:7494344774297406001:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:18.399905Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344757117534387:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.399965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:19.594429Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913119614, txId: 281474976710672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 7591, MsgBus: 30992 2025-04-17T18:05:13.393990Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344757559270834:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.394057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bc0/r3tmp/tmpjGFepb/pdisk_1.dat 2025-04-17T18:05:13.889520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:13.889662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7591, node 1 2025-04-17T18:05:13.894472Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:05:13.894517Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:05:13.895299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:13.897099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:14.059758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:14.059779Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:14.059790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:14.059893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30992 TClient is connected to server localhost:30992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:14.858949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:14.931974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.125030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.361624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.460685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:17.028271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344774739141784:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.028440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.432314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.494983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.577654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.624472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.669554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.731134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.781035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.844812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344774739142449:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.844893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.845378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344774739142454:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.849392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:17.860921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344774739142456:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:17.942160Z node 1 :TX_PROXY ERROR: Actor# [1:7494344774739142511:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:18.398365Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344757559270834:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.398431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:19.541487Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913119537, txId: 281474976710672] shutting down >> KqpSysColV1::SelectRowById [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 63521, MsgBus: 19647 2025-04-17T18:05:13.397116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344758908419798:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.399552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bca/r3tmp/tmp77y8P3/pdisk_1.dat 2025-04-17T18:05:13.858044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:13.880844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:13.880966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:13.883582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63521, node 1 2025-04-17T18:05:14.056563Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:14.056597Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:14.056610Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:14.056733Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19647 TClient is connected to server localhost:19647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:14.961282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:14.981670Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:14.991479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.164762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.356764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.460170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:05:16.801352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344771793323246:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.801440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.435572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.469950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.502209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.532464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.567058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.626376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.705227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.799701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344776088291214:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.799765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344776088291220:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.800059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.809772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:17.820354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344776088291222:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:17.887103Z node 1 :TX_PROXY ERROR: Actor# [1:7494344776088291277:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:18.396441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344758908419798:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.396498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:19.880710Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913119852, txId: 281474976710672] shutting down >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter [GOOD] >> KqpSysColV1::SelectRowAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateView [GOOD] Test command err: Trying to start YDB, gRPC: 11628, MsgBus: 13411 2025-04-17T18:05:02.976823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344710897147360:2100];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:02.977323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ba5/r3tmp/tmpisGJyR/pdisk_1.dat 2025-04-17T18:05:03.391091Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11628, node 1 2025-04-17T18:05:03.442363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:03.442510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:03.445359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:03.484133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:03.484154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:03.484160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:03.484271Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13411 TClient is connected to server localhost:13411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:04.048496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.065079Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:04.080364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.272680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.437494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.514252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.294747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344728077018301:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.294868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.692014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.720829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.789244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.820720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.851972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.891397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.961340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.012427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344732371986259:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.012516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.012788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344732371986264:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.015756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:07.025764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344732371986266:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:07.085201Z node 1 :TX_PROXY ERROR: Actor# [1:7494344732371986321:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:07.976492Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344710897147360:2100];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:07.976561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:08.143159Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344736666953961:2519], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: Cannot find table 'db.[/Root/test_show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:05:08.144453Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzdiOGZmZTctZDY5MzgwODUtNDFlMjUwODctY2JjOTE5NWU=, ActorId: [1:7494344736666953952:2513], ActorState: ExecuteState, TraceId: 01js2et34q61rdwg35djt0w7sr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:05:08.160293Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344736666953974:2522], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:05:08.161633Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzdiOGZmZTctZDY5MzgwODUtNDFlMjUwODctY2JjOTE5NWU=, ActorId: [1:7494344736666953952:2513], ActorState: ExecuteState, TraceId: 01js2et35n23s7y016qckjgcqn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 11032, MsgBus: 22845 2025-04-17T18:05:09.030222Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344738626287854:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:09.030285Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ba5/r3tmp/tmphPjsqk/pdisk_1.dat 2025-04-17T18:05:09.137591Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11032, node 2 2025-04-17T18:05:09.178521Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:09.178598Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:09.181262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:09.187063Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:09.187082Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:09.187089Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:09.187187Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22845 TClient is connected to server localhost:22845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeE ... part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.109618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.150075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.217628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.278083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344751511192180:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.278262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.278318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344751511192185:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.281137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:12.289275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344751511192187:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:12.351481Z node 2 :TX_PROXY ERROR: Actor# [2:7494344751511192241:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:13.465453Z node 2 :SYSTEM_VIEWS ERROR: Scan error, actor: [2:7494344755806159947:2526], owner: [2:7494344755806159943:2524], scan id: 0, table id: [1:0:0:show_create], error: Path type mismatch, expected: Table, found: View 2025-04-17T18:05:13.466273Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494344755806159944:2525], TxId: 281474976715673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZmE0OGUzOTgtZDA4ZTI5YzgtZmRiMmJmZWItMzBiOGY2NWQ=. TraceId : 01js2et88v610m2jh73tqwzbts. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494344755806159940:2512], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-17T18:05:13.466558Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmE0OGUzOTgtZDA4ZTI5YzgtZmRiMmJmZWItMzBiOGY2NWQ=, ActorId: [2:7494344755806159874:2512], ActorState: ExecuteState, TraceId: 01js2et88v610m2jh73tqwzbts, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29988, MsgBus: 6466 2025-04-17T18:05:14.298065Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344761251841008:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:14.302170Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ba5/r3tmp/tmpQODVg9/pdisk_1.dat 2025-04-17T18:05:14.509376Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:14.510500Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:14.510577Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:14.513719Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29988, node 3 2025-04-17T18:05:14.617722Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:14.617747Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:14.617754Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:14.617852Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6466 TClient is connected to server localhost:6466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:15.185991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.195191Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:15.203403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.300488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.503117Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.598353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:17.843204Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344774136744653:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.843284Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.901319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.977838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.049737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.114587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.145936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.200028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.271778Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.380301Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344778431712622:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.380379Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.380670Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344778431712627:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.384692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:18.401589Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344778431712629:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:18.492429Z node 3 :TX_PROXY ERROR: Actor# [3:7494344778431712688:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:19.298439Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344761251841008:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:19.298523Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 13767, MsgBus: 15041 2025-04-17T18:05:14.136552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344761399426298:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:14.136589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b95/r3tmp/tmpHRuVvY/pdisk_1.dat 2025-04-17T18:05:14.571865Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:14.579120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:14.579212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:14.590502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13767, node 1 2025-04-17T18:05:14.679212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:14.679236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:14.679248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:14.679354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15041 TClient is connected to server localhost:15041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:15.242905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.271198Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:15.292556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.463643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.622392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:05:15.715178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.620999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344774284329980:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.621133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.976485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.020170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.068617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.116622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.180665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.249408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.328209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.447130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344778579297949:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.447212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.447638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344778579297954:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.451565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:18.463881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344778579297956:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:18.543542Z node 1 :TX_PROXY ERROR: Actor# [1:7494344778579298011:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:19.138546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344761399426298:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:19.138616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:19.945113Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913119936, txId: 281474976710672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_BadTransactions [GOOD] Test command err: Trying to start YDB, gRPC: 21674, MsgBus: 4350 2025-04-17T18:05:03.605430Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344712365151688:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:03.606134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b9a/r3tmp/tmpBzgR2r/pdisk_1.dat 2025-04-17T18:05:03.952206Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:03.994047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:03.994153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21674, node 1 2025-04-17T18:05:03.996568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:04.054666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:04.054686Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:04.054692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:04.054790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4350 TClient is connected to server localhost:4350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:04.630022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.656600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.797754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.961708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:05.031211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.695830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344725250055202:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.695918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.979771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.013629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.040371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.066990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.097200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.132912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.168960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.257158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344729545023158:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.257244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.257412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344729545023163:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.260688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:07.269644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344729545023165:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:07.365298Z node 1 :TX_PROXY ERROR: Actor# [1:7494344729545023222:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:08.584173Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344712365151688:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:08.584238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3137, MsgBus: 11505 2025-04-17T18:05:09.216384Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344739459728583:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:09.216430Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b9a/r3tmp/tmp2O0aDm/pdisk_1.dat 2025-04-17T18:05:09.337182Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3137, node 2 2025-04-17T18:05:09.375060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:09.375136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:09.377079Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:09.458656Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:09.458676Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:09.458683Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:09.458776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11505 TClient is connected to server localhost:11505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:09.883324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:09.889150Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:09.893022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:09.966140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:10.106870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... LASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:18.375608Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:18.375634Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:18.375722Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:18.375741Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:18.405019Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.405319Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.410843Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.410920Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.416371Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.416449Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.421746Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.423612Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.428986Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.430971Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:18.436362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.538731Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344779080370870:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.538816Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.539192Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344779080370875:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.542696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:05:18.554008Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344779080370877:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:05:18.633376Z node 3 :TX_PROXY ERROR: Actor# [3:7494344779080370930:2686] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:18.802013Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T18:05:18.802860Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7494344779080370375:2341];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893,72075186224037897;receive=72075186224037894; 2025-04-17T18:05:18.802963Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7494344779080370375:2341];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893,72075186224037897;receive=72075186224037894; 2025-04-17T18:05:18.803393Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T18:05:18.803760Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T18:05:18.804532Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7494344779080370375:2341];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893;receive=72075186224037897; 2025-04-17T18:05:18.804590Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7494344779080370375:2341];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893;receive=72075186224037897; 2025-04-17T18:05:18.805057Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-04-17T18:05:19.197779Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzljYWVkYjEtODk1ZTk0ZDktODkxMDFiNjQtYWQzMzJlYmM=, ActorId: [3:7494344779080371060:2489], ActorState: ExecuteState, TraceId: 01js2etdpkb0g54eqff8qt3anx, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-17T18:05:19.336327Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTUwN2E1ZTItODc4ODU5OTQtMTYxMDIyOTYtYjU3MjEyNGE=, ActorId: [3:7494344783375338378:2496], ActorState: ExecuteState, TraceId: 01js2etdzjefznjce90rwzz176, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-17T18:05:19.530287Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344761900500551:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:19.530369Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:19.553473Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzNmZWEzNGEtY2M3MDkwNGItNDg3NTkxODItNzI5MmEyMDU=, ActorId: [3:7494344783375338397:2503], ActorState: ExecuteState, TraceId: 01js2ete3m4d7a6xwqb63x4knv, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-17T18:05:19.869838Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTM2ZmE4M2ItMWQ0YTgyMmEtMTg2ZGFlZmItMWJlODJhYQ==, ActorId: [3:7494344783375338420:2513], ActorState: ExecuteState, TraceId: 01js2eteadc4sc3nqrakjzn7zz, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-17T18:05:20.022044Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzEzZmJmNDItMjdhZGEyZjMtMjYxMjA1NTQtOGQ5Yzc3MTg=, ActorId: [3:7494344783375338441:2521], ActorState: ExecuteState, TraceId: 01js2etem77p697hpewxnks9z3, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-17T18:05:20.180296Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715671; 2025-04-17T18:05:20.180694Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715671; 2025-04-17T18:05:20.181103Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715671; 2025-04-17T18:05:20.182273Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7494344774785403071:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037893;local_tx_no=25;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889,72075186224037894;receive=72075186224037897; 2025-04-17T18:05:20.182331Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7494344774785403071:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037893;local_tx_no=26;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889,72075186224037894;receive=72075186224037897; 2025-04-17T18:05:20.182465Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7494344774785403071:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037893;local_tx_no=28;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889;receive=72075186224037894; 2025-04-17T18:05:20.182520Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7494344774785403071:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037893;local_tx_no=29;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889;receive=72075186224037894; 2025-04-17T18:05:20.182919Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715671; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 9749, MsgBus: 9739 2025-04-17T18:05:15.115641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344764121599134:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:15.123144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b80/r3tmp/tmpkYaW1o/pdisk_1.dat 2025-04-17T18:05:15.691702Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:15.695471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:15.695598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:15.698721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9749, node 1 2025-04-17T18:05:15.803712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:15.803743Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:15.803751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:15.803899Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9739 TClient is connected to server localhost:9739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:16.401676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.427781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.605078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.777953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.865223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:18.663325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344777006502794:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.663436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.032565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.090192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.167133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.200318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.235650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.284013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.321103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.416962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344781301470757:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.417046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.417470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344781301470762:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.422038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:19.437549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344781301470764:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:19.504060Z node 1 :TX_PROXY ERROR: Actor# [1:7494344781301470821:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:20.117120Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344764121599134:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:20.117188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 13081, MsgBus: 6774 2025-04-17T18:05:15.700977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344766624968513:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:15.701929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b6a/r3tmp/tmpN1me34/pdisk_1.dat 2025-04-17T18:05:16.192754Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:16.196082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:16.196186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:16.198984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13081, node 1 2025-04-17T18:05:16.278907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:16.278953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:16.278978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:16.279086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6774 TClient is connected to server localhost:6774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:16.945132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.982711Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:16.998450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:17.152748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:17.354025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:17.445860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:19.107674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344783804839323:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.107846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.440980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.480003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.512270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.554607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.584861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.618470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.659671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.759925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344783804839991:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.760008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.760285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344783804839996:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.763685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:19.775597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344783804839998:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:19.864918Z node 1 :TX_PROXY ERROR: Actor# [1:7494344783804840055:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:20.687118Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344766624968513:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:20.687190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] >> KqpQueryService::ReadManyRangesAndPoints [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter [GOOD] Test command err: 2025-04-17T18:02:43.884065Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344113982551800:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:43.884121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:02:44.080774Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001877/r3tmp/tmpwwo313/pdisk_1.dat 2025-04-17T18:02:44.390634Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:02:44.395422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:02:44.395518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:02:44.398517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27251, node 1 2025-04-17T18:02:44.572400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001877/r3tmp/yandexbvDp2x.tmp 2025-04-17T18:02:44.572426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001877/r3tmp/yandexbvDp2x.tmp 2025-04-17T18:02:44.572574Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001877/r3tmp/yandexbvDp2x.tmp 2025-04-17T18:02:44.572663Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:02:44.786395Z INFO: TTestServer started on Port 6310 GrpcPort 27251 TClient is connected to server localhost:6310 PQClient connected to localhost:27251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:02:45.146480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.175215Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:02:45.184021Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:02:45.197629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:02:45.362603Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T18:02:46.914350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126867454490:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.914357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126867454503:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.914475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.921924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:02:46.926289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344126867454557:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.926502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:02:46.932810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344126867454519:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:02:47.002951Z node 1 :TX_PROXY ERROR: Actor# [1:7494344126867454577:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:02:47.788046Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344131162421889:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:02:47.819547Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmRiZDQ4YjEtY2UzY2ZhMGEtMjgwMDUyNzItZGUyYzUxYQ==, ActorId: [1:7494344126867454487:2336], ActorState: ExecuteState, TraceId: 01js2ens7g8atdp1q21qeapkb2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:02:47.834323Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:02:48.189361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.237114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:02:48.342686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:02:49.443368Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344113982551800:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:02:49.446753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7494344144047324104:2658] === CheckClustersList. Ok 2025-04-17T18:02:55.151143Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-17T18:02:55.161746Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-17T18:02:55.212007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344165522160839:2787], Recipient [1:7494344118277519511:2190]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212083Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:02:55.212098Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:02:55.212145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494344165522160835:2784], Recipient [1:7494344118277519511:2190]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-17T18:02:55.212161Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:02:55.254361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:02:55.254748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:02:55.255161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the ... :20.813285Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.813330Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:20.813828Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344700227038728:2460], Partition 0, Sender [0:0:0], Recipient [6:7494344700227038792:2466], Cookie: 0 2025-04-17T18:05:20.813890Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344700227038792:2466]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.813896Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344743176713276:2843], Partition 1, Sender [0:0:0], Recipient [6:7494344743176713356:2851], Cookie: 0 2025-04-17T18:05:20.813922Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.813960Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:20.813968Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344743176713356:2851]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.813996Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.814009Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:20.814029Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.814047Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:20.814054Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:20.814115Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:20.814145Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.814202Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:20.816391Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344743176713275:2842], Partition 2, Sender [0:0:0], Recipient [6:7494344743176713351:2849], Cookie: 0 2025-04-17T18:05:20.816412Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344768946517664:2980], Partition 3, Sender [0:0:0], Recipient [6:7494344768946517754:2989], Cookie: 0 2025-04-17T18:05:20.816472Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344768946517754:2989]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.816475Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344743176713351:2849]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.816493Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.816498Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.816531Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:20.816535Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:20.816588Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:20.816589Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:20.816611Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.816615Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.816639Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:20.816642Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:20.823037Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494344652982397322:2139]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T18:05:20.823095Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T18:05:20.823120Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T18:05:20.823134Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T18:05:20.823212Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-17T18:05:20.823575Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7494344652982397322:2139]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-17T18:05:20.823604Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-17T18:05:20.823615Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T18:05:20.913660Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344768946517660:2979], Partition 4, Sender [0:0:0], Recipient [6:7494344768946517752:2987], Cookie: 0 2025-04-17T18:05:20.913761Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344768946517752:2987]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.913793Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.913854Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:20.913940Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:20.913980Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.914021Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:20.914162Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344700227038728:2460], Partition 0, Sender [0:0:0], Recipient [6:7494344700227038792:2466], Cookie: 0 2025-04-17T18:05:20.914234Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344700227038792:2466]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.914252Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.914279Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:20.914320Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:20.914340Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.914362Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:20.914410Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344743176713276:2843], Partition 1, Sender [0:0:0], Recipient [6:7494344743176713356:2851], Cookie: 0 2025-04-17T18:05:20.914456Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344743176713356:2851]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.914475Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.914499Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:20.914541Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:20.914560Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.914597Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:20.916669Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344743176713275:2842], Partition 2, Sender [0:0:0], Recipient [6:7494344743176713351:2849], Cookie: 0 2025-04-17T18:05:20.916733Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344743176713351:2849]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.916755Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.916756Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7494344768946517664:2980], Partition 3, Sender [0:0:0], Recipient [6:7494344768946517754:2989], Cookie: 0 2025-04-17T18:05:20.916791Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:20.916823Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7494344768946517754:2989]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.916841Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:20.916851Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-17T18:05:20.916866Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.916884Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-17T18:05:20.916890Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-17T18:05:20.916937Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-17T18:05:20.916957Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-17T18:05:20.916979Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> KqpSysColV1::StreamInnerJoinSelect [GOOD] >> KqpQueryService::DdlWithExplicitTransaction [GOOD] >> KqpQueryService::Ddl_Dml >> KqpQueryService::ShowCreateViewOnTable [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] >> KqpQueryService::ExecuteQueryMultiResult >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 6032, MsgBus: 11196 2025-04-17T18:05:14.944045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344760401248650:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:14.944104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b84/r3tmp/tmp0HNClG/pdisk_1.dat 2025-04-17T18:05:15.477311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:15.477394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:15.480433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:15.514969Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6032, node 1 2025-04-17T18:05:15.606691Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:15.606710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:15.606728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:15.606843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11196 TClient is connected to server localhost:11196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:16.188579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.215798Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:16.226999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.371818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.537040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:05:16.612723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.450616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344777581119533:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.450722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:18.828821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.861764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.895183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.939583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.984718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.021428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.080675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.161209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344781876087489:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.161277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.161491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344781876087494:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.164219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:19.172270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344781876087496:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:19.262044Z node 1 :TX_PROXY ERROR: Actor# [1:7494344781876087553:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:19.938464Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344760401248650:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:19.938528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::FailNavigate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyRangesAndPoints [GOOD] Test command err: Trying to start YDB, gRPC: 21740, MsgBus: 64045 2025-04-17T18:05:08.037018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344735446518810:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:08.042975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b5f/r3tmp/tmp0xLcIV/pdisk_1.dat 2025-04-17T18:05:08.417493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:08.459659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:08.459770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21740, node 1 2025-04-17T18:05:08.461395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:08.508560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:08.508583Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:08.508589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:08.508716Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64045 TClient is connected to server localhost:64045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:09.026671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:09.044366Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:11.096872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344748331421345:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:11.096991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:11.349733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:11.567593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344748331421918:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:11.567674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:11.567815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344748331421923:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:11.571165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:11.580189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344748331421925:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:05:11.660267Z node 1 :TX_PROXY ERROR: Actor# [1:7494344748331421978:2694] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17219, MsgBus: 6417 2025-04-17T18:05:13.406803Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344756570565252:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.406861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b5f/r3tmp/tmp9Lv2Dt/pdisk_1.dat 2025-04-17T18:05:13.576121Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:13.591111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:13.591185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:13.592221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17219, node 2 2025-04-17T18:05:13.636612Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:13.636645Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:13.636657Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:13.636817Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6417 TClient is connected to server localhost:6417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:14.052282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.748021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344769455467784:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.748192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.759145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:16.848868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344769455468004:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.848951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.849366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344769455468009:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.853238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:16.866366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344769455468011:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:16.928984Z node 2 :TX_PROXY ERROR: Actor# [2:7494344769455468062:2467] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9297, MsgBus: 12963 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b5f/r3tmp/tmp4qys3G/pdisk_1.dat 2025-04-17T18:05:18.385224Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:18.480741Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:18.481170Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:18.481231Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:18.490445Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9297, node 3 2025-04-17T18:05:18.624327Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:18.624347Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:18.624355Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:18.624467Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12963 TClient is connected to server localhost:12963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:19.226909Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:19.233462Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:21.877470Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344789122203617:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.877566Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.901130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.062508Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344793417171374:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.062596Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.062837Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344793417171379:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.067002Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:22.082002Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344793417171381:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:22.139738Z node 3 :TX_PROXY ERROR: Actor# [3:7494344793417171432:2619] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpDocumentApi::RestrictDrop >> KqpSystemView::Sessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 1112, MsgBus: 12771 2025-04-17T18:05:16.916702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344771207002789:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:16.942287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b2e/r3tmp/tmpWxAFKT/pdisk_1.dat 2025-04-17T18:05:17.478196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:17.478303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:17.481578Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:17.483815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1112, node 1 2025-04-17T18:05:17.612680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:17.612701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:17.612713Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:17.612817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12771 TClient is connected to server localhost:12771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:18.248221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:18.264790Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:18.277763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:05:18.478240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.648309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:18.733972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.523824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344788386873682:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:20.523943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:20.815284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.840671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.873450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.909257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.961559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.020355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.097995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.206504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344792681841651:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.206632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.207056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344792681841656:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.210720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:21.220362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344792681841659:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:21.291582Z node 1 :TX_PROXY ERROR: Actor# [1:7494344792681841714:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:21.910002Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344771207002789:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:21.910067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:22.988942Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913123009, txId: 281474976710672] shutting down >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] >> KqpQueryService::ExecuteRetryQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateViewOnTable [GOOD] Test command err: Trying to start YDB, gRPC: 17870, MsgBus: 21492 2025-04-17T18:05:05.607825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344724003047719:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:05.608310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b6f/r3tmp/tmpmtex5X/pdisk_1.dat 2025-04-17T18:05:06.103941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:06.104066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:06.106275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:06.129647Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17870, node 1 2025-04-17T18:05:06.246311Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:06.246337Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:06.246354Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:06.246491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21492 TClient is connected to server localhost:21492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:06.788093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.807328Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:06.815202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.985075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:05:07.134739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.205605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:08.988301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344736887951183:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.988417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.373301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.405688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.437805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.480408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.511011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.543096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.590942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.651852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344741182919138:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.651934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.652116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344741182919143:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.655588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:09.666375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344741182919145:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:09.749457Z node 1 :TX_PROXY ERROR: Actor# [1:7494344741182919201:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.611724Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344724003047719:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:10.611793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13245, MsgBus: 11104 2025-04-17T18:05:11.782052Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344747457435640:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:11.782208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b6f/r3tmp/tmp7lwKLt/pdisk_1.dat 2025-04-17T18:05:11.914729Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13245, node 2 2025-04-17T18:05:11.949607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:11.949684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:11.951348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:11.958016Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:11.958036Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:11.958044Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:11.958137Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11104 TClient is connected to server localhost:11104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:12.388003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:12.405023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:12.489423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:12.625147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:15.179105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:15.248448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:15.341541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344764637307259:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:15.341631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:15.342014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344764637307264:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:15.345954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:15.361620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344764637307267:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:15.463116Z node 2 :TX_PROXY ERROR: Actor# [2:7494344764637307323:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:16.782469Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344747457435640:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:16.782573Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29877, MsgBus: 1811 2025-04-17T18:05:17.718480Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344772940276031:2146];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b6f/r3tmp/tmpWd03Y5/pdisk_1.dat 2025-04-17T18:05:17.815439Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:05:17.858927Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:17.872785Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:17.872858Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:17.877419Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29877, node 3 2025-04-17T18:05:17.962739Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:17.962759Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:17.962775Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:17.962927Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1811 TClient is connected to server localhost:1811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:18.495465Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:18.510218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:05:18.586602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:05:18.803149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:18.877492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.374899Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344790120146880:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.374991Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.411004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.507891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.554976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.597683Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.639472Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.678048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.789903Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.871405Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344790120147551:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.871526Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.872926Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344790120147556:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.876504Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:21.888262Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344790120147558:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:21.957344Z node 3 :TX_PROXY ERROR: Actor# [3:7494344790120147613:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:22.673735Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344772940276031:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:22.673814Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:23.105586Z node 3 :SYSTEM_VIEWS ERROR: Scan error, actor: [3:7494344798710082578:2524], owner: [3:7494344798710082575:2522], scan id: 0, table id: [1:0:0:show_create], error: Path type mismatch, expected: View, found: Table 2025-04-17T18:05:23.106437Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344798710082576:2523], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01js2ethnp4aakrb5dnfc5b6t3. SessionId : ydb://session/3?node_id=3&id=YzQ2OTg5NTctZWU2NDczNGEtZmNhNTQ1OTAtOGViMDE1NWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7494344798710082572:2513], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-17T18:05:23.106881Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzQ2OTg5NTctZWU2NDczNGEtZmNhNTQ1OTAtOGViMDE1NWY=, ActorId: [3:7494344794415115240:2513], ActorState: ExecuteState, TraceId: 01js2ethnp4aakrb5dnfc5b6t3, Create QueryResponse for error on request, msg: >> KqpQueryService::TableSink_OlapRWQueries [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] Test command err: Trying to start YDB, gRPC: 14094, MsgBus: 28545 2025-04-17T18:04:41.503397Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344619660865813:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:41.503459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c08/r3tmp/tmpVUuGLJ/pdisk_1.dat 2025-04-17T18:04:41.998941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:42.001424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:42.001566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:42.009782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14094, node 1 2025-04-17T18:04:42.246659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:42.246682Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:42.246688Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:42.246796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28545 TClient is connected to server localhost:28545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:42.908176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:42.938668Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:42.949539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:43.105186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:43.288458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:43.343623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:45.140867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344636840736576:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.140974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.500618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.530590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.561006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.592277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.649112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.725349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.799843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:45.880119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344636840737245:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.880183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.880351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344636840737250:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:45.886756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:45.898595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344636840737252:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:45.985890Z node 1 :TX_PROXY ERROR: Actor# [1:7494344636840737309:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:46.487401Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344619660865813:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:46.487506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:56.998462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:04:56.998488Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 65404, MsgBus: 4146 2025-04-17T18:04:58.779861Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344693266990902:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:58.779936Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c08/r3tmp/tmp66dW4N/pdisk_1.dat 2025-04-17T18:04:58.983035Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:59.004137Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:59.004240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:59.005943Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65404, node 2 2025-04-17T18:04:59.073967Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:59.074002Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:59.074009Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:59.074124Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4146 TClient is connected to server localhost:4146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:59.662703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:59.673372Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:59.696764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:59.818861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Opera ... 7;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:21.353224Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:21.353376Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:21.353421Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:21.353459Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:21.353495Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:21.353584Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:21.353612Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:21.353649Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:21.353702Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:21.353747Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:21.353788Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:21.353945Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:21.353972Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:21.354001Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:21.354008Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:21.354128Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:21.354131Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:21.354152Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:21.354156Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:21.355143Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:21.355176Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:21.355253Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:21.355278Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:21.355432Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:21.355459Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:21.355537Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:21.355569Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:21.355621Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:21.355644Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:21.355682Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:21.355708Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:21.356188Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:21.356240Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:21.356422Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:21.356453Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:21.356601Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:21.356629Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:21.356774Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:21.356801Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:21.356981Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:21.357005Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:21.406623Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.406820Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.413026Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.413523Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.419702Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.420810Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.428632Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.430695Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.435567Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.436552Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; 2025-04-17T18:05:21.618515Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710664; 2025-04-17T18:05:22.115487Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344767804636864:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:22.119228Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 64989, MsgBus: 64842 2025-04-17T18:05:18.619726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344779743073583:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.620060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b28/r3tmp/tmpgadVhU/pdisk_1.dat 2025-04-17T18:05:19.071834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:19.071988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:19.074068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:19.085902Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64989, node 1 2025-04-17T18:05:19.214792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:19.214820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:19.214834Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:19.214960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64842 TClient is connected to server localhost:64842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:19.817108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:19.831276Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:19.855562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.868328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.030989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.218658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.311763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:22.084586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344796922944433:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.084720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.417779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.464325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.496340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.523785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.555483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.593981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.648354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.720181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344796922945091:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.720270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.720886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344796922945096:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.725111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480 2025-04-17T18:05:22.736494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344796922945099:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-04-17T18:05:22.823104Z node 1 :TX_PROXY ERROR: Actor# [1:7494344796922945155:3559] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:23.627793Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344779743073583:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:23.627858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:23.876909Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7494344801217912799:3804], for# user0@builtin, access# DescribeSchema 2025-04-17T18:05:23.876938Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7494344801217912799:3804], for# user0@builtin, access# DescribeSchema 2025-04-17T18:05:23.887105Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344801217912795:2518], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:05:23.887498Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjlkN2E3NWEtMzNkMTdkOTUtMzYwZDM0MC04Nzg3ZGNlZQ==, ActorId: [1:7494344801217912788:2514], ActorState: ExecuteState, TraceId: 01js2etjfta20kjy573ac2vxcw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] Test command err: Trying to start YDB, gRPC: 3487, MsgBus: 2578 2025-04-17T18:05:13.614996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344756171109533:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.615064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b97/r3tmp/tmp1hqNGn/pdisk_1.dat 2025-04-17T18:05:14.071407Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:14.093945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:14.094025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3487, node 1 2025-04-17T18:05:14.110073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:14.254946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:14.254977Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:14.254988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:14.255142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2578 TClient is connected to server localhost:2578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:14.887498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:14.938425Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:14.954846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:14.965313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.159623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.349283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.456818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:17.043196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344773350980467:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.043309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.436776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.492364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.532451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.574396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.624474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.722859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.776484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.843170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344773350981133:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.843250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.843411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344773350981138:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.846976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480 2025-04-17T18:05:17.862979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344773350981140:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-04-17T18:05:17.938452Z node 1 :TX_PROXY ERROR: Actor# [1:7494344773350981195:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:18.615057Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344756171109533:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.615118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 1 ydb-cpp-sdk/dev 2025-04-17T18:05:24.106453Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913124098, txId: 281474976710685] shutting down >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> TSchemeShardSubDomainTest::DeclareDefineAndDelete |86.2%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::CreateDropSolomon >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::CopyRejects >> TSchemeShardSubDomainTest::CreateDropNbs >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::Redefine >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> KqpService::ToDictCache-UseCache [GOOD] >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] >> KqpQueryService::ExecStatsAst [GOOD] >> KqpQueryService::DmlNoTx >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly Test command err: Trying to start YDB, gRPC: 14369, MsgBus: 6636 2025-04-17T18:04:40.889168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344613188785111:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:40.889290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c10/r3tmp/tmp1XdUiy/pdisk_1.dat 2025-04-17T18:04:41.354593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:41.354692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:41.359416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:41.383942Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14369, node 1 2025-04-17T18:04:41.478816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:41.478854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:41.478865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:41.479003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6636 TClient is connected to server localhost:6636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:42.219489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:44.419910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344630368654965:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:44.420001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:44.720266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:44.925956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:44.926164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:44.926434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:44.926557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:44.926650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:44.926747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:44.926833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:44.926927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:44.927018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:44.927107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:44.927220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:44.927329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494344630368655112:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:44.960696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:44.960752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:44.961003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:44.961124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:44.961232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:44.961317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:44.961402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:44.961493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:44.961585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:44.961713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:44.961847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:44.961940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494344630368655121:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:44.972410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494344630368655162:2341];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:44.972553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494344630368655162:2341];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:44.972774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494344630368655162:2341];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:44.972896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494344630368655162:2341];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:44.973011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494344630368655162:2341];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:44.973108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494344630368655162:2341];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:44.973236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494344630368655162:2341];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;eve ... 6224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:23.427272Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:23.427349Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:23.427657Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:23.427799Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:23.427940Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:23.428081Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:23.428212Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:23.428333Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:23.428458Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:23.428577Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:23.428691Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:23.428801Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:23.432560Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:23.432615Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:23.432709Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:23.432735Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:23.432932Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:23.432965Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:23.433056Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:23.433097Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:23.433162Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:23.433197Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:23.433243Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:23.433272Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:23.433944Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:23.433996Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:23.434189Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:23.434218Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:23.434378Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:23.434419Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:23.434614Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:23.434658Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:23.434787Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:23.434815Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:23.453733Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:23.456544Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:23.460527Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:05:23.483452Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344801621377477:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.483541Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.483638Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344801621377482:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.487806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:23.500330Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344801621377484:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:23.598558Z node 3 :TX_PROXY ERROR: Actor# [3:7494344801621377537:2437] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:24.510431Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344784441507459:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:24.511411Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:24.626997Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-04-17T18:05:24.627210Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-04-17T18:05:24.627276Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;self_id=[3:7494344801621377373:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037888;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889;receive=72075186224037890; 2025-04-17T18:05:24.627963Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] Test command err: Trying to start YDB, gRPC: 19973, MsgBus: 21983 2025-04-17T18:05:08.939309Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344735325289620:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:08.939394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b36/r3tmp/tmpl0YaP9/pdisk_1.dat 2025-04-17T18:05:09.344510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:09.398450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:09.398579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:09.400415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19973, node 1 2025-04-17T18:05:09.439892Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:09.439915Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:09.439921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:09.440043Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21983 TClient is connected to server localhost:21983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:09.942049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:09.975583Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:09.985218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:10.116222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:10.285160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:10.373160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:11.994170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344748210193287:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:11.994558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.298337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.329719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.357534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.428701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.457859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.488781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.519980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.608563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344752505161249:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.608673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.608767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344752505161254:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.612896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:12.625043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344752505161256:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:12.707268Z node 1 :TX_PROXY ERROR: Actor# [1:7494344752505161314:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:13.941260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344735325289620:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.941350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1282, MsgBus: 8461 2025-04-17T18:05:14.551392Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344762487141635:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:14.551423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b36/r3tmp/tmpLUFatT/pdisk_1.dat 2025-04-17T18:05:14.681244Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1282, node 2 2025-04-17T18:05:14.723196Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:14.723275Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:14.730827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:14.758652Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:14.758672Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:14.758682Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:14.758775Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8461 TClient is connected to server localhost:8461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:15.292346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.298785Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:17.747260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344775372044177:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.747340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.769790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:17.875349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344775372044283:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.875456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.879719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344775372044288:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.885848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:17.897916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344775372044290:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:17.968638Z node 2 :TX_PROXY ERROR: Actor# [2:7494344775372044341:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14360, MsgBus: 62420 2025-04-17T18:05:19.021937Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344777483000575:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:19.022073Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b36/r3tmp/tmpIuDJKH/pdisk_1.dat 2025-04-17T18:05:19.220338Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:19.234691Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:19.234800Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:19.237091Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14360, node 3 2025-04-17T18:05:19.298695Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:19.298721Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:19.298733Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:19.298851Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62420 TClient is connected to server localhost:62420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:19.791630Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:19.798759Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:19.803825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.909228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.093570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.225885Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:22.513318Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344794662871501:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.513409Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.578698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.617063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.655749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.698300Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.734074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.811393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.860311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.955984Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344794662872169:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.956079Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.956326Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344794662872174:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.959697Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:22.972374Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344794662872176:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:23.049040Z node 3 :TX_PROXY ERROR: Actor# [3:7494344798957839529:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:24.022426Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344777483000575:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:24.022494Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable |86.2%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::SchemeQuotas >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] >> KqpQueryService::CloseConnection [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 3065, MsgBus: 30234 2025-04-17T18:05:20.115380Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344787533796409:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:20.115443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b1c/r3tmp/tmpBzEFo5/pdisk_1.dat 2025-04-17T18:05:20.625278Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:20.629457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:20.629553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:20.631072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3065, node 1 2025-04-17T18:05:20.734371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:20.734392Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:20.734400Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:20.734527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30234 TClient is connected to server localhost:30234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:21.383776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.410410Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:21.425144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.585327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.741485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.827809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:23.593536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344800418700046:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.593639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.925793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.963074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.991175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.018845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.056465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.090784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.139386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.224621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344804713668010:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.224681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.224813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344804713668015:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.228158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:24.238036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344804713668017:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:24.313302Z node 1 :TX_PROXY ERROR: Actor# [1:7494344804713668073:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:25.112844Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344787533796409:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:25.112900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::ToDictCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 28533, MsgBus: 14869 2025-04-17T18:04:22.104135Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344539445081756:2164];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:22.104491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c37/r3tmp/tmpL2i8jT/pdisk_1.dat 2025-04-17T18:04:22.455293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:22.475868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:22.475942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:22.477495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28533, node 1 2025-04-17T18:04:22.590044Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:22.590088Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:22.590100Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:22.590297Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14869 TClient is connected to server localhost:14869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:23.163189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:23.191488Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:23.200292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:23.401316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:23.576445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:23.662019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:25.135515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344552329985315:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:25.135612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:25.455355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:25.496736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:25.571346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:25.608703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:25.685628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:25.730051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:25.811575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:25.906433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344552329985994:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:25.906494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:25.906552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344552329985999:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:25.910464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:25.919802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344552329986001:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:25.984222Z node 1 :TX_PROXY ERROR: Actor# [1:7494344552329986055:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:27.063896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:27.104060Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344539445081756:2164];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:27.104345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:37.442730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:04:37.442761Z node 1 :IMPORT WARN: Table profiles were not loaded took: 18.667492s took: 18.669561s took: 18.674306s took: 18.675602s took: 18.684710s took: 18.687642s took: 18.688489s took: 18.693976s took: 18.699460s took: 18.723600s Trying to start YDB, gRPC: 16399, MsgBus: 20934 2025-04-17T18:04:46.878321Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344641579774404:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:46.919189Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c37/r3tmp/tmpbNQHSy/pdisk_1.dat 2025-04-17T18:04:47.073481Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:47.096541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:47.096633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:47.099136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16399, node 2 2025-04-17T18:04:47.182797Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:47.182823Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:47.182831Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:47.182963Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20934 TClient is connected to server localhost:20934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:04:47.715578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.722740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Noti ... ok: 0.841467s took: 0.844858s took: 0.834935s took: 0.835338s took: 0.835925s took: 0.836311s took: 0.882121s took: 0.882132s took: 0.891355s took: 0.892056s took: 0.885296s took: 0.885825s took: 0.886715s took: 0.920996s took: 0.783589s took: 0.783992s took: 0.784757s took: 0.787642s took: 0.897188s took: 0.897134s took: 0.899307s took: 0.901044s took: 0.893777s took: 0.895675s took: 0.897703s took: 0.899455s took: 0.891200s took: 0.892041s took: 0.896679s took: 0.897163s 2025-04-17T18:05:02.009347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:05:02.009380Z node 2 :IMPORT WARN: Table profiles were not loaded took: 0.878544s took: 0.881161s took: 0.882186s took: 0.883823s took: 0.955503s took: 0.959535s took: 0.962545s took: 0.964575s took: 0.968706s took: 0.978792s took: 0.979769s took: 0.980731s took: 0.855527s took: 0.859607s took: 0.859966s took: 0.867287s Trying to start YDB, gRPC: 13318, MsgBus: 14805 2025-04-17T18:05:06.051417Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344724813776432:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:06.051544Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c37/r3tmp/tmpSn0ChA/pdisk_1.dat 2025-04-17T18:05:06.195070Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:06.209591Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:06.209691Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:06.211473Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13318, node 3 2025-04-17T18:05:06.305080Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:06.305098Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:06.305107Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:06.305245Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14805 TClient is connected to server localhost:14805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:06.856212Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.867434Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:09.639958Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344737698678985:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.639959Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344737698678983:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.640039Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344737698678974:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.640323Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.640777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344737698679001:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.640777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344737698679007:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.640824Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344737698679008:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.640827Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.645532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:09.650443Z node 3 :TX_PROXY ERROR: Actor# [3:7494344737698679034:2318] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:05:09.650580Z node 3 :TX_PROXY ERROR: Actor# [3:7494344737698678998:2307] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:05:09.650711Z node 3 :TX_PROXY ERROR: Actor# [3:7494344737698679033:2317] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:05:09.663298Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344737698679013:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:09.663370Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344737698678997:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:09.663372Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344737698678994:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:09.663398Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344737698679014:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:09.737311Z node 3 :TX_PROXY ERROR: Actor# [3:7494344737698679084:2360] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:09.743536Z node 3 :TX_PROXY ERROR: Actor# [3:7494344737698679103:2369] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:09.751919Z node 3 :TX_PROXY ERROR: Actor# [3:7494344737698679111:2375] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:09.766483Z node 3 :TX_PROXY ERROR: Actor# [3:7494344737698679119:2381] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } took: 3.788979s took: 3.788449s took: 3.789617s took: 3.838131s 2025-04-17T18:05:11.049032Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344724813776432:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:11.049099Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 0.887482s took: 0.888583s took: 0.891870s took: 0.892075s took: 0.901659s took: 0.910041s took: 0.916992s took: 0.960645s took: 0.886805s took: 0.891272s took: 0.897745s took: 0.898003s took: 0.897141s took: 0.898321s took: 0.899130s took: 0.899001s took: 0.935865s took: 1.040284s took: 1.042313s took: 1.047264s took: 0.946987s took: 0.948015s took: 0.948040s took: 0.951191s took: 1.058845s took: 1.066422s took: 1.067481s took: 1.071672s took: 1.042116s took: 1.042679s took: 1.050978s took: 1.055465s took: 0.989660s took: 0.991335s took: 0.992223s took: 0.992441s took: 1.040158s took: 1.043194s took: 1.045111s took: 1.054566s 2025-04-17T18:05:21.186253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:05:21.186276Z node 3 :IMPORT WARN: Table profiles were not loaded took: 0.942042s took: 0.948669s took: 0.948865s took: 0.949723s took: 1.005474s took: 1.007749s took: 1.008797s took: 1.010194s took: 0.926767s took: 0.927846s took: 0.929243s took: 1.077167s took: 0.938682s took: 0.939495s took: 0.939716s took: 0.941152s took: 0.990859s took: 0.991239s took: 0.991637s took: 0.992680s >> TSchemeShardSubDomainTest::Redefine [GOOD] >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.021635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.021729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.021766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.021800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.022739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.022779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.022871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.022956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.024189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.109939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.110007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.131831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.132230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.132388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.140424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.140612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.141820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.142939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.150119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.155838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.155904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.157071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.157147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.157198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.157320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.171000Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.334022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.334397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.334619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.334865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.334924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.337686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.337819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.337970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.338031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.338082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.338123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.339942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.339999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.340049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.341645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.341690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.341731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.341786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.345703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.347450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.347625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.348529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.348660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.348717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.349157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.349208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.349362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.349469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.351353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.351397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.351569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.351609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.351973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.352017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.352117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.352149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.352214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.352251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.352287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.352324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.352359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.352396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.352453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.352493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.352538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.354464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.354568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.354606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:27.497920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.497959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.498055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:27.498196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.498229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T18:05:27.498263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:05:27.498516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.498558Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-04-17T18:05:27.498626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:05:27.498662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:27.498708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:05:27.498742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:27.498779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T18:05:27.498823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:27.498867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:05:27.498898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:05:27.499056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T18:05:27.499092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T18:05:27.499122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:05:27.499152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T18:05:27.499742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.499820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.499858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:05:27.499895Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:05:27.499935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.500516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.500601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.500629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:05:27.500655Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:27.500683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:27.500750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T18:05:27.502381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.502430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.502454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.503550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:27.503650Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T18:05:27.504557Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-17T18:05:27.507138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.507445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-17T18:05:27.508895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:27.509236Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T18:05:27.509450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:27.509650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-04-17T18:05:27.510340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:27.510501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-17T18:05:27.511258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:27.511314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:27.511430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:27.511920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:27.511975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:27.512038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.526545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:27.526629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:27.527199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:27.527231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:27.527292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:27.527324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:27.535050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:27.535156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:05:27.535455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:05:27.535493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:05:27.535897Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:05:27.535991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:27.536028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:519:2473] TestWaitNotification: OK eventTxId 102 2025-04-17T18:05:27.536529Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:27.536696Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 176us result status StatusPathDoesNotExist 2025-04-17T18:05:27.536871Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.022009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.022130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.022168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.022221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.023247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.023287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.023369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.023449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.024948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.111864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.111921Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.128619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.129049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.129216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.140193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.140389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.141790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.142850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.154772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.156055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.156111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.158357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.158419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.158473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.158627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.166687Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.327952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.328219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.328457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.328723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.328781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.330957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.331098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.331322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.331401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.331441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.331477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.333520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.333577Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.333619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.335886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.335938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.335984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.336049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.339884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.341964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.342200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.343288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.343408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.343463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.343764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.343822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.343987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.344109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.345964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.345998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.346216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.346264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.346675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.346721Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.346807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.346833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.346894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.346921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.346959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.346988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.347012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.347038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.347080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.347109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.347135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.348598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.348675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.348700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :27.384945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:27.385810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.385858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.385905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:27.385978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-17T18:05:27.386120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.387624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-17T18:05:27.387713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-17T18:05:27.387973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.388048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.388088Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:27.388349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T18:05:27.388402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:27.388562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.388634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:27.388681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:27.390495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.390537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.390666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:27.390765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.390807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:05:27.390892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:05:27.391150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.391191Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:05:27.391315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:27.391361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:27.391400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:27.391434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:27.391469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:27.391508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:27.391546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:27.391577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:27.391637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:27.391673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:05:27.391739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:05:27.391790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:05:27.392382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:27.392458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:27.392504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:27.392547Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:05:27.392593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.393159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:27.393255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:27.393290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:27.393317Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:27.393345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:27.393423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:05:27.396271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:27.396369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-17T18:05:27.399215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.399352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.399484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2025-04-17T18:05:27.401409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:27.401558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-17T18:05:27.401856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:27.401913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-17T18:05:27.402016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:05:27.402038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:05:27.402555Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:27.402679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:27.402721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:311:2302] 2025-04-17T18:05:27.402864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:05:27.402980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:27.403008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:311:2302] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.021668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.021761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.021801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.021844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.022793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.022858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.022929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.023041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.024199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.127605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.127654Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.146895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.146996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.147162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.153449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.153713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.154391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.154659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.159998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.161180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.161237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.161349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.161390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.161430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.161653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.167823Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.329798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.330014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.330259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.330494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.330555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.332559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.332683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.332845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.332908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.332956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.332992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.334789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.334859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.334892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.336482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.336526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.336567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.336618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.340271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.341936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.342144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.343183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.343312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.343376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.343692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.343743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.343900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.343995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.346233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.346288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.346481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.346519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.346873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.346920Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.347005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.347051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.347104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.347134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.347170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.347210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.347247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.347275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.347336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.347379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.347408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.349124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.349228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.349258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... txid 100:0 3 -> 128 2025-04-17T18:05:27.395740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:27.395845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:27.397158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.397201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.397236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:27.397273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-17T18:05:27.397391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.398902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T18:05:27.399019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T18:05:27.399362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.399476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.399540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:27.399742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T18:05:27.399796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:27.399929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.399980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:27.400020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T18:05:27.402050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.402085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.402219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:27.402315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.402369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T18:05:27.402411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-17T18:05:27.402715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.402754Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T18:05:27.402937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:27.402975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:27.403009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:27.403036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:27.403121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T18:05:27.403164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:27.403195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T18:05:27.403233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T18:05:27.403290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:27.403325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-17T18:05:27.403358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:05:27.403385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:05:27.403972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:27.404101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:27.404135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:27.404170Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:05:27.404205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.404774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:27.404842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:27.404882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:27.404909Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:27.404936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:27.404990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-17T18:05:27.407606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:27.408560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-17T18:05:27.408800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:27.408843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-17T18:05:27.408935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:27.408968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:05:27.409347Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:27.409444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:27.409493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:312:2303] 2025-04-17T18:05:27.409614Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:27.409728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:27.409761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:312:2303] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:27.410133Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:27.410306Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 172us result status StatusSuccess 2025-04-17T18:05:27.410690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.165072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.165150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.165190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.165219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.165278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.165303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.165357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.165447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.165750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.255639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.255690Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.271638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.272069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.272304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.280871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.281103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.281772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.281997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.284632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.285939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.286001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.286215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.286267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.286311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.286458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.292803Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.421608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.421771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.421917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.422066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.422099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.423702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.423826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.423990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.424048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.424080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.424109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.425629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.425674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.425703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.427455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.427522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.427563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.427615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.435889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.437520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.437689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.438576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.438674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.438720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.438992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.439039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.439179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.439266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.440933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.440971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.441111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.441146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.441500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.441543Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.441620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.441651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.441711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.441740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.441769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.441801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.441832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.441861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.441910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.441942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.441969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.443503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.443593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.443625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:05:27.443656Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:05:27.443690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.443773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:05:27.446310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:05:27.446737Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-17T18:05:27.447271Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T18:05:27.463398Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T18:05:27.465812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.466045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.466116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.466755Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:05:27.469124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.469253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-04-17T18:05:27.469732Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-17T18:05:27.469994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:27.470026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-17T18:05:27.470444Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:27.470515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:27.470548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:284:2275] TestWaitNotification: OK eventTxId 100 2025-04-17T18:05:27.470961Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:27.471107Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 152us result status StatusPathDoesNotExist 2025-04-17T18:05:27.471273Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.021683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.021784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.021830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.021871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.022767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.022815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.022909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.023037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.024207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.114817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.114883Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.137041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.137176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.137344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.148612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.148998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.149800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.150111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.153983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.155949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.156010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.156141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.156184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.156336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.157182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.162420Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.275184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.276795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.279790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.281137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.281230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.286660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.286832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.287038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.287180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.287219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.287251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.293698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.293777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.293813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.296018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.296068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.296111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.296166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.300688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.303228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.303436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.305582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.305708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.305775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.310307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.310403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.310600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.310706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.315771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.315817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.315966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.316003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.316358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.316405Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.316495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.316526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.316578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.316612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.316653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.316689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.316726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.316785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.316850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.316887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.316919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.318812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.318949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.318980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.387912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:05:27.387939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:05:27.387966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.388705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.388789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.388817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:05:27.388842Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:27.388879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-17T18:05:27.388937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2025-04-17T18:05:27.388964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:277:2268] 2025-04-17T18:05:27.393110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.393183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.393212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.393235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.393256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.393298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.394485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:27.394761Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2025-04-17T18:05:27.394897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:27.394986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:27.395014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:278:2269] 2025-04-17T18:05:27.395083Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-04-17T18:05:27.395246Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-04-17T18:05:27.395355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-17T18:05:27.395669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T18:05:27.395928Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2025-04-17T18:05:27.396058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.396241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T18:05:27.396526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:27.396651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:27.396836Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-04-17T18:05:27.396920Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-04-17T18:05:27.397065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-17T18:05:27.397207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:27.397522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:27.397652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:27.397850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:27.397977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:27.398428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:27.398476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:27.398689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:27.399109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:27.399160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:27.399226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.403456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-17T18:05:27.407545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:27.407607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:27.407951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T18:05:27.408046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:27.409695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T18:05:27.409955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:27.410053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-17T18:05:27.410698Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:27.410867Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 152us result status StatusPathDoesNotExist 2025-04-17T18:05:27.411039Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:27.411515Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:27.411685Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 143us result status StatusSuccess 2025-04-17T18:05:27.412260Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.021680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.021774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.021816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.021858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.024091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.024148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.024236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.024324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.024656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.125071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.125123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.141545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.141940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.142109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.150876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.151069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.151766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.151959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.154424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.155874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.155930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.157070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.157128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.157178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.157310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.164173Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.320200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.320421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.320632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.320837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.320896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.323221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.323343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.323506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.323564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.323601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.323635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.325530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.325586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.325623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.327371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.327421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.327465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.327524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.331553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.333389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.333605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.334600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.334735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.334804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.335084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.335135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.335297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.335423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.337414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.337461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.337672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.337717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.338099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.338144Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.338265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.338304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.338365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.338399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.338459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.338506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.338546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.338577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.338637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.338673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.338706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.347198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.347342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.347383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... sReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:27.584286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T18:05:27.584319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:27.584356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:05:27.584381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:05:27.584522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T18:05:27.584555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-17T18:05:27.584582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:05:27.584609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T18:05:27.585294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:27.585361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:27.585397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:05:27.585435Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:05:27.585466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.586038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:27.586123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:27.586153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:05:27.586456Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:27.586485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:27.586546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-17T18:05:27.588095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.588139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.588160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.589485Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T18:05:27.589661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:05:27.589917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.590241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-17T18:05:27.592156Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-17T18:05:27.592381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:27.592601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:27.592913Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-17T18:05:27.593746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:27.593899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-17T18:05:27.595499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:27.595544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:27.595733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:27.596510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:05:27.596611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:27.596646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:27.596717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.597245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:27.597288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:27.600775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:27.600816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:27.600881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:27.600907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:27.601070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:27.601158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T18:05:27.601442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T18:05:27.601484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T18:05:27.601888Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T18:05:27.601973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:05:27.602004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:571:2525] TestWaitNotification: OK eventTxId 104 2025-04-17T18:05:27.602641Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:27.602864Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 195us result status StatusPathDoesNotExist 2025-04-17T18:05:27.603023Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:27.603509Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:27.603641Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 132us result status StatusSuccess 2025-04-17T18:05:27.603953Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.022005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.022083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.022129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.022160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.023170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.023214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.023281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.023375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.026085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.111400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.111455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.127866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.127979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.128149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.135948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.137053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.141817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.143045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.167291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.168769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.168833Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.168960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.169006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.169056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.169247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.178253Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.312917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.313152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.313369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.313582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.313628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.319251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.319395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.319580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.319642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.319699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.319742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.321527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.321582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.321621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.323459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.323505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.323547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.323607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.327703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.329417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.329623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.330611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.330730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.330793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.331100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.331156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.331326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.331424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.333371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.333417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.333604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.333644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.333979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.334033Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.334172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.334223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.334274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.334322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.334363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.334403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.334440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.334470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.334531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.334572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.334604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.341952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.342083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.342120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-17T18:05:27.775085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.775128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:05:27.775158Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:05:27.775184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:27.775941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.776022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:27.776047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:05:27.776070Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:27.776106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:27.776192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T18:05:27.776945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.776989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.777040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.777069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.777713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:27.777915Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-17T18:05:27.778857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.779159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:27.780359Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-17T18:05:27.780788Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T18:05:27.780908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:27.781124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2025-04-17T18:05:27.781418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:27.781559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:27.782762Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-17T18:05:27.783029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:27.783223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-04-17T18:05:27.784091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:27.784158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:05:27.784216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-04-17T18:05:27.785054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:27.785107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:27.785240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:27.785846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:27.785938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:27.788405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:27.788456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:27.788532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:27.788568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:27.788622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:27.788646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:27.790620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T18:05:27.790680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T18:05:27.790909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:27.791087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:27.791144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:27.791184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:27.791312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.792903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:05:27.793149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:05:27.793189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:05:27.793629Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:05:27.793744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:27.793780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:544:2499] TestWaitNotification: OK eventTxId 102 2025-04-17T18:05:27.810045Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:27.810286Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 234us result status StatusPathDoesNotExist 2025-04-17T18:05:27.810469Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:27.811151Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:27.811354Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 172us result status StatusPathDoesNotExist 2025-04-17T18:05:27.811480Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.084273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.084345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.084381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.084410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.084466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.084494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.084553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.084623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.084924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.162896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.162941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.180598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.180958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.181095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.190954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.191110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.191648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.191897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.195190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.196399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.196455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.196631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.196690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.196727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.196847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.202519Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.320123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.320359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.320561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.320768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.320849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.323028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.323163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.323325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.323410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.323442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.323472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.326646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.326723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.326766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.328739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.328793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.328838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.328894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.341766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.343777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.343962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.345049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.345157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.345209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.345483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.345547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.345707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.345897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.347794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.347838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.348021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.348061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.348444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.348489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.348603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.348639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.348694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.348726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.348761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.348802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.348833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.348864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.348921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.348953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.348985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.350782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.350923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.350965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 17T18:05:27.995042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 106 2025-04-17T18:05:27.995059Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:05:27.995082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:05:27.996732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:27.996786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:27.996802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-17T18:05:27.996828Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:27.996846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:27.996898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-17T18:05:27.997259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.997326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.997348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.997390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.997421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:27.998313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T18:05:27.998881Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-04-17T18:05:27.999149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-17T18:05:27.999482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:28.003438Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T18:05:28.003758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.003991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:28.005923Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2025-04-17T18:05:28.010545Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T18:05:28.011489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:28.011743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2025-04-17T18:05:28.013449Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-17T18:05:28.013785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:28.013977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-17T18:05:28.015118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:28.015286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:05:28.016730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:28.016769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:05:28.016822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:28.017219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T18:05:28.018756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-17T18:05:28.019056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:28.019102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:28.019238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:28.019911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-17T18:05:28.019958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-17T18:05:28.020050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:28.020070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:28.022101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:28.022138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:28.022275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:28.022297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:28.022348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T18:05:28.022380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T18:05:28.024438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:28.024539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:28.024578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:28.024640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:28.024840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:28.026271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T18:05:28.026594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-17T18:05:28.026632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-17T18:05:28.027137Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-17T18:05:28.027215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T18:05:28.027245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:910:2810] TestWaitNotification: OK eventTxId 106 2025-04-17T18:05:28.027903Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.028047Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 163us result status StatusSuccess 2025-04-17T18:05:28.028397Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.585021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.585102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.585147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.585182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.585241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.585272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.585329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.585410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.585716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.670779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.670827Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.685724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.686109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.686329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.693900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.694101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.694811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.695058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.697549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.698907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.698967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.699178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.699228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.699282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.699402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.705317Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.837415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.837612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.837779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.837940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.837984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.839918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.840054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.840202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.840277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.840314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.840366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.841979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.842033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.842067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.843738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.843792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.843832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.843887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.846638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.848095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.848275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.849049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.849149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.849191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.849395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.849435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.849581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.849666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.851510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.851553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.851699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.851732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.852071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.852108Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.852180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.852205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.852248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.852275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.852314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.852344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.852369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.852398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.852448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.852476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.852504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.853768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.853859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.853887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Seconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:28.029954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:28.029984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:28.030011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:28.030046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:28.030087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:28.030138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:28.031147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:28.031482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:28.055998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:28.057220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:28.057406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:28.057490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:28.057518Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:28.057798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:28.058598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:28.058663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:28.058738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.059455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.059728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T18:05:28.059950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.060026Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T18:05:28.060179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.060341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.060456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:28.060492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:28.060522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:28.060560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:28.060664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.060786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.060971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-17T18:05:28.061341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.061460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.062560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.062674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.062927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.066347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.066547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.066731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.070337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.070563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.070832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.071020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.071075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.071137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.098983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:28.099057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:28.099337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:28.099389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:28.099433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:28.099611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2025-04-17T18:05:28.158696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:28.158749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:458:2408] sender: [1:518:2058] recipient: [1:15:2062] 2025-04-17T18:05:28.159386Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:28.159455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:28.159490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:516:2453] TestWaitNotification: OK eventTxId 100 2025-04-17T18:05:28.159868Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.160002Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 147us result status StatusSuccess 2025-04-17T18:05:28.160296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:28.160613Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.160716Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 112us result status StatusSuccess 2025-04-17T18:05:28.160949Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.062143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.066331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.066384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.066419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.066481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.066509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.066593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.066673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.067010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.140115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.140170Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.158226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.158614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.158784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.194655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.194918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.195587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.195805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.211136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.212607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.212668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.212863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.212917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.212965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.213083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.226312Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.365059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.365289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.365491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.365690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.365739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.367558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.367667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.367793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.368010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.368043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.368072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.369909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.369961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.369992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.371507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.371550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.371583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.371628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.374982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.376640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.376838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.377749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.377874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.377932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.378218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.378281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.378468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.378573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.380697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.380735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.380875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.380919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.381276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.381321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.381420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.381452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.381507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.381539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.381583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.381624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.381657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.381688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.381746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.381780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.381810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.383487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.383587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.383618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:39 2025-04-17T18:05:28.259967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:39 tabletId 72075186233409584 2025-04-17T18:05:28.260280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-04-17T18:05:28.260306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-04-17T18:05:28.260427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-04-17T18:05:28.260465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-04-17T18:05:28.260533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-04-17T18:05:28.260552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-04-17T18:05:28.260697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-04-17T18:05:28.260725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-04-17T18:05:28.260792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-04-17T18:05:28.260811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-04-17T18:05:28.260897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-04-17T18:05:28.260920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-04-17T18:05:28.260961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-04-17T18:05:28.260979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-04-17T18:05:28.261028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:28.261046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:28.261085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-04-17T18:05:28.261114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-04-17T18:05:28.261239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:8 2025-04-17T18:05:28.261263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2025-04-17T18:05:28.266892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:12 2025-04-17T18:05:28.266933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2025-04-17T18:05:28.270860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-17T18:05:28.270905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-17T18:05:28.271001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-04-17T18:05:28.271021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2025-04-17T18:05:28.271128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:25 2025-04-17T18:05:28.271152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2025-04-17T18:05:28.271214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:20 2025-04-17T18:05:28.271235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2025-04-17T18:05:28.271318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:29 2025-04-17T18:05:28.271343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2025-04-17T18:05:28.271403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:33 2025-04-17T18:05:28.271421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2025-04-17T18:05:28.271486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:28.271504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:28.279661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:37 2025-04-17T18:05:28.279711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2025-04-17T18:05:28.279800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-04-17T18:05:28.279834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-04-17T18:05:28.279927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T18:05:28.279946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-17T18:05:28.280015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-04-17T18:05:28.280038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-04-17T18:05:28.280095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-17T18:05:28.280113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-04-17T18:05:28.280338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-04-17T18:05:28.280362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-04-17T18:05:28.280728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-17T18:05:28.280761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-17T18:05:28.280892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-17T18:05:28.280925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-17T18:05:28.281132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-04-17T18:05:28.281154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-04-17T18:05:28.281237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-04-17T18:05:28.281258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-04-17T18:05:28.281289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:28.281310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:28.281357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-04-17T18:05:28.281400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-04-17T18:05:28.281531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:28.281605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:28.281711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:28.281747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:28.281825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:28.285295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T18:05:28.285550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T18:05:28.285600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T18:05:28.286059Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T18:05:28.286163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:05:28.286233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2061:3662] TestWaitNotification: OK eventTxId 103 2025-04-17T18:05:28.286783Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.287006Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 241us result status StatusPathDoesNotExist 2025-04-17T18:05:28.287167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:28.287711Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.287862Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 151us result status StatusPathDoesNotExist 2025-04-17T18:05:28.287977Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:05:28.222035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:28.222116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:28.222153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:28.222209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:28.222251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:28.222277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:28.222338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:28.222447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:28.222762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:28.309545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:28.309592Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:28.323564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:28.323666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:28.323876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:28.330524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:28.330851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:28.331450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.331710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:28.336386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:28.337654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:28.337716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:28.337842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:28.337886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:28.337923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:28.338106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.344846Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:05:28.481197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:28.481443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.481681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:28.481923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:28.481988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.488305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.488459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:28.488651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.488716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:28.488778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:28.488816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:28.491540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.491606Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:28.491645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:28.495957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.496022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.496069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:28.496135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:28.500345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:28.502970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:28.503201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:28.504310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.504468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:28.504544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:28.504852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:28.504923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:28.505107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:28.505231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:28.507689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:28.507746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:28.507964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:28.508017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:28.508390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.508442Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:28.508537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:28.508574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:28.508632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:28.508675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:28.508725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:28.508775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:28.508817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:28.508850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:28.508938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:28.508978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:28.509013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:28.511214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:28.511348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:28.511398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T18:05:28.573794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T18:05:28.574195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.574324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:28.574373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:28.574624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T18:05:28.574677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:28.574849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:28.574908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:28.574955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T18:05:28.577673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:28.577737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:28.577893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:28.578036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:28.578084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T18:05:28.578167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-17T18:05:28.578494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.578537Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T18:05:28.578660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:28.578712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:28.578755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:28.578787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:28.578858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T18:05:28.578912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:28.578953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T18:05:28.578984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T18:05:28.579058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:28.579098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-17T18:05:28.579131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:05:28.579178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:05:28.579971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:28.580056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:28.580086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:28.580128Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:05:28.580168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:28.581588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:28.581676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:28.581710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:28.581738Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:28.581768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:28.581837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-17T18:05:28.587102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:28.588202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-17T18:05:28.588518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:28.588561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-17T18:05:28.588693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:28.588717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:05:28.589218Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:28.589335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:28.589385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:312:2303] 2025-04-17T18:05:28.589527Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:28.589624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:28.589649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:312:2303] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:28.590068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.590274Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 239us result status StatusSuccess 2025-04-17T18:05:28.590745Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:28.591229Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.591409Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 184us result status StatusPathDoesNotExist 2025-04-17T18:05:28.591552Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.700525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.700621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.700656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.700689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.700731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.700757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.700804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.700905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.701211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.774248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.774302Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.788498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.788594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.788765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.794440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.794741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.795362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.796133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.800202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.801496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.801552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.801667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.801717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.801750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.801927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.808037Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.934731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.934969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.935187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.935425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.935515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.947123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.947281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.947475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.947544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.947587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.947637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.961420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.961495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.961542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.970719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.970782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.970824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.970905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.974668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.979504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.979815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.980984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.981133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.981203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.981525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.981604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.981794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.981910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.984338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.984398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.984555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.984604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.984981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.985042Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.985138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.985175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.985233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.985267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.985303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.985368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.985418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.985449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.985511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.985549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.985581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.987584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.987706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.987748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rsion: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:28.418549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:28.418576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:28.418650Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:05:28.418677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:05:28.418734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-17T18:05:28.419367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1278 } } 2025-04-17T18:05:28.419409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-04-17T18:05:28.419532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1278 } } 2025-04-17T18:05:28.419637Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1278 } } 2025-04-17T18:05:28.420326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 624 RawX2: 4294969829 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:05:28.420369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-04-17T18:05:28.420503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 624 RawX2: 4294969829 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:05:28.420560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:05:28.420679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 624 RawX2: 4294969829 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:05:28.420753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.420788Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.420837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-04-17T18:05:28.420877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T18:05:28.423124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:28.427385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:28.427539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.427673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.427991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.428034Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:05:28.428132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:28.428166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:28.428216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:28.428252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:28.428290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T18:05:28.428358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:276:2267] message: TxId: 101 2025-04-17T18:05:28.428410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:28.428450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:28.428494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:28.428609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:28.430503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:28.430544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:277:2268] TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:28.431088Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.431311Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 215us result status StatusSuccess 2025-04-17T18:05:28.431728Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:28.432358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.432549Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 185us result status StatusSuccess 2025-04-17T18:05:28.432937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] Test command err: Trying to start YDB, gRPC: 22653, MsgBus: 24334 2025-04-17T18:05:04.891162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344719020600223:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:04.891565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b89/r3tmp/tmpRH5tA0/pdisk_1.dat 2025-04-17T18:05:05.348631Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22653, node 1 2025-04-17T18:05:05.413136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:05.413436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:05.425434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:05.450656Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:05.450689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:05.450696Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:05.450811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24334 TClient is connected to server localhost:24334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:06.029513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.066123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.015840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344736200469927:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.015940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.016161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344736200469939:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.019437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:08.028854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344736200469941:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:08.123101Z node 1 :TX_PROXY ERROR: Actor# [1:7494344736200469992:2345] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:08.427659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:1, at schemeshard: 72057594046644480 2025-04-17T18:05:08.698306Z node 1 :TX_PROXY ERROR: Actor# [1:7494344736200470227:2470] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZmMzODBmZC05M2JmZWYxNC00MWZlNjk5Ny1mMjRkZWQ3Mg==\', error: path is temporary (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:08.708082Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmMzODBmZC05M2JmZWYxNC00MWZlNjk5Ny1mMjRkZWQ3Mg==, ActorId: [1:7494344731905502610:2330], ActorState: ExecuteState, TraceId: 01js2et3p2f2y6g1szq2j20ed2, Create QueryResponse for error on request, msg: 2025-04-17T18:05:08.735799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.736383Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344736200470255:2367], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:05:08.736591Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2MxZWIyOTUtOTdmYTZhYzUtNTY0ZjRjYzMtNzU4YWY1NmE=, ActorId: [1:7494344736200470251:2366], ActorState: ExecuteState, TraceId: 01js2et3qgck4c0k6f4nfhfbr1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:05:08.740390Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T18:05:08.743690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.884259Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344719020600223:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:09.884336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23528, MsgBus: 27577 2025-04-17T18:05:10.454131Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344745315351426:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:10.455160Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b89/r3tmp/tmp3AnMKA/pdisk_1.dat 2025-04-17T18:05:10.580625Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:10.591334Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:10.591405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:10.594042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23528, node 2 2025-04-17T18:05:10.646786Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:10.646805Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:10.646811Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:10.646920Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27577 TClient is connected to server localhost:27577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:11.068391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:11.074924Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:13.320991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344758200253928:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.321085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.321758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344758200253944:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.327867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:05:13.338625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344758200253946:2339], DatabaseId: /Root, PoolId: defa ... or;event=timeout;self_id=[4:7494344792754821050:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:26.568925Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:26.854512Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:26.963662Z node 4 :TX_PROXY ERROR: Actor# [4:7494344814229660496:3862] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-04-17T18:05:26.963777Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715673, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-04-17T18:05:26.963922Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2FlMDA2MmQtNjNhYzZmYTYtM2EwMWQ0OGMtMzk0NzYwYjU=, ActorId: [4:7494344814229660464:2530], ActorState: ExecuteState, TraceId: 01js2etngz1s58eaz5mz6ckmzg, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744913125648, tx_id: 281474976715669 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122981, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122687, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125445, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125529, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125284, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125319, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125361, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122813, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125396, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913126929, tx_id: 281474976715672 } } Scheme entry: { name: TuplePrimaryDescending, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125606, tx_id: 281474976715668 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122505, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-17T18:05:27.014549Z node 4 :TX_PROXY ERROR: Actor# [4:7494344818524627812:3873] txid# 281474976715675, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-04-17T18:05:27.015221Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715675, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-04-17T18:05:27.015397Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2FlMDA2MmQtNjNhYzZmYTYtM2EwMWQ0OGMtMzk0NzYwYjU=, ActorId: [4:7494344814229660464:2530], ActorState: ExecuteState, TraceId: 01js2etnjq4qrgaxebfhmjkdzk, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744913125648, tx_id: 281474976715669 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122981, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122687, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125445, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125529, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125284, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125319, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125361, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122813, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125396, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913126929, tx_id: 281474976715672 } } Scheme entry: { name: TuplePrimaryDescending, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125606, tx_id: 281474976715668 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122505, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-17T18:05:27.055068Z node 4 :TX_PROXY ERROR: Actor# [4:7494344818524627832:3884] txid# 281474976715677, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:27.056017Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715677, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-17T18:05:27.056159Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2FlMDA2MmQtNjNhYzZmYTYtM2EwMWQ0OGMtMzk0NzYwYjU=, ActorId: [4:7494344814229660464:2530], ActorState: ExecuteState, TraceId: 01js2etnkyewv5dnwnxapbkhb1, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-17T18:05:27.099844Z node 4 :TX_PROXY ERROR: Actor# [4:7494344818524627848:3892] txid# 281474976715679, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:27.100270Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715679, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-17T18:05:27.100424Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2FlMDA2MmQtNjNhYzZmYTYtM2EwMWQ0OGMtMzk0NzYwYjU=, ActorId: [4:7494344814229660464:2530], ActorState: ExecuteState, TraceId: 01js2etnmyc9yp01khn1ffnn5w, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges) Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744913125648, tx_id: 281474976715669 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122981, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122687, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125445, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125529, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125284, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125319, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125361, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122813, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125396, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913126929, tx_id: 281474976715672 } } Scheme entry: { name: TuplePrimaryDescending, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913125606, tx_id: 281474976715668 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913122505, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-17T18:05:27.145672Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.021689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.021795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.021839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.021876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.023172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.023228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.023319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.023401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.025146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.116783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.116832Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.133296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.133650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.134499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.142048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.142233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.142916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.143103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.148362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.156623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.156695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.158376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.158455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.158510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.158803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.166634Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.290298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.290539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.290770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.291010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.291059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.293076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.293203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.293386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.293455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.293492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.293538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.296129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.296200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.296238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.299364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.299436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.299477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.299532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.303472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.305506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.305713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.306774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.306912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.306961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.307246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.307303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.307591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.307746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.310612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.310658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.310892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.310933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.311307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.311351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.311441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.311473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.311528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.311561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.311602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.311645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.311678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.311707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.311766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.311803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.311834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.321811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.321951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.321987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 341723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:28.341854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:28.341985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:28.342021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-17T18:05:28.342065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-17T18:05:28.342361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.342409Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-04-17T18:05:28.342519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:28.342579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:28.342621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:28.342653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:28.342692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T18:05:28.342728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:28.342765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:05:28.342797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:05:28.342978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:28.343020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-17T18:05:28.343065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:05:28.343097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T18:05:28.343837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:28.343920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:28.343954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:05:28.343997Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:05:28.344037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:28.345049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:28.345133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:28.345161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:05:28.345189Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:28.345216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:28.345288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-17T18:05:28.347384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:28.347440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:28.348411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:05:28.348833Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T18:05:28.349059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.349314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:28.349517Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2025-04-17T18:05:28.351209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:28.351428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:28.351938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:28.351983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:28.352116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 2025-04-17T18:05:28.353024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:28.353069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:28.353136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:28.353522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:05:28.355870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:28.355927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:28.356445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:28.356495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:28.357099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:28.357198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T18:05:28.357592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T18:05:28.357631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T18:05:28.358127Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T18:05:28.358234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:05:28.358274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2096:3701] TestWaitNotification: OK eventTxId 104 2025-04-17T18:05:28.366649Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.366881Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 251us result status StatusPathDoesNotExist 2025-04-17T18:05:28.367086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:28.367802Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:28.367971Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 162us result status StatusPathDoesNotExist 2025-04-17T18:05:28.368129Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CloseConnection [GOOD] Test command err: Trying to start YDB, gRPC: 26751, MsgBus: 7241 2025-04-17T18:04:16.522960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344513889817296:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:16.523342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c50/r3tmp/tmpwA6ZgI/pdisk_1.dat 2025-04-17T18:04:16.937199Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:16.971093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:16.971187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:16.973810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26751, node 1 2025-04-17T18:04:17.110704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.110723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.110728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.110845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7241 TClient is connected to server localhost:7241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:17.758303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.796680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.936926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:18.143895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:04:18.211732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:19.767543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344526774720955:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:19.767640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.172526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.249880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.282404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.320181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.350731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.379173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.445259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:20.528599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344531069688924:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.528667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.528749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344531069688929:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:20.531855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:20.542260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344531069688931:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:20.601579Z node 1 :TX_PROXY ERROR: Actor# [1:7494344531069688987:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:21.522018Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344513889817296:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:21.522093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28189, MsgBus: 10153 2025-04-17T18:04:23.584500Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344543121780333:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:23.584573Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c50/r3tmp/tmp0zdkqA/pdisk_1.dat 2025-04-17T18:04:23.697332Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28189, node 2 2025-04-17T18:04:23.735426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:23.735500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:23.755566Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:23.826843Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:23.826879Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:23.826885Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:23.826999Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10153 TClient is connected to server localhost:10153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:24.227041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:24.234144Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:24.248242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:24.333741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:24.488160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... dle abort execution event from: [4:7494344805256422650:2554], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.388719Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-17T18:05:24.389155Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422661:2557], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01js2etk0h9fx2znxfxtc7e0h6. SessionId : ydb://session/3?node_id=4&id=YmQ0MDlhYzAtN2EwODJlODMtZDcyMmQyNDQtYTE5OWQxY2M=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7494344805256422650:2554], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.389229Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422660:2556], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YmQ0MDlhYzAtN2EwODJlODMtZDcyMmQyNDQtYTE5OWQxY2M=. CustomerSuppliedId : . TraceId : 01js2etk0h9fx2znxfxtc7e0h6. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344805256422650:2554], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.389495Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422663:2558], TxId: 281474976715672, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YmQ0MDlhYzAtN2EwODJlODMtZDcyMmQyNDQtYTE5OWQxY2M=. TraceId : 01js2etk0h9fx2znxfxtc7e0h6. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344805256422650:2554], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.389805Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YmQ0MDlhYzAtN2EwODJlODMtZDcyMmQyNDQtYTE5OWQxY2M=, ActorId: [4:7494344805256422648:2554], ActorState: ExecuteState, TraceId: 01js2etk0h9fx2znxfxtc7e0h6, Create QueryResponse for error on request, msg: 2025-04-17T18:05:24.416710Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494344805256422686:2562] TxId: 281474976715674. Ctx: { TraceId: 01js2etk177yv81n7cnbhsmy2a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NWQzNjAxNC1jY2YyMGE3NS1kNzE1MTUxMy0yNzlkMzFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:05:24.416858Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422694:2566], TxId: 281474976715674, task: 3. Ctx: { TraceId : 01js2etk177yv81n7cnbhsmy2a. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NWQzNjAxNC1jY2YyMGE3NS1kNzE1MTUxMy0yNzlkMzFkZg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344805256422686:2562], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.416891Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422692:2564], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWQzNjAxNC1jY2YyMGE3NS1kNzE1MTUxMy0yNzlkMzFkZg==. TraceId : 01js2etk177yv81n7cnbhsmy2a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344805256422686:2562], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.417488Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422695:2567], TxId: 281474976715674, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWQzNjAxNC1jY2YyMGE3NS1kNzE1MTUxMy0yNzlkMzFkZg==. CustomerSuppliedId : . TraceId : 01js2etk177yv81n7cnbhsmy2a. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344805256422686:2562], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.417512Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422696:2568], TxId: 281474976715674, task: 5. Ctx: { TraceId : 01js2etk177yv81n7cnbhsmy2a. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NWQzNjAxNC1jY2YyMGE3NS1kNzE1MTUxMy0yNzlkMzFkZg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344805256422686:2562], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.417765Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-17T18:05:24.417779Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422693:2565], TxId: 281474976715674, task: 2. Ctx: { TraceId : 01js2etk177yv81n7cnbhsmy2a. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NWQzNjAxNC1jY2YyMGE3NS1kNzE1MTUxMy0yNzlkMzFkZg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7494344805256422686:2562], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.418362Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NWQzNjAxNC1jY2YyMGE3NS1kNzE1MTUxMy0yNzlkMzFkZg==, ActorId: [4:7494344805256422683:2562], ActorState: ExecuteState, TraceId: 01js2etk177yv81n7cnbhsmy2a, Create QueryResponse for error on request, msg: 2025-04-17T18:05:24.465746Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-17T18:05:24.466400Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494344805256422770:2578] TxId: 281474976715677. Ctx: { TraceId: 01js2etk2zarsr3t9m1fqzkyqv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQ3OTJkNTktM2FkYjAxNDEtZDdiYTUyYzEtOGQxMjgwMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:05:24.466535Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422779:2582], TxId: 281474976715677, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01js2etk2zarsr3t9m1fqzkyqv. SessionId : ydb://session/3?node_id=4&id=ZDQ3OTJkNTktM2FkYjAxNDEtZDdiYTUyYzEtOGQxMjgwMmQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344805256422770:2578], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.495790Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-17T18:05:24.540642Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494344805256422770:2578] TxId: 281474976715677. Ctx: { TraceId: 01js2etk2zarsr3t9m1fqzkyqv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQ3OTJkNTktM2FkYjAxNDEtZDdiYTUyYzEtOGQxMjgwMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-04-17T18:05:24.541003Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344805256422781:2584], TxId: 281474976715677, task: 5. Ctx: { TraceId : 01js2etk2zarsr3t9m1fqzkyqv. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZDQ3OTJkNTktM2FkYjAxNDEtZDdiYTUyYzEtOGQxMjgwMmQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344805256422770:2578], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:24.541441Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZDQ3OTJkNTktM2FkYjAxNDEtZDdiYTUyYzEtOGQxMjgwMmQ=, ActorId: [4:7494344805256422767:2578], ActorState: ExecuteState, TraceId: 01js2etk2zarsr3t9m1fqzkyqv, Create QueryResponse for error on request, msg: 2025-04-17T18:05:25.181333Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-17T18:05:25.181706Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494344809551390926:2789] TxId: 281474976715704. Ctx: { TraceId: 01js2etkst09mehams3ejj60t7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OGRhMjJkNmEtMmUwZjk1MjgtZjUxZGU2MTEtYjA3M2RhYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:05:25.182042Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGRhMjJkNmEtMmUwZjk1MjgtZjUxZGU2MTEtYjA3M2RhYmU=, ActorId: [4:7494344809551390923:2789], ActorState: ExecuteState, TraceId: 01js2etkst09mehams3ejj60t7, Create QueryResponse for error on request, msg: 2025-04-17T18:05:25.402248Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-17T18:05:25.402846Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494344809551391121:2839] TxId: 281474976715712. Ctx: { TraceId: 01js2etkz9c5zft0ap5hzxn49m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Yjc1NTY4MTgtNGZmMjA0ZS0xMjRhZTdkMy1mZDYzMGQyMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:05:25.423247Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Yjc1NTY4MTgtNGZmMjA0ZS0xMjRhZTdkMy1mZDYzMGQyMw==, ActorId: [4:7494344809551391118:2839], ActorState: ExecuteState, TraceId: 01js2etkz9c5zft0ap5hzxn49m, Create QueryResponse for error on request, msg: 2025-04-17T18:05:25.611239Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-17T18:05:25.611897Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494344809551391336:2895] TxId: 281474976715720. Ctx: { TraceId: 01js2etm5gdwa2emeexx6h85k9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZGE4ZjBjMGUtZDY5NzJjZmUtMjFhYTIyYjYtODc1NTY4ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:05:25.624953Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344809551391345:2900], TxId: 281474976715720, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01js2etm5gdwa2emeexx6h85k9. SessionId : ydb://session/3?node_id=4&id=ZGE4ZjBjMGUtZDY5NzJjZmUtMjFhYTIyYjYtODc1NTY4ZDc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344809551391336:2895], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:25.625619Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGE4ZjBjMGUtZDY5NzJjZmUtMjFhYTIyYjYtODc1NTY4ZDc=, ActorId: [4:7494344809551391333:2895], ActorState: ExecuteState, TraceId: 01js2etm5gdwa2emeexx6h85k9, Create QueryResponse for error on request, msg: 2025-04-17T18:05:25.891486Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-17T18:05:25.892107Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494344809551391591:2960] TxId: 281474976715729. Ctx: { TraceId: 01js2etmdzb3rrjybv233kd4xv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MTg5ZWQxOGEtN2MwYjVkYTQtYTM4YzAzODMtMmE0MDgxODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:05:25.892223Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344809551391601:2966], TxId: 281474976715729, task: 5. Ctx: { TraceId : 01js2etmdzb3rrjybv233kd4xv. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTg5ZWQxOGEtN2MwYjVkYTQtYTM4YzAzODMtMmE0MDgxODQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494344809551391591:2960], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:25.897814Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344809551391598:2963], TxId: 281474976715729, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MTg5ZWQxOGEtN2MwYjVkYTQtYTM4YzAzODMtMmE0MDgxODQ=. TraceId : 01js2etmdzb3rrjybv233kd4xv. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7494344809551391591:2960], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:05:25.899545Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTg5ZWQxOGEtN2MwYjVkYTQtYTM4YzAzODMtMmE0MDgxODQ=, ActorId: [4:7494344809551391588:2960], ActorState: ExecuteState, TraceId: 01js2etmdzb3rrjybv233kd4xv, Create QueryResponse for error on request, msg: >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> KqpSystemView::NodesRange2 [GOOD] >> TSchemeShardSubDomainTest::DeleteAndRestart >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.021659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.021777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.021824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.021874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.022796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.022851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.022931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.023034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.024374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.110973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.111025Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.128518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.128910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.129048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.138943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.139121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.141801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.143088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.149325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.155913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.155993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.157086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.157151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.157204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.157335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.165942Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.295899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.296162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.296391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.296627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.296683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.298896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.299039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.299224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.299286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.299349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.299399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.301288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.301350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.301387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.303224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.303274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.303322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.303391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.307233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.309128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.309325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.310345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.310471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.310527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.310836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.310912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.311085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.311167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.313246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.313302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.313508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.313570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.313945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.313992Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.314089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.314136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.314198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.314232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.314275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.314314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.314353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.314384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.314447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.314500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.314534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.316400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.316538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.316580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 46678944 2025-04-17T18:05:29.261541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 436 RawX2: 8589936991 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-17T18:05:29.261593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:29.261635Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.261676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-04-17T18:05:29.261712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T18:05:29.261739Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 129 -> 240 2025-04-17T18:05:29.263813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.264300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.264352Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2025-04-17T18:05:29.264409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:05:29.264467Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2025-04-17T18:05:29.264558Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-17T18:05:29.264596Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 240 -> 240 2025-04-17T18:05:29.266581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.266638Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-17T18:05:29.266748Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:29.266788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:29.266845Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:29.266881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:29.266922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-17T18:05:29.267040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:639:2561] message: TxId: 106 2025-04-17T18:05:29.267095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:29.267137Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T18:05:29.267175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-17T18:05:29.267316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T18:05:29.267363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:29.269332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T18:05:29.269392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:816:2714] TestWaitNotification: OK eventTxId 106 2025-04-17T18:05:29.270154Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:29.270425Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 311us result status StatusSuccess 2025-04-17T18:05:29.270849Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:29.271640Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:29.271848Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 226us result status StatusSuccess 2025-04-17T18:05:29.272210Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:29.272905Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:29.273071Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 216us result status StatusSuccess 2025-04-17T18:05:29.273488Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> TSchemeShardSubDomainTest::DeclareAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 20442, MsgBus: 15942 2025-04-17T18:05:03.583050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344714003152574:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:03.584324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b9e/r3tmp/tmpTcE8Ay/pdisk_1.dat 2025-04-17T18:05:04.089445Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:04.090465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:04.090581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:04.096347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20442, node 1 2025-04-17T18:05:04.194716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:04.194746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:04.194753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:04.194866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15942 TClient is connected to server localhost:15942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:04.783283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.807503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.981682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:05.195284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:05.278074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.874872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344726888056186:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.875021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.260906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.302215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.341117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.368986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.396038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.428410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.495217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.573691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344731183024153:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.573752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.573905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344731183024158:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.576810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:07.585238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344731183024160:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:07.677546Z node 1 :TX_PROXY ERROR: Actor# [1:7494344731183024218:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:08.582870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.583445Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344714003152574:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:08.583513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:08.586575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.587836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.345391Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.374786Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.402192Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.425915Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.446907Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.463835Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.480925Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.539290Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.564518Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.586246Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.615058Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.634720Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-17T18:05:09.653562Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa7b5723-e44778f5-445b326c-9c1e4edd, reply PRECONDITION_FAILED, issues: {
: Erro ... default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:15.362554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:15.376125Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-04-17T18:05:15.377597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344764382251147:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:15.434226Z node 2 :TX_PROXY ERROR: Actor# [2:7494344764382251202:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:16.503311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:16.504756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:16.506016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:16.592553Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344747202379510:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:16.592641Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:18.973300Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 5cc66331-b6f06da6-945049c2-43fa8bfc, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=ODBlZGQzZjEtNThiYzA3YWItNTE3ODVkYTctNjk0OGFjMDg=, TxId: 2025-04-17T18:05:19.002352Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 5cc66331-b6f06da6-945049c2-43fa8bfc, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=NjdlY2Y0MjEtNDVjY2U1OWItMmE4ZjA1MmQtZjY1YjNkY2E=, TxId: 2025-04-17T18:05:19.017869Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 5cc66331-b6f06da6-945049c2-43fa8bfc, reply NOT_FOUND, issues: {
: Error: No such execution } Trying to start YDB, gRPC: 14778, MsgBus: 13232 2025-04-17T18:05:20.367493Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344784867557013:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b9e/r3tmp/tmpRllw3I/pdisk_1.dat 2025-04-17T18:05:20.389144Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:05:20.496781Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:20.510852Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:20.510955Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:20.512451Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14778, node 3 2025-04-17T18:05:20.566685Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:20.566710Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:20.566720Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:20.566838Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13232 TClient is connected to server localhost:13232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:21.076116Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.089692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.171419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.353270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.440660Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:23.894316Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344797752460521:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.894424Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.956805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.024575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.061183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.125084Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.156833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.199572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.238819Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.330850Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344802047428484:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.330936Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.331209Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344802047428489:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.334690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:24.347573Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344802047428491:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:24.414563Z node 3 :TX_PROXY ERROR: Actor# [3:7494344802047428548:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:25.300607Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344784867557013:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:25.300667Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:25.404110Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.405861Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.409423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpQueryService::ExecuteQueryMultiResult [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejects >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> TSchemeShardSubDomainTest::SimultaneousDeclare >> KqpDocumentApi::RestrictDrop [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop >> TSchemeShardSubDomainTest::SimultaneousDefine >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::RedefineErrors >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> KqpQueryService::ExecuteRetryQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:28.002390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:28.002470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:28.002506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:28.002536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:28.002591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:28.002621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:28.002671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:28.002741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:28.003051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:28.096945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:28.097004Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:28.113133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:28.113506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:28.113673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:28.121705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:28.121913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:28.122525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.122710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:28.125738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:28.127060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:28.127115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:28.127301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:28.127350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:28.127397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:28.127511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.133431Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:28.255733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:28.255959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.256193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:28.256399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:28.256446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.258604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.258752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:28.258937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.259011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:28.259051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:28.259080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:28.260901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.260952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:28.260996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:28.270197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.270254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.270297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:28.270364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:28.274014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:28.279379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:28.279603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:28.280509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:28.280625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:28.280676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:28.280921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:28.280959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:28.281100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:28.281196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:28.283515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:28.283560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:28.283732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:28.283770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:28.284110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:28.284162Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:28.284255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:28.284296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:28.284367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:28.284399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:28.284432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:28.284468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:28.284498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:28.284531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:28.284591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:28.284627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:28.284657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:28.286444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:28.286554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:28.286651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... bletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:30.297548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.297653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, schema: Name: "Table11" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key", at schemeshard: 72057594046678944 2025-04-17T18:05:30.298031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_0, child name: Table11, child id: [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2025-04-17T18:05:30.298104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-04-17T18:05:30.298158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2025-04-17T18:05:30.298264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-04-17T18:05:30.298303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 1 -> 2 2025-04-17T18:05:30.298865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 137:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:30.298933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 137:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.299049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 11 2025-04-17T18:05:30.299108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-04-17T18:05:30.301197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2025-04-17T18:05:30.301358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-04-17T18:05:30.301606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.301649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:30.301864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-04-17T18:05:30.301954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.301992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1036:2898], at schemeshard: 72057594046678944, txId: 137, path id: 2 2025-04-17T18:05:30.302043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1036:2898], at schemeshard: 72057594046678944, txId: 137, path id: 10 2025-04-17T18:05:30.302481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.302540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2025-04-17T18:05:30.302775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-17T18:05:30.303563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-04-17T18:05:30.303671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-04-17T18:05:30.303711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-04-17T18:05:30.303749Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2025-04-17T18:05:30.303793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2025-04-17T18:05:30.304928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-04-17T18:05:30.304997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-04-17T18:05:30.305021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-04-17T18:05:30.305060Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2025-04-17T18:05:30.305091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-04-17T18:05:30.305155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2025-04-17T18:05:30.307221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2025-04-17T18:05:30.307345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2025-04-17T18:05:30.307388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-04-17T18:05:30.308041Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-17T18:05:30.308281Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2025-04-17T18:05:30.308428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-17T18:05:30.308467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-04-17T18:05:30.308634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-17T18:05:30.308705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-17T18:05:30.308813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-17T18:05:30.308904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 2 -> 3 2025-04-17T18:05:30.309784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-04-17T18:05:30.312192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-04-17T18:05:30.312363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.312496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.312542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2025-04-17T18:05:30.312615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2025-04-17T18:05:30.312974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 984 RawX2: 4294970153 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2025-04-17T18:05:30.316636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2025-04-17T18:05:30.316783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:30.098066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:30.098157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.098286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:30.098328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:30.098411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:30.098447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:30.098521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.098604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:30.098984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:30.174168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:30.174313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:30.193274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:30.193744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:30.193941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:30.203240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:30.203496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:30.204266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.204523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:30.209176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.210594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.210748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.211079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:30.211147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.211201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:30.211717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.221030Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:30.373215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:30.373478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.373708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:30.373944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:30.374002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.376301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.376448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:30.376628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.376693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:30.376734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:30.376769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:30.378612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.378667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:30.378707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:30.381514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.381560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.381608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.381671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.385426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:30.387333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:30.387573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:30.388509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.388627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.388686Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.388961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:30.389013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.389170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:30.389292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:30.391376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.391419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.391605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.391651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:30.392005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.392061Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:30.392176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.392211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.392274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.392310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.392342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:30.392400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.392441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:30.392471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:30.392534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:30.392576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:30.392627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:30.394413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.394521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.394585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.623365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409551, partId: 0 2025-04-17T18:05:30.623558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409551 2025-04-17T18:05:30.623626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-17T18:05:30.623662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409551 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-04-17T18:05:30.623701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-17T18:05:30.626171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.626349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.626389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.626443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:30.626486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-17T18:05:30.626611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:30.630350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-17T18:05:30.630511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-17T18:05:30.630869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.630990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.631062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:30.631450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T18:05:30.631505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:30.631658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:30.631752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-17T18:05:30.631806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:30.633649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.633705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.633850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:30.633947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.634002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:05:30.634052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:05:30.634351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.634400Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:05:30.634500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:30.634534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:30.634589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:30.634638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:30.634681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:30.634741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:30.634786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:30.634832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:30.635060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-17T18:05:30.635112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-04-17T18:05:30.635147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:05:30.635178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:05:30.635876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:30.635974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:30.636040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:30.636079Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:05:30.636114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:30.636950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:30.637028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:30.637056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:30.637083Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:30.637109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-17T18:05:30.637185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-17T18:05:30.637225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:567:2476] 2025-04-17T18:05:30.642194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:30.642642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:30.642722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:30.642753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:568:2477] TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:30.643270Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:30.643500Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 209us result status StatusSuccess 2025-04-17T18:05:30.643943Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:30.040819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:30.040880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.040911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:30.040941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:30.040990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:30.041013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:30.041060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.041121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:30.041358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:30.128269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:30.128318Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:30.145838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:30.146269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:30.146440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:30.153980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:30.154192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:30.154759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.154957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:30.158159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.159602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.159675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.159962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:30.160044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.160101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:30.160261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.170987Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:30.293544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:30.293785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.293983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:30.294233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:30.294289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.299181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.299317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:30.299520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.299582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:30.299636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:30.299670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:30.303310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.303367Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:30.303402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:30.307349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.307390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.307420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.307466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.310112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:30.314238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:30.314456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:30.315294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.315414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.315460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.315773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:30.315826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.315965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:30.316060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:30.319308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.319341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.319511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.319544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:30.319845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.319884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:30.319966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.319991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.320028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.320049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.320073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:30.320113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.320147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:30.320164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:30.320211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:30.320233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:30.320253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:30.321713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.321818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.321849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-17T18:05:30.656612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T18:05:30.657550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:30.657628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:30.657655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:05:30.657681Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:05:30.657725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:05:30.657788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T18:05:30.659088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1354 } } 2025-04-17T18:05:30.659142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-17T18:05:30.659274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1354 } } 2025-04-17T18:05:30.659371Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1354 } } 2025-04-17T18:05:30.660538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 504 RawX2: 4294969752 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:05:30.660590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-17T18:05:30.660727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 504 RawX2: 4294969752 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:05:30.660786Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:05:30.660887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 504 RawX2: 4294969752 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:05:30.660956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.660993Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.661035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T18:05:30.661075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T18:05:30.664387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:30.664525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:30.664596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.664664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.664886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.664928Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:05:30.665059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:05:30.665097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:30.665133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:05:30.665165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:30.665216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T18:05:30.665290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:306:2297] message: TxId: 102 2025-04-17T18:05:30.665336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:30.665377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:05:30.665431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:05:30.665596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:30.667155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:30.667186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:532:2476] TestWaitNotification: OK eventTxId 102 2025-04-17T18:05:30.667535Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:30.667686Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 160us result status StatusSuccess 2025-04-17T18:05:30.668070Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.668710Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:30.668897Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 188us result status StatusSuccess 2025-04-17T18:05:30.669200Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiResult [GOOD] Test command err: Trying to start YDB, gRPC: 5796, MsgBus: 8692 2025-04-17T18:05:05.434734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344720813498324:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:05.435019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b7b/r3tmp/tmphmH7cr/pdisk_1.dat 2025-04-17T18:05:05.835649Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5796, node 1 2025-04-17T18:05:05.870554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:05.870657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:05.877864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:05.932107Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:05.932126Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:05.932136Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:05.932258Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8692 TClient is connected to server localhost:8692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:06.444000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.462700Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:06.481076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:05:06.616893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.759936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.825737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:08.374912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344733698401964:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.375030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.681324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.710549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.737558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.764408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.790735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.861362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.901447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.984352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344733698402634:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.984450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.984651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344733698402639:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.988945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:08.999184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344733698402641:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:09.078560Z node 1 :TX_PROXY ERROR: Actor# [1:7494344737993369994:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.353897Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2025-04-17T18:05:10.354356Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2025-04-17T18:05:10.357169Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344742288337727:2512], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7494344742288337622:2512]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7494344742288337727:2512].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:05:10.357736Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494344742288337713:2512], SessionActorId: [1:7494344742288337622:2512], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494344742288337622:2512]. isRollback=0 2025-04-17T18:05:10.357961Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTFhN2M2OTktM2FlMzk0NmQtZjM1YWM3Y2ItYWQwZWZhYTk=, ActorId: [1:7494344742288337622:2512], ActorState: ExecuteState, TraceId: 01js2et59z522se6ktangp61a3, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494344742288337714:2512] from: [1:7494344742288337713:2512] 2025-04-17T18:05:10.358048Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494344742288337714:2512] TxId: 281474976710673. Ctx: { TraceId: 01js2et59z522se6ktangp61a3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTFhN2M2OTktM2FlMzk0NmQtZjM1YWM3Y2ItYWQwZWZhYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:05:10.358296Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTFhN2M2OTktM2FlMzk0NmQtZjM1YWM3Y2ItYWQwZWZhYTk=, ActorId: [1:7494344742288337622:2512], ActorState: ExecuteState, TraceId: 01js2et59z522se6ktangp61a3, Create QueryResponse for error on request, msg: 2025-04-17T18:05:10.435268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344720813498324:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:10.435355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19369, MsgBus: 7780 2025-04-17T18:05:11.208051Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344746275872641:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:11.208106Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b7b/r3tmp/tmpEhtGUi/pdisk_1.dat 2025-04-17T18:05:11.284264Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19369, node 2 2025-04-17T18:05:11.341247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:11.341354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:11.343315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:11.356506Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:11.356531Z nod ... lf is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.035280Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.068721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.146838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.223379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.300115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.352857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.433674Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344791989669553:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.433805Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.433914Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344791989669558:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:21.438424Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:21.448903Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344791989669560:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:21.545556Z node 3 :TX_PROXY ERROR: Actor# [3:7494344791989669618:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:22.372227Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344774809798002:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:22.372316Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7815, MsgBus: 5220 2025-04-17T18:05:24.171091Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494344802530422682:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:24.171591Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b7b/r3tmp/tmpoioZ7W/pdisk_1.dat 2025-04-17T18:05:24.276516Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:24.305919Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:24.306020Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:24.307197Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7815, node 4 2025-04-17T18:05:24.359594Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:24.359618Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:24.359628Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:24.359749Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5220 TClient is connected to server localhost:5220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:05:24.825283Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.832088Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:24.840788Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:24.896120Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:25.060996Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:25.158395Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:27.668914Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344815415326195:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:27.668995Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:27.729350Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:27.782094Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:27.833547Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:27.878403Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:27.920717Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:27.985406Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.030760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.168734Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344819710294157:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.168835Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.168883Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344819710294162:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.179054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:28.200023Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344819710294164:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:28.286634Z node 4 :TX_PROXY ERROR: Actor# [4:7494344819710294222:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:29.170227Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344802530422682:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:29.170326Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 21175, MsgBus: 21695 2025-04-17T18:05:13.811895Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344756503798825:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.811957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:05:13.862275Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344757714583498:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.862319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:05:13.870763Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344758924931955:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.870835Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:05:13.914751Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494344754801928772:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.914803Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:05:13.934114Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494344755765590911:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.935694Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001baf/r3tmp/tmpMp5E6k/pdisk_1.dat 2025-04-17T18:05:14.967896Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:15.174550Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:15.206240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:15.225094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:15.314257Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:15.970712Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:16.219639Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:16.223837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:16.249467Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:16.253238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:16.266472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:16.266598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:16.267050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:16.267112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:16.267209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:16.267244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:16.268178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:16.268218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:16.272156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:16.272232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:16.282665Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:05:16.282707Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T18:05:16.282858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:16.284254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:16.284429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:16.286515Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:05:16.290446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:16.301332Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-17T18:05:16.304400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21175, node 1 2025-04-17T18:05:16.718599Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:16.718628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:16.718674Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:16.719827Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21695 TClient is connected to server localhost:21695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:18.409921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:18.487031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:18.821062Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344756503798825:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.821137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:18.869262Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344757714583498:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.869372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:18.871036Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344758924931955:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.871103Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:18.918296Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344754801928772:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.918354Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:18.965326Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494344755765590911:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.965400Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:19.420413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.093725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.483622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:23.967689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344799453473783:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.967800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.415733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.514421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.577900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.659181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.749773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.958604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.085562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.179592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344808043409178:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:25.179673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:25.179909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344808043409183:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:25.184087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:25.206504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344808043409185:2409], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:25.293700Z node 1 :TX_PROXY ERROR: Actor# [1:7494344808043409266:4203] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:26.790058Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913126776, txId: 281474976710672] shutting down 2025-04-17T18:05:27.061286Z node 3 :BS_PROXY_PUT ERROR: [29b5775bab0d24e7] Result# TEvPutResult {Id# [72075186224037911:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037911:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T18:05:27.065075Z node 2 :BS_PROXY_PUT ERROR: [d7148af2d550e6de] Result# TEvPutResult {Id# [72075186224037899:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037899:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T18:05:27.074968Z node 5 :BS_PROXY_PUT ERROR: [62c6dcc6c53dc89c] Result# TEvPutResult {Id# [72075186224037897:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037897:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T18:05:27.064990Z node 4 :BS_PROXY_PUT ERROR: [c35dd8a4f361b81a] Result# TEvPutResult {Id# [72075186224037915:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037915:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:30.691155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:30.691237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.691273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:30.691306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:30.691373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:30.691401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:30.691460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.691534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:30.691840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:30.773870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:30.773914Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:30.789583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:30.789992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:30.790157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:30.808116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:30.808327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:30.808909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.809114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:30.813843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.815128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.815184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.815375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:30.815416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.815450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:30.815559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.823066Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:30.961371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:30.961598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.961813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:30.962026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:30.962079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.964175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.964293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:30.964444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.964504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:30.964535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:30.964583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:30.966359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.966424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:30.966456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:30.967962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.968007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.968040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.968092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.971489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:30.973289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:30.973503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:30.974427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.974539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.974593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.974871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:30.974919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.975058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:30.975160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:30.977182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.977224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.977403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.977442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:30.977821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.977864Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:30.977951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.977982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.978036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.978080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.978125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:30.978163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.978215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:30.978246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:30.978319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:30.978356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:30.978384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:30.980181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.980281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.980316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.033093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:31.033122Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:31.033150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:31.033223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-17T18:05:31.035410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-17T18:05:31.035530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-17T18:05:31.036002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.036097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.036157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-17T18:05:31.036237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.036280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:31.036380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-17T18:05:31.036512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.036565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:31.037511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.038497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:31.039470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.039500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.039620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:31.039707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.039737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:05:31.039763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:05:31.039951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.039991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-17T18:05:31.040039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:31.040062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.040090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:31.040125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.040151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:31.040175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.040199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:31.040223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:31.040293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:31.040321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:05:31.040343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:05:31.040363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T18:05:31.040911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.040992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.041020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:31.041067Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:05:31.041100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.041552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.041641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.041669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:31.041694Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:31.041719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:31.041791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:05:31.042172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.042236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.042321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:31.042547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.042588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.042653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.045725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.045819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.047283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:31.047361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:05:31.047537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:31.047571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:05:31.047960Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:31.048046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.048077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:338:2329] TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:31.048555Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.048757Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 205us result status StatusPathDoesNotExist 2025-04-17T18:05:31.048948Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:30.592420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:30.592504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.592546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:30.592581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:30.592663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:30.592696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:30.592757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.592835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:30.593172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:30.658722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:30.658764Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:30.673391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:30.673718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:30.673853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:30.685250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:30.685487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:30.686147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.686374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:30.689147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.690597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.690666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.690902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:30.690958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.691003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:30.691142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.698193Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:30.838294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:30.838545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.838783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:30.839033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:30.839080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.842805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.843154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:30.843350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.843412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:30.843446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:30.843496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:30.846470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.846525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:30.846559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:30.849383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.849429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.849479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.849552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.853022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:30.854794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:30.854999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:30.855911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.856040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.856109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.856390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:30.856440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.856579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:30.856681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:30.858794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.858867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.859038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.859077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:30.859466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.859508Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:30.859592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.859623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.859678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.859708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.859739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:30.859777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.859818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:30.859853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:30.859925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:30.859961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:30.859992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:30.861746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.861846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.861880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... lPathId: 2] was 5 Forgetting tablet 72075186233409548 2025-04-17T18:05:31.023070Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T18:05:31.023252Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-17T18:05:31.023356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-17T18:05:31.023472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-04-17T18:05:31.024640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:31.024742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:31.024909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:31.025049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-17T18:05:31.026490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.026527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.026632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:31.027157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-17T18:05:31.027223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-17T18:05:31.027587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-04-17T18:05:31.030341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-17T18:05:31.030405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:31.030419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:31.030460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:31.031000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.031042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.031107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.031708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:31.031742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:31.032924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T18:05:31.032948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-17T18:05:31.033041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-04-17T18:05:31.033085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:31.033100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:31.033158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T18:05:31.033199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T18:05:31.033272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:31.034527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-04-17T18:05:31.034764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:31.034799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-17T18:05:31.034908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:31.034929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-17T18:05:31.034988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:05:31.035022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:05:31.035462Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:31.035573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.035604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:636:2538] 2025-04-17T18:05:31.035751Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:31.035829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.035848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:636:2538] 2025-04-17T18:05:31.035890Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:05:31.036008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.036030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:636:2538] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-17T18:05:31.036438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.036597Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 188us result status StatusPathDoesNotExist 2025-04-17T18:05:31.036758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:31.037207Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.037341Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 136us result status StatusPathDoesNotExist 2025-04-17T18:05:31.037454Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:31.037867Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.038053Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 144us result status StatusSuccess 2025-04-17T18:05:31.038475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:30.535332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:30.535410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.535466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:30.535498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:30.535554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:30.535598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:30.535663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.535738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:30.535978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:30.617025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:30.617079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:30.632309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:30.632715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:30.632896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:30.641409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:30.641617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:30.642227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.642437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:30.645939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.647267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.647310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.647455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:30.647499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.647532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:30.647639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.652958Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:30.768048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:30.768281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.768491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:30.768763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:30.768817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.775479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.775637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:30.775857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.775925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:30.775961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:30.775992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:30.778005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.778053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:30.778085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:30.779833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.779882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.779918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.779969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.783214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:30.785981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:30.786161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:30.787112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.787231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.787283Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.787550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:30.787601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.787761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:30.787855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:30.789882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.789922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.790115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.790155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:30.790545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.790589Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:30.790678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.790707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.790760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.790790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.790850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:30.790888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.790923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:30.790948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:30.791010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:30.791043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:30.791071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:30.792765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.792866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.792901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [1:549:2483] sender: [1:610:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.075586Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:31.075697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.075741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:608:2529] TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:31.076216Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.076371Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 198us result status StatusPathDoesNotExist 2025-04-17T18:05:31.076479Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:31.077439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:549:2483] sender: [1:614:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:549:2483] sender: [1:617:2058] recipient: [1:616:2534] Leader for TabletID 72057594046678944 is [1:549:2483] sender: [1:618:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:619:2535] sender: [1:620:2058] recipient: [1:616:2534] 2025-04-17T18:05:31.115522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:31.115679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.115732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:31.115775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:31.115830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:31.115873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:31.115938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.116009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:31.116337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:31.135524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:31.136877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:31.137041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:31.137270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:31.137313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:31.137579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:31.138417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.138535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.138605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.139062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.139167Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T18:05:31.139334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.139425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.139559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.139654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.139765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.139961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.140227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.140346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.140716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.140792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.141199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.141345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.141445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.141748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.141839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.141969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.142238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.142404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.142462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.142551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.150534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.150617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.150727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:31.150779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.150853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:31.150959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:619:2535] sender: [1:676:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.183581Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.183795Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 240us result status StatusPathDoesNotExist 2025-04-17T18:05:31.183975Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:31.184604Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.184857Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 197us result status StatusSuccess 2025-04-17T18:05:31.185195Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:30.473805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:30.473893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.473932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:30.473966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:30.474028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:30.474062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:30.474124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.474263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:30.474593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:30.563333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:30.563393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:30.577541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:30.577923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:30.578066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:30.588394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:30.588618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:30.589341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.589550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:30.594001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.595491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.595562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.595797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:30.595855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.595920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:30.596061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.602967Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:30.748329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:30.748533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.748725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:30.748912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:30.748964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.756887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.757051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:30.757253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.757322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:30.757357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:30.757391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:30.761198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.761268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:30.761307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:30.763306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.763360Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.763409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.763480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.767310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:30.769437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:30.769639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:30.770719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.770883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.770943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.771257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:30.771328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.771486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:30.771603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:30.776190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.776245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.776465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.776518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:30.776919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.776979Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:30.777109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.777143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.777201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.777237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.777276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:30.777318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.777360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:30.777398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:30.777482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:30.777529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:30.777564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:30.779494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.779601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.779638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409546 2025-04-17T18:05:31.194388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:31.194533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2025-04-17T18:05:31.195161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-17T18:05:31.195307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:31.195711Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T18:05:31.196103Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2025-04-17T18:05:31.197121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:31.197268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-17T18:05:31.197907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:31.198272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:31.198373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:31.198503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:31.199572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.199625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.199738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:31.201854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-17T18:05:31.201901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-17T18:05:31.202034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-17T18:05:31.202057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-17T18:05:31.202243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2025-04-17T18:05:31.202365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:31.202560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.202669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.202728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.205176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:31.205216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:31.205295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:31.205317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:31.205481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T18:05:31.205503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-17T18:05:31.205686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:31.205712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:31.205832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T18:05:31.205864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T18:05:31.206802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:31.207888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-17T18:05:31.208106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:31.208145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-17T18:05:31.208223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:05:31.208245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:05:31.208590Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:31.208710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.208750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:723:2609] 2025-04-17T18:05:31.208878Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:05:31.208940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.208958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:723:2609] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-17T18:05:31.209354Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.209533Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 208us result status StatusPathDoesNotExist 2025-04-17T18:05:31.209682Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:31.210039Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.210227Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 152us result status StatusPathDoesNotExist 2025-04-17T18:05:31.210371Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:31.210759Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.210903Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 149us result status StatusSuccess 2025-04-17T18:05:31.211206Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: Trying to start YDB, gRPC: 17062, MsgBus: 22062 2025-04-17T18:05:06.723163Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344725694951568:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:06.723241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b6d/r3tmp/tmpEZQaGu/pdisk_1.dat 2025-04-17T18:05:07.225207Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:07.231327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:07.231435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:07.233232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17062, node 1 2025-04-17T18:05:07.325185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:07.325207Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:07.325213Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:07.325334Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22062 TClient is connected to server localhost:22062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:07.841566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:07.865800Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:07.871516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:08.033388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:08.193747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:08.250981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:09.770699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344738579855071:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.770807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.086460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:10.116064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:10.145553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:10.170398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:10.199154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:10.268636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:10.316277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:10.396595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344742874823034:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.396683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.396924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344742874823039:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:10.400188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:10.411331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344742874823041:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:10.469994Z node 1 :TX_PROXY ERROR: Actor# [1:7494344742874823095:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:11.379041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:11.439747Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344747169790795:2522], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-17T18:05:11.440172Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjgzMDc5NzUtMjBiMmNiZGEtYmE2MmU3Ni04NGU4NzE0Yg==, ActorId: [1:7494344747169790723:2512], ActorState: ExecuteState, TraceId: 01js2et6bqb08n1rn7f8s06ang, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 64774, MsgBus: 18393 2025-04-17T18:05:12.337292Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344751689693577:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:12.337383Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b6d/r3tmp/tmpFJUeUf/pdisk_1.dat 2025-04-17T18:05:12.464517Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64774, node 2 2025-04-17T18:05:12.481732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:12.481808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:12.482727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:12.530684Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:12.530704Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:12.530711Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:12.530831Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18393 TClient is connected to server localhost:18393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:12.931440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opI ... eration part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.309052Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.355162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.449708Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344794590117027:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.449803Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.450029Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344794590117032:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.453321Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:22.469216Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:05:22.470140Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344794590117034:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:22.570851Z node 3 :TX_PROXY ERROR: Actor# [3:7494344794590117094:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:23.602383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:61: Error: At function: KiAlterTable!
:2:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-17T18:05:23.705919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18277, MsgBus: 63100 2025-04-17T18:05:24.740093Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494344803830232328:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:24.740142Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b6d/r3tmp/tmpt7HBSp/pdisk_1.dat 2025-04-17T18:05:24.851791Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:24.879144Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:24.879227Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:24.881584Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18277, node 4 2025-04-17T18:05:24.928668Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:24.928695Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:24.928711Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:24.928837Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63100 TClient is connected to server localhost:63100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:25.424026Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:25.429409Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:25.442254Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:25.520524Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:25.675429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:25.758324Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:28.366508Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344821010103272:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.366604Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.436755Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.474752Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.507851Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.541204Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.581636Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.618903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.691244Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.883757Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344821010103941:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.883847Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.884237Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344821010103946:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.887659Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:28.904351Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344821010103948:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:28.956522Z node 4 :TX_PROXY ERROR: Actor# [4:7494344821010104006:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:29.741726Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344803830232328:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:29.741821Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:30.002557Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:29.288600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:29.288701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:29.288744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:29.288798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:29.288845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:29.288876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:29.288946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:29.289034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:29.289388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:29.371969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:29.372017Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:29.388744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:29.389274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:29.389459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:29.398723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:29.398980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:29.399639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:29.399865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:29.410571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:29.412111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:29.412192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:29.412378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:29.412438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:29.412480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:29.412632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.419643Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:29.532726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:29.532975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.533196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:29.533411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:29.533463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.536008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:29.536152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:29.536377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.536439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:29.536478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:29.536541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:29.538763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.538855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:29.538903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:29.540646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.540693Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.540738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:29.540805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:29.544581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:29.546373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:29.546582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:29.547631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:29.547754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:29.547811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:29.548088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:29.548143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:29.548307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:29.548403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:29.550287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:29.550330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:29.550514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:29.550560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:29.550964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.551017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:29.551111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:29.551174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:29.551216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:29.551247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:29.551280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:29.551315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:29.551357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:29.551386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:29.551443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:29.551488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:29.551526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:29.553271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:29.553405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:29.553441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8:05:31.305242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.305398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.305451Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:05:31.305576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-04-17T18:05:31.305738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 5000004 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.307305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-04-17T18:05:31.307419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 108 at step: 5000004 2025-04-17T18:05:31.307750Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.307864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.307921Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2025-04-17T18:05:31.308087Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 129 2025-04-17T18:05:31.308237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.308297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T18:05:31.308906Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=108;fline=tx_controller.cpp:214;event=finished_tx;tx_id=108; FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-17T18:05:31.310389Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.310430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.310600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-17T18:05:31.310727Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.310764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:336:2312], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-04-17T18:05:31.310806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:336:2312], at schemeshard: 72057594046678944, txId: 108, path id: 5 2025-04-17T18:05:31.311346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.311399Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:05:31.311461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2025-04-17T18:05:31.311940Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T18:05:31.312055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T18:05:31.312096Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-17T18:05:31.312138Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-17T18:05:31.312176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:05:31.312779Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T18:05:31.312843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T18:05:31.312866Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-17T18:05:31.312893Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-04-17T18:05:31.312920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-17T18:05:31.312975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-04-17T18:05:31.315345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2025-04-17T18:05:31.316679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-17T18:05:31.316962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-17T18:05:31.328942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-04-17T18:05:31.329002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-04-17T18:05:31.329154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-04-17T18:05:31.329209Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 FAKE_COORDINATOR: Erasing txId 108 2025-04-17T18:05:31.331024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.331172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.331211Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-04-17T18:05:31.331370Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-17T18:05:31.331411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-17T18:05:31.331450Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-17T18:05:31.331484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-17T18:05:31.331523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-04-17T18:05:31.331596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:491:2439] message: TxId: 108 2025-04-17T18:05:31.331651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-17T18:05:31.331694Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-17T18:05:31.331727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-17T18:05:31.331846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T18:05:31.334000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.334047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:874:2786] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-04-17T18:05:31.337331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.337538Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.337886Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:31.340971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.341136Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-04-17T18:05:31.341516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-04-17T18:05:31.341565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-04-17T18:05:31.342062Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-04-17T18:05:31.342144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.342207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:909:2821] TestWaitNotification: OK eventTxId 109 >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:31.189785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:31.189872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.190007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:31.190039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:31.190088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:31.190114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:31.190170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.190267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:31.190552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:31.264799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:31.265013Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:31.279057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:31.279487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:31.279836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:31.287965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:31.288178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:31.288862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.289066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:31.292160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.293537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.293602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.293806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:31.293860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.293904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:31.294035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.301155Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.427680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.427924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.428145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:31.428440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:31.428503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.430472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.430641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:31.430847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.430932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:31.430970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:31.431022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:31.432723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.432779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.432819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:31.434433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.434481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.434521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.434580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.443796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.445716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:31.445936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:31.447011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.447129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.447186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.447471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:31.447524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.447687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.447783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:31.449779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.449821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.450004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.450045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:31.450407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.450452Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:31.450541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.450574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.450626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.450670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.450712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:31.450749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.450786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:31.450831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:31.450887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.450926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:31.450955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:31.452696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.452801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.452839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:31.497349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:31.497379Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T18:05:31.497407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:31.497504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-04-17T18:05:31.499962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.500020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.500067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-17T18:05:31.501870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:31.502196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:31.503200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.503252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.503306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:31.503357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-17T18:05:31.503489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.505211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T18:05:31.505332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T18:05:31.505688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.505790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.505838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:31.506091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T18:05:31.506153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:31.506342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.506405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:31.506457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T18:05:31.508270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.508307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.508452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:31.508575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.508612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T18:05:31.508664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-17T18:05:31.509002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.509046Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T18:05:31.509140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:31.509175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:31.509215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:31.509261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:31.509300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T18:05:31.509340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:31.509387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T18:05:31.509431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T18:05:31.509497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:31.509544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-04-17T18:05:31.509595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:05:31.509627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:05:31.510348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:31.510430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:31.510465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:31.510506Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:05:31.510544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.511123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:31.511239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:31.511272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:31.511307Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:31.511338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:31.511403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-04-17T18:05:31.511445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:275:2266] 2025-04-17T18:05:31.514840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:31.515272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:31.515358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.515388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:276:2267] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-04-17T18:05:31.515895Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.516113Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 201us result status StatusSuccess 2025-04-17T18:05:31.516487Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:31.491510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:31.491599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.491659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:31.491698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:31.491762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:31.491798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:31.491872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.491956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:31.492250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:31.578580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:31.578640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:31.600191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:31.600663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:31.600838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:31.609672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:31.609942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:31.610690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.610903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:31.613432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.614713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.614773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.615000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:31.615044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.615085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:31.615213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.623067Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.722197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.722449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.722651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:31.723008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:31.723067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.727097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.727233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:31.727408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.727543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:31.727577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:31.727616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:31.729234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.729272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.729301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:31.730833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.730870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.730901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.730944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.733821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.735466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:31.735625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:31.736436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.736519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.736563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.736779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:31.736828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.736935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.737016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:31.738580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.738624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.738799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.738868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:31.739217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.739268Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:31.739356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.739394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.739449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.739484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.739520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:31.739560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.739609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:31.739645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:31.739718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.739781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:31.739826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:31.741676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.741797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.741848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TDropForceUnsafe TPropose, operationId: 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-17T18:05:31.828038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.828076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:31.828220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-17T18:05:31.828444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.828520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:31.829579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.829896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.830877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.830918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.831036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:31.831152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.831183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:05:31.831258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:31.831592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.831641Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-17T18:05:31.831726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:31.831759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.831797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:31.831830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.831864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:31.831905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.831936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:31.831967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:31.832038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:31.832076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:05:31.832124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:05:31.832159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T18:05:31.832771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.832863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.832902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:31.832940Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:05:31.832985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.833517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.833588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.833627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:31.833656Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:31.833727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:31.833811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:05:31.834289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.834333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.834419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:31.834578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.834630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.834688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.837409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.837621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.838947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:31.839017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:05:31.839239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:31.839277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:05:31.839683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:31.839795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.839845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2328] TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:31.840319Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.840509Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 202us result status StatusPathDoesNotExist 2025-04-17T18:05:31.840690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:31.841264Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.841431Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 173us result status StatusSuccess 2025-04-17T18:05:31.841824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteRetryQuery [GOOD] Test command err: Trying to start YDB, gRPC: 15729, MsgBus: 13438 2025-04-17T18:05:08.386694Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344735601404520:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:08.386994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b54/r3tmp/tmpyK1OMF/pdisk_1.dat 2025-04-17T18:05:08.708157Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15729, node 1 2025-04-17T18:05:08.766090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:08.766216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:08.769468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:08.819093Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:08.819122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:08.819135Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:08.819274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13438 TClient is connected to server localhost:13438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:09.394550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:09.447344Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:09.461360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:05:09.603593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.757051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:09.837933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:11.551211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344748486308042:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:11.551333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:11.871947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:11.900909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:11.931407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:11.964832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:11.997501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.032790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.064673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.135770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344752781275996:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.135858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.136047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344752781276001:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.138929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:12.148791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344752781276003:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:12.229476Z node 1 :TX_PROXY ERROR: Actor# [1:7494344752781276057:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:13.384918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344757076243738:2525], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-17T18:05:13.384977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344757076243740:2527], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-17T18:05:13.384986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-17T18:05:13.385052Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494344757076243739:2526], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=ZDhhZDI0M2YtNjc0N2NkZmEtYzY3MDg1M2YtMTJlYTFlMWI=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-17T18:05:13.385130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494344757076243739:2526], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=ZDhhZDI0M2YtNjc0N2NkZmEtYzY3MDg1M2YtMTJlYTFlMWI=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-17T18:05:13.385218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7494344757076243736:2524]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-17T18:05:13.385306Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDhhZDI0M2YtNjc0N2NkZmEtYzY3MDg1M2YtMTJlYTFlMWI=, ActorId: [1:7494344757076243736:2524], ActorState: ExecuteState, TraceId: 01js2et896d77a6ea3nhg81bj4, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-17T18:05:13.385342Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344735601404520:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.385449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:13.385451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7494344757076243736:2524]: Pool another_pool_id not found Trying to start YDB, gRPC: 63931, MsgBus: 64630 2025-04-17T18:05:14.189543Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344759258620619:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:14.189591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b54/r3tmp/tmpo4n4tm/pdisk_1.dat 2025-04-17T18:05:14.293616Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63931, node 2 2025-04-17T18:05:14.340213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:14.340286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Discon ... ool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:17.898627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:17.911742Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:05:17.913568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344772143524961:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:17.980238Z node 2 :TX_PROXY ERROR: Actor# [2:7494344772143525018:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:19.081105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.089844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480 2025-04-17T18:05:19.190400Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344759258620619:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:19.190459Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:19.627493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.108446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:1, at schemeshard: 72057594046644480 2025-04-17T18:05:20.694778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.201081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-17T18:05:21.645829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710696:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.274422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710711:0, at schemeshard: 72057594046644480 Wait resource pool classifier 0.096334s: status = SUCCESS, issues = 2025-04-17T18:05:24.387957Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTQ5OGM2NTYtNmNhOWY5ZDktYmYyZWU3My1mZmQzNzRjNw==, ActorId: [2:7494344802208297722:2809], ActorState: ExecuteState, TraceId: 01js2etk10by1mx24rwjjvsx4z, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool MyPool Trying to start YDB, gRPC: 29461, MsgBus: 24993 2025-04-17T18:05:25.306527Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344808836181655:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:25.306609Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b54/r3tmp/tmpMqKNqn/pdisk_1.dat 2025-04-17T18:05:25.453214Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:25.474563Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:25.474660Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:25.476371Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29461, node 3 2025-04-17T18:05:25.532926Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:25.532949Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:25.532957Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:25.533081Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24993 TClient is connected to server localhost:24993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:26.137462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:26.153559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:26.234474Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:26.451213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:26.543898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:28.865744Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344821721085329:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.865851Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:28.928998Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:28.967043Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:29.004419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:29.045591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:29.092930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:29.128929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:29.164782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:29.225724Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344826016053284:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:29.225816Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:29.226252Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344826016053289:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:29.230804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:29.245260Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344826016053291:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:29.321709Z node 3 :TX_PROXY ERROR: Actor# [3:7494344826016053346:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:30.307000Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344808836181655:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:30.307088Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:30.569548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:30.569629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.569663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:30.569694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:30.569768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:30.569796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:30.569854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.569934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:30.570253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:30.649260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:30.649308Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:30.665605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:30.666009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:30.666360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:30.676103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:30.676316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:30.677008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.677226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:30.680006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.681339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.681393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.681590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:30.681641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.681686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:30.681825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.688163Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:30.844669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:30.844931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.845167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:30.845427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:30.845510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.854547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.854689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:30.854902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.854975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:30.855009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:30.855042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:30.857501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.857569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:30.857608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:30.859513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.859564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.859602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.859669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.870121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:30.872232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:30.872489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:30.873544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.873667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.873730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.874016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:30.874075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.874251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:30.874357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:30.876529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.876581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.876767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.876824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:30.877223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.877268Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:30.877383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.877421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.877484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.877516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.877552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:30.877594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.877628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:30.877666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:30.877727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:30.877763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:30.877797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:30.892150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.892305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.892389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 31.522753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.522885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.522955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2025-04-17T18:05:31.523052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-17T18:05:31.523400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-04-17T18:05:31.523529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409550 TxId: 104 Status: OK 2025-04-17T18:05:31.523610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409550 TxId: 104 Status: OK 2025-04-17T18:05:31.523669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-17T18:05:31.523714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-17T18:05:31.523986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-17T18:05:31.524076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2025-04-17T18:05:31.524132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2025-04-17T18:05:31.524161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-17T18:05:31.524190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T18:05:31.528807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.529198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 104 2025-04-17T18:05:31.623495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-17T18:05:31.623626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-17T18:05:31.623703Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-17T18:05:31.623763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.623797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-17T18:05:31.623834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-17T18:05:31.625522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-04-17T18:05:31.625634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-17T18:05:31.625696Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-17T18:05:31.625746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.625773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-17T18:05:31.625918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-17T18:05:31.626038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.626102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T18:05:31.634356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.635069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.635399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.635464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.635602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:05:31.635790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.635829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:336:2312], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-17T18:05:31.635864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:336:2312], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-17T18:05:31.636303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.636362Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T18:05:31.636490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:31.636539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:31.636593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:31.636624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:31.636662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T18:05:31.636715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:31.636764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:05:31.636796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:05:31.636969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-17T18:05:31.637012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-17T18:05:31.637050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:05:31.637078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T18:05:31.637854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:31.638018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:31.638063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:05:31.638101Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:05:31.638142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:05:31.639051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:31.639123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:05:31.639149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:05:31.639196Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T18:05:31.639228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T18:05:31.639288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-17T18:05:31.650134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:05:31.650463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:31.501763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:31.501893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.501943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:31.501982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:31.502057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:31.502096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:31.502159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.502264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:31.502556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:31.581004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:31.581048Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:31.595034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:31.595370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:31.595541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:31.612349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:31.612536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:31.613066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.613233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:31.616047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.617389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.617452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.617655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:31.617719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.617782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:31.617897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.624078Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.749155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.749330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.749486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:31.749644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:31.749678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.751303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.751407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:31.751520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.751562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:31.751587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:31.751624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:31.752897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.752945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.752983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:31.754215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.754262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.754297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.754341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.757646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.759098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:31.759270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:31.760058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.760146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.760210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.760426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:31.760466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.760584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.760658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:31.762297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.762339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.762515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.762554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:31.762928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.762976Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:31.763068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.763096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.763146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.763186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.763222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:31.763254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.763284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:31.763309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:31.763361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.763397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:31.763443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:31.765146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.765245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.765278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxId: 101, partId: 0, tablet: 72075186233409548 2025-04-17T18:05:31.899992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T18:05:31.900191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409546 2025-04-17T18:05:31.900251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-17T18:05:31.900291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409546 shardIdx# 72057594046678944:1 at schemeshard# 72057594046678944 2025-04-17T18:05:31.903107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-17T18:05:31.903230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409547 2025-04-17T18:05:31.903279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-17T18:05:31.903316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2025-04-17T18:05:31.904420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.906854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409548, partId: 0 2025-04-17T18:05:31.906965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2025-04-17T18:05:31.907038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-17T18:05:31.907080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-17T18:05:31.907110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-17T18:05:31.909607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.910849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.910983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.911017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.911054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:31.911122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.911257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.912849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-17T18:05:31.912952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-17T18:05:31.913296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.913450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.913497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:31.913772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T18:05:31.913819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:31.913969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:31.914046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:31.915936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.915977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:31.916142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.916195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:05:31.916482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.916524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:05:31.916611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:31.916640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.916673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:31.916699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.916729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:31.916798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:31.916835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:31.916865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:31.917005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T18:05:31.917043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2025-04-17T18:05:31.917087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-04-17T18:05:31.917575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.917676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:31.917710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:31.917746Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T18:05:31.917776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:31.917870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-17T18:05:31.917905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:306:2297] 2025-04-17T18:05:31.920700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.920783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:31.920809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:313:2304] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:31.921358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:31.921521Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 196us result status StatusSuccess 2025-04-17T18:05:31.921833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] >> KqpQueryService::DmlNoTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:31.121740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:31.121799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.121823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:31.121875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:31.121940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:31.121968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:31.122026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.122241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:31.122536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:31.197470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:31.197510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:31.211468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:31.211856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:31.212029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:31.224010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:31.224197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:31.224844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.225010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:31.227631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.228874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.228929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.229122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:31.229171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.229208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:31.229328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.235234Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.354409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.354621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.354840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:31.355045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:31.355108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.357060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.357181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:31.357351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.357415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:31.357451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:31.357483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:31.359190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.359248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.359284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:31.360674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.360715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.360753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.360809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.364334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.368948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:31.369209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:31.370264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.370413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.370476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.370758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:31.370808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.370989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.371091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:31.373431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.373473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.373664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.373704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:31.374079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.374129Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:31.374262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.374299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.374354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.374386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.374430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:31.374471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.374514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:31.374543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:31.374608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.374644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:31.374713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:31.384529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.384720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.384763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , at schemeshard: 72057594046678944, message: Source { RawX1: 605 RawX2: 4294969839 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-17T18:05:32.043938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2025-04-17T18:05:32.044027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 605 RawX2: 4294969839 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-17T18:05:32.044068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:05:32.044118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 605 RawX2: 4294969839 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-17T18:05:32.044163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.044188Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.044223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T18:05:32.044263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-17T18:05:32.044574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 607 RawX2: 4294969840 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-17T18:05:32.044595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 2 2025-04-17T18:05:32.044664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:2, at schemeshard: 72057594046678944, message: Source { RawX1: 607 RawX2: 4294969840 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-17T18:05:32.044685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:05:32.044745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 607 RawX2: 4294969840 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-17T18:05:32.044784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:2, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.044798Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-17T18:05:32.044813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T18:05:32.044829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:2 129 -> 240 2025-04-17T18:05:32.047378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T18:05:32.047437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T18:05:32.049867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T18:05:32.049966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.050091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-17T18:05:32.050146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T18:05:32.050251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.050406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.050447Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:0 ProgressState 2025-04-17T18:05:32.050560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 2/3 2025-04-17T18:05:32.050592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-04-17T18:05:32.050627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 2/3 2025-04-17T18:05:32.050656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-04-17T18:05:32.050713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2025-04-17T18:05:32.051116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-17T18:05:32.051336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-17T18:05:32.051374Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:2 ProgressState 2025-04-17T18:05:32.051439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 3/3 2025-04-17T18:05:32.051461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-17T18:05:32.051494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 3/3 2025-04-17T18:05:32.051523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-17T18:05:32.051545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2025-04-17T18:05:32.051598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:481:2429] message: TxId: 107 2025-04-17T18:05:32.051637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-17T18:05:32.051675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-17T18:05:32.051705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-17T18:05:32.051823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T18:05:32.051862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:1 2025-04-17T18:05:32.051895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:1 2025-04-17T18:05:32.051927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T18:05:32.051965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:2 2025-04-17T18:05:32.052002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:2 2025-04-17T18:05:32.052053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T18:05:32.053916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-17T18:05:32.053946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:535:2483] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-17T18:05:32.056838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:32.057277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2025-04-17T18:05:32.057347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-04-17T18:05:32.057377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-04-17T18:05:32.059139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:32.059296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-17T18:05:32.059664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-17T18:05:32.059696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-17T18:05:32.060128Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-17T18:05:32.060225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-17T18:05:32.060271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:731:2645] TestWaitNotification: OK eventTxId 108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:31.533373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:31.533470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.533511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:31.533549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:31.533606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:31.533636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:31.533694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.533776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:31.534081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:31.618083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:31.618127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:31.638388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:31.638722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:31.638902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:31.648115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:31.648327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:31.648960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.649151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:31.651869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.653132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.653206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.653400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:31.653458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.653507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:31.653634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.660140Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.790905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.791153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.791371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:31.791583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:31.791640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.795125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.795278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:31.795456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.795519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:31.795557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:31.795590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:31.797660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.797728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.797766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:31.799675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.799735Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.799776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.799833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.803320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.805216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:31.805425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:31.806444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.806564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.806631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.806913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:31.806964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.807124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.807253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:31.809283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.809329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.809510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.809557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:31.809996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.810053Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:31.810409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.810453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.810511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.810548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.810611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:31.810653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.810690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:31.810718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:31.810782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.810843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:31.810881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:31.817560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.817685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.817716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:31.986473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.987505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:31.987696Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-04-17T18:05:31.988423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-17T18:05:31.988691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T18:05:31.989288Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T18:05:31.989564Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-17T18:05:31.990244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.990438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409546 2025-04-17T18:05:31.990865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:31.991001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:31.992058Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409548 2025-04-17T18:05:31.993592Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T18:05:31.993770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-17T18:05:31.993916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:31.994143Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-17T18:05:31.994351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:31.994543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409551 2025-04-17T18:05:31.997076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:31.997250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:31.997836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.997892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.998048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-17T18:05:31.999551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:31.999607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:31.999672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:32.001193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-17T18:05:32.001239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-17T18:05:32.002018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-04-17T18:05:32.002641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:32.002666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:32.002744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:32.002769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:32.005322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T18:05:32.005359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-17T18:05:32.005489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-04-17T18:05:32.005539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:32.005570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:32.005627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T18:05:32.005662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T18:05:32.005761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:32.005837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-17T18:05:32.006092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:32.006127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-17T18:05:32.006238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:32.006261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:05:32.006653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:32.006759Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:32.006802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:32.006845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:623:2523] 2025-04-17T18:05:32.006954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:32.006973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:623:2523] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:32.007343Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:32.007506Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 182us result status StatusPathDoesNotExist 2025-04-17T18:05:32.007681Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:32.008035Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:32.008232Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 157us result status StatusSuccess 2025-04-17T18:05:32.008587Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateAndWait >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:31.731564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:31.731643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.731681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:31.731715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:31.731782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:31.731813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:31.731870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.731949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:31.732238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:31.819384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:31.819443Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:31.835889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:31.836294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:31.836460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:31.845234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:31.845515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:31.846157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.846396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:31.849162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.850630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.850687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.850910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:31.850961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.850999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:31.851107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.859054Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.989995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.990341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.990574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:31.990826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:31.990882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.992905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.993054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:31.993222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.993287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:31.993325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:31.993358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:31.995292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.995350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.995390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:31.997305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.997364Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.997403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.997467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.001096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:32.003535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:32.003735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:32.004695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.004825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:32.004883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:32.005178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:32.005234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:32.005396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:32.005548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:32.007755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:32.007801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:32.007981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:32.008023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:32.008436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.008484Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:32.008580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:32.008615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.008673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:32.008707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.008764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:32.008805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.008842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:32.008875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:32.008940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:32.008975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:32.009008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:32.010770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:32.010903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:32.010940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... G: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:569:2523] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-17T18:05:32.233982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 2 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:32.234233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/USER_0, opId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.234443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:32.234663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:32.234712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.237006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAccepted TxId: 108 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:32.237144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-04-17T18:05:32.237338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.237394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 108:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:32.237436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 108:0 ProgressState no shards to create, do next state 2025-04-17T18:05:32.237473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 2 -> 3 2025-04-17T18:05:32.240008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.240070Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:32.240108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 3 -> 128 2025-04-17T18:05:32.244016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.244068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.244112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2025-04-17T18:05:32.244159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-04-17T18:05:32.244336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:32.246015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-04-17T18:05:32.246141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2025-04-17T18:05:32.246521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.246627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:32.246676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-04-17T18:05:32.246971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 240 2025-04-17T18:05:32.247029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-04-17T18:05:32.247227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:32.247319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2025-04-17T18:05:32.249211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:32.249256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:32.249441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:32.249482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 2 2025-04-17T18:05:32.249560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.249604Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-04-17T18:05:32.249716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-17T18:05:32.249757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-17T18:05:32.249794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-17T18:05:32.249851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-17T18:05:32.249890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-04-17T18:05:32.249937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-17T18:05:32.249977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-17T18:05:32.250008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-17T18:05:32.250076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T18:05:32.250128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2025-04-17T18:05:32.250165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2025-04-17T18:05:32.250939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T18:05:32.251041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-17T18:05:32.251081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-04-17T18:05:32.251119Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-17T18:05:32.251158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:32.251238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-04-17T18:05:32.254885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-17T18:05:32.255235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-17T18:05:32.255319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-17T18:05:32.255837Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-17T18:05:32.255934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-17T18:05:32.255972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:594:2548] TestWaitNotification: OK eventTxId 108 2025-04-17T18:05:32.256616Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:32.256827Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 221us result status StatusSuccess 2025-04-17T18:05:32.257187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.021750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.021830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.021871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.021905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.023540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.023596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.023680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.023782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.024228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.120246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.120298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.140665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.141063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.141244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.150855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.151062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.151772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.151985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.154502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.155982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.156045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.157406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.157497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.157573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.157758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.166906Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.295872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.296138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.296360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.296595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.296663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.299245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.299373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.299555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.299621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.299662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.299696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.302479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.302541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.302583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.304566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.304637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.304681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.304736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.308797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.310692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.310917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.311881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.312017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.312068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.312363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.312419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.312581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.312685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.314659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.314702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.314911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.314958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.315327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.315372Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.315480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.315527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.315587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.315621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.315657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.315697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.315736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.315765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.315824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.315866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.315902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.317689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.317794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.317831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... ed, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 526 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-17T18:05:32.269365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-17T18:05:32.269498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 526 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-17T18:05:32.269567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-17T18:05:32.270024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:32.270096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-17T18:05:32.270148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:05:32.270204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-17T18:05:32.270299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-17T18:05:32.270415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-17T18:05:32.270569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-17T18:05:32.270641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-17T18:05:32.272761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:32.274068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:32.274340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T18:05:32.274404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T18:05:32.274582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-17T18:05:32.274762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T18:05:32.274822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-17T18:05:32.274870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-17T18:05:32.274935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:32.275001Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-17T18:05:32.275092Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:32.275130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-17T18:05:32.275189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-17T18:05:32.276155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:32.276256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:32.276291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-17T18:05:32.276350Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-04-17T18:05:32.276389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-17T18:05:32.277556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:32.277631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:32.277657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-17T18:05:32.277686Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:32.277719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-17T18:05:32.277782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T18:05:32.281252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:32.281305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-17T18:05:32.281727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-17T18:05:32.281943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:32.281998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:32.282041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:32.282082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:32.282125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-17T18:05:32.282578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:554:2493] message: TxId: 104 2025-04-17T18:05:32.282624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:32.282669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:05:32.282702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:05:32.282797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-17T18:05:32.283270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T18:05:32.283307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T18:05:32.283870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-17T18:05:32.285506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-17T18:05:32.286458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T18:05:32.286509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-17T18:05:32.286599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:05:32.286658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:756:2673] 2025-04-17T18:05:32.287540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-17T18:05:32.288610Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-17T18:05:32.288817Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 217us result status StatusSuccess 2025-04-17T18:05:32.289273Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> KqpQueryService::Ddl_Dml [GOOD] >> TSchemeShardSubDomainTest::LS >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:32.539025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:32.539108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:32.539144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:32.539193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:32.539247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:32.539373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:32.539441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:32.539515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:32.539802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:32.612624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:32.612676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:32.628102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:32.628518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:32.628689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:32.636590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:32.636814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:32.637464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.637670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:32.640466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:32.641845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:32.641901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:32.642110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:32.642157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:32.642218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:32.642346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.653792Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:32.784407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:32.784631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.784835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:32.785044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:32.785102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.787281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.787409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:32.787559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.787618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:32.787650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:32.787702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:32.789494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.789542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:32.789578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:32.791203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.791251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.791289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:32.791335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.794969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:32.796716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:32.796873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:32.797691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.797773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:32.797811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:32.797987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:32.798020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:32.798150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:32.798324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:32.799889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:32.799925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:32.800098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:32.800146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:32.800520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.800564Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:32.800657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:32.800690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.800740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:32.800768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.800800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:32.800832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.800863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:32.800888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:32.800971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:32.801013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:32.801035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:32.802379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:32.802471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:32.802508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... O: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-17T18:05:32.864269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:32.864311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:32.864408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-17T18:05:32.864512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:32.864559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:32.865294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:32.865453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:32.866446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:32.866483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:32.866544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:32.866646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:32.866667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:05:32.866691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:32.866892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.866929Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-17T18:05:32.866979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:32.867011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:32.867042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:32.867078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:32.867115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:32.867150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:32.867174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:32.867196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:32.867238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:32.867265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:05:32.867289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:05:32.867315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T18:05:32.867766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:32.867828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:32.867859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:32.867887Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:05:32.867914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:32.868304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:32.868360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:32.868379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:32.868407Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:32.868426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:32.868478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:05:32.868795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:32.868862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:32.868933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:32.869050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:32.869109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:32.869164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:32.871765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:32.871944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:32.874306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:32.874392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:05:32.874620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:32.874656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:05:32.875068Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:32.875147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:32.875180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2328] TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:32.875657Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:32.875864Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 172us result status StatusPathDoesNotExist 2025-04-17T18:05:32.876044Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:32.876576Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:32.876745Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 185us result status StatusSuccess 2025-04-17T18:05:32.877151Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Restart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:32.268551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:32.268613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:32.268652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:32.268679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:32.268725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:32.268749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:32.268789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:32.268859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:32.269104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:32.341559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:32.341609Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:32.354196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:32.354572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:32.354749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:32.363416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:32.363629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:32.364355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.364547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:32.366665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:32.367884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:32.367930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:32.368096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:32.368136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:32.368167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:32.368274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.375650Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:32.501621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:32.501905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.502129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:32.502388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:32.502440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.504942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.505123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:32.505306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.505367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:32.505404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:32.505445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:32.507461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.507516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:32.507553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:32.509177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.509232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.509276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:32.509339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.512808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:32.514632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:32.514842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:32.515870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:32.516013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:32.516070Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:32.516340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:32.516395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:32.516549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:32.516646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:32.518609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:32.518650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:32.518838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:32.518878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:32.519258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.519304Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:32.519400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:32.519435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.519489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:32.519523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.519560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:32.519599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:32.519632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:32.519661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:32.519724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:32.519763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:32.519799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:32.521688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:32.521803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:32.521833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:32.964829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.965228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.965507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.965549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:05:32.965649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:05:32.965694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:32.965751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:05:32.965780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:32.965814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T18:05:32.965926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:627:2560] message: TxId: 102 2025-04-17T18:05:32.966025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:32.966071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:05:32.966101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:05:32.966248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:32.967953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:32.968002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:628:2561] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 108 2025-04-17T18:05:32.971149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:32.971377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:05:32.971515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-17T18:05:32.973590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2025-04-17T18:05:32.973796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2025-04-17T18:05:32.974448Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:32.974627Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 180us result status StatusSuccess 2025-04-17T18:05:32.974975Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:32.975561Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:32.975746Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 169us result status StatusSuccess 2025-04-17T18:05:32.976130Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:32.976890Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:32.977089Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 155us result status StatusSuccess 2025-04-17T18:05:32.977395Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:32.978046Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:32.978231Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 185us result status StatusSuccess 2025-04-17T18:05:32.978543Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:31.698600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:31.698664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.698689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:31.698727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:31.698760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:31.698778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:31.698838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.698897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:31.699146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:31.774408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:31.774453Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:31.791002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:31.791346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:31.791502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:31.800355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:31.800529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:31.801126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.801314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:31.803829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.805167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.805237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.805418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:31.805470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.805509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:31.805624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.811775Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.936810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.937040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.937248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:31.937464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:31.937516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.939674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.939813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:31.940011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.940069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:31.940100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:31.940168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:31.941955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.942029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.942069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:31.943722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.943765Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.943803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.943898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.947679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.949394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:31.949593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:31.950577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.950701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.950757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.951023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:31.951074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.951233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.951361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:31.953231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.953279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.953448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.953488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:31.953883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.953949Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:31.954033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.954082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.954124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.954152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.954202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:31.954240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.954275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:31.954302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:31.954355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.954407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:31.954446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:31.956187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.956284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.956317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-17T18:05:33.117390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:33.117415Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:05:33.117440Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:05:33.117465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:33.118263Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:33.118331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:33.118353Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:05:33.118376Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:33.118410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:33.118466Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-17T18:05:33.118746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:33.118785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:33.118822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:33.118845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:33.120313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:05:33.120846Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T18:05:33.121834Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-17T18:05:33.122312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.122567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-17T18:05:33.123661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:33.126238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:33.126619Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T18:05:33.126968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:33.127123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-04-17T18:05:33.127429Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-17T18:05:33.127525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:33.127644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-04-17T18:05:33.128967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:33.129027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:05:33.129090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:33.129377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:33.129426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:33.129525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409549 2025-04-17T18:05:33.130197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:05:33.130285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:05:33.134867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:33.134919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:33.135209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:33.135236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:33.135828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:33.135861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:33.136344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T18:05:33.136396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T18:05:33.136582Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:33.136686Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:33.136744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:33.136782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:33.136853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:33.138206Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T18:05:33.138480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T18:05:33.138521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T18:05:33.138959Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:05:33.139048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:05:33.139081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:653:2605] TestWaitNotification: OK eventTxId 105 2025-04-17T18:05:33.139568Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:33.139762Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 219us result status StatusPathDoesNotExist 2025-04-17T18:05:33.139918Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:33.140467Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:33.140617Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 158us result status StatusPathDoesNotExist 2025-04-17T18:05:33.140731Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeleteAdd >> TSchemeShardSubDomainTest::SetSchemeLimits >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> DataShardTxOrder::RandomPointsAndRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DmlNoTx [GOOD] Test command err: Trying to start YDB, gRPC: 21605, MsgBus: 2521 2025-04-17T18:05:09.046938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344739256607654:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:09.047027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b27/r3tmp/tmpOloDyM/pdisk_1.dat 2025-04-17T18:05:09.454877Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21605, node 1 2025-04-17T18:05:09.460704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:09.460829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:09.462739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:09.496694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:09.496714Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:09.496739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:09.496875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2521 TClient is connected to server localhost:2521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:09.966457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:09.983292Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:09.993427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:10.140818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:10.299813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:10.384175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:11.984423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344747846544005:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:11.984543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.371830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.440117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.472041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.499085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.566731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.602882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.675573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:12.744576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344752141511977:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.744638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.744904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344752141511982:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.748470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:12.758139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344752141511984:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:12.833172Z node 1 :TX_PROXY ERROR: Actor# [1:7494344752141512039:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:14.047148Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344739256607654:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:14.047254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62908, MsgBus: 29887 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b27/r3tmp/tmprg0xEa/pdisk_1.dat 2025-04-17T18:05:14.894088Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:14.922674Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:14.933468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:14.933539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:14.938216Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62908, node 2 2025-04-17T18:05:14.976463Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:14.976481Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:14.976487Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:14.976580Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29887 TClient is connected to server localhost:29887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:15.491048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.498992Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:15.520166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.589519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:15.753130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 720575 ... ] ActorId: [3:7494344803729253796:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.500849Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.501047Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344803729253801:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.503962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:24.513164Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344803729253803:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:24.579827Z node 3 :TX_PROXY ERROR: Actor# [3:7494344803729253857:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:25.525627Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344786549382182:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:25.525703Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:25.740695Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494344808024221511:2520], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:1:8: Error: At function: Member
:1:8: Error: Member not found: test_ast_column 2025-04-17T18:05:25.742467Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MWU1ZDc2MjItODM2ZDE5ZWQtYTI1NzIxNTQtODNiYjc0ZmU=, ActorId: [3:7494344808024221509:2519], ActorState: ExecuteState, TraceId: 01js2etm995gdn0z0h1z9pejda, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:05:25.826723Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344808024221547:2524], TxId: 281474976715673, task: 1. Ctx: { TraceId : 01js2etmbn3p986xrq6jerdeqf. SessionId : ydb://session/3?node_id=3&id=Y2M4YjU3ZjgtYWI1NzZjNDEtZjRhZDkxNjEtZDFlMzA4ZDk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(43):
:1:8: Failed to unwrap empty optional }. 2025-04-17T18:05:25.828353Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2M4YjU3ZjgtYWI1NzZjNDEtZjRhZDkxNjEtZDFlMzA4ZDk=, ActorId: [3:7494344808024221540:2524], ActorState: ExecuteState, TraceId: 01js2etmbn3p986xrq6jerdeqf, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8730, MsgBus: 22826 2025-04-17T18:05:26.796301Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494344813755381133:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:26.796357Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b27/r3tmp/tmpyrbQmE/pdisk_1.dat 2025-04-17T18:05:26.897030Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8730, node 4 2025-04-17T18:05:26.946737Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:26.946820Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:26.948923Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:26.986688Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:26.986710Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:26.986716Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:26.986820Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22826 TClient is connected to server localhost:22826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:27.459359Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:27.468022Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:27.480810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:27.569831Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:27.820935Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:27.895781Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:30.233742Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344830935252070:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.233821Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.290265Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.326346Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.375260Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.410918Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.480712Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.524729Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.595639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.656024Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344830935252740:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.656105Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.656247Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344830935252745:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.659565Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:30.669770Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344830935252747:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:30.765044Z node 4 :TX_PROXY ERROR: Actor# [4:7494344830935252805:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:31.810865Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344813755381133:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:31.821200Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] Test command err: Trying to start YDB, gRPC: 7003, MsgBus: 29068 2025-04-17T18:04:45.710265Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344634582157310:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:45.710319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bdb/r3tmp/tmpWUJ0QO/pdisk_1.dat 2025-04-17T18:04:46.208677Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:46.231351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:46.231451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:46.233593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7003, node 1 2025-04-17T18:04:46.458759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:46.458784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:46.458801Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:46.458920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29068 TClient is connected to server localhost:29068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:47.065448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:47.084158Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:47.104619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:47.280433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:47.446208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:04:47.528914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:49.192731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344651762028257:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.192841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.556057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.584510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.612069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.659865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.693475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.729347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.786216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.886341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344651762028922:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.886422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.886605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344651762028927:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.891579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:49.908825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344651762028929:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:50.011065Z node 1 :TX_PROXY ERROR: Actor# [1:7494344656056996283:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:50.710323Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344634582157310:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:50.710405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61602, MsgBus: 8966 2025-04-17T18:04:52.003364Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344661147006100:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:52.003767Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bdb/r3tmp/tmpBoUQvi/pdisk_1.dat 2025-04-17T18:04:52.145771Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:52.172241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:52.172325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:52.176185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61602, node 2 2025-04-17T18:04:52.244138Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:52.244166Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:52.244177Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:52.244286Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8966 TClient is connected to server localhost:8966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:52.655676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:52.663274Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:52.672106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:52.744156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T1 ... idx:1802|>,<|idx:1803|>,<|idx:1804|>,<|idx:1805|>,<|idx:1806|>,<|idx:1807|>,<|idx:1808|>,<|idx:1809|>,<|idx:1810|>,<|idx:1811|>,<|idx:1812|>,<|idx:1813|>,<|idx:1814|>,<|idx:1815|>,<|idx:1816|>,<|idx:1817|>,<|idx:1818|>,<|idx:1819|>,<|idx:1820|>,<|idx:1821|>,<|idx:1822|>,<|idx:1823|>,<|idx:1824|>,<|idx:1825|>,<|idx:1826|>,<|idx:1827|>,<|idx:1828|>,<|idx:1829|>,<|idx:1830|>,<|idx:1831|>,<|idx:1832|>,<|idx:1833|>,<|idx:1834|>,<|idx:1835|>,<|idx:1836|>,<|idx:1837|>,<|idx:1838|>,<|idx:1839|>,<|idx:1840|>,<|idx:1841|>,<|idx:1842|>,<|idx:1843|>,<|idx:1844|>,<|idx:1845|>,<|idx:1846|>,<|idx:1847|>,<|idx:1848|>,<|idx:1849|>,<|idx:1850|>,<|idx:1851|>,<|idx:1852|>,<|idx:1853|>,<|idx:1854|>,<|idx:1855|>,<|idx:1856|>,<|idx:1857|>,<|idx:1858|>,<|idx:1859|>,<|idx:1860|>,<|idx:1861|>,<|idx:1862|>,<|idx:1863|>,<|idx:1864|>,<|idx:1865|>,<|idx:1866|>,<|idx:1867|>,<|idx:1868|>,<|idx:1869|>,<|idx:1870|>,<|idx:1871|>,<|idx:1872|>,<|idx:1873|>,<|idx:1874|>,<|idx:1875|>,<|idx:1876|>,<|idx:1877|>,<|idx:1878|>,<|idx:1879|>,<|idx:1880|>,<|idx:1881|>,<|idx:1882|>,<|idx:1883|>,<|idx:1884|>,<|idx:1885|>,<|idx:1886|>,<|idx:1887|>,<|idx:1888|>,<|idx:1889|>,<|idx:1890|>,<|idx:1891|>,<|idx:1892|>,<|idx:1893|>,<|idx:1894|>,<|idx:1895|>,<|idx:1896|>,<|idx:1897|>,<|idx:1898|>,<|idx:1899|>,<|idx:1900|>,<|idx:1901|>,<|idx:1902|>,<|idx:1903|>,<|idx:1904|>,<|idx:1905|>,<|idx:1906|>,<|idx:1907|>,<|idx:1908|>,<|idx:1909|>,<|idx:1910|>,<|idx:1911|>,<|idx:1912|>,<|idx:1913|>,<|idx:1914|>,<|idx:1915|>,<|idx:1916|>,<|idx:1917|>,<|idx:1918|>,<|idx:1919|>,<|idx:1920|>,<|idx:1921|>,<|idx:1922|>,<|idx:1923|>,<|idx:1924|>,<|idx:1925|>,<|idx:1926|>,<|idx:1927|>,<|idx:1928|>,<|idx:1929|>,<|idx:1930|>,<|idx:1931|>,<|idx:1932|>,<|idx:1933|>,<|idx:1934|>,<|idx:1935|>,<|idx:1936|>,<|idx:1937|>,<|idx:1938|>,<|idx:1939|>,<|idx:1940|>,<|idx:1941|>,<|idx:1942|>,<|idx:1943|>,<|idx:1944|>,<|idx:1945|>,<|idx:1946|>,<|idx:1947|>,<|idx:1948|>,<|idx:1949|>,<|idx:1950|>,<|idx:1951|>,<|idx:1952|>,<|idx:1953|>,<|idx:1954|>,<|idx:1955|>,<|idx:1956|>,<|idx:1957|>,<|idx:1958|>,<|idx:1959|>,<|idx:1960|>,<|idx:1961|>,<|idx:1962|>,<|idx:1963|>,<|idx:1964|>,<|idx:1965|>,<|idx:1966|>,<|idx:1967|>,<|idx:1968|>,<|idx:1969|>,<|idx:1970|>,<|idx:1971|>,<|idx:1972|>,<|idx:1973|>,<|idx:1974|>,<|idx:1975|>,<|idx:1976|>,<|idx:1977|>,<|idx:1978|>,<|idx:1979|>,<|idx:1980|>,<|idx:1981|>,<|idx:1982|>,<|idx:1983|>,<|idx:1984|>,<|idx:1985|>,<|idx:1986|>,<|idx:1987|>,<|idx:1988|>,<|idx:1989|>,<|idx:1990|>,<|idx:1991|>,<|idx:1992|>,<|idx:1993|>,<|idx:1994|>,<|idx:1995|>,<|idx:1996|>,<|idx:1997|>,<|idx:1998|>,<|idx:1999|>]);", parameters: 0b 2025-04-17T18:05:19.547883Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913119523, txId: 281474976711185] shutting down Trying to start YDB, gRPC: 5387, MsgBus: 5916 2025-04-17T18:05:20.838239Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344785948434804:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bdb/r3tmp/tmpM0AoBP/pdisk_1.dat 2025-04-17T18:05:20.881573Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:05:21.001044Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:21.026363Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:21.026483Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:21.029197Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5387, node 3 2025-04-17T18:05:21.134206Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:21.134229Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:21.134242Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:21.134384Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5916 TClient is connected to server localhost:5916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:21.751547Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.761263Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:21.784458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:21.884449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:22.141680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:22.231055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:24.916624Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344803128305559:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.916742Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.969189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.017790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.055186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.094431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.127160Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.203412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.238937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:25.308513Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344807423273520:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:25.308582Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:25.308786Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344807423273525:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:25.312796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:25.323957Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344807423273527:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:25.404738Z node 3 :TX_PROXY ERROR: Actor# [3:7494344807423273584:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:25.826093Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344785948434804:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:25.826160Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:26.955375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:26.956694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:26.957570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.944769Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913131983, txId: 281474976710759] shutting down >> TSchemeShardSubDomainTest::RmDir >> TSchemeShardSubDomainTest::LS [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Ddl_Dml [GOOD] Test command err: Trying to start YDB, gRPC: 2305, MsgBus: 24903 2025-04-17T18:05:05.727899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344720611165585:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:05.728569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b6e/r3tmp/tmpI6JSfG/pdisk_1.dat 2025-04-17T18:05:06.190053Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:06.210665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:06.210786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:06.212792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2305, node 1 2025-04-17T18:05:06.278669Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:06.278699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:06.278704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:06.278809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24903 TClient is connected to server localhost:24903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:06.834883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.863707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:07.020552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:07.196849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:07.268247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:08.912647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344733496069211:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.912751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.303393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.333842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.373761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.405403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.438801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.488251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.563157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.616801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344737791037177:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.616882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.617042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344737791037182:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.621015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:09.632377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344737791037184:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:09.714588Z node 1 :TX_PROXY ERROR: Actor# [1:7494344737791037239:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.723498Z node 1 :TX_PROXY ERROR: Actor# [1:7494344742086004895:3797] txid# 281474976710673, issues: { message: "User already exists" severity: 1 } 2025-04-17T18:05:10.727807Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344720611165585:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:10.727875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:10.733440Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzUwZjljYTgtMWRkM2MwMC1kMDkzYzFkMC00YzUwZmRmNQ==, ActorId: [1:7494344742086004889:2518], ActorState: ExecuteState, TraceId: 01js2et5nq8h1rx4jck372a6s2, Create QueryResponse for error on request, msg: 2025-04-17T18:05:10.814043Z node 1 :TX_PROXY ERROR: Actor# [1:7494344742086004962:3840] txid# 281474976710677, issues: { message: "User not found" severity: 1 } 2025-04-17T18:05:10.814327Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWZjOWViYTItOWY4NGJjMjAtYzMyODMyOTMtYTI3ZGY0MWE=, ActorId: [1:7494344742086004956:2528], ActorState: ExecuteState, TraceId: 01js2et5rg8hm548dhn7f53z54, Create QueryResponse for error on request, msg: 2025-04-17T18:05:10.837082Z node 1 :TX_PROXY ERROR: Actor# [1:7494344742086004978:3847] txid# 281474976710679, issues: { message: "User not found" severity: 1 } 2025-04-17T18:05:10.837909Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODI0MzcxYWItM2RiMGI1ODYtMzQ3MjRlNzktMjQxMGY2MzA=, ActorId: [1:7494344742086004972:2531], ActorState: ExecuteState, TraceId: 01js2et5s773q1tzpcb080k1jh, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5706, MsgBus: 23932 2025-04-17T18:05:11.650629Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344746298823983:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:11.652146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b6e/r3tmp/tmpCtWrWh/pdisk_1.dat 2025-04-17T18:05:11.732850Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5706, node 2 2025-04-17T18:05:11.777497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:11.777586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:11.779234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:11.817646Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:11.817670Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:11.817679Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:11.817782Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23932 TClient is connected to server localhost:23932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 720575940 ... 6224037922 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:05:29.299660Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344825609978754:2527], Table: `/Root/TestDdlDml2` ([72057594046644480:17:1]), SessionActorId: [4:7494344825609978704:2527]Got LOCKS BROKEN for table `/Root/TestDdlDml2`. ShardID=72075186224037922, Sink=[4:7494344825609978754:2527].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:05:29.300170Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494344825609978747:2527], SessionActorId: [4:7494344825609978704:2527], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestDdlDml2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7494344825609978704:2527]. isRollback=0 2025-04-17T18:05:29.300412Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjIyOGJkZDktYTgwYzQ3NzYtMjlhOTVjY2YtMWMxZjEzNDg=, ActorId: [4:7494344825609978704:2527], ActorState: ExecuteState, TraceId: 01js2etqmj4rz4pzhpmkstbfrm, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7494344825609978804:2527] from: [4:7494344825609978747:2527] 2025-04-17T18:05:29.300495Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494344825609978804:2527] TxId: 281474976710676. Ctx: { TraceId: 01js2etqmj4rz4pzhpmkstbfrm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjIyOGJkZDktYTgwYzQ3NzYtMjlhOTVjY2YtMWMxZjEzNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestDdlDml2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:05:29.300642Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjIyOGJkZDktYTgwYzQ3NzYtMjlhOTVjY2YtMWMxZjEzNDg=, ActorId: [4:7494344825609978704:2527], ActorState: ExecuteState, TraceId: 01js2etqmj4rz4pzhpmkstbfrm, Create QueryResponse for error on request, msg: 2025-04-17T18:05:29.379093Z node 4 :KQP_COMPILE_SERVICE WARN: queryId in recompile request and queryId in cache are different, queryId in request: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (1, \"1\");\n SELECT * FROM TestDdlDml2;\n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (2, \"2\");\n SELECT * FROM TestDdlDml2;\n CREATE TABLE TestDdlDml33 (\n Key Uint64,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }}, queryId in cache: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1, Value2) VALUES (1, \"1\", \"1\");\n SELECT * FROM TestDdlDml2;\n ALTER TABLE TestDdlDml2 DROP COLUMN Value2;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-17T18:05:29.542633Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:05:29.782625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.046293Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494344829904946541:2606], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:11:17: Error: At function: KiReadTable!
:11:17: Error: Cannot find table 'db.[/Root/TestDdlDml5]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:05:30.046552Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTNlZTdlZjQtODA1YzczMmYtNzQyNmNkMjMtNTBmOTIxNWM=, ActorId: [4:7494344825609979068:2583], ActorState: ExecuteState, TraceId: 01js2etr8e6mb447hd0fzebywa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:05:30.113089Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.252857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.640164Z node 4 :TX_PROXY ERROR: Actor# [4:7494344829904946850:4343] txid# 281474976710697, issues: { message: "Check failed: path: \'/Root/TestDdl1\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:30.640299Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710697, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl1', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-17T18:05:30.640420Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ODA0ZDhhZDYtNzA3MDNjMzQtN2U4NTdkNTEtZTY4ZGMyNzA=, ActorId: [4:7494344829904946835:2660], ActorState: ExecuteState, TraceId: 01js2ets3xbw287wmc7b0zrx9d, Create QueryResponse for error on request, msg: 2025-04-17T18:05:30.667827Z node 4 :TX_PROXY ERROR: Actor# [4:7494344829904946876:4356] txid# 281474976710699, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 21], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:30.668054Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710699, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 21], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-17T18:05:30.668214Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGE3YTE2MmItYzM4NmFlMWQtYzhhNjQ2ZDktYTkyYjgwOTg=, ActorId: [4:7494344829904946862:2667], ActorState: ExecuteState, TraceId: 01js2ets4qf6jmpd8rvm0zjkfq, Create QueryResponse for error on request, msg: 2025-04-17T18:05:31.013962Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.128589Z node 4 :TX_PROXY ERROR: Actor# [4:7494344834199914357:4458] txid# 281474976710704, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 21], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:31.128664Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710704, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 21], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-17T18:05:31.128815Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjllZDQxNTgtYWQxN2M5YmUtZDU1ZmUwZjgtMzE0ZjYwYjk=, ActorId: [4:7494344829904946953:2693], ActorState: ExecuteState, TraceId: 01js2etsd2asra886rp1s2aqd4, Create QueryResponse for error on request, msg: 2025-04-17T18:05:31.251644Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494344834199914412:2729], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl4]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:05:31.251844Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTcxNjNiOTQtZDE4YmExMTctNzQxMzBiNWMtZmIwZTllN2Q=, ActorId: [4:7494344834199914409:2727], ActorState: ExecuteState, TraceId: 01js2etsq32pdzdsvsgt9kk1z3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:05:31.413521Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.946826Z node 4 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [4:7494344834199914652:2779], owner: [4:7494344817020042907:2396], statement id: 1 2025-04-17T18:05:31.947232Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGNmN2NmNDYtMmU3ZTFkZmYtZDc2ZmFlZWUtOTg3NmIwYmM=, ActorId: [4:7494344834199914650:2778], ActorState: ExecuteState, TraceId: 01js2ettd1e5t1xvr9z1jva7k8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:05:32.138435Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494344838494881995:2795], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:3:44: Error: Failed to convert 'Value': String to Optional
:3:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:05:32.138719Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YmFkNzg1N2UtNWQ5ZDdlYmQtYWQ0MTVmNDAtOTQ0NmU3NTY=, ActorId: [4:7494344838494881977:2789], ActorState: ExecuteState, TraceId: 01js2ettgnd76jqz8mxe11pfyw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:05:32.201354Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-04-17T18:05:32.305740Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7494344838494882112:2817], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:8:29: Error: At function: KiWriteTable!
:8:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:8:44: Error: Failed to convert 'Value': String to Optional
:8:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:05:32.306034Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjFkNGZjOTEtZTc0ZTYwOTMtMmViNTMxNjEtYzA1ODVkNDk=, ActorId: [4:7494344838494882022:2804], ActorState: ExecuteState, TraceId: 01js2ettm9badc8ap9r8zb60kk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:33.525134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:33.525219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.525254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:33.525285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:33.525337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:33.525363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:33.525415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.525484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:33.525758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:33.613504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:33.613554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:33.631830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:33.632200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:33.632350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:33.639268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:33.639460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:33.640085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.640257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:33.642722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.643895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.643944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.644114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:33.644232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:33.644275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:33.644389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.650265Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:33.778916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:33.779142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.779332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:33.779571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:33.779618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.781582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.781779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:33.781940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.781997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:33.782030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:33.782064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:33.783749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.783797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:33.783830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:33.785297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.785339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.785372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:33.785436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.789293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:33.791726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:33.791907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:33.792799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.792925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:33.792983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:33.793248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:33.793300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:33.793447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:33.793545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:33.796059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.796105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:33.796271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.796312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:33.796656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.796710Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:33.796802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:33.796841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.796895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:33.796924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.796961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:33.797000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.797031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:33.797058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:33.797116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:33.797150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:33.797183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:33.798990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:33.799095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:33.799127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:33.855410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:33.855644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T18:05:33.855687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:33.855826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:33.855869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:33.855924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:05:33.857472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.857507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:33.857612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:05:33.857695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.857729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:05:33.857775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:33.858147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.858206Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:05:33.858279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:33.858302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.858329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:33.858352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.858378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:33.858405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.858430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:33.858452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:33.858497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:33.858523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:05:33.858549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-17T18:05:33.858617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-17T18:05:33.859076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.859158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.859185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:33.859211Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:05:33.859237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:33.859785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.859867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.859893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:33.859918Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:05:33.859941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:33.859999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:05:33.862490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:33.862573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-17T18:05:33.862765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:33.862814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-17T18:05:33.862928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:33.862953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:05:33.863386Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:33.863657Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:33.863712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:33.863741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:331:2322] 2025-04-17T18:05:33.863888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:33.863912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:331:2322] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:33.864316Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:33.864497Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 197us result status StatusSuccess 2025-04-17T18:05:33.864844Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:33.865292Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:33.865438Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir" took 156us result status StatusSuccess 2025-04-17T18:05:33.865716Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:33.471350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:33.471427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.471463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:33.471496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:33.471563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:33.471593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:33.471652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.471729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:33.472199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:33.549062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:33.549107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:33.562257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:33.562682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:33.562848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:33.569625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:33.569828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:33.570475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.570654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:33.573049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.574276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.574330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.574530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:33.574577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:33.574615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:33.574719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.582123Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:33.697587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:33.697823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.698049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:33.698288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:33.698342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.700533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.700738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:33.700922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.700984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:33.701019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:33.701050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:33.702876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.702948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:33.702985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:33.705843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.705887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.705936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:33.705998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.709261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:33.711070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:33.711262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:33.712107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.712275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:33.712326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:33.712572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:33.712617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:33.712753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:33.712866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:33.715049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.715119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:33.715282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.715321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:33.715674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.715718Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:33.715803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:33.715833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.715887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:33.715917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.715953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:33.715986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.716015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:33.716040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:33.716100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:33.716149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:33.716178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:33.717807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:33.717915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:33.717948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:33.775651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.775686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:33.775828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:05:33.775909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.775939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:05:33.776003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-17T18:05:33.776309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.776352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:05:33.776464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:33.776509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.776545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:33.776573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.776609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:33.776645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.776678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:33.776706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:33.776768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:33.776806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:05:33.776988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-17T18:05:33.777035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-17T18:05:33.777621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.777687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.777723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:33.777765Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:05:33.777847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:33.778512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.778584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.778608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:33.778637Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:05:33.778662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:05:33.778722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:05:33.782460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:33.782788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:05:33.783002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:33.783040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:05:33.783463Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:33.783565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:33.783611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:335:2326] TestWaitNotification: OK eventTxId 101 2025-04-17T18:05:33.784084Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:33.784255Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 183us result status StatusSuccess 2025-04-17T18:05:33.784680Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:33.785128Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:33.785278Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 126us result status StatusSuccess 2025-04-17T18:05:33.785510Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:33.786024Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:33.786169Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 104us result status StatusSuccess 2025-04-17T18:05:33.786375Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:33.606201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:33.606287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.606324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:33.606358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:33.606424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:33.606455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:33.606514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.606591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:33.606898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:33.677508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:33.677588Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:33.693132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:33.693562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:33.693750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:33.702476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:33.702711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:33.703372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.703560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:33.706292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.707712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.707765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.707969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:33.708017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:33.708053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:33.708160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.715955Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:33.843392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:33.843567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.843724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:33.843922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:33.843979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.846056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.846207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:33.846389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.846469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:33.846515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:33.846553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:33.848253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.848293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:33.848318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:33.849404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.849438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.849465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:33.849504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.859301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:33.864210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:33.864433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:33.865396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.865536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:33.865594Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:33.865863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:33.865909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:33.866046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:33.866156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:33.868205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.868253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:33.868410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.868449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:33.868840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.868890Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:33.868983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:33.869016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.869068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:33.869119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.869152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:33.869194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:33.869224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:33.869254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:33.869314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:33.869350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:33.869380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:33.871162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:33.871248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:33.871275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T18:05:33.922677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-17T18:05:33.922940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.922974Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:33.923022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.923127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:33.923758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.923844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.923882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:33.923930Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T18:05:33.923969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:33.924814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.924897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.924936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:33.924959Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T18:05:33.924982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:33.925041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-17T18:05:33.933645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-17T18:05:33.933811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-17T18:05:33.934279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.934416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:33.934488Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-17T18:05:33.934626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T18:05:33.934807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:33.934884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:05:33.935922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:33.937331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:33.940397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.940436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:33.940567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:05:33.940651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.940760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:05:33.940806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-17T18:05:33.941054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.941091Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:05:33.941185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:33.941218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.941255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:33.941282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.941323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:33.941371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:33.941404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:33.941446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:33.941525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:33.941580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:05:33.941632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-17T18:05:33.941674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-17T18:05:33.942281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.942363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.942391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:33.942430Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:05:33.942479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:33.942954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.943052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:33.943099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:33.943125Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:05:33.943150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:05:33.943220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:05:33.946747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:33.946864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-17T18:05:33.950085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0/dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:33.951088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.951221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, schema: Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId", at schemeshard: 72057594046678944 2025-04-17T18:05:33.951369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, at schemeshard: 72057594046678944 2025-04-17T18:05:33.953479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Inclusive subDomain do not support shared transactions" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:33.953714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_0, subject: , status: StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, operation: CREATE TABLE, path: /MyRoot/USER_0/dir/table_0 TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] Test command err: Trying to start YDB, gRPC: 6643, MsgBus: 12422 2025-04-17T18:05:11.339307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:05:11.339806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:11.339862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b45/r3tmp/tmpmF7lme/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6643, node 1 2025-04-17T18:05:11.801347Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:11.805508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:11.805577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:11.805613Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:11.806092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:05:11.844016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:11.844174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:11.855642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12422 TClient is connected to server localhost:12422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:12.178423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:12.264976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:12.629512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:13.014367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:13.314032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:14.145495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1813:3409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:14.145708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:14.173449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:14.382355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:14.668858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:14.977759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:15.256907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:15.634662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:15.943427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:16.308544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2561:3980], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.308663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.309028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2566:3985], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:16.313834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:16.468506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2568:3987], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:16.536456Z node 1 :TX_PROXY ERROR: Actor# [1:2635:4035] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10327, MsgBus: 7655 2025-04-17T18:05:18.683718Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344777884357141:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.684239Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b45/r3tmp/tmpQs7rHm/pdisk_1.dat 2025-04-17T18:05:18.924868Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:18.937505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:18.937599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:18.939209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10327, node 2 2025-04-17T18:05:19.022052Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:19.022077Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:19.022088Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:19.022222Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7655 TClient is connected to server localhost:7655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:19.597151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:19.616980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:19.708161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:19.872780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:19.954031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:22.151290Z node 2 :KQP_ ... ndo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.302714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.334926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.412894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.459040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.531749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.604063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344795064228761:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.604172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.604426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344795064228766:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.609069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:22.619526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344795064228768:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:22.696988Z node 2 :TX_PROXY ERROR: Actor# [2:7494344795064228825:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:23.685770Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344777884357141:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:23.698037Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:23.767445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 120 Trying to start YDB, gRPC: 1074, MsgBus: 10513 2025-04-17T18:05:27.844190Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344816592786230:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:27.844231Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b45/r3tmp/tmp2NRY7w/pdisk_1.dat 2025-04-17T18:05:28.017926Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:28.018919Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:28.019000Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:28.030565Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1074, node 3 2025-04-17T18:05:28.094732Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:28.094756Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:28.094762Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:28.094906Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10513 TClient is connected to server localhost:10513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:28.640931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:28.650398Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:05:28.655519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:28.743847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:28.916288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:28.992407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:31.218260Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344833772657172:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:31.218329Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:31.274065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.346243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.375740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.405941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.434034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.467458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.507184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.594495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344833772657835:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:31.594599Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:31.594925Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344833772657840:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:31.598351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:31.614048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344833772657842:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:31.714853Z node 3 :TX_PROXY ERROR: Actor# [3:7494344833772657900:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:32.844661Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344816592786230:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:32.844747Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TSchemeShardSubDomainTest::Restart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:30.549889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:30.549981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.550020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:30.550058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:30.550121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:30.550152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:30.550248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.550334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:30.550658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:30.631394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:30.631436Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:30.647473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:30.647842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:30.648024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:30.658625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:30.658811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:30.659440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.659617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:30.662700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.663984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.664044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.664229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:30.664277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.664330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:30.664449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.671339Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:30.794052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:30.794308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.794521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:30.794744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:30.794799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.796927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.797073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:30.797246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.797310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:30.797348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:30.797384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:30.802326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.802392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:30.802431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:30.805074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.805134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.805178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.805239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.815248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:30.817201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:30.817399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:30.818195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.818297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:30.818343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.818578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:30.818623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:30.818738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:30.818833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:30.820490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.820535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.820700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.820730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:30.821043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.821077Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:30.821146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.821169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.821212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:30.821235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.821264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:30.821292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:30.821318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:30.821340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:30.821416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:30.821441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:30.821464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:30.822844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.822942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:30.822979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 69699 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-17T18:05:34.099880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-17T18:05:34.099982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 4294969699 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-17T18:05:34.100019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-17T18:05:34.100433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.100492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-17T18:05:34.100545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:05:34.100573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-17T18:05:34.100647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-17T18:05:34.100752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-17T18:05:34.100874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:34.100941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:34.103568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.104064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.104259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.104311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:34.104512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:05:34.104643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.104673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T18:05:34.104713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-17T18:05:34.105033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.105099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:05:34.105193Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.105225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T18:05:34.105264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-17T18:05:34.106218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:34.106322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:34.106364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T18:05:34.106402Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-17T18:05:34.106444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:34.107415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:34.107505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:34.107538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T18:05:34.107579Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:05:34.107607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:05:34.107660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T18:05:34.110149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.110224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.110574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:34.110759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:05:34.110808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:34.110843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:05:34.110881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:34.110914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T18:05:34.110974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:409:2376] message: TxId: 103 2025-04-17T18:05:34.111007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:34.111041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T18:05:34.111074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T18:05:34.111160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:34.111728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.111786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:34.112064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T18:05:34.112849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T18:05:34.113973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.114017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-17T18:05:34.114089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:05:34.114133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:740:2675] 2025-04-17T18:05:34.114862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-17T18:05:34.116224Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.116453Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 227us result status StatusSuccess 2025-04-17T18:05:34.116909Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:33.771955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:33.772035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.772079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:33.772114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:33.772183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:33.772218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:33.772283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.772367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:33.772692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:33.855193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:33.855247Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:33.871056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:33.871516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:33.871682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:33.880042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:33.880266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:33.880931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.881134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:33.884099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.885478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.885539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.885772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:33.885829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:33.885870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:33.886013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.893167Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.032140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.032390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.032611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.032841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.032946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.035161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.035293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.035469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.035536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.035574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.035612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.037905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.037987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.038032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.039579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.039624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.039669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.039723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.044056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.045942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.046147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.047246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.047377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.047439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.047725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.047782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.047947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.048076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.050314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.050358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.050571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.050614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.051148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.051209Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.051320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.051371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.051433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.051466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.051504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.051544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.051581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.051613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.051674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.051713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.051751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.053612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.053721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.053761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... trongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T18:05:34.174811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T18:05:34.175109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.175219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.175259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:34.175569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T18:05:34.175620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:34.175762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.175813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:34.175861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:34.177580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.177626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.177750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:34.177847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.177935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T18:05:34.178001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T18:05:34.178312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.178352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T18:05:34.178451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:34.178485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:34.178525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:34.178558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:34.178596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T18:05:34.178638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:34.178678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T18:05:34.178721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T18:05:34.178911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T18:05:34.178957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-17T18:05:34.178990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:05:34.179028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:05:34.179674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:34.179751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:34.179791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:34.179843Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:05:34.179887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.180464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:34.180526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:34.180550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:34.180584Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:34.180632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:34.180695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-17T18:05:34.183870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:34.183955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-17T18:05:34.184154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:34.184219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-17T18:05:34.184605Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:34.184686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:34.184721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:453:2407] TestWaitNotification: OK eventTxId 100 2025-04-17T18:05:34.185127Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.185295Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 182us result status StatusSuccess 2025-04-17T18:05:34.185698Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.186606Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.186750Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 558us result status StatusSuccess 2025-04-17T18:05:34.187083Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:30.863980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:30.864056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.864086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:30.864118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:30.864173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:30.864200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:30.864254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:30.864329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:30.864639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:30.944115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:30.944160Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:30.959209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:30.959571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:30.959761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:30.967456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:30.967651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:30.968215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:30.968429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:30.970909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.972273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:30.972329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:30.972527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:30.972577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:30.972615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:30.972736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:30.978793Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.090605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.090942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.091167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:31.091379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:31.091432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.093641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.093807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:31.093984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.094091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:31.094130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:31.094160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:31.095805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.095856Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.095885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:31.097504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.097546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.097584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.097638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.101091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.102691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:31.102876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:31.103557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.103663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.103702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.103876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:31.103909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.104018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.104095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:31.105598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.105638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.105770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.105804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:31.106089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.106123Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:31.106195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.106224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.106271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.106301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.106333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:31.106365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.106393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:31.106422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:31.106465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.106490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:31.106511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:31.107952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.108051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.108087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 139 2025-04-17T18:05:34.035752Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 16], version: 18446744073709551615 2025-04-17T18:05:34.035772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 4 2025-04-17T18:05:34.036519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-04-17T18:05:34.036573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-04-17T18:05:34.036593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 139 2025-04-17T18:05:34.036611Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:34.036631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:34.036684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 139, subscribers: 0 2025-04-17T18:05:34.037698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:34.037734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:15 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:34.037753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:14 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:34.037771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:34.037789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:16 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:34.038695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-17T18:05:34.039644Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T18:05:34.040497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.040749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:34.040953Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 15 TabletID: 72075186233409556 2025-04-17T18:05:34.041884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 15 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2025-04-17T18:05:34.042060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409556 2025-04-17T18:05:34.043059Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409555 2025-04-17T18:05:34.043553Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T18:05:34.044980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-17T18:05:34.045166Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 16 TabletID: 72075186233409557 2025-04-17T18:05:34.045645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2025-04-17T18:05:34.045816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 Forgetting tablet 72075186233409555 2025-04-17T18:05:34.046418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:34.046571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409557 2025-04-17T18:05:34.048101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 16 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2025-04-17T18:05:34.048258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-04-17T18:05:34.048632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-17T18:05:34.048754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:34.048794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-04-17T18:05:34.048850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:34.049030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:34.049067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:34.049165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:34.051140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:34.051182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:34.051250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-17T18:05:34.051285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2025-04-17T18:05:34.051369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:14 2025-04-17T18:05:34.051389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2025-04-17T18:05:34.053274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:34.053306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:34.053399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-04-17T18:05:34.053433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2025-04-17T18:05:34.053527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:34.053620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:34.053710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:34.053745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:34.053801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.055053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2025-04-17T18:05:34.055792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2025-04-17T18:05:34.055828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2025-04-17T18:05:34.056732Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2025-04-17T18:05:34.056810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2025-04-17T18:05:34.056843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2282:4054] TestWaitNotification: OK eventTxId 139 2025-04-17T18:05:34.058152Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.058351Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 228us result status StatusSuccess 2025-04-17T18:05:34.058648Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:33.951841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:33.951920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.951957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:33.951987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:33.952032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:33.952051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:33.952091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.952153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:33.952417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:34.014074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:34.014122Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.028542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:34.028930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:34.029085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:34.036794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:34.037000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:34.037623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.037815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:34.040400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.041683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.041731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.041911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:34.041956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.041995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:34.042116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.049140Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.198531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.198783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.199043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.199287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.199354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.203002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.203170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.203368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.203431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.203467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.203503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.205563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.205618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.205655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.207453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.207505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.207571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.207632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.218283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.220721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.220924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.221947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.222083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.222144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.222499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.222559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.222720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.222855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.224958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.225008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.225157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.225186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.225531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.225581Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.225682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.225719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.225797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.225831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.225869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.225914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.225948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.225974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.226039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.226074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.226099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.227479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.227584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.227615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:05:34.227672Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:05:34.227710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.227798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:05:34.230072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:05:34.230566Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-17T18:05:34.231237Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T18:05:34.249420Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T18:05:34.252067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { Coordinators: 1 Mediators: 1 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.252342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.252419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.253198Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:05:34.258060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: plan resolution is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.258229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-04-17T18:05:34.258780Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-17T18:05:34.259083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:34.259123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-17T18:05:34.259575Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:34.259653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:34.259688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:284:2275] TestWaitNotification: OK eventTxId 100 2025-04-17T18:05:34.260172Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.260344Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 186us result status StatusPathDoesNotExist 2025-04-17T18:05:34.260517Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] >> TSchemeShardSubDomainTest::RmDir [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:05:33.835675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:33.835736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.835763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:33.835786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:33.835840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:33.835860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:33.835896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:33.835976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:33.836181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:33.919737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:33.919777Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:33.936719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:33.936827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:33.937007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:33.945200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:33.945435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:33.946039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:33.946384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:33.963324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.964706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:33.964772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:33.964888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:33.964939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:33.964984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:33.965170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:33.975036Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.134981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.135222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.135457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.135694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.135753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.138230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.138365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.138568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.138637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.138677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.138710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.140948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.141001Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.141040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.142959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.143006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.143058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.143116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.147091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.148962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.149190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.150218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.150353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.150423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.150741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.150833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.151013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.151113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.153184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.153234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.153416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.153460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.153844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.153895Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.154001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.154036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.154091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.154126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.154168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.154227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.154265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.154300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.154375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.154424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.154458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.156379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.156489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.156528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cookie: 103 2025-04-17T18:05:34.627963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T18:05:34.628064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.628191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.628401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.628435Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T18:05:34.628536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:05:34.628575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:34.628660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:05:34.628679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:34.628703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T18:05:34.628744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:495:2451] message: TxId: 103 2025-04-17T18:05:34.628767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:34.628788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T18:05:34.628803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T18:05:34.628899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T18:05:34.630512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:05:34.630555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:496:2452] TestWaitNotification: OK eventTxId 103 2025-04-17T18:05:34.631042Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.631245Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 197us result status StatusSuccess 2025-04-17T18:05:34.631700Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.632330Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.632691Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 367us result status StatusSuccess 2025-04-17T18:05:34.633060Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.633674Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.633821Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 148us result status StatusSuccess 2025-04-17T18:05:34.634126Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.634953Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.635133Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 195us result status StatusSuccess 2025-04-17T18:05:34.635359Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:34.192246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:34.192331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.192373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:34.192405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:34.192469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:34.192497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:34.192556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.192632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:34.192950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:34.277559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:34.277617Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.293070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:34.293532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:34.293709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:34.302143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:34.302367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:34.303025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.303247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:34.306044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.307549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.307608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.307804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:34.307853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.307897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:34.308019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.314893Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.444152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.444412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.444635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.444924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.444982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.447363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.447565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.447759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.447823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.447859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.447894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.449605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.449660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.449696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.451443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.451490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.451531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.451586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.455309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.457052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.457236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.458194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.458325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.458383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.458656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.458712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.458903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.459017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.461691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.461752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.461946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.461990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.462397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.462450Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.462545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.462580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.462639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.462687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.462721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.462770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.462830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.462864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.462928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.462968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.463000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.464764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.464876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.464913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4046678944 2025-04-17T18:05:34.590137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:459:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:462:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:463:2058] recipient: [1:461:2413] Leader for TabletID 72057594046678944 is [1:464:2414] sender: [1:465:2058] recipient: [1:461:2413] 2025-04-17T18:05:34.628929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:34.629053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.629102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:34.629155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:34.629193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:34.629223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:34.629278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.629351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:34.629675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:34.646958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:34.648232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:34.648432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:34.648561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:34.648606Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.648736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:34.649413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:34.649511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:34.649600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.649679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.649948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T18:05:34.650300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.650387Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T18:05:34.650568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.650669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.650781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:34.650832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:34.650861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:34.650892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:34.651012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.651082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.651308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-17T18:05:34.651674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.651846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.652232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.652301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.652512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.652609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.652696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.652862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.652961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.653128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.653371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.653538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.653589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.653632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.665819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.665922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.666249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:34.666311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.666351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:34.666576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:464:2414] sender: [1:522:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.719709Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.719916Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 227us result status StatusSuccess 2025-04-17T18:05:34.720346Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.720858Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.721036Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 154us result status StatusSuccess 2025-04-17T18:05:34.721352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:34.285859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:34.285935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.285972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:34.286011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:34.286061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:34.286088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:34.286138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.286227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:34.286526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:34.365735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:34.365774Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.378651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:34.379012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:34.379192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:34.388034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:34.388221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:34.388990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.389150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:34.391731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.392708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.392762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.392936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:34.392974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.393005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:34.393085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.399253Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.544957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.545174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.545360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.545618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.545684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.551366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.551508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.551722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.551818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.551882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.551922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.554244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.554315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.554355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.556120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.556176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.556214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.556276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.566400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.568410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.568616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.569481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.569618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.569685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.569977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.570037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.570214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.570314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.572253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.572293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.572454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.572497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.572787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.572834Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.572919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.572944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.572997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.573051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.573103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.573140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.573172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.573198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.573249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.573278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.573306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.575041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.575155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.575184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T18:05:34.787167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T18:05:34.787778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.787905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.787958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:34.788355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T18:05:34.788414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:34.788578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.788645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:34.788696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T18:05:34.790620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.790669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.790817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:34.790910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.790943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:336:2312], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T18:05:34.790996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:336:2312], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-17T18:05:34.791059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.791112Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T18:05:34.791213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:34.791249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:34.791286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:34.791321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:34.791377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T18:05:34.791427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:34.791481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T18:05:34.791517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T18:05:34.791678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:34.791722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-17T18:05:34.791753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:05:34.791786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:05:34.792734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:34.792816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:34.792880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:34.792939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:05:34.792983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.793665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:34.793758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:34.793800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:34.793829Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:34.793880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:34.793947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-17T18:05:34.797587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:34.797946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-17T18:05:34.798248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-17T18:05:34.798302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-17T18:05:34.798724Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-17T18:05:34.798857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:34.798907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:482:2430] TestWaitNotification: OK eventTxId 100 2025-04-17T18:05:34.799407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.799754Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 329us result status StatusSuccess 2025-04-17T18:05:34.800132Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.800599Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.800811Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 205us result status StatusSuccess 2025-04-17T18:05:34.801159Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:34.404529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:34.404600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.404632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:34.404658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:34.404706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:34.404730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:34.404786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.404854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:34.405091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:34.485263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:34.485311Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.500903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:34.501340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:34.501513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:34.511188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:34.511396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:34.512070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.512263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:34.514704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.515782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.515825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.515973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:34.516009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.516045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:34.516133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.526460Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.681765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.681998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.682231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.682449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.682497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.684561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.684715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.684886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.684953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.684986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.685022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.686861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.686926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.686963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.688602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.688647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.688692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.688745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.692690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.694575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.694780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.695775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.695896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.695954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.696235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.696291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.696440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.696546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.698590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.698649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.698824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.698862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.699248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.699297Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.699395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.699427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.699486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.699533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.699579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.699616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.699648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.699678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.699758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.699796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.699826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.701498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.701608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.701642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... _TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-17T18:05:34.734771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.734911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.734962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:34.735135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T18:05:34.735190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-17T18:05:34.735337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.735391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:34.735444Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:34.736454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:34.736714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:34.737867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.737908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.738059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:34.738135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.738166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:05:34.738227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:05:34.738553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.738596Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:05:34.738690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:34.738726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:34.738762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:34.738807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:34.738845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:34.738884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:34.738918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:34.738949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:05:34.739026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:34.739068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:05:34.739101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:05:34.739142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:05:34.739714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:34.739800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:34.739835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:34.739874Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:05:34.739916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.740644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:34.740718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:34.740744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:34.740769Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:34.740793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:34.740853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:05:34.743293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:05:34.744238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-17T18:05:34.747314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.747530Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2025-04-17T18:05:34.747575Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2025-04-17T18:05:34.747716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.747779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.749855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.750010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-17T18:05:34.750292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:05:34.750331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-17T18:05:34.750446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:05:34.750505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:05:34.750968Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:05:34.751071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:05:34.751105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:307:2298] 2025-04-17T18:05:34.751236Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:05:34.751304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:34.751324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:34.236548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:34.236625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.236660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:34.236692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:34.236749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:34.236777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:34.236847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.236923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:34.237211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:34.318756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:34.318817Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.333726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:34.334140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:34.334334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:34.341383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:34.341530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:34.341961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.342103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:34.344074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.345058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.345097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.345232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:34.345266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.345295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:34.345375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.356094Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.478059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.478285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.478483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.478694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.478745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.480959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.481080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.481254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.481328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.481366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.481399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.483389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.483436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.483470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.485038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.485079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.485116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.485166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.488721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.490377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.490563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.491528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.491650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.491704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.491939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.491982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.492141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.492235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.494018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.494056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.494249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.494289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.494639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.494693Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.494777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.494834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.494889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.494919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.494951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.494993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.495032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.495055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.495110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.495143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.495173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.496906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.497004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.497050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6316545 cookie: 0:102 msg type: 269090816 2025-04-17T18:05:34.885188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000004 2025-04-17T18:05:34.885539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.885658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.885707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-17T18:05:34.886041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-17T18:05:34.886117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-17T18:05:34.886301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.886362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-17T18:05:34.886406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:05:34.888522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.888572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.888734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:05:34.888838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.888872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T18:05:34.888912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-17T18:05:34.889171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.889216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:05:34.889317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:05:34.889359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:34.889397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:05:34.889429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:34.889468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T18:05:34.889522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:05:34.889563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:05:34.889593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:05:34.889803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-04-17T18:05:34.889845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T18:05:34.889877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-17T18:05:34.889907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-17T18:05:34.890694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:34.890806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:34.890844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:05:34.890883Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T18:05:34.890919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.891582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:34.891642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:05:34.891671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:05:34.891699Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:05:34.891720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-17T18:05:34.891773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T18:05:34.894922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:05:34.895261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:05:34.895543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:05:34.895584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:05:34.896011Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:05:34.896100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:05:34.896134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:981:2805] TestWaitNotification: OK eventTxId 102 2025-04-17T18:05:34.896688Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.896902Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 236us result status StatusSuccess 2025-04-17T18:05:34.897249Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.897819Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:34.898026Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 208us result status StatusSuccess 2025-04-17T18:05:34.898411Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:34.693661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:34.693731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.693761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:34.693785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:34.693842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:34.693881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:34.693922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.693984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:34.694272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:34.760999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:34.761039Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.772553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:34.772915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:34.773065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:34.779616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:34.779760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:34.780311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.780526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:34.782765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.783869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.783919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.784084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:34.784138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.784177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:34.784294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.789726Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.901444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.901653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.901822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.902018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.902060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.904165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.904276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.904444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.904539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.904572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.904596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.906225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.906284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.906338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.907859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.907894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.907927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.907973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.911264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.912986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.913164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.914150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.914293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.914348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.914599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.914665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.914824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.914927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.916972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.917030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.917227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.917272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.917671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.917730Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.917822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.917879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.917944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.917989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.918033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.918071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.918110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.918140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.918224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.918272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.918311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.920256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.920380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.920419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:35.134486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-17T18:05:35.134605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:35.135774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-17T18:05:35.135868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-17T18:05:35.136128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:35.136201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:35.136234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:35.136515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-17T18:05:35.136566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-17T18:05:35.136718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:35.136773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-17T18:05:35.136812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-17T18:05:35.138091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:35.138130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:35.138275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:35.138380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:35.138417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-17T18:05:35.138474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-17T18:05:35.138745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.138782Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-17T18:05:35.138900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:35.138923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:35.138969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-17T18:05:35.139004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:35.139039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-17T18:05:35.139071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-17T18:05:35.139099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-17T18:05:35.139123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-17T18:05:35.139315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-17T18:05:35.139346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-04-17T18:05:35.139370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:05:35.139406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:05:35.139935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:35.140009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:35.140041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:35.140076Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:05:35.140106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:35.140660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:35.140710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-17T18:05:35.140729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-17T18:05:35.140752Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:05:35.140779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-17T18:05:35.140833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-04-17T18:05:35.140862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:569:2479] 2025-04-17T18:05:35.143764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:35.146888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-17T18:05:35.147003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-17T18:05:35.147037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:570:2480] TestWaitNotification: OK eventTxId 100 2025-04-17T18:05:35.147629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:35.147879Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 263us result status StatusSuccess 2025-04-17T18:05:35.148360Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:05:35.150491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:35.150608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.150703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-17T18:05:35.153036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:35.153195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:31.339049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:31.339133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.339172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:31.339222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:31.339294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:31.339331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:31.339393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:31.339473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:31.339777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:31.422228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:31.422282Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:31.443024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:31.443374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:31.443527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:31.451323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:31.451525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:31.452154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.452341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:31.455175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.456202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.456249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.456455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:31.456510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.456551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:31.456675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.462535Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:31.593013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:31.593208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.593382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:31.593587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:31.593626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.595549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.595693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:31.595871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.595933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:31.595976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:31.596011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:31.598204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.598267Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:31.598307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:31.600187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.600233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.600293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.600353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.604215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:31.606213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:31.606405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:31.607444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:31.607569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:31.607625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.607957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:31.608011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:31.608167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:31.608453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:31.610478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:31.610524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:31.610719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:31.610763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:31.611192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:31.611245Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:31.611340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.611377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.611438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:31.611474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.611519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:31.611559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:31.611595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:31.611626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:31.611686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:31.611725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:31.611756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:31.613510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.613615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:31.613649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... sage: Source { RawX1: 526 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-17T18:05:35.015733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-17T18:05:35.015825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 526 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-17T18:05:35.015864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-17T18:05:35.017134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:35.017194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-17T18:05:35.017244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:05:35.017290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-17T18:05:35.017365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-17T18:05:35.017453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-17T18:05:35.017549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-17T18:05:35.017601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-17T18:05:35.018348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:35.019078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:35.019994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T18:05:35.020037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T18:05:35.020160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-17T18:05:35.020261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T18:05:35.020285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-17T18:05:35.020315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-17T18:05:35.020532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:35.020567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-17T18:05:35.020631Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:35.020660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-17T18:05:35.020706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-17T18:05:35.021808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:35.021899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:35.021933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-17T18:05:35.021968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-04-17T18:05:35.022003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-17T18:05:35.022877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:35.022948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:35.022977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-17T18:05:35.023005Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:35.023030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-17T18:05:35.023093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T18:05:35.025578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:35.025621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-17T18:05:35.026227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-17T18:05:35.026413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:35.026446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:35.026493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:35.026537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:35.026573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-17T18:05:35.026637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:554:2493] message: TxId: 104 2025-04-17T18:05:35.026680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:35.026715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:05:35.026765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:05:35.026873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-17T18:05:35.027375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T18:05:35.027410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T18:05:35.027736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-17T18:05:35.028924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-17T18:05:35.029992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T18:05:35.030051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-17T18:05:35.030145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:05:35.030698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:826:2744] 2025-04-17T18:05:35.031303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-17T18:05:35.032745Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-17T18:05:35.032894Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 163us result status StatusSuccess 2025-04-17T18:05:35.033171Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] Test command err: Trying to start YDB, gRPC: 25828, MsgBus: 16236 2025-04-17T18:05:04.421552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344719962172013:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:04.423024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b90/r3tmp/tmp5AclbZ/pdisk_1.dat 2025-04-17T18:05:04.847904Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25828, node 1 2025-04-17T18:05:04.872822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:04.872946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:04.874762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:04.926765Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:04.926798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:04.926805Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:04.926925Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16236 TClient is connected to server localhost:16236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:05.404471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:05.441982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:05.576077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:05.725333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:05.804853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:07.509332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344732847075591:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.509434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:07.791799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.821114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.848351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.878563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.911044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.949001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.986252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.060976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344737142043543:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.061058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.061124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344737142043548:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.064103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:08.073746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344737142043550:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:08.131341Z node 1 :TX_PROXY ERROR: Actor# [1:7494344737142043604:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:09.034430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.035672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.037273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.418322Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344719962172013:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:09.418504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:10.900121Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913110941, txId: 281474976710697] shutting down 2025-04-17T18:05:10.923657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344745731979576:2768], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-17T18:05:10.923696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344745731979574:2766], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-17T18:05:10.923764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-17T18:05:10.923781Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494344745731979575:2767], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=YmQzMzU0MDUtNjU5MTgzYTMtNzZjM2MwNC1jMWExN2I0NA==, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-17T18:05:10.923846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494344745731979575:2767], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=YmQzMzU0MDUtNjU5MTgzYTMtNzZjM2MwNC1jMWExN2I0NA==, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-17T18:05:10.923946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7494344745731979572:2765]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-17T18:05:10.924040Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmQzMzU0MDUtNjU5MTgzYTMtNzZjM2MwNC1jMWExN2I0NA==, ActorId: [1:7494344745731979572:2765], ActorState: ExecuteState, TraceId: 01js2et5vybkdhf5snekpyxp40, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-17T18:05:10.924226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7494344745731979572:2765]: Pool another_pool_id not found Trying to start YDB, gRPC: 29270, MsgBus: 3133 2025-04-17T18:05:11.645813Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344746330843992:2067];send_to=[0:7307199536658146131:77 ... , issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:15.324355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:15.330334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344763510715614:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:15.334494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:15.351578Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-04-17T18:05:15.351823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344763510715616:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:15.453980Z node 2 :TX_PROXY ERROR: Actor# [2:7494344763510715672:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:16.613361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:16.615690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:16.617154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:16.648702Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344746330843992:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:16.648795Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8068, MsgBus: 12735 2025-04-17T18:05:19.192555Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344780930836499:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:19.192638Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b90/r3tmp/tmp54z6AX/pdisk_1.dat 2025-04-17T18:05:19.386094Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:19.421677Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:19.421782Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:19.425102Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8068, node 3 2025-04-17T18:05:19.534077Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:19.534102Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:19.534110Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:19.534273Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12735 TClient is connected to server localhost:12735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:20.166098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.179369Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:20.198092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.273374Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:20.458157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:05:20.544238Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.971579Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344793815740150:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:22.971710Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.025657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.060834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.129754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.169260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.240412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.288767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.361467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:23.432967Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344798110708117:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.433063Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.433121Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494344798110708122:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:23.437013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:23.451835Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494344798110708124:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:23.545041Z node 3 :TX_PROXY ERROR: Actor# [3:7494344798110708181:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:24.193190Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344780930836499:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:24.193267Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:24.629444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.631738Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.633318Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:34.383034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:05:34.383084Z node 3 :IMPORT WARN: Table profiles were not loaded >> KqpSystemView::PartitionStatsFollower [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:35.387656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:35.387745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:35.387784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:35.387820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:35.387887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:35.387921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:35.387989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:35.388075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:35.388385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:35.472061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:35.472115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:35.485796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:35.486167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:35.486347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:35.493120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:35.493271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:35.493733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:35.493885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:35.496266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:35.497246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:35.497288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:35.497589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:35.497622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:35.497648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:35.497774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.502761Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:35.589049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:35.589256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.589430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:35.589613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:35.589664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.591902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:35.592016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:35.592192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.592250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:35.592277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:35.592301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:35.593970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.594015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:35.594043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:35.595502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.595536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.595563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:35.595604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:35.598117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:35.599804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:35.599987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:35.600662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:35.600747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:35.600801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:35.601010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:35.601045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:35.601175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:35.601248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:35.602783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:35.602832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:35.602957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:35.602993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:35.603279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:35.603314Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:35.603388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:35.603410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:35.603463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:35.603490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:35.603525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:35.603563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:35.603587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:35.603606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:35.603665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:35.603687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:35.603708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:35.604905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:35.604990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:35.605018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3409550 2025-04-17T18:05:35.980297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-17T18:05:35.980479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409552 2025-04-17T18:05:35.981443Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-17T18:05:35.981608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:35.981643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T18:05:35.981697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:05:35.981733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:05:35.981777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T18:05:35.982027Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-17T18:05:35.983414Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-17T18:05:35.983773Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2025-04-17T18:05:35.985335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409548 2025-04-17T18:05:35.985818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:35.985975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-17T18:05:35.986689Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-17T18:05:35.986839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:35.986943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409551 2025-04-17T18:05:35.987546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-17T18:05:35.987654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-17T18:05:35.988629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:35.988737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:05:35.989138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:35.989254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:35.990143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T18:05:35.990884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:35.990940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:35.991028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:35.991859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-17T18:05:35.991910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-17T18:05:35.991994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-17T18:05:35.992008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-17T18:05:35.992039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:35.992084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:35.992108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:35.992145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:35.994235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:05:35.994275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:05:35.994330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:35.994342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-17T18:05:35.994408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-17T18:05:35.994422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-17T18:05:35.994457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:05:35.994470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:05:35.994856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T18:05:35.994883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T18:05:35.995503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:35.995895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T18:05:35.996188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T18:05:35.996227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T18:05:35.996642Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T18:05:35.996736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:05:35.996770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:786:2676] TestWaitNotification: OK eventTxId 103 2025-04-17T18:05:35.997196Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:35.997386Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 208us result status StatusPathDoesNotExist 2025-04-17T18:05:35.997535Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:05:35.998007Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:35.998195Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 163us result status StatusSuccess 2025-04-17T18:05:35.998507Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2025-04-17T18:03:43.766422Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.788146Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.788534Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792047Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.849867Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.857957Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.946721Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.948607Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.948694Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.948797Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.949149Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.949560Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.949694Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.949999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.950039Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.031206Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.059954Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.060169Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.060283Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.060320Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.060358Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.060394Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.060562Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.060617Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.060875Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.060962Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.061107Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.061147Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.061203Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.061248Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.061300Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.061333Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.061375Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.061492Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.061531Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.061595Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.064179Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.064244Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.064353Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.064532Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.064581Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.064657Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.064716Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.064755Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.064807Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.064845Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.065132Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.065185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.065242Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.065275Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.065337Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.065373Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.065418Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.065453Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.065482Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.077693Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.077753Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.077814Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.077855Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.077913Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.078493Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.078540Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.078585Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.078654Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.078683Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.078809Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.078856Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.078923Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.078960Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.082753Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.082813Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.082998Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.083037Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.083086Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.083123Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.083159Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.083197Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.083236Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.083294Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.083342Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.083386Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.083422Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.083607Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.083661Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.083689Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.083713Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.083737Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.083792Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.083868Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.083919Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.083951Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.083994Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.084041Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.084074Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.084126Z node 1 :TX_D ... kips: 0, InvisibleRowSkips: 0} 2025-04-17T18:05:31.639885Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is ExecutedNoMoreRestarts 2025-04-17T18:05:31.639922Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit ExecuteDataTx 2025-04-17T18:05:31.639951Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompleteOperation 2025-04-17T18:05:31.639973Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompleteOperation 2025-04-17T18:05:31.640179Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is DelayComplete 2025-04-17T18:05:31.640201Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompleteOperation 2025-04-17T18:05:31.640224Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompletedOperations 2025-04-17T18:05:31.640249Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompletedOperations 2025-04-17T18:05:31.640275Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is Executed 2025-04-17T18:05:31.640295Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompletedOperations 2025-04-17T18:05:31.640320Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437185 has finished 2025-04-17T18:05:31.640345Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:05:31.640366Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T18:05:31.640393Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T18:05:31.640422Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T18:05:31.659631Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:05:31.659697Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:402] at 9437186 on unit CompleteOperation 2025-04-17T18:05:31.659765Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 402] from 9437186 at tablet 9437186 send result to client [4:99:2134], exec latency: 3 ms, propose latency: 5 ms 2025-04-17T18:05:31.659834Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-04-17T18:05:31.659872Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:05:31.660124Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-17T18:05:31.660163Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:05:31.660192Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit StoreAndSendOutRS 2025-04-17T18:05:31.660225Z node 4 :TX_DATASHARD DEBUG: Send RS 400 at 9437186 from 9437186 to 9437184 txId 403 2025-04-17T18:05:31.660278Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:05:31.660309Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit CompleteOperation 2025-04-17T18:05:31.660354Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437186 at tablet 9437186 send result to client [4:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:05:31.660404Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-04-17T18:05:31.660436Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:05:31.661092Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:455:2397], Recipient [4:346:2313]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-04-17T18:05:31.661141Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:31.661176Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 402 2025-04-17T18:05:31.661497Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [4:455:2397], Recipient [4:234:2227]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-04-17T18:05:31.661539Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:05:31.661574Z node 4 :TX_DATASHARD DEBUG: Receive RS at 9437184 source 9437186 dest 9437184 producer 9437186 txId 403 2025-04-17T18:05:31.661641Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437184 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-04-17T18:05:31.661688Z node 4 :TX_DATASHARD TRACE: Filled readset for [1000004:403] from=9437186 to=9437184origin=9437186 2025-04-17T18:05:31.661753Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-17T18:05:31.661833Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:455:2397], Recipient [4:346:2313]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-04-17T18:05:31.661863Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:31.661894Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 403 2025-04-17T18:05:31.662395Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [4:234:2227], Recipient [4:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:05:31.662443Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:05:31.662496Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:05:31.662531Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:05:31.662566Z node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437184 for LoadAndWaitInRS 2025-04-17T18:05:31.662594Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit LoadAndWaitInRS 2025-04-17T18:05:31.662628Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-04-17T18:05:31.662660Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit LoadAndWaitInRS 2025-04-17T18:05:31.662693Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:05:31.662722Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit ExecuteDataTx 2025-04-17T18:05:31.664501Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437184 with status COMPLETE 2025-04-17T18:05:31.664565Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437184: {NSelectRow: 2, NSelectRange: 2, NUpdateRow: 6, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 72, SelectRangeBytes: 576, UpdateRowBytes: 45, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:05:31.664626Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:05:31.664658Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:05:31.664689Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompleteOperation 2025-04-17T18:05:31.664719Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompleteOperation 2025-04-17T18:05:31.664972Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is DelayComplete 2025-04-17T18:05:31.665001Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompleteOperation 2025-04-17T18:05:31.665031Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompletedOperations 2025-04-17T18:05:31.665061Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompletedOperations 2025-04-17T18:05:31.665095Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-04-17T18:05:31.665116Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2025-04-17T18:05:31.665146Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437184 has finished 2025-04-17T18:05:31.665179Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:05:31.665207Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:05:31.665239Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:05:31.665268Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:05:31.688326Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T18:05:31.688400Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2025-04-17T18:05:31.688470Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:99:2134], exec latency: 3 ms, propose latency: 5 ms 2025-04-17T18:05:31.688544Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-04-17T18:05:31.688583Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T18:05:31.690470Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:346:2313], Recipient [4:234:2227]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-04-17T18:05:31.690571Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:31.690615Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 2025-04-17T18:05:31.710446Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:05:31.710510Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2025-04-17T18:05:31.710581Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:99:2134], exec latency: 3 ms, propose latency: 5 ms 2025-04-17T18:05:31.710651Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-04-17T18:05:31.710694Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:05:31.711083Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:234:2227], Recipient [4:455:2397]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-04-17T18:05:31.711128Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:05:31.711166Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |86.6%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TGRpcStreamingTest::SimpleEcho >> TGRpcStreamingTest::WriteAndFinishWorks >> TGRpcStreamingTest::WritesDoneFromClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 20804, MsgBus: 6833 2025-04-17T18:05:20.999503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344785657318676:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:20.999802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b16/r3tmp/tmph9ZFx2/pdisk_1.dat 2025-04-17T18:05:21.512038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:21.537562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:21.537663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:21.540042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20804, node 1 2025-04-17T18:05:21.730695Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:21.730717Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:21.730722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:21.730812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6833 TClient is connected to server localhost:6833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:22.377226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:22.391836Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:05:22.542265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:05:22.542316Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:05:22.542430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7494344789952286401:2190], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:05:22.542446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:05:23.542349Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:05:23.542382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:05:23.542440Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7494344789952286401:2190], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:05:23.542449Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:05:24.000838Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7494344785657318648:2068], interval end# 2025-04-17T18:05:24.000000Z, event interval end# 2025-04-17T18:05:24.000000Z 2025-04-17T18:05:24.000864Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7494344785657318648:2068], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T18:05:24.003781Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7494344789952286009:2094], interval end# 2025-04-17T18:05:24.000000Z, event interval end# 2025-04-17T18:05:24.000000Z 2025-04-17T18:05:24.003830Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7494344789952286009:2094], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T18:05:24.287366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344802837188531:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.287570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:24.523011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344802837188556:2312], Recipient [1:7494344789952286401:2190]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:05:24.523044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:05:24.523064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:05:24.523107Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7494344802837188552:2309], Recipient [1:7494344789952286401:2190]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:05:24.523120Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:05:24.615069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:05:24.615464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.615601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2025-04-17T18:05:24.616012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T18:05:24.616054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-17T18:05:24.616069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:05:24.616179Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] path /Root/Followers ShardIndices size 1 2025-04-17T18:05:24.616199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:05:24.616213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-17T18:05:24.616783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TCreateTable Propose creating new table opId# 281474976710658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 2] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } FailOnExist: false 2025-04-17T18:05:24.616955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:05:24.616989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:24.617064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T18:05:24.617096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:05:24.617123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T18:05:24.617506Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:05:24.617529Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:05:24.617573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7494344789952286401:2190], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:05:24.617597Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeas ... 0 2025-04-17T18:05:34.661735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: DataSize: 800 RowCount: 4 IndexSize: 0 InMemSize: 800 LastAccessTime: 1744913125087 LastUpdateTime: 1744913124953 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:05:34.661795Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099973s, queue# 1 2025-04-17T18:05:34.680784Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7494344802837188626:2338]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:05:34.680822Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:05:34.684893Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7494344802837188628:2339]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:05:34.684932Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:05:34.685014Z node 1 :TX_DATASHARD DEBUG: SendPeriodicTableStats register new pipe at datashard 72075186224037888 FollowerId 3, TableInfos size = 1 2025-04-17T18:05:34.685087Z node 1 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 3, tableId 2 2025-04-17T18:05:34.685157Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7494344802837188629:2340]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:05:34.685174Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:05:34.685505Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7494344845786861898:2527], Recipient [1:7494344789952286401:2190]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:05:34.685511Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877760, Sender [1:7494344845786861897:2412], Recipient [1:7494344802837188628:2339]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [1:7494344845786861898:2527] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:05:34.685535Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:05:34.685537Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:05:34.685555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:05:34.685756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7494344802837188628:2339], Recipient [1:7494344789952286401:2190]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { } ShardState: 3 NodeId: 1 StartTime: 1744913124664 TableOwnerId: 72057594046644480 FollowerId: 3 2025-04-17T18:05:34.685780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T18:05:34.685810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 3 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Readonly' dataSize 0 rowCount 0 cpuUsage 0 2025-04-17T18:05:34.685886Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 3 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:05:34.762211Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:05:34.762268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:05:34.762300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-17T18:05:34.762383Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-17T18:05:34.762410Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-17T18:05:34.762524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 800 row count 4 2025-04-17T18:05:34.762616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0, RowCount 4, DataSize 800 2025-04-17T18:05:34.762642Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-04-17T18:05:34.762735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-17T18:05:34.762823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-04-17T18:05:34.762863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 0 row count 0 2025-04-17T18:05:34.762899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=3, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:05:34.762906Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 3 2025-04-17T18:05:34.762970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T18:05:34.763105Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:05:34.763129Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:05:34.763141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T18:05:34.763855Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1744913124625 AccessTime: 1744913125087 UpdateTime: 1744913124953 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:05:34.763971Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 3 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1744913124664 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 3 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:05:35.630022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:05:35.630071Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:05:35.630121Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7494344789952286401:2190], Recipient [1:7494344789952286401:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:05:35.630149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... SELECT from partition_stats, attempt 2 2025-04-17T18:05:36.000664Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7494344789952286009:2094], interval end# 2025-04-17T18:05:36.000000Z, event interval end# 2025-04-17T18:05:36.000000Z 2025-04-17T18:05:36.000664Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7494344785657318648:2068], interval end# 2025-04-17T18:05:36.000000Z, event interval end# 2025-04-17T18:05:36.000000Z 2025-04-17T18:05:36.000692Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7494344785657318648:2068], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-17T18:05:36.000703Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7494344789952286009:2094], query logs count# 1, processor ids count# 1, processor id to database count# 0 2025-04-17T18:05:36.129333Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494344854376796515:2423], owner: [1:7494344854376796512:2421], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T18:05:36.129735Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494344854376796515:2423], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T18:05:36.129944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274595843, Sender [1:7494344854376796515:2423], Recipient [1:7494344789952286401:2190]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2025-04-17T18:05:36.129972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2025-04-17T18:05:36.130091Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494344854376796515:2423], row count: 2, finished: 1 2025-04-17T18:05:36.130140Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494344854376796515:2423], owner: [1:7494344854376796512:2421], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T18:05:36.133166Z node 1 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [1:7494344789952286009:2094], database# /Root, query hash# 14960494650040056739, cpu time# 263373 |86.6%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] Test command err: Trying to start YDB, gRPC: 18942, MsgBus: 14793 2025-04-17T18:04:53.383978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344672897543326:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:53.384020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bc7/r3tmp/tmpbhkTgU/pdisk_1.dat 2025-04-17T18:04:53.809174Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18942, node 1 2025-04-17T18:04:53.829394Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:04:53.829454Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:04:53.830087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:53.830167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:53.831003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:53.895912Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:53.895933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:53.895951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:53.896069Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14793 TClient is connected to server localhost:14793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:54.460322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:54.479128Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:54.497112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:54.621242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:54.765677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:54.833196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:56.532295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344685782446973:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:56.532402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:56.815305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.842824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.873320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.902606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:56.933204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.004109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.082766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.166460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344690077414936:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.166554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.166699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344690077414943:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.174035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:57.186912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344690077414945:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:57.253908Z node 1 :TX_PROXY ERROR: Actor# [1:7494344690077415001:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14751, MsgBus: 13345 2025-04-17T18:04:58.848692Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344691015795519:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:58.848769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bc7/r3tmp/tmp33GNdt/pdisk_1.dat 2025-04-17T18:04:58.980397Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14751, node 2 2025-04-17T18:04:59.007852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:59.007935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:59.009508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:04:59.042694Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:59.042721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:59.042731Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:59.042838Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13345 TClient is connected to server localhost:13345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:59.515092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:59.534891Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:04:59.548490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:59.626893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:04:59.781664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waitin ... ration part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.092662Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:22.837544Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913122869, txId: 281474976715700] shutting down 2025-04-17T18:05:23.149405Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913123184, txId: 281474976715703] shutting down 2025-04-17T18:05:23.473714Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913123506, txId: 281474976715706] shutting down 2025-04-17T18:05:23.773103Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913123814, txId: 281474976715709] shutting down 2025-04-17T18:05:23.816422Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: d12b6b2-89746a78-899c4111-5ac887c, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=4&id=YjZiMGY1NS0xNzVlYmE4OS04YTQxYWM0Yi0zYWJmYzNmNQ==, TxId: Trying to start YDB, gRPC: 14399, MsgBus: 27666 2025-04-17T18:05:25.820588Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494344807199623181:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:25.821356Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bc7/r3tmp/tmpgYQpUr/pdisk_1.dat 2025-04-17T18:05:25.965572Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:25.994766Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:25.994878Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:25.998137Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14399, node 5 2025-04-17T18:05:26.146943Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:26.146976Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:26.146987Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:26.147146Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27666 TClient is connected to server localhost:27666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:26.726933Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:26.745685Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:26.818960Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:26.993921Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:27.083506Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:29.952875Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494344824379494126:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:29.952980Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.021167Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.051588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.089244Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.126294Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.161319Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.200581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.287035Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:30.355114Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494344828674462087:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.355217Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.355421Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494344828674462092:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.359395Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:30.371229Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494344828674462094:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:30.459759Z node 5 :TX_PROXY ERROR: Actor# [5:7494344828674462152:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:30.820877Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494344807199623181:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:30.827614Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:31.648887Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.650528Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.652688Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:33.585803Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 748c81d-71af5b6e-c3cac12-566a4fe8, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=YWFkMmMzMDgtMzFkNzlhNTMtZTQ5ZDEyOWItYzc2OTQwMzE=, TxId: 2025-04-17T18:05:34.673986Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 748c81d-71af5b6e-c3cac12-566a4fe8, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=ZTU0ZThmODQtOGM4ZmQ4Ny1jYmQ0OTc2LTJjYTM2NzJk, TxId: 2025-04-17T18:05:34.831046Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 748c81d-71af5b6e-c3cac12-566a4fe8, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-04-17T18:05:34.856156Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 748c81d-71af5b6e-c3cac12-566a4fe8, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=MzlkOTY0MzktMjk5ZDkzYy05NjQ2ZmMxNS0yMzY4NTM4OA==, TxId: 2025-04-17T18:05:34.856310Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: 748c81d-71af5b6e-c3cac12-566a4fe8, check lease failed 2025-04-17T18:05:35.205802Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 748c81d-71af5b6e-c3cac12-566a4fe8, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=5&id=NjRiZGM1M2YtZTA1MWVmYjctYzM4YTA4YzEtZTY2ZjdlMjE=, TxId: |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TWebLoginService::AuditLogEmptySIDsLoginSuccess >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false >> TSchemeShardLoginTest::UserLogin >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction >> TColumnShardTestSchema::RebootOneColdTier >> TColumnShardTestSchema::RebootExportAfterFail >> TColumnShardTestSchema::EnableColdTiersAfterTtl >> TColumnShardTestSchema::ExportWithLostAnswer >> TColumnShardTestSchema::Drop >> TColumnShardTestSchema::ColdTiersWithStat >> TColumnShardTestSchema::RebootHotTiersWithStat >> TColumnShardTestSchema::RebootColdTiers >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn >> TColumnShardTestSchema::HotTiersTtl >> TColumnShardTestSchema::HotTiersWithStat >> TColumnShardTestSchema::RebootForgetAfterFail >> TColumnShardTestSchema::ColdCompactionSmoke >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TColumnShardTestSchema::ForgetWithLostAnswer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:34.056657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:34.056764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.056803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:34.056841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:34.056900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:34.056932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:34.056989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.057078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:34.057377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:34.148001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:34.148052Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.165158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:34.165508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:34.165679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:34.173203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:34.173402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:34.174071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.174279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:34.176773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.178097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.178154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.178366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:34.178415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.178454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:34.178610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.184969Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.306255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.306503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.306713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.306948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.307002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.308918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.309047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.309208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.309276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.309321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.309359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.310999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.311042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.311069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.312372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.312415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.312449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.312493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.314981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.316233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.316403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.317188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.317282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.317330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.317602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.317657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.317778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.317867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.319424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.319461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.319668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.319714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.320023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.320066Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.320145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.320171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.320214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.320247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.320276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.320306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.320336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.320370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.320413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.320763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.320791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.321988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.322070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.322098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... hemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 4294969699 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-17T18:05:38.824453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-17T18:05:38.824650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 4294969699 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-17T18:05:38.824722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-17T18:05:38.825228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:38.825288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-17T18:05:38.825334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:05:38.825368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-17T18:05:38.825443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-17T18:05:38.825610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-17T18:05:38.825725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:38.825785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:38.828269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:38.829428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:38.829639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:38.829682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:38.829867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:05:38.830043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:38.830098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T18:05:38.830153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-17T18:05:38.830598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:38.830651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:05:38.830756Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:38.830810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T18:05:38.830863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-17T18:05:38.831677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:38.831849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:38.831889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T18:05:38.831926Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-17T18:05:38.831964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:38.833017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:38.833122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:38.833160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-17T18:05:38.833222Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:05:38.833253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:05:38.833314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-17T18:05:38.836352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:38.836405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:38.836776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:05:38.836971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:05:38.837027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:38.837068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:05:38.837101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:38.837137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T18:05:38.837207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:409:2376] message: TxId: 103 2025-04-17T18:05:38.837249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:38.837286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T18:05:38.837316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T18:05:38.837423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:38.837832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:38.837865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:38.838552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T18:05:38.838670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T18:05:38.840018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:38.840081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-17T18:05:38.840160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:05:38.840195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:670:2604] 2025-04-17T18:05:38.840928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-17T18:05:38.841849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:38.842089Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 236us result status StatusSuccess 2025-04-17T18:05:38.842571Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true >> TGRpcStreamingTest::SimpleEcho [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] >> TColumnShardTestSchema::ExportAfterFail >> TColumnShardTestSchema::DropWriteRace >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false >> TColumnShardTestSchema::CreateTable >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2025-04-17T18:05:37.613135Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344861142578207:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:37.613277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dad/r3tmp/tmpG0ADQL/pdisk_1.dat 2025-04-17T18:05:37.917922Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:37.978342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:37.978668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:37.980479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:37.995414Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] stream accepted Name# Session ok# true peer# ipv6:[::1]:45082 2025-04-17T18:05:37.995696Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] facade attach Name# Session actor# [1:7494344861142578736:2256] peer# ipv6:[::1]:45082 2025-04-17T18:05:37.995719Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] facade read Name# Session peer# ipv6:[::1]:45082 2025-04-17T18:05:37.995834Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] read finished Name# Session ok# true data# peer# ipv6:[::1]:45082 2025-04-17T18:05:37.995891Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 1 2025-04-17T18:05:37.995920Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] facade write Name# Session data# peer# ipv6:[::1]:45082 2025-04-17T18:05:37.996141Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] facade finish Name# Session peer# ipv6:[::1]:45082 grpc status# (0) message# 2025-04-17T18:05:37.996142Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] write finished Name# Session ok# true peer# ipv6:[::1]:45082 2025-04-17T18:05:37.996412Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] stream done notification Name# Session ok# true peer# ipv6:[::1]:45082 2025-04-17T18:05:37.996510Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] stream finished Name# Session ok# true peer# ipv6:[::1]:45082 grpc status# (0) message# 2025-04-17T18:05:37.996612Z node 1 :GRPC_SERVER DEBUG: [0x51f000027680] deregistering request Name# Session peer# ipv6:[::1]:45082 (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2025-04-17T18:05:37.613110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344860001845897:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:37.613157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000daa/r3tmp/tmpDUltSf/pdisk_1.dat 2025-04-17T18:05:37.947132Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:37.995493Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:55980 2025-04-17T18:05:37.995780Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7494344860001846418:2253] peer# ipv6:[::1]:55980 2025-04-17T18:05:37.995857Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:55980 2025-04-17T18:05:37.996113Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:55980 grpc status# (0) message# 2025-04-17T18:05:37.996158Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:55980 2025-04-17T18:05:37.996193Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-17T18:05:37.996412Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:55980 2025-04-17T18:05:37.996544Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:55980 2025-04-17T18:05:37.996634Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-17T18:05:37.996650Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:55980 grpc status# (0) message# 2025-04-17T18:05:37.996764Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:55980 (finish done) 2025-04-17T18:05:38.014107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:38.014215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:38.015807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-04-17T18:05:37.613168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344858359267729:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:37.613229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d9c/r3tmp/tmpBYJVlA/pdisk_1.dat 2025-04-17T18:05:37.922858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:37.973857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:37.974780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:37.977900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:37.995427Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream accepted Name# Session ok# true peer# ipv6:[::1]:40432 2025-04-17T18:05:37.995723Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade attach Name# Session actor# [1:7494344858359268251:2256] peer# ipv6:[::1]:40432 2025-04-17T18:05:37.995763Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade read Name# Session peer# ipv6:[::1]:40432 2025-04-17T18:05:37.995855Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] read finished Name# Session ok# false data# peer# ipv6:[::1]:40432 2025-04-17T18:05:37.995892Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-17T18:05:37.995927Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade finish Name# Session peer# ipv6:[::1]:40432 grpc status# (9) message# Everything is A-OK 2025-04-17T18:05:37.996222Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream done notification Name# Session ok# true peer# ipv6:[::1]:40432 2025-04-17T18:05:37.996269Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream finished Name# Session ok# true peer# ipv6:[::1]:40432 grpc status# (9) message# Everything is A-OK 2025-04-17T18:05:37.996271Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-17T18:05:37.996293Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] deregistering request Name# Session peer# ipv6:[::1]:40432 (finish done) >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> TWebLoginService::AuditLogCreateModifyUser >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true >> TColumnShardTestSchema::RebootHotTiersRevCompression |86.7%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.7%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] >> TColumnShardTestSchema::DropWriteRace [GOOD] >> TColumnShardTestSchema::HotTiers >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TColumnShardTestSchema::OneColdTier >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> TWebLoginService::AuditLogCreateModifyUser [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:29.210805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:29.210890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:29.210919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:29.210950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:29.211018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:29.211045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:29.211091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:29.211164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:29.211478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:29.302340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:29.302409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:29.323193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:29.323730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:29.323932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:29.333756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:29.333982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:29.334840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:29.335068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:29.338047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:29.339440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:29.339501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:29.339723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:29.339786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:29.339831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:29.339971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.347016Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:29.481647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:29.481903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.482156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:29.482403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:29.482457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.489538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:29.489695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:29.489898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.489990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:29.490030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:29.490066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:29.492439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.492508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:29.492562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:29.494819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.494892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.494949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:29.495065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:29.499120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:29.501302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:29.501517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:29.502582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:29.502723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:29.502784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:29.503090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:29.503149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:29.503308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:29.503423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:29.505701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:29.505751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:29.505946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:29.506011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:29.506437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:29.506486Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:29.506587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:29.506634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:29.506710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:29.506748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:29.506787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:29.506843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:29.506881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:29.507031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:29.507103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:29.507143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:29.507177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:29.509037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:29.509210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:29.509257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... teStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:41.100054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:483:2441] sender: [1:769:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:483:2441] sender: [1:772:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:483:2441] sender: [1:773:2058] recipient: [1:771:2691] Leader for TabletID 72057594046678944 is [1:774:2692] sender: [1:775:2058] recipient: [1:771:2691] 2025-04-17T18:05:41.141928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:41.142031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:41.142077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:41.142114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:41.142157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:41.142207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:41.142339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:41.142415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:41.142746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:41.160023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:41.161372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:41.161569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:41.161789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:41.161825Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:41.162085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:41.162828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:41.162940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:05:41.162981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table2, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:05:41.163056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.163123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.163538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-04-17T18:05:41.163668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T18:05:41.163734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-17T18:05:41.163822Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T18:05:41.164053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:41.164219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.164333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:41.164386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:41.164416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:05:41.164439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:41.164574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:41.164789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.165040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-17T18:05:41.165371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.165492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.165913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.165989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.166248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.166335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.166421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.166609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.166703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.166886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.167127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.167306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.167357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.167434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.174723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:41.174786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:41.174968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:41.175017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:41.175044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:41.175150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:774:2692] sender: [1:828:2058] recipient: [1:15:2062] 2025-04-17T18:05:41.207294Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:41.207529Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 209us result status StatusSuccess 2025-04-17T18:05:41.207986Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2025-04-17T18:05:40.605406Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.693691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.716490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.716757Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.724257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.724514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.724750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.724905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.725013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.725132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.725245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.725379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.725507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.725605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.725701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.725837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.752511Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.752677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.752727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.752880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.753027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.753121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.753178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.753278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.753338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.753375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.753402Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.753561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.753615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.753649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.753678Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.753774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.753828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.753897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.753928Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.753994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.754036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.754070Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.754122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.754161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.754209Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.754559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-17T18:05:40.754636Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-17T18:05:40.754704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T18:05:40.754790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-04-17T18:05:40.754941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.755022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.755055Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.755236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.755290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.755317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.755471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.755514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:40.755544Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:40.755752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:40.755801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:40.755831Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:05:40.755968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:40.756005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:40.756046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... pp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-17T18:05:41.015550Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2025-04-17T18:05:41.015603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-17T18:05:41.015662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-04-17T18:05:41.015712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=18; 2025-04-17T18:05:41.015764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=24; 2025-04-17T18:05:41.015798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3977; 2025-04-17T18:05:41.015912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:05:41.015965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:05:41.016041Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:05:41.016307Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:05:41.016365Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-04-17T18:05:41.016590Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:05:41.016772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:05:41.016846Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:05:41.016901Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:05:41.016929Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:05:41.016975Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:05:41.017015Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-04-17T18:05:41.285264Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=101;this=88923004797312;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;fline=schema.h:36;event=sync_schema; 2025-04-17T18:05:41.297322Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004797312;op_tx=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881142464;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-04-17T18:05:41.297386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004797312;op_tx=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881142464;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-04-17T18:05:41.297440Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004797312;op_tx=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1744913141659;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881142464;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2025-04-17T18:05:41.297776Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-17T18:05:41.297916Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1744913141659 at tablet 9437184, mediator 0 2025-04-17T18:05:41.297970Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-17T18:05:41.298291Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-17T18:05:41.302719Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T18:05:41.302834Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-17T18:05:41.302875Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-17T18:05:41.307317Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-17T18:05:41.307499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:214;event=finished_tx;tx_id=101; 2025-04-17T18:05:41.331000Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-17T18:05:41.331179Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6120;columns=10; 2025-04-17T18:05:41.345128Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6120;count=1; 2025-04-17T18:05:41.346892Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-17T18:05:41.347160Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T18:05:41.358948Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T18:05:41.359080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:05:41.385890Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1744913141667;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005124576;op_tx=103:TX_KIND_SCHEMA;min=1744913141667;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1744913141667;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881220032;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-04-17T18:05:41.385956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1744913141667;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005124576;op_tx=103:TX_KIND_SCHEMA;min=1744913141667;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1744913141667;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881220032;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-04-17T18:05:41.385995Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1744913141667;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005124576;op_tx=103:TX_KIND_SCHEMA;min=1744913141667;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1744913141667;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881220032;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2025-04-17T18:05:41.386347Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-17T18:05:41.386461Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1744913141667 at tablet 9437184, mediator 0 2025-04-17T18:05:41.386505Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-04-17T18:05:41.386729Z node 1 :TX_COLUMNSHARD DEBUG: DropTable for pathId: 1 at tablet 9437184 2025-04-17T18:05:41.386808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=tx_controller.cpp:214;event=finished_tx;tx_id=103; 2025-04-17T18:05:41.398481Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-04-17T18:05:41.398634Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:05:41.398901Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1744913141668 at tablet 9437184, mediator 0 2025-04-17T18:05:41.398977Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] execute at tablet 9437184 2025-04-17T18:05:41.399327Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=abstract.h:83;progress_tx_id=102;lock_id=1;broken=0; 2025-04-17T18:05:41.399481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;commit_tx_id=102;commit_lock_id=1;fline=insert_table.cpp:50;event=abort_insertion;path_id=1;blob_range={ Blob: DS:0:[9437184:2:1:3:0:7080:0] Offset: 0 Size: 7080 }; 2025-04-17T18:05:41.399599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=tx_controller.cpp:214;event=finished_tx;tx_id=102; 2025-04-17T18:05:41.411356Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] complete at tablet 9437184 2025-04-17T18:05:41.411453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=102;lock_id=1;broken=0; 2025-04-17T18:05:41.411543Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true >> TColumnShardTestSchema::CreateTable [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 3194, MsgBus: 18050 2025-04-17T18:05:02.942445Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344711608675315:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:02.944634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb2/r3tmp/tmpM4GfUx/pdisk_1.dat 2025-04-17T18:05:03.405319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:03.409642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:03.409734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:03.412375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3194, node 1 2025-04-17T18:05:03.521199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:03.521229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:03.521254Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:03.521380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18050 TClient is connected to server localhost:18050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:04.058188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.075436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.212246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.400474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:04.485882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.214101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344728788546042:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.214220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.541712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.572903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.605978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.639242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.670076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.713393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.788064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:06.879225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344728788546711:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.879290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.879503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344728788546716:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:06.883112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:06.895373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344728788546718:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:06.989961Z node 1 :TX_PROXY ERROR: Actor# [1:7494344728788546775:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:07.943300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344711608675315:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:07.943418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:07.964587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.965924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:07.968873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9097, MsgBus: 15778 2025-04-17T18:05:10.285968Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344743779587757:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:10.286026Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb2/r3tmp/tmpMRFAeG/pdisk_1.dat 2025-04-17T18:05:10.420008Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9097, node 2 2025-04-17T18:05:10.443965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:10.444037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:10.450420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:10.486210Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:10.486232Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:10.486239Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:10.486339Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15778 TClient is connected to server localhost:15778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:10.832997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:10.843923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, su ... 025-04-17T18:05:30.130656Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494344829081712761:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.130669Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:30.133652Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:30.146387Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494344829081712763:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:30.220285Z node 4 :TX_PROXY ERROR: Actor# [4:7494344829081712818:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:30.645981Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494344807606873838:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:30.646033Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:31.426803Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.428245Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.429305Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:31.758371Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTQyOGQyLWU3N2JmZDNjLWQ2ODVlM2I1LWU3ZGMwOTJl, ActorId: [4:7494344833376680770:2539], ActorState: ExecuteState, TraceId: 01js2etswv2j41aw3mnayk859r, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 28051, MsgBus: 28321 2025-04-17T18:05:34.112032Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494344847746780383:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:34.112154Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb2/r3tmp/tmp7EryAo/pdisk_1.dat 2025-04-17T18:05:34.259000Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:34.259104Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:34.259265Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.274701Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28051, node 5 2025-04-17T18:05:34.332254Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:34.332283Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:34.332295Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:34.332459Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28321 TClient is connected to server localhost:28321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:34.924305Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:34.939874Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:35.020833Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:35.175835Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:35.249976Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:37.285070Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494344860631684031:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:37.285203Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:37.334389Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:37.359781Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:37.384911Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:37.408649Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:37.433871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:37.460607Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:37.491350Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:37.543093Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494344860631684692:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:37.543182Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:37.543187Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494344860631684697:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:37.546523Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:37.555258Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494344860631684699:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:05:37.639925Z node 5 :TX_PROXY ERROR: Actor# [5:7494344860631684755:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:38.631466Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:05:38.633685Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:05:38.635180Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:05:38.943899Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YmU0M2U2NjYtNjNiYWZlNGQtMWJiZWVlY2UtZTBiZmEwYmE=, ActorId: [5:7494344864926652688:2535], ActorState: ExecuteState, TraceId: 01js2ev0y39jzdy015dnzdme0b, Create QueryResponse for error on request, msg: 2025-04-17T18:05:39.154148Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494344847746780383:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:39.154617Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T18:05:39.378529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:39.378628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.378667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:39.378699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:39.379713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:39.379759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:39.379825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.379927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:39.381174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:39.466739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:39.466803Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:39.480262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:39.480439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:39.480616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:39.488744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:39.488922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:39.489492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.490137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:39.492926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.500059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:39.500100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.500172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:39.500802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.507022Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-04-17T18:05:39.640898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:39.641062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.641214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:39.641383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:39.641422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.646720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.646875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:39.647157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.647223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:39.647258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:39.647313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:39.648727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.648781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:39.648812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:39.650370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.650419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.650469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.650528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.654089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:39.655981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:39.656149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:39.657012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.657131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:39.657182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.657443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:39.657497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.657689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:39.657818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:39.659808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.659846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.660000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.660042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:39.660355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.660404Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:39.660506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.660581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.660626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.660656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.660691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:39.660759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.660795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:39.660825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:39.660879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:39.660918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:39.660948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:39.662718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.662834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.662887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7157Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:41.737241Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:41.737308Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:41.737363Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:41.737409Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:41.737634Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.744508Z node 4 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [4:124:2150] sender: [4:241:2058] recipient: [4:15:2062] 2025-04-17T18:05:41.755519Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:41.755731Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.755940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:41.756153Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:41.756207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.758390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:41.758546Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:41.758710Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.758785Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:41.758829Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:41.758871Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:41.760600Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.760663Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:41.760704Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:41.762259Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.762303Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.762350Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:41.762402Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:41.762539Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:41.763937Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:41.764116Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:41.764963Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:41.765089Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:41.765142Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:41.765400Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:41.765463Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:41.765643Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:41.765717Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:41.767383Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:41.767455Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:41.767627Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:41.767674Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:41.767977Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.768029Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:41.768152Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:41.768194Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:41.768243Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:41.768281Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:41.768323Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:41.768367Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:41.768408Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:41.768443Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:41.768510Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:41.768558Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:41.768595Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:41.769139Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:41.769257Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:41.769307Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:05:41.769355Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:05:41.769399Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:41.769493Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:05:41.772110Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:05:41.772570Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:41.773876Z node 4 :TX_PROXY DEBUG: actor# [4:271:2262] Bootstrap 2025-04-17T18:05:41.794507Z node 4 :TX_PROXY DEBUG: actor# [4:271:2262] Become StateWork (SchemeCache [4:279:2270]) 2025-04-17T18:05:41.794774Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-17T18:05:41.795111Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3992, port: 3992 2025-04-17T18:05:41.795204Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:05:41.798816Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:3992. Invalid credentials 2025-04-17T18:05:41.799279Z node 4 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2025-04-17T18:05:41.799802Z node 4 :TX_PROXY DEBUG: actor# [4:271:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:05:41.801910Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2025-04-17T18:05:41.758498Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-17T18:05:41.799061Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:3992. Invalid credentials, login_user=user1@ldap, sanitized_token={none} AUDIT LOG checked line: 2025-04-17T18:05:41.799061Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:3992. Invalid credentials, login_user=user1@ldap, sanitized_token={none} >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogCreateModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:39.378534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:39.378654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.378701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:39.378776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:39.379724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:39.379787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:39.379883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.379968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:39.381194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:39.472232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:39.472287Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:39.488352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:39.488730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:39.488908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:39.497093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:39.497263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:39.497941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.498135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:39.500991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.502270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.502339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.502518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:39.502580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.502637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:39.502776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.508949Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:39.664412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:39.664647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.664862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:39.665080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:39.665132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.667300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.667435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:39.667596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.667666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:39.667703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:39.667753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:39.669555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.669611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:39.669647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:39.671306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.671351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.671392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.671477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.676580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:39.678641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:39.678859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:39.679824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.679958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:39.680018Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.680297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:39.680350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.680541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:39.680626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:39.682823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.682872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.683036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.683076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:39.683445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.683493Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:39.683594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.683640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.683686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.683759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.683804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:39.683846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.683879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:39.683918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:39.683980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:39.684018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:39.684050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:39.685885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.685991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.686028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... oDone TxId: 105 ready parts: 1/1 2025-04-17T18:05:41.840056Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:05:41.840091Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:05:41.840155Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:41.840220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-17T18:05:41.840258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:05:41.840297Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T18:05:41.840332Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-17T18:05:41.840372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-04-17T18:05:41.842279Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusSuccess TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:41.842373Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-04-17T18:05:41.842564Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:41.842609Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:41.842766Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:41.842809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:208:2210], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-17T18:05:41.843267Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:41.843361Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:41.843396Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:05:41.843437Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-17T18:05:41.843479Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:41.843564Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-17T18:05:41.845032Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 AUDIT LOG buffer(6): 2025-04-17T18:05:41.758657Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-17T18:05:41.800830Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-04-17T18:05:41.815726Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-04-17T18:05:41.823585Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2025-04-17T18:05:41.830706Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2025-04-17T18:05:41.839783Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] AUDIT LOG checked line: 2025-04-17T18:05:41.839783Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-04-17T18:05:41.847893Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" Password: "password1" CanLogin: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:41.852954Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:41.853073Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:41.853114Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:41.853159Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:41.853196Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:41.853259Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:41.853315Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-17T18:05:41.853354Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:41.853392Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T18:05:41.853430Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-17T18:05:41.853472Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-17T18:05:41.855715Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:41.855805Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-04-17T18:05:41.855987Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:41.856041Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:41.856204Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:41.856246Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:208:2210], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-17T18:05:41.856678Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:41.856757Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:41.856791Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-17T18:05:41.856825Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T18:05:41.856859Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:41.856947Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-17T18:05:41.858528Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 AUDIT LOG buffer(7): 2025-04-17T18:05:41.758657Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-17T18:05:41.800830Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-04-17T18:05:41.815726Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-04-17T18:05:41.823585Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2025-04-17T18:05:41.830706Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2025-04-17T18:05:41.839783Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-04-17T18:05:41.852837Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] AUDIT LOG checked line: 2025-04-17T18:05:41.852837Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2025-04-17T18:05:40.867273Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.942606Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.960863Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.961063Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.966851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.967032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.967249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.967365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.967439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.967543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.967612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.967706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.967800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.967864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.967938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.968000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.987666Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.987865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.987907Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.988056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.988177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.988249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.988276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.988332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.988388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.988418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.988456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.988600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.988645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.988671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.988691Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.988745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.988794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.988824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.988843Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.988907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.988934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.988959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.988995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.989017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.989033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.989297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=28; 2025-04-17T18:05:40.989362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-17T18:05:40.989424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-17T18:05:40.989483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-17T18:05:40.989587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.989653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.989676Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.989850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.989893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.989913Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.990006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.990032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:40.990056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:40.990212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:40.990243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:40.990272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:05:40.990355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:40.990384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:40.990430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-17T18:05:42.022557Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=118;this=88923004879296;method=TTxController::StartProposeOnExecute;tx_info=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;fline=schema.h:36;event=sync_schema; 2025-04-17T18:05:42.034969Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004879296;op_tx=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881225024;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-04-17T18:05:42.035047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004879296;op_tx=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881225024;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-04-17T18:05:42.035091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004879296;op_tx=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=1744913141971;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881225024;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=118; 2025-04-17T18:05:42.035489Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-17T18:05:42.035631Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1744913141971 at tablet 9437184, mediator 0 2025-04-17T18:05:42.035698Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] execute at tablet 9437184 2025-04-17T18:05:42.035981Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 19 ttl settings: { Version: 1 } at tablet 9437184 2025-04-17T18:05:42.036059Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tables_manager.cpp:245;method=RegisterTable;path_id=19; 2025-04-17T18:05:42.036109Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine.h:144;event=RegisterTable;path_id=19; 2025-04-17T18:05:42.036499Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=19; 2025-04-17T18:05:42.036649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tx_controller.cpp:214;event=finished_tx;tx_id=118; 2025-04-17T18:05:42.048806Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] complete at tablet 9437184 2025-04-17T18:05:42.048940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 20 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-17T18:05:42.050396Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=88923004882656;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1744913141974;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-17T18:05:42.062964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1744913141974;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;this=88923004882656;op_tx=119:TX_KIND_SCHEMA;min=1744913141974;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-04-17T18:05:42.063041Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1744913141974;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;this=88923004882656;op_tx=119:TX_KIND_SCHEMA;min=1744913141974;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=119; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-17T18:05:42.064159Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=88923004884448;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1744913141975;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-17T18:05:42.076068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1744913141975;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;this=88923004884448;op_tx=120:TX_KIND_SCHEMA;min=1744913141975;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-04-17T18:05:42.076142Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1744913141975;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;this=88923004884448;op_tx=120:TX_KIND_SCHEMA;min=1744913141975;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-17T18:05:42.077484Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=88923004886240;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1744913141977;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-17T18:05:42.089390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1744913141977;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;this=88923004886240;op_tx=121:TX_KIND_SCHEMA;min=1744913141977;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-04-17T18:05:42.089447Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1744913141977;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;this=88923004886240;op_tx=121:TX_KIND_SCHEMA;min=1744913141977;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] >> KqpQueryService::TableSink_OltpOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:39.378586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:39.378682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.378721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:39.378782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:39.379724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:39.379766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:39.379838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.379925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:39.381133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:39.467870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:39.467928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:39.482790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:39.483150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:39.483288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:39.490923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:39.491072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:39.491628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.491798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:39.494335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.500677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:39.500773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.500820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:39.500938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.507248Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:39.644777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:39.645023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.645232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:39.645445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:39.645504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.647555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.647674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:39.647828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.647883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:39.647938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:39.647982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:39.649825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.649873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:39.649906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:39.651629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.651670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.651706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.651767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.660571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:39.662690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:39.662946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:39.663911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.664052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:39.664116Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.664437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:39.664505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.664681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:39.664770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:39.667279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.667325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.667497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.667547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:39.667904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.667951Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:39.668049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.668095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.668144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.668177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.668212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:39.668252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.668293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:39.668325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:39.668410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:39.668448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:39.668481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:39.670442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.670566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.670606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:42.348746Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.348896Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.348947Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-17T18:05:42.349015Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-17T18:05:42.349674Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.349787Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.349843Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:05:42.349892Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:05:42.349943Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:42.350404Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.350480Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.350512Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:05:42.350542Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-17T18:05:42.350572Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:42.350639Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-17T18:05:42.353258Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:05:42.353527Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-04-17T18:05:42.354026Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:42.354256Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 252us result status StatusSuccess 2025-04-17T18:05:42.354615Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-04-17T18:05:42.357831Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:42.358036Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.358074Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.358132Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.358170Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:42.358413Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:42.358528Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:42.358573Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:42.358620Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:42.358661Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:42.358730Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:42.358810Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-17T18:05:42.358858Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:42.358905Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T18:05:42.358942Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-17T18:05:42.358983Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-17T18:05:42.361405Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:42.361523Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-04-17T18:05:42.361732Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:42.361778Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.361968Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.362018Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-17T18:05:42.362577Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:42.362688Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:42.362737Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-17T18:05:42.362802Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T18:05:42.362851Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:42.362963Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-17T18:05:42.364899Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-04-17T18:05:42.365523Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:42.365714Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 222us result status StatusSuccess 2025-04-17T18:05:42.366122Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:27.021647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:27.021728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.021766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:27.021808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:27.022819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:27.022879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:27.022956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:27.023060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:27.024201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:27.125864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:27.125908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:27.137931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:27.138261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:27.138362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:27.151787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:27.151966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:27.152599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.152775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:27.158861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.160210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.160258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.160433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:27.160476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.160516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:27.160632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.166345Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:27.301385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:27.301627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.301858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:27.302107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:27.302160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.304197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.304333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:27.304528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.304585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:27.304639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:27.304671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:27.306526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.306601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:27.306638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:27.308438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.308496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.308539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.308613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.311593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:27.312852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:27.312983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:27.313856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:27.313967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:27.314021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.314324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:27.314376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:27.314521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:27.314628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:27.316587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:27.316631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:27.316798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:27.316840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:27.317210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:27.317268Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:27.317362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.317392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.317447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:27.317479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.317510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:27.317545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:27.317584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:27.317610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:27.317663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:27.317693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:27.317721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:27.319520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.319643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:27.319684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Id 72075186233409548 2025-04-17T18:05:41.235807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-17T18:05:41.235856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-17T18:05:41.237091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:41.237214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 0 RawX2: 0 } } Step: 3150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:41.237268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 103:0 HandleReply TEvOperationPlan, step: 3150, at schemeshard: 72057594046678944 2025-04-17T18:05:41.237436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:41.237517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-17T18:05:41.237703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:41.237766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:05:41.239474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:41.239516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:05:41.239978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:41.240023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:41.240148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:41.240229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:05:41.240342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:41.240368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T18:05:41.240398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T18:05:41.240419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-17T18:05:41.240762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:05:41.240794Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T18:05:41.240875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:05:41.240900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:41.240937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:05:41.240960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:41.240988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-17T18:05:41.241022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:05:41.241070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T18:05:41.241099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T18:05:41.241194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:05:41.241234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-17T18:05:41.241266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2025-04-17T18:05:41.241288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-17T18:05:41.242051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:41.242145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:41.242171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-17T18:05:41.242231Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:05:41.242263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:05:41.243001Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-04-17T18:05:41.243156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:05:41.243188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:05:41.243235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:05:41.243392Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-04-17T18:05:41.243980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:41.244039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:05:41.244058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-17T18:05:41.244079Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2025-04-17T18:05:41.244107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:05:41.244163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-17T18:05:41.244712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-17T18:05:41.245030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-17T18:05:41.247639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T18:05:41.247748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:05:41.248065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T18:05:41.249222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-17T18:05:41.249296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T18:05:41.249645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T18:05:41.249678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T18:05:41.250067Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T18:05:41.250142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:05:41.250171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:782:2696] TestWaitNotification: OK eventTxId 103 2025-04-17T18:05:41.721949Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:41.722230Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 295us result status StatusSuccess 2025-04-17T18:05:41.722654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:39.378533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:39.378691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.378735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:39.378796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:39.379732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:39.379778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:39.379849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.379931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:39.381136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:39.468427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:39.468490Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:39.483318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:39.483636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:39.483773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:39.491947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:39.492089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:39.492723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.492892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:39.495273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.500707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:39.500770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.500822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:39.500949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.511067Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:39.633573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:39.633798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.634016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:39.634243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:39.634312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.636653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.636779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:39.636944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.637087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:39.637122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:39.637172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:39.638978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.639035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:39.639082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:39.640927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.640971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.641011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.641072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.645795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:39.647513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:39.648130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:39.649114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.649221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:39.649273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.650675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:39.650735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.650950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:39.651034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:39.653490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.653534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.653683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.653723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:39.654110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.654159Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:39.654273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.654318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.654353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.654399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.654447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:39.654482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.654511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:39.654544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:39.654597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:39.654635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:39.654682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:39.656354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.656449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.656486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tablet# 72057594046678944 2025-04-17T18:05:42.267733Z node 5 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:42.267795Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:42.267983Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:42.268067Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:42.270025Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:42.270078Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.270287Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.270345Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:42.270711Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:42.270833Z node 5 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:42.270957Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:42.271002Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:42.271055Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:42.271094Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:42.271144Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:42.271187Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:42.271232Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:42.271277Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:42.271349Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:42.271400Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:42.271443Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:42.272237Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:42.272352Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:42.272396Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:05:42.272457Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:05:42.272500Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:42.272602Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:05:42.276698Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:05:42.277221Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:42.277863Z node 5 :TX_PROXY DEBUG: actor# [5:272:2263] Bootstrap 2025-04-17T18:05:42.298952Z node 5 :TX_PROXY DEBUG: actor# [5:272:2263] Become StateWork (SchemeCache [5:277:2268]) 2025-04-17T18:05:42.299174Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:42.299359Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 211us result status StatusSuccess 2025-04-17T18:05:42.299738Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:42.300377Z node 5 :TX_PROXY DEBUG: actor# [5:272:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:05:42.303399Z node 5 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944 2025-04-17T18:05:42.304112Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:42.304168Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-17T18:05:42.427153Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 2025-04-17T18:05:42.427301Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:42.427351Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.427558Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.427608Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-17T18:05:42.428140Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2025-04-17T18:05:42.428559Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:42.428737Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 198us result status StatusSuccess 2025-04-17T18:05:42.429169Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvDmuYeFkodwut1MTDXmi\nrwKfwJ/2DBxVO8s/OhwdVk/k3u2dFxJwI0SP62Oyqu7GeDu8RN7r40LHgTVDQnWz\nUoJGyDmJImqjPmnK3qsHXZzXD4khOFUely4EYekfNlqQbDRsCgEg/1RYxTvu4O7S\nhs2slQnH5mnpoUCW9vdivbI4HHhlnhIKk69ItvuTzVWsxUTWhGnEw8y0xOYjtRot\nPBcROpFSKIRLkdzZz1lGrNdl92u+R0UagZlPen9HMISbYnrNUUP+NGRjOWtfr7tH\nLHfRUV1myckPgaFOIDiIoAh4yuyxvkadzGUeqxssF0JqBBwt0GJD0MkZ5MnZUVCb\nQwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744999542424 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:39.378537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:39.378668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.378723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:39.378796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:39.379740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:39.379782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:39.379873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.379967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:39.381171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:39.467694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:39.467760Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:39.487747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:39.488201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:39.488390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:39.497793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:39.497952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:39.498665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.498879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:39.502034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.503239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.503318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.503479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:39.503523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.503583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:39.503697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.511181Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:39.641030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:39.641222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.641406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:39.641648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:39.641734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.643876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.644007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:39.644171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.644253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:39.644291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:39.644338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:39.645948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.645999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:39.646033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:39.647734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.647775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.647812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.647881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.657591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:39.659528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:39.659717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:39.660617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.660726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:39.660785Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.661044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:39.661098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.661275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:39.661366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:39.663180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.663252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.663416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.663453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:39.663773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.663811Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:39.663888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.663932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.663966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.663993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.664068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:39.664110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.664141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:39.664170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:39.664224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:39.664265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:39.664299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:39.665948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.666069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.666114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:42.823503Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.823627Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.823688Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-17T18:05:42.823728Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-17T18:05:42.824219Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.824298Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.824332Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:05:42.824514Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:05:42.824565Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:42.824956Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.825010Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.825031Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:05:42.825051Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-17T18:05:42.825071Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:42.825115Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-17T18:05:42.827242Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:05:42.827442Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-04-17T18:05:42.827779Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:42.827914Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 158us result status StatusSuccess 2025-04-17T18:05:42.828166Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-04-17T18:05:42.830388Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:42.830546Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.830584Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.830630Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.830660Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:42.830840Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:42.830925Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:42.830957Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:42.830992Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:42.831019Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:42.831064Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:42.831111Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-17T18:05:42.831147Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:42.831180Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T18:05:42.831212Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-17T18:05:42.831253Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-17T18:05:42.833563Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:42.833700Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-04-17T18:05:42.833979Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:42.834047Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.834266Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.834325Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-17T18:05:42.834898Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:42.835012Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:42.835055Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-17T18:05:42.835106Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T18:05:42.835173Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:42.835279Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-17T18:05:42.837125Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-04-17T18:05:42.837738Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:42.837934Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 228us result status StatusSuccess 2025-04-17T18:05:42.838387Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:39.378535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:39.378620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.378655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:39.378707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:39.379709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:39.379746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:39.379812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.379885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:39.381137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:39.466719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:39.466798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:39.481194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:39.481535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:39.481688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:39.494543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:39.494677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:39.495176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.495314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:39.498058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.500735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:39.500786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.500837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:39.500958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.508493Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:39.628458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:39.629816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.631486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:39.632583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:39.632638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.639762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.639876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:39.640019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.640078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:39.640115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:39.640155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:39.641718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.641764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:39.641791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:39.643153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.643188Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.643220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.643271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.645709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:39.647183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:39.648104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:39.649209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.649347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:39.649403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.650644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:39.650728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.650970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:39.651059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:39.653514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.653562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.653801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.653851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:39.654233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.654283Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:39.654387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.654437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.654483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.654514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.654551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:39.654594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.654634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:39.654671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:39.654744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:39.654804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:39.654839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:39.662689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.662852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.662898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:05:42.871321Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:05:42.871361Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:05:42.871414Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:05:42.871456Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:05:42.871527Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-17T18:05:42.871598Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:05:42.871641Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-17T18:05:42.871689Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:05:42.871727Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:05:42.871768Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T18:05:42.871825Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-04-17T18:05:42.871865Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-04-17T18:05:42.871902Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2025-04-17T18:05:42.874111Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusSuccess TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:42.874354Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /MyRoot/Dir1/DirSub1, set owner:user2 2025-04-17T18:05:42.874576Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:42.874629Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:05:42.874793Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:05:42.874912Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.874963Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-17T18:05:42.875012Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-17T18:05:42.875626Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.875741Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.875787Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:05:42.875831Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-04-17T18:05:42.875881Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:05:42.876324Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.876393Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:05:42.876420Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:05:42.876449Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-17T18:05:42.876477Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:05:42.876548Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-17T18:05:42.878588Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:05:42.879935Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-04-17T18:05:42.882842Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "user1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:42.883247Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:42.883361Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:42.883403Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:42.883451Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-17T18:05:42.883496Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:42.883559Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:42.883628Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-17T18:05:42.883675Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-17T18:05:42.883717Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-17T18:05:42.883761Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-17T18:05:42.883802Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-04-17T18:05:42.885930Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:42.886042Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2025-04-17T18:05:42.886280Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:42.886332Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.886528Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.886578Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-17T18:05:42.887130Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:42.887247Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-17T18:05:42.887289Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-17T18:05:42.887340Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T18:05:42.887389Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:42.887495Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-17T18:05:42.889174Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-04-17T18:05:42.889731Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:42.889926Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 222us result status StatusSuccess 2025-04-17T18:05:42.890283Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user2" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:42.890808Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:42.890919Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 >> TColumnShardTestSchema::RebootColdTiersWithStat |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpOrder [GOOD] Test command err: Trying to start YDB, gRPC: 2594, MsgBus: 7922 2025-04-17T18:05:10.052561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344742925188807:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:10.058267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b1b/r3tmp/tmpruMJx9/pdisk_1.dat 2025-04-17T18:05:10.460704Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:10.467008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:10.467288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:10.471533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2594, node 1 2025-04-17T18:05:10.557466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:10.557486Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:10.557494Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:10.557584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7922 TClient is connected to server localhost:7922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:11.113100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:12.971833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344751515124058:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:12.971937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.243556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:13.373342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344755810091508:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.373411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.373575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344755810091513:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:13.377401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:13.394598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344755810091515:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:05:13.464436Z node 1 :TX_PROXY ERROR: Actor# [1:7494344755810091566:2432] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:15.061615Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344742925188807:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:15.061695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14836, MsgBus: 18096 2025-04-17T18:05:16.326575Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344768192014760:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:16.326626Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b1b/r3tmp/tmpUZMXBt/pdisk_1.dat 2025-04-17T18:05:16.510635Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14836, node 2 2025-04-17T18:05:16.605163Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:16.605191Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:16.605200Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:16.605327Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:05:16.642310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:16.642391Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:16.643861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18096 TClient is connected to server localhost:18096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:05:16.985306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.453417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344781076917279:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.453545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.472989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:05:19.562482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344781076917383:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.562551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.562748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344781076917388:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.565797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:05:19.576491Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344781076917390:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:05:19.648883Z node 2 :TX_PROXY ERROR: Actor# [2:7494344781076917441:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17414, MsgBus: 8835 2025-04-17T18:05:20.905888Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344786079309372:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:20.905931Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b1b/r3tmp/tmpU7qKsd/pdisk_1.d ... e been found., code: 2012 . sessionActorId=[3:7494344803259179312:2340]. isRollback=0 2025-04-17T18:05:36.934169Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWYwMjVkNjItNDgxNTQ3ZTItM2VjNjU5YzktYTVmOGU4MmU=, ActorId: [3:7494344803259179312:2340], ActorState: ExecuteState, TraceId: 01js2etz7h00mrmy3wszqn2wt4, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7494344854798790252:2340] from: [3:7494344854798790251:2340] 2025-04-17T18:05:36.934237Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494344854798790252:2340] TxId: 281474976715755. Ctx: { TraceId: 01js2etz7h00mrmy3wszqn2wt4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWYwMjVkNjItNDgxNTQ3ZTItM2VjNjU5YzktYTVmOGU4MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:05:36.934379Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWYwMjVkNjItNDgxNTQ3ZTItM2VjNjU5YzktYTVmOGU4MmU=, ActorId: [3:7494344803259179312:2340], ActorState: ExecuteState, TraceId: 01js2etz7h00mrmy3wszqn2wt4, Create QueryResponse for error on request, msg: 2025-04-17T18:05:37.049147Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=97; 2025-04-17T18:05:37.049410Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757593:2365], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7494344803259179434:2365]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7494344859093757593:2365].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:05:37.049485Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757584:2365], SessionActorId: [3:7494344803259179434:2365], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7494344803259179434:2365]. isRollback=0 2025-04-17T18:05:37.049651Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTYwZThkYTYtYTNhYTFlMzAtMWMxYWU3NGYtNGQyODY1OTI=, ActorId: [3:7494344803259179434:2365], ActorState: ExecuteState, TraceId: 01js2etzaz1kytxh84yxv0qckm, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7494344859093757585:2365] from: [3:7494344859093757584:2365] 2025-04-17T18:05:37.049711Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494344859093757585:2365] TxId: 281474976715756. Ctx: { TraceId: 01js2etzaz1kytxh84yxv0qckm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTYwZThkYTYtYTNhYTFlMzAtMWMxYWU3NGYtNGQyODY1OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:05:37.049864Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTYwZThkYTYtYTNhYTFlMzAtMWMxYWU3NGYtNGQyODY1OTI=, ActorId: [3:7494344803259179434:2365], ActorState: ExecuteState, TraceId: 01js2etzaz1kytxh84yxv0qckm, Create QueryResponse for error on request, msg: 2025-04-17T18:05:37.198496Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=98; 2025-04-17T18:05:37.198708Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757628:2340], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7494344803259179312:2340]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7494344859093757628:2340].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:05:37.198761Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757618:2340], SessionActorId: [3:7494344803259179312:2340], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7494344803259179312:2340]. isRollback=0 2025-04-17T18:05:37.198897Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWYwMjVkNjItNDgxNTQ3ZTItM2VjNjU5YzktYTVmOGU4MmU=, ActorId: [3:7494344803259179312:2340], ActorState: ExecuteState, TraceId: 01js2etzen2gw3xjs181te00f5, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7494344859093757619:2340] from: [3:7494344859093757618:2340] 2025-04-17T18:05:37.198939Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494344859093757619:2340] TxId: 281474976715757. Ctx: { TraceId: 01js2etzen2gw3xjs181te00f5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWYwMjVkNjItNDgxNTQ3ZTItM2VjNjU5YzktYTVmOGU4MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:05:37.199053Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWYwMjVkNjItNDgxNTQ3ZTItM2VjNjU5YzktYTVmOGU4MmU=, ActorId: [3:7494344803259179312:2340], ActorState: ExecuteState, TraceId: 01js2etzen2gw3xjs181te00f5, Create QueryResponse for error on request, msg: 2025-04-17T18:05:37.304010Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=99; 2025-04-17T18:05:37.304208Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757662:2365], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7494344803259179434:2365]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7494344859093757662:2365].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:05:37.304253Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757652:2365], SessionActorId: [3:7494344803259179434:2365], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7494344803259179434:2365]. isRollback=0 2025-04-17T18:05:37.304366Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTYwZThkYTYtYTNhYTFlMzAtMWMxYWU3NGYtNGQyODY1OTI=, ActorId: [3:7494344803259179434:2365], ActorState: ExecuteState, TraceId: 01js2etzk92ftd3hybp34q64yn, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7494344859093757653:2365] from: [3:7494344859093757652:2365] 2025-04-17T18:05:37.304411Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494344859093757653:2365] TxId: 281474976715758. Ctx: { TraceId: 01js2etzk92ftd3hybp34q64yn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTYwZThkYTYtYTNhYTFlMzAtMWMxYWU3NGYtNGQyODY1OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:05:37.304527Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTYwZThkYTYtYTNhYTFlMzAtMWMxYWU3NGYtNGQyODY1OTI=, ActorId: [3:7494344803259179434:2365], ActorState: ExecuteState, TraceId: 01js2etzk92ftd3hybp34q64yn, Create QueryResponse for error on request, msg: 2025-04-17T18:05:37.402756Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=100; 2025-04-17T18:05:37.402969Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757695:2340], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7494344803259179312:2340]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7494344859093757695:2340].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:05:37.403017Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757685:2340], SessionActorId: [3:7494344803259179312:2340], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7494344803259179312:2340]. isRollback=0 2025-04-17T18:05:37.403132Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWYwMjVkNjItNDgxNTQ3ZTItM2VjNjU5YzktYTVmOGU4MmU=, ActorId: [3:7494344803259179312:2340], ActorState: ExecuteState, TraceId: 01js2etzp8eq7gpw2s8p0pwgcm, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7494344859093757686:2340] from: [3:7494344859093757685:2340] 2025-04-17T18:05:37.403176Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494344859093757686:2340] TxId: 281474976715759. Ctx: { TraceId: 01js2etzp8eq7gpw2s8p0pwgcm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWYwMjVkNjItNDgxNTQ3ZTItM2VjNjU5YzktYTVmOGU4MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:05:37.403307Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWYwMjVkNjItNDgxNTQ3ZTItM2VjNjU5YzktYTVmOGU4MmU=, ActorId: [3:7494344803259179312:2340], ActorState: ExecuteState, TraceId: 01js2etzp8eq7gpw2s8p0pwgcm, Create QueryResponse for error on request, msg: 2025-04-17T18:05:37.502738Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=101; 2025-04-17T18:05:37.502975Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757732:2365], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7494344803259179434:2365]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7494344859093757732:2365].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:05:37.503023Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494344859093757722:2365], SessionActorId: [3:7494344803259179434:2365], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7494344803259179434:2365]. isRollback=0 2025-04-17T18:05:37.503129Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTYwZThkYTYtYTNhYTFlMzAtMWMxYWU3NGYtNGQyODY1OTI=, ActorId: [3:7494344803259179434:2365], ActorState: ExecuteState, TraceId: 01js2etzsd5e9sdpsxzcvn1vmp, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7494344859093757723:2365] from: [3:7494344859093757722:2365] 2025-04-17T18:05:37.503172Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494344859093757723:2365] TxId: 281474976715760. Ctx: { TraceId: 01js2etzsd5e9sdpsxzcvn1vmp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTYwZThkYTYtYTNhYTFlMzAtMWMxYWU3NGYtNGQyODY1OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:05:37.503288Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTYwZThkYTYtYTNhYTFlMzAtMWMxYWU3NGYtNGQyODY1OTI=, ActorId: [3:7494344803259179434:2365], ActorState: ExecuteState, TraceId: 01js2etzsd5e9sdpsxzcvn1vmp, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn >> TColumnShardTestSchema::RebootHotTiers >> TColumnShardTestSchema::RebootHotTiersTtlWithStat >> TColumnShardTestSchema::RebootForgetWithLostAnswer >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> TColumnShardTestSchema::RebootDrop >> TColumnShardTestSchema::RebootHotTiersTtl >> TColumnShardTestSchema::RebootExportWithLostAnswer >> TColumnShardTestSchema::HotTiersRevCompression |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:39.378527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:39.378613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.378647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:39.378711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:39.379671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:39.379708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:39.379790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:39.379878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:39.381179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:39.455483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:39.455541Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:39.469192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:39.470369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:39.470564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:39.481276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:39.481439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:39.486418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.486729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:39.496061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.499943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.500701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:39.500757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.500808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:39.500958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.507134Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:39.649954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:39.650191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.650410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:39.650655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:39.650720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.652707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.652854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:39.653021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.653080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:39.653117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:39.653166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:39.655363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.655416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:39.655454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:39.657072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.657119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.657171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.657243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.661158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:39.662897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:39.663078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:39.663957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:39.664070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:39.664124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.664382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:39.664432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:39.664642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:39.664765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:39.666587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:39.666630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:39.666812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:39.666856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:39.667225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:39.667268Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:39.667365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.667415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.667453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:39.667490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.667524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:39.667560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:39.667596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:39.667631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:39.667687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:39.667722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:39.667755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:39.669485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.669581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:39.669620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:05:42.535213Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:42.535276Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:42.535339Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:05:42.535383Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:05:42.535421Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:05:42.535459Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-17T18:05:42.535498Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-17T18:05:42.537718Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:42.537846Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-17T18:05:42.538051Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:42.538118Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.538318Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.538369Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:05:42.538892Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:42.538995Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:05:42.539037Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:05:42.539078Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-17T18:05:42.539119Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:42.539228Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:05:42.540966Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-17T18:05:42.541388Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:42.541432Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-17T18:05:42.579452Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-17T18:05:42.579593Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:42.579639Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:42.579850Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:42.579900Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:209:2211], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-17T18:05:42.580402Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-04-17T18:05:42.580727Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:42.587071Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-17T18:05:42.587413Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:42.593773Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-17T18:05:42.594127Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:42.603743Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-17T18:05:42.604223Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:42.604359Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 login denied: too many failed password attempts", at schemeshard: 72057594046678944 2025-04-17T18:05:42.604737Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:42.604838Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 login denied: too many failed password attempts", at schemeshard: 72057594046678944 2025-04-17T18:05:42.605262Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:42.605474Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 240us result status StatusSuccess 2025-04-17T18:05:42.605909Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0eOEMvljIkG56fNz9NFe\n7aDjTvHNrTguJTSFR8C7DLkXJzoJlRLgmf7VW9V90hLB0PL2spZtdOkolR0hGz/N\nMFSg6mDNBzuWCrzKR+mG6L3mKbz3HTPKVuvcxo7Qym1Pg7l+MXtvtdOAX8hFK+Dp\nsKJsC8KFX1frODbhrR5VZ205k7R/vsBXmQc9cXwmaFJC3pf7pfje2ooHA10/BFjH\niwQxaHaC7oMmIlTaEmv71hFaw4H83RgFY6dNnV0gmoh907ECdJC94634yTBT4a1V\n7j81Up5qK1mKrz1KGhbfmYxehnNmlK6RlJYDyionzHjoHUQ9SpqORS2+OQovlrFR\njwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744999542572 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:46.610157Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:46.625523Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-17T18:05:46.626081Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-17T18:05:46.638377Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU2MzQ2LCJpYXQiOjE3NDQ5MTMxNDYsInN1YiI6InVzZXIxIn0.D-5-98kMaWzlIdyzGXZcYZMpArXKRFNBcuGLAteKjF46T169KQO5gOcCEXUH_3SVTLmxQU5Y6ti2gHU5t2o-pHxOp8dN1G49DYgNIF1-dvaodxGwxExMhcbsip4MWdPT5S6kmI8790-SsQFxn5zMsrGLxKBFuBTW_LeH95dP_p_u_uJ3AjGlQ8QhyvJmKOxGf1gTHIPCRlOGVjn_igEfh_x1tOZtt3L7MrrpV-M6z_lmxUHamVdBHty7pInhKV4jXXNJMQ9_bKWue-YsPDPyciXWEGxB6GuVpdtjrnBnz1aTO_9V4p1K3EXlq2C59dkdzik676rBnjiHzMw-ZZYbYQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0OTU2MzQ2LCJpYXQiOjE3NDQ5MTMxNDYsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-17T18:05:46.638983Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:05:46.639204Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 243us result status StatusSuccess 2025-04-17T18:05:46.639663Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0eOEMvljIkG56fNz9NFe\n7aDjTvHNrTguJTSFR8C7DLkXJzoJlRLgmf7VW9V90hLB0PL2spZtdOkolR0hGz/N\nMFSg6mDNBzuWCrzKR+mG6L3mKbz3HTPKVuvcxo7Qym1Pg7l+MXtvtdOAX8hFK+Dp\nsKJsC8KFX1frODbhrR5VZ205k7R/vsBXmQc9cXwmaFJC3pf7pfje2ooHA10/BFjH\niwQxaHaC7oMmIlTaEmv71hFaw4H83RgFY6dNnV0gmoh907ECdJC94634yTBT4a1V\n7j81Up5qK1mKrz1KGhbfmYxehnNmlK6RlJYDyionzHjoHUQ9SpqORS2+OQovlrFR\njwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744999542572 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpService::CloseSessionsWithLoad [GOOD] >> KqpService::PatternCache ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:05:34.046557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:05:34.046647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.046706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:05:34.046742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:05:34.046784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:05:34.046834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:05:34.046899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:05:34.046978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:05:34.047291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:05:34.126601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:05:34.126653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:34.147092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:05:34.147513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:05:34.147685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:05:34.164290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:05:34.164523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:05:34.165273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.165426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:05:34.168253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.169407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.169450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.169578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:05:34.169614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.169662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:05:34.169749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.176190Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:05:34.297848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:05:34.298108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.298340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:05:34.298557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:05:34.298617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.300906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.301040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:05:34.301221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.301276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:05:34.301321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:05:34.301361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:05:34.303098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.303146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:05:34.303179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:05:34.304771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.304819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.304858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.304909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.308003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:05:34.309385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:05:34.309551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:05:34.310301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:05:34.310380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:05:34.310421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.310788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:05:34.310846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:05:34.310958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:05:34.311022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:05:34.312764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:05:34.312820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:05:34.312938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:05:34.312963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:05:34.313262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:05:34.313304Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:05:34.313370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.313390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.313416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:05:34.313437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.313489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:05:34.313517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:05:34.313541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:05:34.313570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:05:34.313614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:05:34.313649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:05:34.313671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:05:34.315439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.315517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:05:34.315539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... : Source { RawX1: 526 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-17T18:05:46.663701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-17T18:05:46.663872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 526 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-17T18:05:46.663926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-17T18:05:46.666116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:46.666214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-17T18:05:46.666267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:05:46.666308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-17T18:05:46.666408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-17T18:05:46.666535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-17T18:05:46.666667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-17T18:05:46.666746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-17T18:05:46.669155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:46.669432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:46.670862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T18:05:46.670923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T18:05:46.671082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-17T18:05:46.671254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T18:05:46.671309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-17T18:05:46.671351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-17T18:05:46.671923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:46.671979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-17T18:05:46.672062Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:46.672096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-17T18:05:46.672136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-17T18:05:46.672912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:46.673008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:46.673035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-17T18:05:46.673071Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2025-04-17T18:05:46.673112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-17T18:05:46.674116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:46.674228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-17T18:05:46.674262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-17T18:05:46.674290Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:05:46.674321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-17T18:05:46.674382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T18:05:46.677725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-17T18:05:46.677792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-17T18:05:46.678173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-17T18:05:46.678402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:46.678461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:46.678508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:05:46.678541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:46.678582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-17T18:05:46.678650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:554:2493] message: TxId: 104 2025-04-17T18:05:46.678699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:05:46.678773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:05:46.678810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:05:46.678914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-17T18:05:46.679582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-17T18:05:46.679612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-17T18:05:46.680895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-17T18:05:46.680984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-17T18:05:46.683427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-17T18:05:46.683481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-17T18:05:46.683569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:05:46.683609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1430:3339] 2025-04-17T18:05:46.684366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-17T18:05:46.688376Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-17T18:05:46.688603Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 264us result status StatusSuccess 2025-04-17T18:05:46.689766Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |86.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2025-04-17T18:05:43.384095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:05:43.384670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:43.384729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00190f/r3tmp/tmpIdoVDo/pdisk_1.dat 2025-04-17T18:05:43.778536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:05:43.823147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:43.873152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:43.873283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:43.885062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:43.965138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:05:44.282306Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-04-17T18:05:44.282488Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-17T18:05:44.406210Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.123266s, errors=0 2025-04-17T18:05:44.406365Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-04-17T18:05:47.589854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:05:47.590074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:47.590267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00190f/r3tmp/tmpO0X15Q/pdisk_1.dat 2025-04-17T18:05:47.839509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:05:47.868134Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:47.905195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:47.905330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:47.916833Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:48.012476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:05:48.287958Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-04-17T18:05:48.288117Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-04-17T18:05:48.388143Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.099561s, errors=0 2025-04-17T18:05:48.388250Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> TColumnShardTestSchema::Drop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop [GOOD] Test command err: 2025-04-17T18:05:39.219080Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:39.386079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:39.412430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:39.413289Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:39.425707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:39.425947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:39.426215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:39.426334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:39.426445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:39.426571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:39.426677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:39.426841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:39.426978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:39.427077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:39.427180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:39.427318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:39.459266Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:39.459445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:39.459505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:39.459715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:39.461392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:39.461492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:39.461540Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:39.461700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:39.461775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:39.461815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:39.461845Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:39.462009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:39.462087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:39.462125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:39.462158Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:39.462413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:39.462482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:39.462543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:39.462576Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:39.462646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:39.462695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:39.462749Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:39.462819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:39.462883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:39.462919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:39.463368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-17T18:05:39.463461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T18:05:39.463580Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=72; 2025-04-17T18:05:39.463679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-04-17T18:05:39.463838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:39.463908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:39.463947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:39.464202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:39.464258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:39.464301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:39.464484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:39.464532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:39.464575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:39.464777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:39.464830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:39.464865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:05:39.465025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:39.465072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:39.465118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:122:2872:0]; 2025-04-17T18:05:49.611946Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:123:2872:0]; 2025-04-17T18:05:49.611977Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:124:2872:0]; 2025-04-17T18:05:49.612018Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:125:2872:0]; 2025-04-17T18:05:49.612065Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:126:2872:0]; 2025-04-17T18:05:49.612102Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:127:2872:0]; 2025-04-17T18:05:49.612139Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:128:2872:0]; 2025-04-17T18:05:49.612171Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:129:2872:0]; 2025-04-17T18:05:49.612202Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:130:2872:0]; 2025-04-17T18:05:49.612233Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:131:2864:0]; 2025-04-17T18:05:49.612261Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:132:2872:0]; 2025-04-17T18:05:49.612289Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:133:2872:0]; 2025-04-17T18:05:49.612320Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:134:2864:0]; 2025-04-17T18:05:49.612351Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:135:2864:0]; 2025-04-17T18:05:49.612381Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:136:2864:0]; 2025-04-17T18:05:49.612408Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:137:2856:0]; 2025-04-17T18:05:49.612434Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:138:2864:0]; 2025-04-17T18:05:49.612461Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:139:2864:0]; 2025-04-17T18:05:49.612493Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:140:2856:0]; 2025-04-17T18:05:49.612540Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:141:2856:0]; 2025-04-17T18:05:49.612588Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:142:2800:0]; 2025-04-17T18:05:49.612631Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:143:2752:0]; 2025-04-17T18:05:49.612676Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:144:2792:0]; 2025-04-17T18:05:49.612723Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:145:2792:0]; 2025-04-17T18:05:49.612828Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:146:2792:0]; 2025-04-17T18:05:49.612875Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:147:2784:0]; 2025-04-17T18:05:49.612924Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:148:2784:0]; 2025-04-17T18:05:49.612975Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:149:2784:0]; 2025-04-17T18:05:49.613021Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:150:2784:0]; 2025-04-17T18:05:49.613072Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:151:2784:0]; 2025-04-17T18:05:49.613120Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:152:2776:0]; 2025-04-17T18:05:49.613166Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:153:2768:0]; 2025-04-17T18:05:49.613208Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:154:9448:0]; 2025-04-17T18:05:49.617437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=9;column_names=saved_at;);; 2025-04-17T18:05:49.617709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-04-17T18:05:49.731237Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:05:49.731935Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[22] (CS::GENERAL) apply at tablet 9437184 2025-04-17T18:05:49.755691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=973ce744-1bb611f0-b2ad3100-ff399a8f;fline=with_appended.cpp:24;event=skip_inserted_data;reason=table_removed;path_id=1; 2025-04-17T18:05:49.758374Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 464 2025-04-17T18:05:49.762011Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=10308;raw_bytes=8378;count=1;records=100} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5601076;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=5605344;raw_bytes=7864506;count=2;records=80000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:05:49.778304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:654:2671];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:05:49.778517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:654:2671];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:05:49.778666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:654:2671];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:05:49.778749Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:654:2671] finished for tablet 9437184 2025-04-17T18:05:49.779251Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:654:2671];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:644:2661];stats={"p":[{"events":["f_bootstrap"],"t":0.009},{"events":["l_bootstrap","f_ProduceResults"],"t":0.012},{"events":["f_ack","f_processing"],"t":0.171},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.172}],"full":{"a":1744913149606647,"name":"_full_task","f":1744913149606647,"d_finished":0,"c":0,"l":1744913149778818,"d":172171},"events":[{"name":"bootstrap","f":1744913149616220,"d_finished":3053,"c":1,"l":1744913149619273,"d":3053},{"a":1744913149778239,"name":"ack","f":1744913149778239,"d_finished":0,"c":0,"l":1744913149778818,"d":579},{"a":1744913149778172,"name":"processing","f":1744913149778172,"d_finished":0,"c":0,"l":1744913149778818,"d":646},{"name":"ProduceResults","f":1744913149619243,"d_finished":369,"c":2,"l":1744913149778712,"d":369},{"a":1744913149778716,"name":"Finish","f":1744913149778716,"d_finished":0,"c":0,"l":1744913149778818,"d":102}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:05:49.779340Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:654:2671];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:644:2661];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:05:49.779758Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:654:2671];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:644:2661];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.009},{"events":["l_bootstrap","f_ProduceResults"],"t":0.012},{"events":["f_ack","f_processing"],"t":0.171},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.172}],"full":{"a":1744913149606647,"name":"_full_task","f":1744913149606647,"d_finished":0,"c":0,"l":1744913149779388,"d":172741},"events":[{"name":"bootstrap","f":1744913149616220,"d_finished":3053,"c":1,"l":1744913149619273,"d":3053},{"a":1744913149778239,"name":"ack","f":1744913149778239,"d_finished":0,"c":0,"l":1744913149779388,"d":1149},{"a":1744913149778172,"name":"processing","f":1744913149778172,"d_finished":0,"c":0,"l":1744913149779388,"d":1216},{"name":"ProduceResults","f":1744913149619243,"d_finished":369,"c":2,"l":1744913149778712,"d":369},{"a":1744913149778716,"name":"Finish","f":1744913149778716,"d_finished":0,"c":0,"l":1744913149779388,"d":672}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:05:49.779847Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:654:2671];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:05:49.605712Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:05:49.779896Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:654:2671];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:05:49.780011Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:654:2671];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching >> TProxyActorTest::TestAttachSession >> TProxyActorTest::TestAttachSession [GOOD] >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR ... waiting for blocked registrations (done) 2025-04-17T18:05:56.631537Z node 1 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore >> TProxyActorTest::TestCreateSemaphore [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |86.9%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootDrop [GOOD] Test command err: 2025-04-17T18:05:45.280335Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:45.374007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:45.397573Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:45.397864Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:45.405494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:45.405738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:45.405992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:45.406138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:45.406282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:45.406437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:45.406587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:45.406772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:45.406939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:45.407081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.407217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:45.407370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:45.433677Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:45.433871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:45.433920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:45.434107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:45.434294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:45.434402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:45.434445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:45.434545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:45.434612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:45.434662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:45.434702Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:45.434902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:45.434964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:45.435001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:45.435031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:45.435114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:45.435162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:45.435228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:45.435274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:45.435341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:45.435376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:45.435422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:45.435476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:45.435516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:45.435546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:45.435986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-17T18:05:45.436069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-17T18:05:45.436142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T18:05:45.436218Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-17T18:05:45.436361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:45.436432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:45.436484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:45.436680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:45.436733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.436762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.436920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:45.436959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:45.436998Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:45.437263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:45.437307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:45.437352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:05:45.437485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:45.437530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:45.437574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... _id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:06:00.298841Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:06:00.298874Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:06:00.298941Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:00.299017Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:00.299088Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-17T18:06:00.299155Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912846633;tx_id=18446744073709551615;;current_snapshot_ts=1744913146442; 2025-04-17T18:06:00.299210Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:00.299258Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:00.299301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:00.299369Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-17T18:06:00.299410Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:00.299830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=7;path_id=1; 2025-04-17T18:06:00.300406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; 2025-04-17T18:06:00.471883Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913146633:max} readable: {1744913146633:max} at tablet 9437184 2025-04-17T18:06:00.472045Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:00.474258Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913146633:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 9 } } } ; 2025-04-17T18:06:00.474351Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913146633:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 9 } } } ; 2025-04-17T18:06:00.474988Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913146633:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"saved_at","id":9}]},"o":"9","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"9","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:00.475139Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913146633:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:00.476275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:947:2948];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913146633:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1007:3000];trace_detailed=; 2025-04-17T18:06:00.477492Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=9;column_names=saved_at;);; 2025-04-17T18:06:00.477764Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-04-17T18:06:00.478244Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1007:3000];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:00.478387Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1007:3000];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:06:00.478501Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1007:3000];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:06:00.478546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1007:3000] finished for tablet 9437184 2025-04-17T18:06:00.478965Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1007:3000];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1000:2994];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1744913160476209,"name":"_full_task","f":1744913160476209,"d_finished":0,"c":0,"l":1744913160478606,"d":2397},"events":[{"name":"bootstrap","f":1744913160476454,"d_finished":1456,"c":1,"l":1744913160477910,"d":1456},{"a":1744913160478217,"name":"ack","f":1744913160478217,"d_finished":0,"c":0,"l":1744913160478606,"d":389},{"a":1744913160478195,"name":"processing","f":1744913160478195,"d_finished":0,"c":0,"l":1744913160478606,"d":411},{"name":"ProduceResults","f":1744913160477891,"d_finished":269,"c":2,"l":1744913160478526,"d":269},{"a":1744913160478528,"name":"Finish","f":1744913160478528,"d_finished":0,"c":0,"l":1744913160478606,"d":78}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:06:00.479047Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1007:3000];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1000:2994];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:00.479401Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1007:3000];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1000:2994];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1744913160476209,"name":"_full_task","f":1744913160476209,"d_finished":0,"c":0,"l":1744913160479088,"d":2879},"events":[{"name":"bootstrap","f":1744913160476454,"d_finished":1456,"c":1,"l":1744913160477910,"d":1456},{"a":1744913160478217,"name":"ack","f":1744913160478217,"d_finished":0,"c":0,"l":1744913160479088,"d":871},{"a":1744913160478195,"name":"processing","f":1744913160478195,"d_finished":0,"c":0,"l":1744913160479088,"d":893},{"name":"ProduceResults","f":1744913160477891,"d_finished":269,"c":2,"l":1744913160478526,"d":269},{"a":1744913160478528,"name":"Finish","f":1744913160478528,"d_finished":0,"c":0,"l":1744913160479088,"d":560}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:06:00.479470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1007:3000];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:00.475101Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:00.479517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1007:3000];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:00.479606Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1007:3000];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpSystemView::QueryStatsSimple [GOOD] >> S3SettingsConversion::StyleDeduction [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 27132, MsgBus: 62798 2025-04-17T18:05:13.689947Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344757864246933:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:13.690003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:05:14.171828Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494344756932002747:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:14.172264Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b99/r3tmp/tmpr03bTK/pdisk_1.dat 2025-04-17T18:05:14.349483Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:14.770257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:05:14.775599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:14.775709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:14.784926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:14.785024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:14.785190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:14.785236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:14.792280Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:05:14.792336Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:05:14.792482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:14.796456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:14.796677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:05:14.837468Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27132, node 1 2025-04-17T18:05:14.960039Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:05:14.965327Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:05:15.113252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:15.113277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:15.113286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:15.113400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62798 TClient is connected to server localhost:62798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:16.360448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.402212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:16.874477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:17.435181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:17.769417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:18.690743Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344757864246933:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.690812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:18.878276Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494344756932002747:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:18.878350Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:19.824627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344783634052747:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:19.824765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:20.171538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.237836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.341285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.437316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.519144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.610906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.697588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:20.846336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344787929020909:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:20.846527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:20.847310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344787929020915:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:20.851479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:20.885788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344787929020918:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:20.950695Z node 1 :TX_PROXY ERROR: Actor# [1:7494344787929020995:4279] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:22.720861Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913122698, txId: 281474976710674] shutting down 2025-04-17T18:05:23.167225Z node 3 :BS_PROXY_PUT ERROR: [0f2f09948599d1ba] Result# TEvPutResult {Id# [72075186224037889:1:22:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:22:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T18:05:23.164693Z node 2 :BS_PROXY_PUT ERROR: [243115f23e3c6d3e] Result# TEvPutResult {Id# [72075186224037888:1:22:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037888:1:22:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Trying to start YD ... State# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037919:1:21:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 13 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Trying to start YDB, gRPC: 14687, MsgBus: 21069 2025-04-17T18:06:02.079086Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7494344965273420268:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:02.079177Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:06:02.096210Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7494344965223244086:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:02.096265Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b99/r3tmp/tmpoVAduo/pdisk_1.dat 2025-04-17T18:06:02.226905Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:02.311596Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:02.360682Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:02.360794Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:02.361561Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:02.361653Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:02.362245Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:02.362320Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:02.369117Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2025-04-17T18:06:02.369162Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2025-04-17T18:06:02.369270Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:02.370105Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:02.370291Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14687, node 16 2025-04-17T18:06:02.440499Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:02.440531Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:02.440542Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:02.440737Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21069 TClient is connected to server localhost:21069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:03.093203Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:03.129793Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:03.243532Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:03.383644Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:03.464304Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:07.079880Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7494344965273420268:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:07.079968Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:06:07.096707Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7494344965223244086:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:07.096815Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:06:07.435930Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7494344986748258795:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:07.436062Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:07.500128Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:07.678801Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:07.733581Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:07.790012Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:07.839473Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:07.894429Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:07.985771Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:08.076907Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7494344991043226934:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:08.077038Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:08.077250Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7494344991043226939:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:08.083202Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:08.119148Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7494344991043226941:2411], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:06:08.182675Z node 16 :TX_PROXY ERROR: Actor# [16:7494344991043227021:4260] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:10.258403Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913170246, txId: 281474976715674] shutting down 2025-04-17T18:06:10.533446Z node 18 :BS_PROXY_PUT ERROR: [2bf5710dbd88de5e] Result# TEvPutResult {Id# [72075186224037892:1:21:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037892:1:21:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T18:06:10.536812Z node 17 :BS_PROXY_PUT ERROR: [65094c309bdbaf07] Result# TEvPutResult {Id# [72075186224037895:1:21:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037895:1:21:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub >> S3SettingsConversion::Basic [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913740.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913740.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912540.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-17T18:05:42.423094Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:42.507348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:42.526686Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:42.527034Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:42.535452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:42.535723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:42.535972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:42.536103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:42.536236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:42.536381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:42.536519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:42.536633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:42.536749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:42.536871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:42.536980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:42.537088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:42.566079Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:42.566259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:42.566341Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:42.566524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:42.566721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:42.566831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:42.566877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:42.566996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:42.567072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:42.567122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:42.567162Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:42.567453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:42.567521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:42.567564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:42.567598Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:42.567713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:42.567780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:42.567828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:42.567861Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:42.567941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:42.567980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:42.568012Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:42.568068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:42.568113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:42.568147Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:42.568551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-17T18:05:42.568635Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T18:05:42.568733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-04-17T18:05:42.568827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-17T18:05:42.568997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:42.569058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:42.569093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:42.569331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:42.569400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:42.569438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:42.569589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:42.569633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:42.569665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:42.569874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... OLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:21.538708Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:06:21.538731Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:06:21.538806Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:21.538876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:21.538903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:06:21.538960Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-04-17T18:06:21.539003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-04-17T18:06:21.539094Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1264:3273];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1265:3274]->[1:1264:3273] 2025-04-17T18:06:21.539166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:21.539243Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:21.539326Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:21.539423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:21.539484Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:21.539542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:21.539567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1265:3274] finished for tablet 9437184 2025-04-17T18:06:21.539934Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1264:3273];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.007},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["l_task_result"],"t":0.738},{"events":["f_ack"],"t":0.739},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.743}],"full":{"a":1744913180795712,"name":"_full_task","f":1744913180795712,"d_finished":0,"c":0,"l":1744913181539616,"d":743904},"events":[{"name":"bootstrap","f":1744913180795842,"d_finished":7059,"c":1,"l":1744913180802901,"d":7059},{"a":1744913181539410,"name":"ack","f":1744913181534865,"d_finished":4143,"c":7,"l":1744913181539350,"d":4349},{"a":1744913181539402,"name":"processing","f":1744913180804641,"d_finished":376549,"c":56,"l":1744913181539352,"d":376763},{"name":"ProduceResults","f":1744913180798545,"d_finished":12269,"c":65,"l":1744913181539556,"d":12269},{"a":1744913181539557,"name":"Finish","f":1744913181539557,"d_finished":0,"c":0,"l":1744913181539616,"d":59},{"name":"task_result","f":1744913180804662,"d_finished":371435,"c":49,"l":1744913181534679,"d":371435}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:21.539981Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1264:3273];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:21.540330Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1264:3273];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.007},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["l_task_result"],"t":0.738},{"events":["f_ack"],"t":0.739},{"events":["l_ProduceResults","f_Finish"],"t":0.743},{"events":["l_ack","l_processing","l_Finish"],"t":0.744}],"full":{"a":1744913180795712,"name":"_full_task","f":1744913180795712,"d_finished":0,"c":0,"l":1744913181540010,"d":744298},"events":[{"name":"bootstrap","f":1744913180795842,"d_finished":7059,"c":1,"l":1744913180802901,"d":7059},{"a":1744913181539410,"name":"ack","f":1744913181534865,"d_finished":4143,"c":7,"l":1744913181539350,"d":4743},{"a":1744913181539402,"name":"processing","f":1744913180804641,"d_finished":376549,"c":56,"l":1744913181539352,"d":377157},{"name":"ProduceResults","f":1744913180798545,"d_finished":12269,"c":65,"l":1744913181539556,"d":12269},{"a":1744913181539557,"name":"Finish","f":1744913181539557,"d_finished":0,"c":0,"l":1744913181540010,"d":453},{"name":"task_result","f":1744913180804662,"d_finished":371435,"c":49,"l":1744913181534679,"d":371435}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1265:3274]->[1:1264:3273] 2025-04-17T18:06:21.540389Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:20.795330Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-04-17T18:06:21.540426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:21.540603Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1265:3274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 >> S3SettingsConversion::FoldersStrictStyle [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::WaitCAsStateOnAbort >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913738.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144913738.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913738.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124913738.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912538.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124912538.000000s;Name=;Codec=}; 2025-04-17T18:05:40.845436Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.931785Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.946811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.947107Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.953498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.953664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.953827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.953935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.954022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.954111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.954215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.954288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.954362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.954437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.954514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.954577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.971826Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.971934Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.971987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.972144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.972314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.972389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.972421Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.972489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.972531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.972564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.972586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.972725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.972776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.972804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.972826Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.972886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.972924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.972955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.972975Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.973027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.973053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.973072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.973104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.973142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.973170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.973454Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=31; 2025-04-17T18:05:40.973524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-17T18:05:40.973590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-17T18:05:40.973667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-17T18:05:40.973783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.973823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.973849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.974040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.974082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.974108Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.974249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.974283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:40.974304Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:40.974436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 186232898,"d_finished":0,"c":0,"l":1744913186233362,"d":464},{"name":"task_result","f":1744913185825122,"d_finished":157914,"c":28,"l":1744913186230638,"d":157914}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1278:3285]->[1:1277:3284] 2025-04-17T18:06:26.233744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1278:3285];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:25.818710Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-04-17T18:06:26.233770Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1278:3285];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:26.233954Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1278:3285];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:26.235290Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-17T18:06:26.235505Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913177095:max} readable: {1744913177095:max} at tablet 9437184 2025-04-17T18:06:26.235605Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:26.235716Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177095:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:26.235758Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177095:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:26.236236Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177095:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:26.236347Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177095:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:26.236771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177095:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1294:3301];trace_detailed=; 2025-04-17T18:06:26.237076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:26.237253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:26.237375Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.237480Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.237709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:26.237790Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.237882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.237918Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1294:3301] finished for tablet 9437184 2025-04-17T18:06:26.238242Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1293:3300];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913186236719,"name":"_full_task","f":1744913186236719,"d_finished":0,"c":0,"l":1744913186237966,"d":1247},"events":[{"name":"bootstrap","f":1744913186236867,"d_finished":643,"c":1,"l":1744913186237510,"d":643},{"a":1744913186237691,"name":"ack","f":1744913186237691,"d_finished":0,"c":0,"l":1744913186237966,"d":275},{"a":1744913186237678,"name":"processing","f":1744913186237678,"d_finished":0,"c":0,"l":1744913186237966,"d":288},{"name":"ProduceResults","f":1744913186237320,"d_finished":365,"c":2,"l":1744913186237906,"d":365},{"a":1744913186237907,"name":"Finish","f":1744913186237907,"d_finished":0,"c":0,"l":1744913186237966,"d":59}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.238313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1293:3300];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:26.238724Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1293:3300];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913186236719,"name":"_full_task","f":1744913186236719,"d_finished":0,"c":0,"l":1744913186238362,"d":1643},"events":[{"name":"bootstrap","f":1744913186236867,"d_finished":643,"c":1,"l":1744913186237510,"d":643},{"a":1744913186237691,"name":"ack","f":1744913186237691,"d_finished":0,"c":0,"l":1744913186238362,"d":671},{"a":1744913186237678,"name":"processing","f":1744913186237678,"d_finished":0,"c":0,"l":1744913186238362,"d":684},{"name":"ProduceResults","f":1744913186237320,"d_finished":365,"c":2,"l":1744913186237906,"d":365},{"a":1744913186237907,"name":"Finish","f":1744913186237907,"d_finished":0,"c":0,"l":1744913186238362,"d":455}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1294:3301]->[1:1293:3300] 2025-04-17T18:06:26.238835Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:26.236315Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:26.238883Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:26.238996Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913739.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144913739.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913739.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124913739.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912539.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124912539.000000s;Name=;Codec=}; 2025-04-17T18:05:40.797352Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.899878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.918521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.918805Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.925498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.925676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.925860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.925942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.926025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.926143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.926294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.926409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.926534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.926671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.926802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.926910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.947249Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.947390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.947487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.947669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.947859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.947945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.947989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.948092Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.948156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.948206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.948238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.948401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.948465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.948508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.948539Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.948626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.948680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.948725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.948754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.948827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.948870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.948905Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.948954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.949006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.949040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.949484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=80; 2025-04-17T18:05:40.949577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-17T18:05:40.949646Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-17T18:05:40.949739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=52; 2025-04-17T18:05:40.949959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.950022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.950060Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.950289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.950357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.950401Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.950560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.950607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:40.950638Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:40.950847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 0,"c":0,"l":1744913186279982,"d":567},{"name":"task_result","f":1744913185819154,"d_finished":176708,"c":28,"l":1744913186276330,"d":176708}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1278:3285]->[1:1277:3284] 2025-04-17T18:06:26.280445Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1278:3285];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:25.811755Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-04-17T18:06:26.280515Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1278:3285];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:26.280767Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1278:3285];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:26.282609Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-17T18:06:26.282890Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913177062:max} readable: {1744913177062:max} at tablet 9437184 2025-04-17T18:06:26.283014Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:26.283175Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177062:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:26.283241Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177062:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:26.283708Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177062:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:26.283805Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177062:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:26.284310Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177062:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1294:3301];trace_detailed=; 2025-04-17T18:06:26.284756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:26.284985Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:26.285162Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.285292Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.285586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:26.285700Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.285859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.285921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1294:3301] finished for tablet 9437184 2025-04-17T18:06:26.286378Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1293:3300];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913186284244,"name":"_full_task","f":1744913186284244,"d_finished":0,"c":0,"l":1744913186285990,"d":1746},"events":[{"name":"bootstrap","f":1744913186284460,"d_finished":866,"c":1,"l":1744913186285326,"d":866},{"a":1744913186285562,"name":"ack","f":1744913186285562,"d_finished":0,"c":0,"l":1744913186285990,"d":428},{"a":1744913186285541,"name":"processing","f":1744913186285541,"d_finished":0,"c":0,"l":1744913186285990,"d":449},{"name":"ProduceResults","f":1744913186285080,"d_finished":523,"c":2,"l":1744913186285897,"d":523},{"a":1744913186285901,"name":"Finish","f":1744913186285901,"d_finished":0,"c":0,"l":1744913186285990,"d":89}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:26.286472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1293:3300];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:26.286883Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1293:3300];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744913186284244,"name":"_full_task","f":1744913186284244,"d_finished":0,"c":0,"l":1744913186286517,"d":2273},"events":[{"name":"bootstrap","f":1744913186284460,"d_finished":866,"c":1,"l":1744913186285326,"d":866},{"a":1744913186285562,"name":"ack","f":1744913186285562,"d_finished":0,"c":0,"l":1744913186286517,"d":955},{"a":1744913186285541,"name":"processing","f":1744913186285541,"d_finished":0,"c":0,"l":1744913186286517,"d":976},{"name":"ProduceResults","f":1744913186285080,"d_finished":523,"c":2,"l":1744913186285897,"d":523},{"a":1744913186285901,"name":"Finish","f":1744913186285901,"d_finished":0,"c":0,"l":1744913186286517,"d":616}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1294:3301]->[1:1293:3300] 2025-04-17T18:06:26.286976Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:26.283776Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:26.287022Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:26.287136Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] >> ColumnShardTiers::TieringUsage |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpBatchUpdate::SimpleOnePartition |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] >> KqpBatchDelete::HasTxControl >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913741.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913741.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913741.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913741.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913741.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144913741.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913741.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912541.000000s;Name=;Codec=}; 2025-04-17T18:05:41.727709Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:41.816220Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:41.834287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:41.834533Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:41.841579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:41.841792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:41.842062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:41.842169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:41.842285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:41.842376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:41.842454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:41.842532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:41.842599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:41.842669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:41.842733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:41.842808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:41.862315Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:41.862426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:41.862507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:41.862650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:41.862808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:41.862871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:41.862903Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:41.862972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:41.863016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:41.863050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:41.863069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:41.863196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:41.863243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:41.863273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:41.863294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:41.863353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:41.863388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:41.863422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:41.863440Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:41.863488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:41.863511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:41.863528Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:41.863567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:41.863594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:41.863620Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:41.863915Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=31; 2025-04-17T18:05:41.863986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T18:05:41.864065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-04-17T18:05:41.864128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-17T18:05:41.864282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:41.864338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:41.864374Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:41.864620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:41.864691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:41.864728Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:41.864886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... EST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:06:30.373228Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912877465;tx_id=18446744073709551615;;current_snapshot_ts=1744913141801; 2025-04-17T18:06:30.373280Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:30.373348Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:30.373399Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:30.373516Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:30.373756Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913177465:max} readable: {1744913177465:max} at tablet 9437184 2025-04-17T18:06:30.373885Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:30.374050Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177465:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:30.374121Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177465:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:30.374653Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177465:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:30.374764Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177465:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:30.375282Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913177465:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1999:4008];trace_detailed=; 2025-04-17T18:06:30.375721Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:30.375966Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:30.376145Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:30.376288Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:30.376667Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:30.376785Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:30.376922Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:30.376970Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1999:4008] finished for tablet 9437184 2025-04-17T18:06:30.377459Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1998:4007];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913190375216,"name":"_full_task","f":1744913190375216,"d_finished":0,"c":0,"l":1744913190377038,"d":1822},"events":[{"name":"bootstrap","f":1744913190375426,"d_finished":902,"c":1,"l":1744913190376328,"d":902},{"a":1744913190376639,"name":"ack","f":1744913190376639,"d_finished":0,"c":0,"l":1744913190377038,"d":399},{"a":1744913190376618,"name":"processing","f":1744913190376618,"d_finished":0,"c":0,"l":1744913190377038,"d":420},{"name":"ProduceResults","f":1744913190376062,"d_finished":519,"c":2,"l":1744913190376953,"d":519},{"a":1744913190376956,"name":"Finish","f":1744913190376956,"d_finished":0,"c":0,"l":1744913190377038,"d":82}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:30.377551Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1998:4007];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:30.378006Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1998:4007];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744913190375216,"name":"_full_task","f":1744913190375216,"d_finished":0,"c":0,"l":1744913190377603,"d":2387},"events":[{"name":"bootstrap","f":1744913190375426,"d_finished":902,"c":1,"l":1744913190376328,"d":902},{"a":1744913190376639,"name":"ack","f":1744913190376639,"d_finished":0,"c":0,"l":1744913190377603,"d":964},{"a":1744913190376618,"name":"processing","f":1744913190376618,"d_finished":0,"c":0,"l":1744913190377603,"d":985},{"name":"ProduceResults","f":1744913190376062,"d_finished":519,"c":2,"l":1744913190376953,"d":519},{"a":1744913190376956,"name":"Finish","f":1744913190376956,"d_finished":0,"c":0,"l":1744913190377603,"d":647}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1999:4008]->[1:1998:4007] 2025-04-17T18:06:30.378108Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:30.374728Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:30.378155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:30.378294Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913741.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913741.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912541.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124912541.000000s;Name=;Codec=}; 2025-04-17T18:05:43.444593Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:43.527415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:43.541716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:43.541950Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:43.547955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:43.548088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:43.548248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:43.548319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:43.548389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:43.548456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:43.548521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:43.548588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:43.548651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:43.548741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:43.548841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:43.548933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:43.569839Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:43.569975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:43.570026Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:43.570199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:43.570355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:43.570437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:43.570479Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:43.570563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:43.570615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:43.570660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:43.570687Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:43.570879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:43.570938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:43.570973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:43.570999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:43.571073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:43.571117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:43.571157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:43.571181Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:43.571243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:43.571276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:43.571301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:43.571344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:43.571382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:43.571410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:43.571759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=34; 2025-04-17T18:05:43.571836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-17T18:05:43.571899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-17T18:05:43.572006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=59; 2025-04-17T18:05:43.572160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:43.572210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:43.572243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:43.572433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:43.572484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:43.572515Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:43.572639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:43.572671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:43.572703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:43.572869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 191030530,"d_finished":0,"c":0,"l":1744913191030920,"d":390},{"name":"task_result","f":1744913190603085,"d_finished":167693,"c":28,"l":1744913191028054,"d":167693}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1278:3285]->[1:1277:3284] 2025-04-17T18:06:31.031202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1278:3285];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:30.594520Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-04-17T18:06:31.031240Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1278:3285];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:31.031420Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1278:3285];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:31.032902Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-17T18:06:31.033090Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913179693:max} readable: {1744913179693:max} at tablet 9437184 2025-04-17T18:06:31.033178Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:31.033294Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913179693:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:31.033335Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913179693:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:31.033642Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913179693:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:31.033744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913179693:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:31.034223Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913179693:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1294:3301];trace_detailed=; 2025-04-17T18:06:31.034604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:31.034770Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:31.034891Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:31.034998Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:31.035256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:31.035329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:31.035436Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:31.035493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1294:3301] finished for tablet 9437184 2025-04-17T18:06:31.035788Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1293:3300];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913191034140,"name":"_full_task","f":1744913191034140,"d_finished":0,"c":0,"l":1744913191035544,"d":1404},"events":[{"name":"bootstrap","f":1744913191034351,"d_finished":674,"c":1,"l":1744913191035025,"d":674},{"a":1744913191035240,"name":"ack","f":1744913191035240,"d_finished":0,"c":0,"l":1744913191035544,"d":304},{"a":1744913191035228,"name":"processing","f":1744913191035228,"d_finished":0,"c":0,"l":1744913191035544,"d":316},{"name":"ProduceResults","f":1744913191034836,"d_finished":377,"c":2,"l":1744913191035468,"d":377},{"a":1744913191035472,"name":"Finish","f":1744913191035472,"d_finished":0,"c":0,"l":1744913191035544,"d":72}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:31.035852Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1293:3300];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:31.036126Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1293:3300];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913191034140,"name":"_full_task","f":1744913191034140,"d_finished":0,"c":0,"l":1744913191035887,"d":1747},"events":[{"name":"bootstrap","f":1744913191034351,"d_finished":674,"c":1,"l":1744913191035025,"d":674},{"a":1744913191035240,"name":"ack","f":1744913191035240,"d_finished":0,"c":0,"l":1744913191035887,"d":647},{"a":1744913191035228,"name":"processing","f":1744913191035228,"d_finished":0,"c":0,"l":1744913191035887,"d":659},{"name":"ProduceResults","f":1744913191034836,"d_finished":377,"c":2,"l":1744913191035468,"d":377},{"a":1744913191035472,"name":"Finish","f":1744913191035472,"d_finished":0,"c":0,"l":1744913191035887,"d":415}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1294:3301]->[1:1293:3300] 2025-04-17T18:06:31.036204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:31.033710Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:31.036252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:31.036350Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1294:3301];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913738.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913738.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912538.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-17T18:05:40.435077Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.509029Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.528986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.529265Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.536501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.536681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.536949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.537054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.537178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.537299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.537406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.537514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.537613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.537724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.537853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.537948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.556385Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.556491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.556527Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.556665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.556788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.556847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.556875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.556946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.556983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.557016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.557037Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.557141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.557174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.557196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.557213Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.557261Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.557292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.557317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.557338Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.557379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.557402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.557419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.557445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.557474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.557493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.557758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-04-17T18:05:40.557842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-17T18:05:40.557898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=22; 2025-04-17T18:05:40.557962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-04-17T18:05:40.558078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.558114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.558136Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.558294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.558333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.558354Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.558439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.558462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:40.558478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:40.558590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... 17T18:06:31.129676Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=230; 2025-04-17T18:06:31.129703Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=22152; 2025-04-17T18:06:31.133919Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=4162; 2025-04-17T18:06:31.137775Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3053; 2025-04-17T18:06:31.137843Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=3857; 2025-04-17T18:06:31.137945Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=58; 2025-04-17T18:06:31.138020Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=44; 2025-04-17T18:06:31.138113Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=61; 2025-04-17T18:06:31.138201Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=58; 2025-04-17T18:06:31.142974Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4724; 2025-04-17T18:06:31.149221Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6167; 2025-04-17T18:06:31.149320Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=28; 2025-04-17T18:06:31.149382Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=26; 2025-04-17T18:06:31.149431Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-17T18:06:31.149468Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-04-17T18:06:31.149499Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-04-17T18:06:31.149553Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=27; 2025-04-17T18:06:31.149600Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-17T18:06:31.149690Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2025-04-17T18:06:31.149736Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-17T18:06:31.149800Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-04-17T18:06:31.149888Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=46; 2025-04-17T18:06:31.150110Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=181; 2025-04-17T18:06:31.150148Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=47809; 2025-04-17T18:06:31.150316Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=10402524;raw_bytes=16084646;count=7;records=160000} at tablet 9437184 2025-04-17T18:06:31.150437Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:06:31.150490Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:06:31.150549Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:06:31.158475Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:06:31.158680Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:31.158737Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:31.158804Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-17T18:06:31.158859Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912869182;tx_id=18446744073709551615;;current_snapshot_ts=1744913141584; 2025-04-17T18:06:31.158889Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:31.158926Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:31.158955Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:31.159021Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:31.159537Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:31.159603Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1966:3867];tablet_id=9437184;parent=[1:1928:3836];fline=manager.cpp:82;event=ask_data;request=request_id=108;1={portions_count=18};; 2025-04-17T18:06:31.160166Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:06:31.160428Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:06:31.160455Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:06:31.160480Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:06:31.160518Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:31.160571Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:31.160616Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-17T18:06:31.160672Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912869182;tx_id=18446744073709551615;;current_snapshot_ts=1744913141584; 2025-04-17T18:06:31.160712Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:31.160753Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:31.160782Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:31.160853Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:31.162026Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=18;path_id=1; 2025-04-17T18:06:31.163511Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1928:3836];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 >> KqpBatchUpdate::Large_3 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] >> KqpBatchDelete::Large_3 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913738.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144913738.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913738.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124913738.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912538.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124912538.000000s;Name=;Codec=}; 2025-04-17T18:05:40.519981Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.616838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.639700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.639982Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.648071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.648283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.648517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.648634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.648759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.648877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.648989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.649107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.649215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.649331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.649435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.649530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.677455Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.677604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.677654Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.677867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.678016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.678088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.678129Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.678234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.678306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.678349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.678376Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.678545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.678607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.678645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.678673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.678766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.678815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.678859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.678887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.678948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.678982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.679013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.679056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.679097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.679130Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.679512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-17T18:05:40.679595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-17T18:05:40.679665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-17T18:05:40.679803Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-17T18:05:40.679974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.680025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.680058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.680251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.680303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.680335Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.680474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.680511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:40.680541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:40.680699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... finishLoadingTime=14; 2025-04-17T18:06:33.084069Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=274; 2025-04-17T18:06:33.084114Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=29329; 2025-04-17T18:06:33.090724Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6527; 2025-04-17T18:06:33.098002Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6148; 2025-04-17T18:06:33.098114Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7295; 2025-04-17T18:06:33.098315Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=102; 2025-04-17T18:06:33.098460Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=85; 2025-04-17T18:06:33.098604Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=93; 2025-04-17T18:06:33.098713Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=64; 2025-04-17T18:06:33.107108Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8320; 2025-04-17T18:06:33.117144Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9924; 2025-04-17T18:06:33.117283Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=45; 2025-04-17T18:06:33.117371Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=31; 2025-04-17T18:06:33.117425Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-17T18:06:33.117479Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-17T18:06:33.117533Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-04-17T18:06:33.117618Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-04-17T18:06:33.117674Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2025-04-17T18:06:33.117777Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2025-04-17T18:06:33.117832Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-17T18:06:33.117904Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2025-04-17T18:06:33.117998Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2025-04-17T18:06:33.118287Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=248; 2025-04-17T18:06:33.118333Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=71467; 2025-04-17T18:06:33.118515Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:06:33.118621Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:06:33.118681Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:06:33.118747Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:06:33.129113Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:06:33.129264Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:33.129331Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:33.129415Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-17T18:06:33.129484Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912878278;tx_id=18446744073709551615;;current_snapshot_ts=1744913141680; 2025-04-17T18:06:33.129539Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:33.129595Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:33.129640Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:33.129742Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:33.130454Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:33.130523Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1985:3885];tablet_id=9437184;parent=[1:1947:3854];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-04-17T18:06:33.131579Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:06:33.132393Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:06:33.132429Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:06:33.132456Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:06:33.132502Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:33.132560Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:33.132649Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-17T18:06:33.132719Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912878278;tx_id=18446744073709551615;;current_snapshot_ts=1744913141680; 2025-04-17T18:06:33.132768Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:33.132818Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:33.132862Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:33.132960Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:33.133541Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=11;path_id=1; 2025-04-17T18:06:33.134991Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> KqpBatchUpdate::ManyPartitions_2 >> KqpBatchDelete::MultiStatement |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 1915, MsgBus: 22343 2025-04-17T18:06:31.601693Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345093212548431:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:31.601816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d02/r3tmp/tmpcu6vmu/pdisk_1.dat 2025-04-17T18:06:31.858276Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1915, node 1 2025-04-17T18:06:31.916927Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:31.916953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:31.916963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:31.917116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:31.953585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:31.953769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:31.955673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22343 TClient is connected to server localhost:22343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:32.386534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:32.410341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:32.512235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:32.646677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:32.704034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:34.113160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345106097452110:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:34.113322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:34.420366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:34.448553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:34.476856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:34.505288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:34.533156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:34.591134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:34.622536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:34.668051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345106097452769:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:34.668160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345106097452775:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:34.668168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:34.671939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:34.681461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345106097452777:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:06:34.760698Z node 1 :TX_PROXY ERROR: Actor# [1:7494345106097452831:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:36.006698Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjUwZDE2ZGYtNmM1NzkyZC0yMWFhMDU0NS0yYmFlNGJmNA==, ActorId: [1:7494345110392420462:2512], ActorState: ExecuteState, TraceId: 01js2ewrky24y9j2qjx3vnbz0r, Create QueryResponse for error on request, msg: BATCH operation can be executed only in NoTx mode. >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> KqpBatchDelete::SimplePartitions >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913744.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144913744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913744.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124913744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912544.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124912544.000000s;Name=;Codec=}; 2025-04-17T18:05:46.469092Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:46.543456Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:46.559709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:46.559965Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:46.566441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:46.566636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:46.566877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:46.566978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:46.567081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:46.567191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:46.567289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:46.567385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:46.567471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:46.567570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:46.567665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:46.567749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:46.588406Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:46.588506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:46.588542Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:46.588651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:46.588785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:46.588842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:46.588869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:46.588922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:46.588967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:46.588990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:46.589011Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:46.589106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:46.589138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:46.589158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:46.589173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:46.589221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:46.589250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:46.589273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:46.589288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:46.589329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:46.589350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:46.589364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:46.589389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:46.589414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:46.589432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:46.589722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-17T18:05:46.589776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=22; 2025-04-17T18:05:46.589827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=24; 2025-04-17T18:05:46.589879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2025-04-17T18:05:46.589978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:46.590010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:46.590034Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:46.590163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:46.590209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:46.590227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:46.590319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:46.590347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:46.590364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:46.590463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... HARGE:finishLoadingTime=8; 2025-04-17T18:06:37.404309Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=147; 2025-04-17T18:06:37.404336Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=24319; 2025-04-17T18:06:37.407522Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3135; 2025-04-17T18:06:37.411272Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3093; 2025-04-17T18:06:37.411343Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=3759; 2025-04-17T18:06:37.411448Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=59; 2025-04-17T18:06:37.411522Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=41; 2025-04-17T18:06:37.411608Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=57; 2025-04-17T18:06:37.411675Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=40; 2025-04-17T18:06:37.415610Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3892; 2025-04-17T18:06:37.421082Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5404; 2025-04-17T18:06:37.421182Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=28; 2025-04-17T18:06:37.421234Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-17T18:06:37.421269Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-17T18:06:37.421304Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-17T18:06:37.421336Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-17T18:06:37.421390Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=28; 2025-04-17T18:06:37.421424Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-17T18:06:37.421494Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=41; 2025-04-17T18:06:37.421528Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-17T18:06:37.421577Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2025-04-17T18:06:37.421644Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=33; 2025-04-17T18:06:37.421822Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=154; 2025-04-17T18:06:37.421850Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=47127; 2025-04-17T18:06:37.421960Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:06:37.422036Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:06:37.422072Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:06:37.422116Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:06:37.428464Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:06:37.428566Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:37.428616Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:37.428671Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-17T18:06:37.428714Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912884216;tx_id=18446744073709551615;;current_snapshot_ts=1744913147618; 2025-04-17T18:06:37.428743Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:37.428775Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:37.428800Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:37.428860Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:37.429288Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:37.429339Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1985:3885];tablet_id=9437184;parent=[1:1947:3854];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-04-17T18:06:37.429924Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:06:37.430149Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:06:37.430194Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:06:37.430217Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:06:37.430259Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:37.430311Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:37.430362Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-17T18:06:37.430431Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912884216;tx_id=18446744073709551615;;current_snapshot_ts=1744913147618; 2025-04-17T18:06:37.430470Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:37.430509Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:37.430545Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:37.430617Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:37.431704Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=11;path_id=1; 2025-04-17T18:06:37.432519Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> KqpBatchDelete::Large_1 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913749.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913749.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913749.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913749.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913749.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913749.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144913749.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913749.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912549.000000s;Name=;Codec=}; 2025-04-17T18:05:49.872112Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:49.968211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:49.985571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:49.985857Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:49.992619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:49.992787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:49.992966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:49.993080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:49.993160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:49.993242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:49.993329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:49.993400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:49.993468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:49.993571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:49.993641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:49.993700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:50.012115Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:50.012268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:50.012373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:50.012534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:50.012661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:50.012742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:50.012777Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:50.012870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:50.012922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:50.012968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:50.012993Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:50.013106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:50.013151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:50.013189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:50.013219Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:50.013287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:50.013354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:50.013395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:50.013416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:50.013481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:50.013509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:50.013529Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:50.013562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:50.013600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:50.013637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:50.014021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-17T18:05:50.014102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-17T18:05:50.014161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-17T18:05:50.014251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-17T18:05:50.014377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:50.014451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:50.014487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:50.014642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:50.014703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:50.014729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:50.014846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... EST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:06:37.603023Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912885610;tx_id=18446744073709551615;;current_snapshot_ts=1744913149946; 2025-04-17T18:06:37.603083Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:37.603148Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:37.603201Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:37.603319Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:37.603579Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913185610:max} readable: {1744913185610:max} at tablet 9437184 2025-04-17T18:06:37.603710Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:37.603886Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913185610:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:37.603953Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913185610:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:37.604456Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913185610:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:37.604564Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913185610:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:37.605101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913185610:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1997:4006];trace_detailed=; 2025-04-17T18:06:37.605524Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:37.605774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:37.605957Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:37.606103Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:37.606570Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1997:4006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:37.606697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1997:4006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:37.606838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1997:4006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:37.606889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1997:4006] finished for tablet 9437184 2025-04-17T18:06:37.607362Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1997:4006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1996:4005];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913197605029,"name":"_full_task","f":1744913197605029,"d_finished":0,"c":0,"l":1744913197606956,"d":1927},"events":[{"name":"bootstrap","f":1744913197605223,"d_finished":921,"c":1,"l":1744913197606144,"d":921},{"a":1744913197606541,"name":"ack","f":1744913197606541,"d_finished":0,"c":0,"l":1744913197606956,"d":415},{"a":1744913197606518,"name":"processing","f":1744913197606518,"d_finished":0,"c":0,"l":1744913197606956,"d":438},{"name":"ProduceResults","f":1744913197605870,"d_finished":539,"c":2,"l":1744913197606870,"d":539},{"a":1744913197606873,"name":"Finish","f":1744913197606873,"d_finished":0,"c":0,"l":1744913197606956,"d":83}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:37.607449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1997:4006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1996:4005];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:37.607907Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1997:4006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1996:4005];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744913197605029,"name":"_full_task","f":1744913197605029,"d_finished":0,"c":0,"l":1744913197607501,"d":2472},"events":[{"name":"bootstrap","f":1744913197605223,"d_finished":921,"c":1,"l":1744913197606144,"d":921},{"a":1744913197606541,"name":"ack","f":1744913197606541,"d_finished":0,"c":0,"l":1744913197607501,"d":960},{"a":1744913197606518,"name":"processing","f":1744913197606518,"d_finished":0,"c":0,"l":1744913197607501,"d":983},{"name":"ProduceResults","f":1744913197605870,"d_finished":539,"c":2,"l":1744913197606870,"d":539},{"a":1744913197606873,"name":"Finish","f":1744913197606873,"d_finished":0,"c":0,"l":1744913197607501,"d":628}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1997:4006]->[1:1996:4005] 2025-04-17T18:06:37.608007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1997:4006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:37.604531Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:37.608057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1997:4006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:37.608178Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1997:4006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913745.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144913745.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913745.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124913745.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912545.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124912545.000000s;Name=;Codec=}; 2025-04-17T18:05:47.078737Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:47.156417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:47.171612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:47.171836Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:47.177723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:47.177864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:47.178028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:47.178094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:47.178168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:47.178271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:47.178342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:47.178408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:47.178470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:47.178538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:47.178610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:47.178693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:47.196676Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:47.196799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:47.196842Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:47.196967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:47.197090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:47.197160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:47.197188Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:47.197255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:47.197297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:47.197323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:47.197341Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:47.197458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:47.197503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:47.197527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:47.197545Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:47.197602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:47.197633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:47.197656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:47.197673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:47.197717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:47.197739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:47.197757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:47.197785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:47.197810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:47.197830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:47.198134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=34; 2025-04-17T18:05:47.198221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-04-17T18:05:47.198301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-04-17T18:05:47.198356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=25; 2025-04-17T18:05:47.198465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:47.198501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:47.198526Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:47.198675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:47.198705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:47.198746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:47.198840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:47.198866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:47.198884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:47.198993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... E:finishLoadingTime=14; 2025-04-17T18:06:38.059542Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=255; 2025-04-17T18:06:38.059581Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=26785; 2025-04-17T18:06:38.065500Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=5845; 2025-04-17T18:06:38.071996Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5476; 2025-04-17T18:06:38.072095Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6507; 2025-04-17T18:06:38.072260Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=88; 2025-04-17T18:06:38.072374Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=64; 2025-04-17T18:06:38.072499Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=82; 2025-04-17T18:06:38.072604Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=63; 2025-04-17T18:06:38.080369Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7702; 2025-04-17T18:06:38.090769Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10300; 2025-04-17T18:06:38.090894Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2025-04-17T18:06:38.090970Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=26; 2025-04-17T18:06:38.091017Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-17T18:06:38.091064Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-17T18:06:38.091114Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-17T18:06:38.091192Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-04-17T18:06:38.091246Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-17T18:06:38.091340Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=56; 2025-04-17T18:06:38.091391Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2025-04-17T18:06:38.091459Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-04-17T18:06:38.091541Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=46; 2025-04-17T18:06:38.091778Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=198; 2025-04-17T18:06:38.091815Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=65982; 2025-04-17T18:06:38.091967Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:06:38.092059Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:06:38.092107Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:06:38.092168Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:06:38.101601Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:06:38.101744Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:38.101803Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:38.101872Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-17T18:06:38.101930Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912884828;tx_id=18446744073709551615;;current_snapshot_ts=1744913148230; 2025-04-17T18:06:38.101968Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:38.102012Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:38.102048Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:38.102137Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:38.102723Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:38.102799Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1985:3885];tablet_id=9437184;parent=[1:1947:3854];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-04-17T18:06:38.103563Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:06:38.103989Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:06:38.104019Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:06:38.104041Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:06:38.104081Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:38.104132Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:38.104182Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-17T18:06:38.104239Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912884828;tx_id=18446744073709551615;;current_snapshot_ts=1744913148230; 2025-04-17T18:06:38.104279Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:38.104337Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:38.104385Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:38.104458Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:38.105486Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=11;path_id=1; 2025-04-17T18:06:38.106663Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1947:3854];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913740.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913740.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913740.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913740.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912540.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913740.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124913740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912540.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912540.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124912540.000000s;Name=;Codec=}; 2025-04-17T18:05:40.497966Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.584701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.608686Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.608972Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.616593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.616754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.616939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.617037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.617123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.617185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.617258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.617330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.617404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.617478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.617567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.617630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.650257Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.651003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.651378Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.652192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.653077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.653591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.653732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.654121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.654287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.654345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.654400Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.654815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.654997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.655049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.655132Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.655398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.655519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.655585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.655623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.655719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.655765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.655794Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.655856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.655911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.655969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.657959Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=269; 2025-04-17T18:05:40.658302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=208; 2025-04-17T18:05:40.658522Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-17T18:05:40.658846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=184; 2025-04-17T18:05:40.659299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.659375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.659427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.659747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.659842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.659896Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.660173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.660220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cp ... UG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-17T18:06:38.170721Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912888751;tx_id=18446744073709551615;;current_snapshot_ts=1744913141654; 2025-04-17T18:06:38.170764Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:38.170817Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:38.170861Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:38.170958Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:38.171149Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913188751:max} readable: {1744913188751:max} at tablet 9437184 2025-04-17T18:06:38.171268Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:38.171415Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913188751:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:38.171477Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913188751:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:38.171868Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913188751:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:38.171950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913188751:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:38.172358Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913188751:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1386:3391];trace_detailed=; 2025-04-17T18:06:38.172724Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:38.172917Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:38.173080Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:38.173183Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:38.173491Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:38.173596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:38.173703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:38.173733Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1386:3391] finished for tablet 9437184 2025-04-17T18:06:38.174091Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1385:3390];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913198172304,"name":"_full_task","f":1744913198172304,"d_finished":0,"c":0,"l":1744913198173779,"d":1475},"events":[{"name":"bootstrap","f":1744913198172483,"d_finished":724,"c":1,"l":1744913198173207,"d":724},{"a":1744913198173466,"name":"ack","f":1744913198173466,"d_finished":0,"c":0,"l":1744913198173779,"d":313},{"a":1744913198173447,"name":"processing","f":1744913198173447,"d_finished":0,"c":0,"l":1744913198173779,"d":332},{"name":"ProduceResults","f":1744913198173004,"d_finished":400,"c":2,"l":1744913198173719,"d":400},{"a":1744913198173721,"name":"Finish","f":1744913198173721,"d_finished":0,"c":0,"l":1744913198173779,"d":58}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:38.174162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1385:3390];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:38.174578Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1385:3390];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913198172304,"name":"_full_task","f":1744913198172304,"d_finished":0,"c":0,"l":1744913198174221,"d":1917},"events":[{"name":"bootstrap","f":1744913198172483,"d_finished":724,"c":1,"l":1744913198173207,"d":724},{"a":1744913198173466,"name":"ack","f":1744913198173466,"d_finished":0,"c":0,"l":1744913198174221,"d":755},{"a":1744913198173447,"name":"processing","f":1744913198173447,"d_finished":0,"c":0,"l":1744913198174221,"d":774},{"name":"ProduceResults","f":1744913198173004,"d_finished":400,"c":2,"l":1744913198173719,"d":400},{"a":1744913198173721,"name":"Finish","f":1744913198173721,"d_finished":0,"c":0,"l":1744913198174221,"d":500}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1386:3391]->[1:1385:3390] 2025-04-17T18:06:38.174678Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:38.171920Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:38.174731Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:38.174841Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1386:3391];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> KqpBatchDelete::MultiStatement [GOOD] >> ReadLoad::ShouldReadIterate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 8667, MsgBus: 12621 2025-04-17T18:06:35.060699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345109978093661:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:35.060771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000cdc/r3tmp/tmpv54VnE/pdisk_1.dat 2025-04-17T18:06:35.321947Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8667, node 1 2025-04-17T18:06:35.383675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:35.383704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:35.383717Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:35.383834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:35.389357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:35.389456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:35.391088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12621 TClient is connected to server localhost:12621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:35.805554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:35.819531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:35.935646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:36.077771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:36.134859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:37.357528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345118568030026:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.357612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.585781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.611039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.634669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.657490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.680233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.745675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.774752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.816718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345118568030687:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.816783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.816878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345118568030692:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.820057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:37.827903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345118568030694:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:06:37.930006Z node 1 :TX_PROXY ERROR: Actor# [1:7494345118568030749:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:38.838860Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345122862998389:2517], status: GENERIC_ERROR, issues:
:4:32: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T18:06:38.839176Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmMzMTgyYTctODNlYWJmOTYtYWViMzk2MC00NTRlNTg0Mw==, ActorId: [1:7494345122862998380:2511], ActorState: ExecuteState, TraceId: 01js2ewvp96ndjhffty7ceq4md, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:06:38.856494Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345122862998396:2519], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T18:06:38.856718Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmMzMTgyYTctODNlYWJmOTYtYWViMzk2MC00NTRlNTg0Mw==, ActorId: [1:7494345122862998380:2511], ActorState: ExecuteState, TraceId: 01js2ewvqwfqs4cgrsw4k0rqgn, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:06:38.870635Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345122862998400:2521], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T18:06:38.870846Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmMzMTgyYTctODNlYWJmOTYtYWViMzk2MC00NTRlNTg0Mw==, ActorId: [1:7494345122862998380:2511], ActorState: ExecuteState, TraceId: 01js2ewvrc9p19vf12c7eqjnc2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:06:38.889176Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345122862998404:2523], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T18:06:38.890370Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmMzMTgyYTctODNlYWJmOTYtYWViMzk2MC00NTRlNTg0Mw==, ActorId: [1:7494345122862998380:2511], ActorState: ExecuteState, TraceId: 01js2ewvrvbej7xje5fd62bg0g, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:06:38.901541Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345122862998408:2525], status: GENERIC_ERROR, issues:
:3:29: Error: BATCH can't be used with multiple writes or reads. 2025-04-17T18:06:38.901745Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmMzMTgyYTctODNlYWJmOTYtYWViMzk2MC00NTRlNTg0Mw==, ActorId: [1:7494345122862998380:2511], ActorState: ExecuteState, TraceId: 01js2ewvseea0rq3g8r79r52ye, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> UpsertLoad::ShouldCreateTable |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpQueryService::DdlColumnTable [GOOD] >> KqpQueryService::DdlCache >> KqpJoinOrder::CanonizedJoinOrderTPCH6 >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |87.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913747.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913747.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913747.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913747.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913747.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913747.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144913747.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913747.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912547.000000s;Name=;Codec=}; 2025-04-17T18:05:47.801457Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:47.903539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:47.928451Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:47.928774Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:47.937413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:47.937658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:47.937910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:47.938021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:47.938158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:47.938323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:47.938453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:47.938589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:47.938725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:47.939088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:47.939254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:47.939371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:47.968790Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:47.968978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:47.969093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:47.969291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:47.969467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:47.969558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:47.969607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:47.969719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:47.969804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:47.969861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:47.969897Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:47.970114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:47.970227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:47.970279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:47.970317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:47.970419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:47.970483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:47.970535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:47.970567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:47.970640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:47.970681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:47.970735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:47.970789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:47.970843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:47.970877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:47.971321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-17T18:05:47.971444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=64; 2025-04-17T18:05:47.971533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-17T18:05:47.971620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-17T18:05:47.971790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:47.971852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:47.971890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:47.972153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:47.972213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:47.972253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:47.972442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... a.cpp:29;EXECUTE:finishLoadingTime=545; 2025-04-17T18:06:42.954866Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=47483; 2025-04-17T18:06:42.967599Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12652; 2025-04-17T18:06:42.980974Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12344; 2025-04-17T18:06:42.981089Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=13400; 2025-04-17T18:06:42.981257Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=102; 2025-04-17T18:06:42.981376Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=66; 2025-04-17T18:06:42.981515Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=90; 2025-04-17T18:06:42.981629Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=69; 2025-04-17T18:06:42.996664Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14958; 2025-04-17T18:06:43.015281Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18504; 2025-04-17T18:06:43.015416Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=40; 2025-04-17T18:06:43.015489Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=26; 2025-04-17T18:06:43.015534Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-17T18:06:43.015583Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-17T18:06:43.015632Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-17T18:06:43.015709Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-04-17T18:06:43.015762Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-17T18:06:43.015858Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=56; 2025-04-17T18:06:43.015910Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-17T18:06:43.015980Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-04-17T18:06:43.016078Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-04-17T18:06:43.016392Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=270; 2025-04-17T18:06:43.016437Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=116405; 2025-04-17T18:06:43.016584Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:06:43.016687Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:06:43.016743Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:06:43.016807Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:06:43.035667Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:06:43.035825Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:43.035888Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:43.035963Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:06:43.036030Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912884959;tx_id=18446744073709551615;;current_snapshot_ts=1744913147874; 2025-04-17T18:06:43.036073Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:43.036123Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:43.036160Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:43.036257Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:43.037035Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:43.037146Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2587:4489];tablet_id=9437184;parent=[1:2537:4446];fline=manager.cpp:82;event=ask_data;request=request_id=120;1={portions_count=22};; 2025-04-17T18:06:43.038581Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:06:43.039069Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:06:43.039112Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:06:43.039139Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:06:43.039180Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:43.039241Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:43.039296Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:06:43.039362Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912884959;tx_id=18446744073709551615;;current_snapshot_ts=1744913147874; 2025-04-17T18:06:43.039403Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:43.039451Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:43.039490Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:43.039581Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:43.040869Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=22;path_id=1; 2025-04-17T18:06:43.041922Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2537:4446];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-04-17T18:06:17.828566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:17.828966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:17.829010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ac7/r3tmp/tmpBtH9xd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18313, node 1 TClient is connected to server localhost:15850 2025-04-17T18:06:18.311946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:18.349089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:18.356269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:18.356319Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:18.356344Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:18.356666Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:18.391194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:18.391289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:18.402616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:18.534637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-17T18:06:18.622656Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:745:2626]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:06:18.623750Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:745:2626]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:06:18.623977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:745:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:06:18.646236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:745:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:06:18.646581Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-17T18:06:18.655660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:06:18.655938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:06:18.656261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:06:18.656390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:06:18.656506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:06:18.656625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:06:18.656735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:06:18.656873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:06:18.656997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:06:18.657112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:06:18.657215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:06:18.657367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:06:18.679261Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:745:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:06:18.680915Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-17T18:06:18.681286Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:06:18.681365Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:06:18.681618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:06:18.681803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:06:18.681885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:06:18.681951Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:06:18.682059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:06:18.682139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:06:18.682234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:06:18.682276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:06:18.682464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:06:18.682546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:06:18.682605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:06:18.682636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:06:18.682733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:06:18.682791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:06:18.682830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:06:18.682859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:06:18.682948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:06:18.682989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:06:18.683020Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:06:18.683070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:06:18.683118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:06:18.683161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:06:18.683668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=108; 2025-04-17T18:06:18.683759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-17T18:06:18.684579Z node 1 :TX_COLUMNSHARD INFO: tablet_i ... size=1445744;count=1;;12:size=1445408;count=1;;13:size=1445360;count=1;;14:size=1445920;count=1;;15:size=2598872;count=4;;16:size=1832712;count=3;;17:size=1018248;count=1;;18:size=994312;count=1;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-17T18:06:42.145369Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:1275:3077], Recipient [1:745:2626]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2025-04-17T18:06:42.145766Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1276:3078], Recipient [1:745:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-04-17T18:06:42.145801Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-04-17T18:06:42.145999Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[26] (CS::GENERAL) apply at tablet 72075186224037888 2025-04-17T18:06:42.149461Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:16 Blob count: 1 2025-04-17T18:06:42.149606Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2945656;raw_bytes=96859974;count=2;records=82561} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=10985168;raw_bytes=371060503;count=6;records=311449} inactive {blob_bytes=21748920;raw_bytes=731151681;count=14;records=617870} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:72/0:size=2030;count=15;;1:size=53667;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445376;count=1;;7:size=1445528;count=1;;8:size=808584;count=1;;9:size=2093168;count=3;;10:size=1247888;count=1;;11:size=1445744;count=1;;12:size=1445408;count=1;;13:size=1445360;count=1;;14:size=1445920;count=1;;15:size=2598872;count=4;;16:size=1832712;count=3;;17:size=1018248;count=1;;18:size=994312;count=1;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:72/0:size=2099;count=16;;1:size=53667;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445376;count=1;;7:size=1445528;count=1;;8:size=808584;count=1;;9:size=2093168;count=3;;10:size=1247888;count=1;;11:size=1445744;count=1;;12:size=1445408;count=1;;13:size=1445360;count=1;;14:size=1445920;count=1;;15:size=2598872;count=4;;16:size=1832712;count=3;;17:size=1018248;count=1;;18:size=994312;count=1;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-17T18:06:42.161590Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-17T18:06:42.161656Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;fline=with_appended.cpp:65;portions=23,;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd; 2025-04-17T18:06:42.161913Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:23;path_id:3;records_count:70098;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:2469232;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-17T18:06:42.162102Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/429610.000000s;; 2025-04-17T18:06:42.162225Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd; 2025-04-17T18:06:42.162293Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:13930824;portions_count:23;); 2025-04-17T18:06:42.162352Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:42.162414Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;tablet_id=72075186224037888;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:42.162476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-17T18:06:42.162540Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-04-17T18:06:42.162587Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:42.162637Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:42.162676Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:42.162747Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.797000s; 2025-04-17T18:06:42.162796Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:42.162969Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:16 Blob count: 1 VERIFY failed (2025-04-17T18:06:42.163169Z): tablet_id=72075186224037888;task_id=b6bc169e-1bb611f0-84ec7f4b-9f2c1ccd;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18E6FBB9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18E5DE4B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x1A175E96) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x48C9C201) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x308D0F6D) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1EBA5183) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1EA88B60) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E8CEF54) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E86BCD5) NActors::IActor::Receive(TAutoPtr&)+237 (0x1A0A766D) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x35C14E45) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x35C0D6BA) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x35C0A461) NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const+7904 (0x36592F30) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4568 (0x18A4D5B8) std::__y1::__function::__func, void ()>::operator()()+280 (0x18A5F8A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1931CAA6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x192EC419) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x18A5E854) NUnitTest::TTestFactory::Execute()+2438 (0x192EDCE6) NUnitTest::RunMain(int, char**)+5213 (0x1931701D) ??+0 (0x7FE81C725D90) __libc_start_main+128 (0x7FE81C725E40) _start+41 (0x163A0029) >> TTicketParserTest::BulkAuthorizationRetryError >> UpsertLoad::ShouldWriteDataBulkUpsertBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913748.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913748.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913748.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913748.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913748.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913748.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144913748.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913748.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912548.000000s;Name=;Codec=}; 2025-04-17T18:05:48.866577Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:48.964806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:48.989401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:48.989731Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:48.997566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:48.997794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:48.997990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:48.998069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:48.998152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:48.998291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:48.998413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:48.998506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:48.998595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:48.998689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:48.998777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:48.998842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:49.023463Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:49.023636Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:49.023741Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:49.023927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:49.024112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:49.024203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:49.024250Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:49.024356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:49.024427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:49.024480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:49.024513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:49.024692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:49.024755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:49.024800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:49.024831Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:49.024921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:49.024978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:49.025029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:49.025060Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:49.025141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:49.025181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:49.025212Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:49.025265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:49.025313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:49.025351Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:49.025762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-17T18:05:49.025868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T18:05:49.025959Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-17T18:05:49.026046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-17T18:05:49.026252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:49.026320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:49.026364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:49.026621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:49.026692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:49.026750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:49.026921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... on_data.cpp:29;EXECUTE:finishLoadingTime=361; 2025-04-17T18:06:44.381299Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=42198; 2025-04-17T18:06:44.390412Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=9040; 2025-04-17T18:06:44.398533Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7284; 2025-04-17T18:06:44.398620Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8123; 2025-04-17T18:06:44.398750Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=71; 2025-04-17T18:06:44.398836Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=46; 2025-04-17T18:06:44.398941Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=68; 2025-04-17T18:06:44.399055Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=61; 2025-04-17T18:06:44.409579Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10459; 2025-04-17T18:06:44.422991Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=13315; 2025-04-17T18:06:44.423117Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=32; 2025-04-17T18:06:44.423182Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=24; 2025-04-17T18:06:44.423220Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-04-17T18:06:44.423254Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-17T18:06:44.423290Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-17T18:06:44.423349Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2025-04-17T18:06:44.423387Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-17T18:06:44.423461Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2025-04-17T18:06:44.423498Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-17T18:06:44.423550Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-04-17T18:06:44.423635Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=42; 2025-04-17T18:06:44.423922Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=233; 2025-04-17T18:06:44.423957Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=92157; 2025-04-17T18:06:44.424085Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:06:44.424164Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:06:44.424219Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:06:44.424274Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:06:44.441320Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:06:44.441450Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:44.441500Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:44.441586Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:06:44.441639Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912886030;tx_id=18446744073709551615;;current_snapshot_ts=1744913148941; 2025-04-17T18:06:44.441676Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:44.441722Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:44.441751Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:44.441822Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:44.442552Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:44.442632Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2594:4496];tablet_id=9437184;parent=[1:2544:4453];fline=manager.cpp:82;event=ask_data;request=request_id=120;1={portions_count=22};; 2025-04-17T18:06:44.443803Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:06:44.444896Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:06:44.444928Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:06:44.444945Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:06:44.444991Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:44.445038Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:44.445095Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:06:44.445140Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912886030;tx_id=18446744073709551615;;current_snapshot_ts=1744913148941; 2025-04-17T18:06:44.445176Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:44.445222Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:44.445254Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:44.445328Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:44.446112Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=22;path_id=1; 2025-04-17T18:06:44.447130Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2544:4453];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> TTicketParserTest::LoginBad >> KqpQueryService::DdlCache [GOOD] >> KqpQueryService::DdlExecuteScript >> TColumnShardTestSchema::RebootColdTiers [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913740.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913740.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913740.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913740.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912540.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913740.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124913740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912540.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912540.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124912540.000000s;Name=;Codec=}; 2025-04-17T18:05:40.555013Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.651109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.678424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.678779Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.687601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.687853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.688067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.688192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.688309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.688415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.688533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.688638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.688749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.688857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.688957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.689048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.714945Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.715091Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.715161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.715355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.715509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.715582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.715626Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.715719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.715778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.715822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.715855Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.716016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.716082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.716123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.716156Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.716237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.716305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.716347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.716376Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.716451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.716489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.716519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.716576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.716619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.716652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.717086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=66; 2025-04-17T18:05:40.717170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-17T18:05:40.717241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-17T18:05:40.717321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-17T18:05:40.717515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.717571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.717613Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.717822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.717881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.717915Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.718062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.718108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... EXECUTE:finishLoadingTime=363; 2025-04-17T18:06:46.587800Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=34949; 2025-04-17T18:06:46.594339Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6435; 2025-04-17T18:06:46.601395Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5941; 2025-04-17T18:06:46.601516Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7064; 2025-04-17T18:06:46.601694Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=101; 2025-04-17T18:06:46.601820Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2025-04-17T18:06:46.601967Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=92; 2025-04-17T18:06:46.602095Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=75; 2025-04-17T18:06:46.609855Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7684; 2025-04-17T18:06:46.615623Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5656; 2025-04-17T18:06:46.615753Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2025-04-17T18:06:46.615827Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=31; 2025-04-17T18:06:46.615871Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-17T18:06:46.615907Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-17T18:06:46.615946Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-17T18:06:46.616018Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2025-04-17T18:06:46.616058Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-17T18:06:46.616135Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-04-17T18:06:46.616174Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-17T18:06:46.616227Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2025-04-17T18:06:46.616307Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=47; 2025-04-17T18:06:46.616606Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=266; 2025-04-17T18:06:46.616637Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=72049; 2025-04-17T18:06:46.616748Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:06:46.616834Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:06:46.616885Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:06:46.616945Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:06:46.631810Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:06:46.631937Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:46.631983Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:46.632042Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-17T18:06:46.632104Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912890411;tx_id=18446744073709551615;;current_snapshot_ts=1744913141701; 2025-04-17T18:06:46.632140Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:46.632176Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:46.632203Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:46.632274Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:46.632817Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:46.632881Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2205:4079];tablet_id=9437184;parent=[1:2165:4046];fline=manager.cpp:82;event=ask_data;request=request_id=128;1={portions_count=18};; 2025-04-17T18:06:46.633942Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:06:46.634543Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:06:46.634571Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:06:46.634603Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:06:46.634635Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:46.634680Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:46.634742Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-17T18:06:46.634797Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912890411;tx_id=18446744073709551615;;current_snapshot_ts=1744913141701; 2025-04-17T18:06:46.634839Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:46.634878Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:46.634909Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:46.634995Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:46.635381Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=18;path_id=1; 2025-04-17T18:06:46.636132Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> UpsertLoad::ShouldWriteDataBulkUpsert >> TColumnShardTestSchema::HotTiersWithStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913741.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913741.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913741.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913741.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912541.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913741.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912541.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912541.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124912541.000000s;Name=;Codec=}; 2025-04-17T18:05:42.294935Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:42.398847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:42.426890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:42.427175Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:42.435483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:42.435709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:42.435934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:42.436080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:42.436203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:42.436312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:42.436424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:42.436529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:42.436640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:42.436744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:42.436846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:42.436947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:42.465744Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:42.465910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:42.465990Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:42.466196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:42.466407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:42.466499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:42.466545Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:42.466646Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:42.466724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:42.466786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:42.466817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:42.466982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:42.467040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:42.467078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:42.467106Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:42.467191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:42.467245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:42.467287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:42.467315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:42.467384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:42.467421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:42.467447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:42.467499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:42.467545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:42.467577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:42.468021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-17T18:05:42.468109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-17T18:05:42.468184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-17T18:05:42.468281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-04-17T18:05:42.468484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:42.468554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:42.468597Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:42.468860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:42.468917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:42.468952Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... 437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:06:47.436192Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912890825;tx_id=18446744073709551615;;current_snapshot_ts=1744913143461; 2025-04-17T18:06:47.436239Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:47.436292Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:47.436331Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:47.436419Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:47.436616Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913190825:max} readable: {1744913190825:max} at tablet 9437184 2025-04-17T18:06:47.436731Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:47.436867Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190825:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:47.436914Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190825:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:47.437297Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190825:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:47.437380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190825:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:47.437895Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190825:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1960:3965];trace_detailed=; 2025-04-17T18:06:47.438396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:47.438661Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:47.438855Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:47.439010Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:47.439382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:47.439492Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:47.439612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:47.439660Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1960:3965] finished for tablet 9437184 2025-04-17T18:06:47.440005Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1959:3964];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913207437825,"name":"_full_task","f":1744913207437825,"d_finished":0,"c":0,"l":1744913207439717,"d":1892},"events":[{"name":"bootstrap","f":1744913207438039,"d_finished":1006,"c":1,"l":1744913207439045,"d":1006},{"a":1744913207439359,"name":"ack","f":1744913207439359,"d_finished":0,"c":0,"l":1744913207439717,"d":358},{"a":1744913207439342,"name":"processing","f":1744913207439342,"d_finished":0,"c":0,"l":1744913207439717,"d":375},{"name":"ProduceResults","f":1744913207438771,"d_finished":504,"c":2,"l":1744913207439642,"d":504},{"a":1744913207439646,"name":"Finish","f":1744913207439646,"d_finished":0,"c":0,"l":1744913207439717,"d":71}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:47.440061Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1959:3964];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:47.440366Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1959:3964];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744913207437825,"name":"_full_task","f":1744913207437825,"d_finished":0,"c":0,"l":1744913207440096,"d":2271},"events":[{"name":"bootstrap","f":1744913207438039,"d_finished":1006,"c":1,"l":1744913207439045,"d":1006},{"a":1744913207439359,"name":"ack","f":1744913207439359,"d_finished":0,"c":0,"l":1744913207440096,"d":737},{"a":1744913207439342,"name":"processing","f":1744913207439342,"d_finished":0,"c":0,"l":1744913207440096,"d":754},{"name":"ProduceResults","f":1744913207438771,"d_finished":504,"c":2,"l":1744913207439642,"d":504},{"a":1744913207439646,"name":"Finish","f":1744913207439646,"d_finished":0,"c":0,"l":1744913207440096,"d":450}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1960:3965]->[1:1959:3964] 2025-04-17T18:06:47.440437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:47.437349Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:47.440472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:47.440584Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1960:3965];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913742.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913742.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913742.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913742.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913742.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913742.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912542.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913742.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913742.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912542.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912542.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912542.000000s;Name=;Codec=}; 2025-04-17T18:05:42.474615Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:42.576616Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:42.602056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:42.602324Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:42.610424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:42.610649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:42.610895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:42.611021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:42.611142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:42.611251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:42.611363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:42.611475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:42.611582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:42.611688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:42.611794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:42.611892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:42.636478Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:42.636646Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:42.636726Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:42.636909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:42.637075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:42.637161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:42.637207Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:42.637298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:42.637357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:42.637405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:42.637440Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:42.637607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:42.637669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:42.637713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:42.637742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:42.637829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:42.637882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:42.637922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:42.637952Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:42.638023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:42.638061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:42.638091Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:42.638142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:42.638216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:42.638255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:42.638680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-17T18:05:42.638793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-04-17T18:05:42.638874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-17T18:05:42.638965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-17T18:05:42.639180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:42.639271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:42.639309Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:42.639528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:42.639588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:42.639624Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:06:48.022815Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912890999;tx_id=18446744073709551615;;current_snapshot_ts=1744913143635; 2025-04-17T18:06:48.022875Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:48.022942Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:48.022996Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:48.023101Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:48.023331Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913190999:max} readable: {1744913190999:max} at tablet 9437184 2025-04-17T18:06:48.023459Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:48.023621Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190999:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:48.023683Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190999:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:48.024174Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190999:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:48.024275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190999:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:48.024781Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913190999:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1974:3979];trace_detailed=; 2025-04-17T18:06:48.025214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:48.025472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:48.025660Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:48.025797Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:48.026227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:48.026363Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:48.026503Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:48.026549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1974:3979] finished for tablet 9437184 2025-04-17T18:06:48.027003Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1973:3978];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913208024710,"name":"_full_task","f":1744913208024710,"d_finished":0,"c":0,"l":1744913208026616,"d":1906},"events":[{"name":"bootstrap","f":1744913208024906,"d_finished":924,"c":1,"l":1744913208025830,"d":924},{"a":1744913208026197,"name":"ack","f":1744913208026197,"d_finished":0,"c":0,"l":1744913208026616,"d":419},{"a":1744913208026152,"name":"processing","f":1744913208026152,"d_finished":0,"c":0,"l":1744913208026616,"d":464},{"name":"ProduceResults","f":1744913208025572,"d_finished":527,"c":2,"l":1744913208026531,"d":527},{"a":1744913208026534,"name":"Finish","f":1744913208026534,"d_finished":0,"c":0,"l":1744913208026616,"d":82}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:48.027085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1973:3978];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:48.027526Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1973:3978];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744913208024710,"name":"_full_task","f":1744913208024710,"d_finished":0,"c":0,"l":1744913208027136,"d":2426},"events":[{"name":"bootstrap","f":1744913208024906,"d_finished":924,"c":1,"l":1744913208025830,"d":924},{"a":1744913208026197,"name":"ack","f":1744913208026197,"d_finished":0,"c":0,"l":1744913208027136,"d":939},{"a":1744913208026152,"name":"processing","f":1744913208026152,"d_finished":0,"c":0,"l":1744913208027136,"d":984},{"name":"ProduceResults","f":1744913208025572,"d_finished":527,"c":2,"l":1744913208026531,"d":527},{"a":1744913208026534,"name":"Finish","f":1744913208026534,"d_finished":0,"c":0,"l":1744913208027136,"d":602}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1974:3979]->[1:1973:3978] 2025-04-17T18:06:48.027623Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:48.024243Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:48.027672Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:48.027784Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> UpsertLoad::ShouldDropCreateTable [GOOD] >> KqpJoin::RightSemiJoin_FullScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2025-04-17T18:06:42.990091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:42.990619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:42.990680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018ff/r3tmp/tmp2zNPiK/pdisk_1.dat 2025-04-17T18:06:43.328347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:43.367115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:43.405593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:43.405741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:43.417298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:43.497049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:43.792422Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2025-04-17T18:06:43.793996Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-17T18:06:43.849505Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor finished in 0.055201s, errors=0 2025-04-17T18:06:43.850736Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-04-17T18:06:43.850865Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [1:747:2629] with id# {Tag: 0, parent: [1:738:2620], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-04-17T18:06:43.851692Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-17T18:06:43.851792Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:750:2632] 2025-04-17T18:06:43.851851Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Bootstrap called, sample# 0 2025-04-17T18:06:43.851875Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-04-17T18:06:43.852494Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-17T18:06:43.857951Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} finished in 0.005402s, read# 1000 2025-04-17T18:06:43.858437Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:750:2632] with chunkSize# 0 finished: 0 { DurationMs: 5 OperationsOK: 1000 OperationsError: 0 } 2025-04-17T18:06:43.858576Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:753:2635] 2025-04-17T18:06:43.858627Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Bootstrap called, sample# 0 2025-04-17T18:06:43.858670Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Connect to# 72075186224037888 called 2025-04-17T18:06:43.858924Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-04-17T18:06:44.042636Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} finished in 0.183662s, read# 1000 2025-04-17T18:06:44.042792Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:753:2635] with chunkSize# 1 finished: 0 { DurationMs: 183 OperationsOK: 1000 OperationsError: 0 } 2025-04-17T18:06:44.042920Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:756:2638] 2025-04-17T18:06:44.042965Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Bootstrap called, sample# 0 2025-04-17T18:06:44.042995Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Connect to# 72075186224037888 called 2025-04-17T18:06:44.043249Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-04-17T18:06:44.095643Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} finished in 0.052340s, read# 1000 2025-04-17T18:06:44.095774Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:756:2638] with chunkSize# 10 finished: 0 { DurationMs: 52 OperationsOK: 1000 OperationsError: 0 } 2025-04-17T18:06:44.095873Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:759:2641] 2025-04-17T18:06:44.095915Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Bootstrap called, sample# 1000 2025-04-17T18:06:44.095944Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Connect to# 72075186224037888 called 2025-04-17T18:06:44.096176Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-04-17T18:06:44.098237Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} finished in 0.001670s, sampled# 1000, iter finished# 1, oks# 1000 2025-04-17T18:06:44.098335Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} received keyCount# 1000 2025-04-17T18:06:44.098461Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} started read actor with id# [1:762:2644] 2025-04-17T18:06:44.098516Z node 1 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [1:747:2629], subTag: 5} Bootstrap called, will read keys# 1000 2025-04-17T18:06:44.397161Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} received point times# 1000, Inflight left# 0 2025-04-17T18:06:44.397357Z node 1 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 298 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 17\n" } 2025-04-17T18:06:44.397508Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} finished in 0.546497s with report: { DurationMs: 5 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 183 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 52 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 298 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 17\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-04-17T18:06:44.397849Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:747:2629] with tag# 3 2025-04-17T18:06:47.652809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:47.653147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:47.653359Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018ff/r3tmp/tmpJZ7wf5/pdisk_1.dat 2025-04-17T18:06:47.945077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:47.972433Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:48.009967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:48.010104Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:48.021722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:48.106691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:48.389209Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-04-17T18:06:48.389524Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-17T18:06:48.412182Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 1} TUpsertActor finished in 0.022359s, errors=0 2025-04-17T18:06:48.412758Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-04-17T18:06:48.412885Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [2:747:2629] with id# {Tag: 0, parent: [2:738:2620], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-04-17T18:06:48.414019Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-17T18:06:48.414139Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:750:2632] 2025-04-17T18:06:48.414254Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Bootstrap called, sample# 0 2025-04-17T18:06:48.414316Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-04-17T18:06:48.414596Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-17T18:06:48.415693Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} finished in 0.001044s, read# 10 2025-04-17T18:06:48.415860Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:750:2632] with chunkSize# 0 finished: 0 { DurationMs: 1 OperationsOK: 10 OperationsError: 0 } 2025-04-17T18:06:48.415977Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:753:2635] 2025-04-17T18:06:48.416113Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Bootstrap called, sample# 0 2025-04-17T18:06:48.416143Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Connect to# 72075186224037888 called 2025-04-17T18:06:48.416416Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-04-17T18:06:48.418652Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} finished in 0.002196s, read# 10 2025-04-17T18:06:48.418764Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:753:2635] with chunkSize# 1 finished: 0 { DurationMs: 2 OperationsOK: 10 OperationsError: 0 } 2025-04-17T18:06:48.418842Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:756:2638] 2025-04-17T18:06:48.418876Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Bootstrap called, sample# 0 2025-04-17T18:06:48.418903Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Connect to# 72075186224037888 called 2025-04-17T18:06:48.419122Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-04-17T18:06:48.419779Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} finished in 0.000620s, read# 10 2025-04-17T18:06:48.419863Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:756:2638] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-04-17T18:06:48.419958Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:759:2641] 2025-04-17T18:06:48.420002Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Bootstrap called, sample# 10 2025-04-17T18:06:48.420028Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Connect to# 72075186224037888 called 2025-04-17T18:06:48.420236Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-04-17T18:06:48.420634Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} finished in 0.000336s, sampled# 10, iter finished# 1, oks# 10 2025-04-17T18:06:48.420714Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} received keyCount# 10 2025-04-17T18:06:48.420850Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} started read actor with id# [2:762:2644] 2025-04-17T18:06:48.420913Z node 2 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [2:747:2629], subTag: 5} Bootstrap called, will read keys# 10 2025-04-17T18:06:48.802342Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} received point times# 1000, Inflight left# 0 2025-04-17T18:06:48.802573Z node 2 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 381 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 29\n" } 2025-04-17T18:06:48.802773Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} finished in 0.389696s with report: { DurationMs: 1 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 2 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 381 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 29\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-04-17T18:06:48.802892Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:747:2629] with tag# 3 >> TTicketParserTest::LoginGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2025-04-17T18:06:43.368597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:43.369119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:43.369182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018fd/r3tmp/tmpDqlqQ1/pdisk_1.dat 2025-04-17T18:06:43.690385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:43.733870Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:43.774534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:43.774716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:43.786507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:43.869256Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-04-17T18:06:44.148087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:645:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:44.148249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:44.179704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:44.505042Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-04-17T18:06:44.506703Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-17T18:06:44.529112Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor finished in 0.022076s, errors=0 2025-04-17T18:06:44.529384Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-17T18:06:44.529515Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-17T18:06:44.586403Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor finished in 0.056567s, errors=0 2025-04-17T18:06:44.586504Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:754:2630] with tag# 3 2025-04-17T18:06:47.456259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:47.456559Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:47.456788Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018fd/r3tmp/tmpnUd00p/pdisk_1.dat 2025-04-17T18:06:47.695690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:47.720847Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:47.759737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:47.759852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:47.771194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:47.853786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:48.114505Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-04-17T18:06:48.114621Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2025-04-17T18:06:48.569346Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.454417s, errors=0 2025-04-17T18:06:48.569423Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 2025-04-17T18:06:48.573624Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2025-04-17T18:06:48.585015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:781:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:48.585110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:48.767699Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2025-04-17T18:06:48.782790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:847:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:48.782874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:48.793055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:06:48.850009Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T18:06:49.028146Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-04-17T18:06:49.028425Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-17T18:06:49.040235Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor finished in 0.011480s, errors=0 2025-04-17T18:06:49.040551Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-17T18:06:49.040693Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-17T18:06:49.096644Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor finished in 0.055712s, errors=0 2025-04-17T18:06:49.096723Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:940:2782] with tag# 3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> TColumnShardTestSchema::HotTiers [GOOD] >> KqpLimits::WaitCAsTimeout [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2025-04-17T18:06:48.811227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:48.811787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:48.811851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018ef/r3tmp/tmpzxOyWx/pdisk_1.dat 2025-04-17T18:06:49.185295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:49.230494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:49.270068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:49.270267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:49.281688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:49.365326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:49.679214Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-04-17T18:06:49.679342Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-04-17T18:06:49.682513Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-04-17T18:06:49.682592Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T18:06:49.682646Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T18:06:49.682700Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T18:06:49.682729Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T18:06:49.682755Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-17T18:06:49.686009Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=NWU1ODY0M2UtNmU3YWM4NzEtZGQ2YjVhMWEtZWY4MWVlZDU= 2025-04-17T18:06:49.687356Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=ZmQwZjFlZmQtODM3OWI5OTItZDBhZGIzNjItZmMxOTYxZTg= 2025-04-17T18:06:49.688336Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=ODM5ODUzM2QtY2M2MWI3ZmUtMzUyMWRlZGYtZTJlZTdjNDY= 2025-04-17T18:06:49.689305Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=MjE5MjhmMWEtMjY2ZjgwYi03ZWM5MjY5ZS1mYTViOGU0OA== 2025-04-17T18:06:49.690257Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=ZmIxM2FmZGUtYTgyNmRlOWMtYTA3M2I2NmMtNjlmMDNjYTI= 2025-04-17T18:06:49.693456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:49.693526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:49.693559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:49.693596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:49.693631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:49.693667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2659], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:49.693715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:49.701360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:06:49.742634Z node 1 :TX_PROXY ERROR: Actor# [1:792:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:06:49.743225Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:06:49.743866Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:06:49.744209Z node 1 :TX_PROXY ERROR: Actor# [1:802:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:06:49.893237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:06:49.893333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:06:49.893402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:06:49.893460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2670], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:06:49.893537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2671], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:06:49.928008Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:50.469387Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1744913210.469334s, errors=0 2025-04-17T18:06:50.470471Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744913210469 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:06:50.484335Z node 1 :TX_PROXY ERROR: Actor# [1:973:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:50.547707Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1744913210.547667s, errors=0 2025-04-17T18:06:50.548016Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744913210547 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:06:50.561519Z node 1 :TX_PROXY ERROR: Actor# [1:1024:2801] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:50.626229Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1744913210.626164s, errors=0 2025-04-17T18:06:50.626577Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744913210626 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:06:50.640436Z node 1 :TX_PROXY ERROR: Actor# [1:1075:2823] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:50.703029Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1744913210.702981s, errors=0 2025-04-17T18:06:50.703357Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744913210702 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:06:50.717027Z node 1 :TX_PROXY ERROR: Actor# [1:1126:2845] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:50.779503Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1744913210.779458s, errors=0 2025-04-17T18:06:50.779843Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744913210779 OperationsOK: 4 OperationsError: 0 } 2025-04-17T18:06:50.779931Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.097631s, oks# 20, errors# 0 2025-04-17T18:06:50.780139Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> TTicketParserTest::TicketFromCertificateCheckIssuerGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913744.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913744.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913744.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913744.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912544.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913744.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912544.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912544.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912544.000000s;Name=;Codec=}; 2025-04-17T18:05:44.930349Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:45.037666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:45.063227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:45.063548Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:45.072022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:45.072267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:45.072522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:45.072658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:45.072792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:45.072917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:45.073036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:45.073156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:45.073258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:45.073370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.073482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:45.073591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:45.101493Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:45.101644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:45.101727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:45.101900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:45.102056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:45.102142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:45.102230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:45.102344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:45.102409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:45.102461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:45.102491Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:45.102645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:45.102701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:45.102764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:45.102793Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:45.102882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:45.102938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:45.102985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:45.103014Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:45.103083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:45.103118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:45.103146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:45.103201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:45.103244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:45.103283Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:45.103685Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-17T18:05:45.103773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T18:05:45.103858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-17T18:05:45.103941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-17T18:05:45.104149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:45.104223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:45.104261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:45.104485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:45.104552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.104589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... UG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:06:50.954809Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912893465;tx_id=18446744073709551615;;current_snapshot_ts=1744913146101; 2025-04-17T18:06:50.954859Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:50.954908Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:50.954948Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:50.955034Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:50.955228Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913193465:max} readable: {1744913193465:max} at tablet 9437184 2025-04-17T18:06:50.955351Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:50.955472Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913193465:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:50.955517Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913193465:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:50.955890Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913193465:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:50.955988Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913193465:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:50.956411Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913193465:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1974:3979];trace_detailed=; 2025-04-17T18:06:50.956788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:50.956983Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:50.957123Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:50.957233Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:50.957528Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:50.957615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:50.957714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:50.957745Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1974:3979] finished for tablet 9437184 2025-04-17T18:06:50.958060Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1973:3978];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913210956354,"name":"_full_task","f":1744913210956354,"d_finished":0,"c":0,"l":1744913210957790,"d":1436},"events":[{"name":"bootstrap","f":1744913210956563,"d_finished":694,"c":1,"l":1744913210957257,"d":694},{"a":1744913210957507,"name":"ack","f":1744913210957507,"d_finished":0,"c":0,"l":1744913210957790,"d":283},{"a":1744913210957493,"name":"processing","f":1744913210957493,"d_finished":0,"c":0,"l":1744913210957790,"d":297},{"name":"ProduceResults","f":1744913210957058,"d_finished":378,"c":2,"l":1744913210957733,"d":378},{"a":1744913210957735,"name":"Finish","f":1744913210957735,"d_finished":0,"c":0,"l":1744913210957790,"d":55}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:50.958121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1973:3978];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:50.958457Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1973:3978];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913210956354,"name":"_full_task","f":1744913210956354,"d_finished":0,"c":0,"l":1744913210958165,"d":1811},"events":[{"name":"bootstrap","f":1744913210956563,"d_finished":694,"c":1,"l":1744913210957257,"d":694},{"a":1744913210957507,"name":"ack","f":1744913210957507,"d_finished":0,"c":0,"l":1744913210958165,"d":658},{"a":1744913210957493,"name":"processing","f":1744913210957493,"d_finished":0,"c":0,"l":1744913210958165,"d":672},{"name":"ProduceResults","f":1744913210957058,"d_finished":378,"c":2,"l":1744913210957733,"d":378},{"a":1744913210957735,"name":"Finish","f":1744913210957735,"d_finished":0,"c":0,"l":1744913210958165,"d":430}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1974:3979]->[1:1973:3978] 2025-04-17T18:06:50.958527Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:50.955956Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:50.958563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:50.958647Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::WaitCAsTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 15027, MsgBus: 18594 2025-04-17T18:00:18.498738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343491056659103:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:18.499129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a9e/r3tmp/tmpk8vgUP/pdisk_1.dat 2025-04-17T18:00:19.280026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:19.294521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:19.294677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:19.303066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15027, node 1 2025-04-17T18:00:19.610749Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:19.610767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:19.610774Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:19.610868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18594 TClient is connected to server localhost:18594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:20.653075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:20.714361Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:00:20.724233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:21.025074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:21.297703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:21.414213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:23.491800Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343491056659103:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:23.491871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:23.783260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343512531497213:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:23.783360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.185236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.228669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.271335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.336367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.374954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.444187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.497978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:24.594912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343516826465180:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.594984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.595426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343516826465185:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:24.599524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:24.612682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343516826465187:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:24.713849Z node 1 :TX_PROXY ERROR: Actor# [1:7494343516826465244:3562] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:25.901269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:34.282694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:00:34.282724Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 26224, MsgBus: 7069 2025-04-17T18:04:16.893963Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344513285394300:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:16.894028Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a9e/r3tmp/tmpFpxx37/pdisk_1.dat 2025-04-17T18:04:17.131913Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:17.159949Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:17.160057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:17.161880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26224, node 2 2025-04-17T18:04:17.254731Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:17.254751Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:17.254759Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:17.254901Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7069 TClient is connected to server localhost:7069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:17.728223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:17.750839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2 ... baseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzhiYzYzYzItNzZhNzBlZTYtYThiZTc2MDMtNWVhNDY2MGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2025-04-17T18:06:36.986108Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:2937:4278], TxId: 281474976715672, task: 3. Ctx: { TraceId : 01js2ewsxj059chza0e5c950xw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YzhiYzYzYzItNzZhNzBlZTYtYThiZTc2MDMtNWVhNDY2MGM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:2929:4234], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-17T18:06:36.986609Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:2935:4276], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YzhiYzYzYzItNzZhNzBlZTYtYThiZTc2MDMtNWVhNDY2MGM=. TraceId : 01js2ewsxj059chza0e5c950xw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:2929:4234], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-17T18:06:36.986841Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:2936:4277], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01js2ewsxj059chza0e5c950xw. SessionId : ydb://session/3?node_id=3&id=YzhiYzYzYzItNzZhNzBlZTYtYThiZTc2MDMtNWVhNDY2MGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:2929:4234], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-17T18:06:36.987281Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzhiYzYzYzItNzZhNzBlZTYtYThiZTc2MDMtNWVhNDY2MGM=, ActorId: [3:2890:4234], ActorState: ExecuteState, TraceId: 01js2ewsxj059chza0e5c950xw, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 3576, MsgBus: 17688 2025-04-17T18:06:42.491371Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:42.491720Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:06:42.491823Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a9e/r3tmp/tmpcVgptF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3576, node 4 2025-04-17T18:06:42.940723Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:42.940829Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:42.940880Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:42.941358Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:42.942613Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:43.020469Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:43.020620Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:43.031957Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17688 TClient is connected to server localhost:17688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:43.445178Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:43.572705Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:43.853429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:44.312537Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:44.669550Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:45.282865Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1814:3410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:45.283017Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:45.307848Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:45.504168Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:45.764643Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:46.010273Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:46.324016Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:46.600875Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:46.970082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:47.335757Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2561:3981], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:47.335884Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:47.336325Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2566:3986], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:47.343821Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:47.509698Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2568:3988], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:06:47.557111Z node 4 :TX_PROXY ERROR: Actor# [4:2635:4036] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:50.239980Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:2936:4237] TxId: 281474976715672. Ctx: { TraceId: 01js2ex6vq2ntgkb9shxy6vy69, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzNzgxOTYtZmJjNWU0MDktNThmMTk1NjctMTRmZDZhYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2025-04-17T18:06:50.241830Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:2944:4283], TxId: 281474976715672, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZDQzNzgxOTYtZmJjNWU0MDktNThmMTk1NjctMTRmZDZhYmM=. TraceId : 01js2ex6vq2ntgkb9shxy6vy69. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:2936:4237], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-17T18:06:50.242742Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:2942:4281], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZDQzNzgxOTYtZmJjNWU0MDktNThmMTk1NjctMTRmZDZhYmM=. TraceId : 01js2ex6vq2ntgkb9shxy6vy69. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:2936:4237], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-17T18:06:50.243122Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:2943:4282], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZDQzNzgxOTYtZmJjNWU0MDktNThmMTk1NjctMTRmZDZhYmM=. TraceId : 01js2ex6vq2ntgkb9shxy6vy69. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:2936:4237], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-17T18:06:50.646105Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZDQzNzgxOTYtZmJjNWU0MDktNThmMTk1NjctMTRmZDZhYmM=, ActorId: [4:2895:4237], ActorState: ExecuteState, TraceId: 01js2ex6vq2ntgkb9shxy6vy69, Create QueryResponse for error on request, msg: >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913745.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913745.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913745.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913745.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913745.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913745.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912545.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913745.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124913745.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912545.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912545.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124912545.000000s;Name=;Codec=}; 2025-04-17T18:05:45.748536Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:45.860886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:45.886402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:45.886671Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:45.894858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:45.895075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:45.895295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:45.895414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:45.895537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:45.895639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:45.895749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:45.895855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:45.895963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:45.896068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.896169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:45.896264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:45.926752Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:45.926936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:45.927002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:45.927155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:45.927288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:45.927361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:45.927403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:45.927495Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:45.927559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:45.927603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:45.927630Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:45.927814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:45.927875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:45.927912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:45.927944Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:45.928028Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:45.928076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:45.928120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:45.928153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:45.928217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:45.928252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:45.928282Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:45.928325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:45.928366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:45.928398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:45.928755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-17T18:05:45.928836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T18:05:45.928905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T18:05:45.928981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-17T18:05:45.929188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:45.929268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:45.929305Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:45.929510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:45.929555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.929588Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.929742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:45.929779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... CUTE:finishLoadingTime=322; 2025-04-17T18:06:52.361599Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=30031; 2025-04-17T18:06:52.367500Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=5823; 2025-04-17T18:06:52.373882Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5410; 2025-04-17T18:06:52.373988Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6384; 2025-04-17T18:06:52.374156Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=101; 2025-04-17T18:06:52.374324Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=97; 2025-04-17T18:06:52.374480Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=99; 2025-04-17T18:06:52.374624Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=92; 2025-04-17T18:06:52.382724Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8023; 2025-04-17T18:06:52.393497Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10661; 2025-04-17T18:06:52.393641Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=47; 2025-04-17T18:06:52.393736Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=40; 2025-04-17T18:06:52.393793Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-17T18:06:52.393841Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-17T18:06:52.393887Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-17T18:06:52.393969Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-04-17T18:06:52.394019Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-17T18:06:52.394135Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=73; 2025-04-17T18:06:52.394214Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=28; 2025-04-17T18:06:52.394301Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=45; 2025-04-17T18:06:52.394408Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=63; 2025-04-17T18:06:52.394779Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=324; 2025-04-17T18:06:52.394823Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=70520; 2025-04-17T18:06:52.394972Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:06:52.395075Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:06:52.395123Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:06:52.395187Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:06:52.413886Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:06:52.414033Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:52.414090Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:52.414155Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-17T18:06:52.414232Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912895626;tx_id=18446744073709551615;;current_snapshot_ts=1744913146916; 2025-04-17T18:06:52.414287Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:52.414332Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:52.414369Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:52.414458Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:52.415090Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:52.415168Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2205:4079];tablet_id=9437184;parent=[1:2165:4046];fline=manager.cpp:82;event=ask_data;request=request_id=128;1={portions_count=18};; 2025-04-17T18:06:52.416083Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:06:52.416167Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:06:52.416194Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:06:52.416218Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:06:52.416259Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:06:52.416312Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:06:52.416363Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-17T18:06:52.416421Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912895626;tx_id=18446744073709551615;;current_snapshot_ts=1744913146916; 2025-04-17T18:06:52.416463Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:52.416517Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:52.416559Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:52.416642Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:52.417649Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=18;path_id=1; 2025-04-17T18:06:52.418696Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2165:4046];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2025-04-17T18:06:48.223992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:48.224513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:48.224563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018fc/r3tmp/tmpDDvWOZ/pdisk_1.dat 2025-04-17T18:06:48.582212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:48.617386Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:48.654855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:48.654968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:48.666256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:48.747006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:49.045488Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2025-04-17T18:06:49.045630Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-17T18:06:49.128307Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.082292s, errors=0 2025-04-17T18:06:49.128387Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-04-17T18:06:52.286084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:52.286414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:52.286620Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018fc/r3tmp/tmpVcpyKx/pdisk_1.dat 2025-04-17T18:06:52.560372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.592088Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:52.629686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:52.629808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:52.641407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:52.724846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.989347Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-04-17T18:06:52.989434Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-17T18:06:53.061487Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.071774s, errors=0 2025-04-17T18:06:53.061585Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> KqpQueryService::DdlExecuteScript [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlExecuteScript [GOOD] Test command err: Trying to start YDB, gRPC: 1604, MsgBus: 25391 2025-04-17T18:04:45.528288Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344636821678380:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:45.528385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bdf/r3tmp/tmpaSh4Ii/pdisk_1.dat 2025-04-17T18:04:46.035462Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:46.064908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:46.065013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:46.066750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1604, node 1 2025-04-17T18:04:46.178148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:46.178250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:46.178259Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:46.178383Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25391 TClient is connected to server localhost:25391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:46.822252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:46.845828Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:46.865219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:47.019605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:47.182766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:47.266491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:04:48.980739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344649706582046:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:48.980845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.352337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.383345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.459644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.487220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.517745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.570832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.644172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.706499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344654001550011:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.706610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.710412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344654001550016:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:49.717382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:49.730735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344654001550018:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:49.810174Z node 1 :TX_PROXY ERROR: Actor# [1:7494344654001550073:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:50.530833Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344636821678380:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:50.530883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:50.823420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:04:51.072779Z node 1 :TX_PROXY ERROR: Actor# [1:7494344662591485172:3897] txid# 281474976710675, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:51.072962Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710675, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl_0', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-17T18:04:51.073056Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2VkMTg5ZTItNzQwMWQ2MTgtOTY5ODBmM2MtNDE3M2ZkNjU=, ActorId: [1:7494344662591485160:2538], ActorState: ExecuteState, TraceId: 01js2esjfja475af5bf4e8j20p, Create QueryResponse for error on request, msg: 2025-04-17T18:04:51.099140Z node 1 :TX_PROXY ERROR: Actor# [1:7494344662591485196:3909] txid# 281474976710677, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:51.190832Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-17T18:04:51.194013Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344662591485269:2563], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:04:51.194266Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzA0MDkxODUtZjNjZDZmZWEtZjYxNzY3N2YtZjc3MzFkZmI=, ActorId: [1:7494344662591485266:2562], ActorState: ExecuteState, TraceId: 01js2esjkbdntcph41jwgnq2bp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:04:51.216754Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344662591485285:2567], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiDropTable!
:2:29: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:04:51.217889Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzU3ZDZmMmUtMTViN2YzNGUtOTRhNGM2NjAtY2Y5NzM0YmI=, ActorId: [1:7494344662591485283:2566], ActorState: ExecuteState, TraceId: 01js2esjm70d45aafnbksjgccr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:04:51.240150Z node 1 :TX_PROXY ERROR: Actor# [1:7494344662591485316:3987] txid# 281474976710680, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-17T18:04:51.262808Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494344662591485322:2575], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable! ... elf is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:44.878861Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345147340250106:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:44.879004Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:44.879118Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345147340250111:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:44.882754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:44.892445Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345147340250113:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:06:44.997605Z node 3 :TX_PROXY ERROR: Actor# [3:7494345147340250170:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:45.856961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:06:46.040829Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037922 not found 2025-04-17T18:06:46.067502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:06:46.222259Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345134455345785:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:46.222356Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15468, MsgBus: 29436 2025-04-17T18:06:47.033179Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345159694055944:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:47.034259Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bdf/r3tmp/tmpwbtHqf/pdisk_1.dat 2025-04-17T18:06:47.136281Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15468, node 4 2025-04-17T18:06:47.178715Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:47.178810Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:47.180430Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:47.216779Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:47.216801Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:47.216811Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:47.216936Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29436 TClient is connected to server localhost:29436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:47.712345Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:47.725675Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:47.807882Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:47.982194Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:48.064087Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:50.429103Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345172578959598:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.429205Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.494804Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.525454Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.555014Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.583826Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.613672Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.670711Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.710320Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.780233Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345172578960261:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.780334Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.780594Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345172578960266:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.784189Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:50.793792Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345172578960268:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:06:50.886761Z node 4 :TX_PROXY ERROR: Actor# [4:7494345172578960324:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:51.980637Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:06:51.982416Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:06:51.984665Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.033873Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494345159694055944:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:52.033964Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:06:52.460414Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> KqpJoinOrder::ShuffleEliminationOneJoin >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913748.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913748.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913748.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913748.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913748.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913748.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912548.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913748.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913748.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912548.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912548.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912548.000000s;Name=;Codec=}; 2025-04-17T18:05:48.822462Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:48.937365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:48.960433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:48.960708Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:48.968941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:48.969173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:48.969402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:48.969533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:48.969655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:48.969769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:48.969879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:48.969995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:48.970102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:48.970240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:48.970379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:48.970556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:48.999508Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:48.999657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:48.999731Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:48.999916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:49.000100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:49.000185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:49.000227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:49.000317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:49.000375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:49.000417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:49.000449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:49.000603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:49.000657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:49.000694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:49.000722Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:49.000805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:49.000855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:49.000895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:49.000923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:49.000992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:49.001027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:49.001056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:49.001103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:49.001147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:49.001182Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:49.001559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-17T18:05:49.001640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-17T18:05:49.001714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-17T18:05:49.001787Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-17T18:05:49.001978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:49.002037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:49.002072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:49.002340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:49.002398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:49.002440Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... 171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:06:55.186539Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912897361;tx_id=18446744073709551615;;current_snapshot_ts=1744913149997; 2025-04-17T18:06:55.186606Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:06:55.186681Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:55.186746Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:06:55.186871Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:06:55.187179Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913197361:max} readable: {1744913197361:max} at tablet 9437184 2025-04-17T18:06:55.187334Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:06:55.187500Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913197361:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:55.187564Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913197361:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-17T18:06:55.188073Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913197361:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-17T18:06:55.188163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913197361:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:06:55.188648Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913197361:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1974:3979];trace_detailed=; 2025-04-17T18:06:55.189259Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-17T18:06:55.189568Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-17T18:06:55.189788Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:55.189919Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:55.190297Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:06:55.190396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:55.190513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:55.190552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1974:3979] finished for tablet 9437184 2025-04-17T18:06:55.190993Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1973:3978];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744913215188585,"name":"_full_task","f":1744913215188585,"d_finished":0,"c":0,"l":1744913215190603,"d":2018},"events":[{"name":"bootstrap","f":1744913215188877,"d_finished":1069,"c":1,"l":1744913215189946,"d":1069},{"a":1744913215190276,"name":"ack","f":1744913215190276,"d_finished":0,"c":0,"l":1744913215190603,"d":327},{"a":1744913215190259,"name":"processing","f":1744913215190259,"d_finished":0,"c":0,"l":1744913215190603,"d":344},{"name":"ProduceResults","f":1744913215189701,"d_finished":455,"c":2,"l":1744913215190536,"d":455},{"a":1744913215190538,"name":"Finish","f":1744913215190538,"d_finished":0,"c":0,"l":1744913215190603,"d":65}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:06:55.191089Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1973:3978];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:06:55.191573Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1973:3978];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744913215188585,"name":"_full_task","f":1744913215188585,"d_finished":0,"c":0,"l":1744913215191145,"d":2560},"events":[{"name":"bootstrap","f":1744913215188877,"d_finished":1069,"c":1,"l":1744913215189946,"d":1069},{"a":1744913215190276,"name":"ack","f":1744913215190276,"d_finished":0,"c":0,"l":1744913215191145,"d":869},{"a":1744913215190259,"name":"processing","f":1744913215190259,"d_finished":0,"c":0,"l":1744913215191145,"d":886},{"name":"ProduceResults","f":1744913215189701,"d_finished":455,"c":2,"l":1744913215190536,"d":455},{"a":1744913215190538,"name":"Finish","f":1744913215190538,"d_finished":0,"c":0,"l":1744913215191145,"d":607}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1974:3979]->[1:1973:3978] 2025-04-17T18:06:55.191681Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:06:55.188135Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:06:55.191736Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:06:55.191855Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2025-04-17T18:06:50.633389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:50.633754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:50.633807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018ee/r3tmp/tmp2P4sHF/pdisk_1.dat 2025-04-17T18:06:50.986774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:51.028254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:51.068084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:51.068240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:51.080354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:51.161904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:51.489881Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-17T18:06:51.490032Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-17T18:06:51.558861Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.068412s, errors=0 2025-04-17T18:06:51.558981Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-04-17T18:06:54.473852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:54.474035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:54.474171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0018ee/r3tmp/tmpHCFu7N/pdisk_1.dat 2025-04-17T18:06:54.700875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:54.726122Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:54.764349Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:54.764451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:54.775992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:54.862446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:55.129848Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-17T18:06:55.130005Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-04-17T18:06:55.202209Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.071692s, errors=0 2025-04-17T18:06:55.202320Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> KqpJoin::RightSemiJoin_FullScan [GOOD] |87.4%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> TTicketParserTest::AuthenticationWithUserAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_FullScan [GOOD] Test command err: Trying to start YDB, gRPC: 22466, MsgBus: 10746 2025-04-17T18:06:49.872520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345169156313566:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:49.872580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e60/r3tmp/tmpb79ROs/pdisk_1.dat 2025-04-17T18:06:50.188572Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22466, node 1 2025-04-17T18:06:50.256587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:50.256685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:50.258002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:50.264346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:50.264376Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:50.264390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:50.264525Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10746 TClient is connected to server localhost:10746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:50.681086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:50.694575Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:06:50.707456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:50.843864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:50.989671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:51.064159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:52.674512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345182041217235:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:52.674578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:52.983365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:53.054406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:53.084773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:53.112073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:53.140764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:53.213447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:53.302488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:53.368227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345186336185202:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:53.368350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:53.368438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345186336185207:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:53.371859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:53.386376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345186336185209:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:06:53.480962Z node 1 :TX_PROXY ERROR: Actor# [1:7494345186336185268:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:54.434722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:06:54.465127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:06:54.491387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:06:54.550869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:06:54.624223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:06:54.874324Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345169156313566:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:54.874409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad >> TTicketParserTest::AccessServiceAuthenticationOk >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnsupported >> KqpBatchDelete::Large_1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 2702, MsgBus: 31862 2025-04-17T18:06:38.953235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345120453055737:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:38.953942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000cbc/r3tmp/tmpkKrzAp/pdisk_1.dat 2025-04-17T18:06:39.210559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2702, node 1 2025-04-17T18:06:39.287875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:39.287902Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:39.287911Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:39.288010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:39.301287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:39.301412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:39.303096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31862 TClient is connected to server localhost:31862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:39.728039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:39.752491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:39.868113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:40.003184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:40.065858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:41.718351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345133337959393:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.718490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.981665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:42.008782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:42.030857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:42.055280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:42.081962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:42.148575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:42.215611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:42.293367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345137632927360:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:42.293422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:42.293504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345137632927365:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:42.296618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:42.306421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345137632927367:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:06:42.385909Z node 1 :TX_PROXY ERROR: Actor# [1:7494345137632927424:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:43.255321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:43.953316Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345120453055737:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:43.953412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10577, MsgBus: 23040 2025-04-17T18:06:47.733766Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345160401271308:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:47.733875Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000cbc/r3tmp/tmpdATnNU/pdisk_1.dat 2025-04-17T18:06:47.846319Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:47.881009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:47.881109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10577, node 2 2025-04-17T18:06:47.884636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:47.925563Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:47.925588Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:47.925595Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:47.925730Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23040 TClient is connected to server localhost:23040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:48.336433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:48.351132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:48.425374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:48.547239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... emeshard: 72057594046644480 2025-04-17T18:06:50.600123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.631209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.660370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.729465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.769236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.819185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345173286175629:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.819264Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.819338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345173286175634:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.822640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:50.831718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345173286175636:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:06:50.928124Z node 2 :TX_PROXY ERROR: Actor# [2:7494345173286175691:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:51.897671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:52.734439Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345160401271308:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:52.734487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4095, MsgBus: 26445 2025-04-17T18:06:54.730637Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345189334422736:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:54.730754Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000cbc/r3tmp/tmpPj8quN/pdisk_1.dat 2025-04-17T18:06:54.825688Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4095, node 3 2025-04-17T18:06:54.867440Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:54.867541Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:54.869824Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:54.899035Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:54.899062Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:54.899071Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:54.899197Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26445 TClient is connected to server localhost:26445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:55.338381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:55.358492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:06:55.411013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:06:55.602982Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:06:55.678400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:06:58.065812Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345206514293702:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:58.065898Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:58.123395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:58.156499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:58.185269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:58.213752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:58.242890Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:58.272884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:58.343370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:58.420732Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345206514294367:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:58.420807Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345206514294372:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:58.420812Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:58.424085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:58.434086Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345206514294374:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:06:58.488682Z node 3 :TX_PROXY ERROR: Actor# [3:7494345206514294428:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:59.453303Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:59.737512Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345189334422736:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:59.738223Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] Test command err: 2025-04-17T18:06:46.424615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345156520881349:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:46.424772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb1/r3tmp/tmpajjYe8/pdisk_1.dat 2025-04-17T18:06:46.733420Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28651, node 1 2025-04-17T18:06:46.815373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:46.815398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:46.815411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:46.815563Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:46.819810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:46.819969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:46.821735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:47.075789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:47.187567Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:06:47.192937Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:06:47.192989Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:06:47.193543Z node 1 :TICKET_PARSER DEBUG: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2025-04-17T18:06:47.193556Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2025-04-17T18:06:47.193690Z node 1 :TICKET_PARSER ERROR: Ticket **** (5DAB89DE): Token is not in correct format 2025-04-17T18:06:49.411723Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345170498230685:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:49.411795Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb1/r3tmp/tmpE24euz/pdisk_1.dat 2025-04-17T18:06:49.518995Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19004, node 2 2025-04-17T18:06:49.554880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:49.555067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:49.557547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:49.580737Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:49.580766Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:49.580773Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:49.580921Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:49.791657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:49.798202Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:06:49.798234Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:06:49.798250Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:06:49.798411Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-17T18:06:49.798475Z node 2 :GRPC_CLIENT DEBUG: [517000021f88] Connect to grpc://localhost:61645 2025-04-17T18:06:49.802892Z node 2 :GRPC_CLIENT DEBUG: [517000021f88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-17T18:06:49.814642Z node 2 :GRPC_CLIENT DEBUG: [517000021f88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-04-17T18:06:49.814851Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-17T18:06:49.814971Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:06:49.815437Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:06:49.815464Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:06:49.815475Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:06:49.815553Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-17T18:06:49.815744Z node 2 :GRPC_CLIENT DEBUG: [517000021f88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-17T18:06:49.824998Z node 2 :GRPC_CLIENT DEBUG: [517000021f88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-04-17T18:06:49.825144Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-17T18:06:49.825214Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2025-04-17T18:06:52.701307Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345183130420440:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:52.701410Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb1/r3tmp/tmpCP3o88/pdisk_1.dat 2025-04-17T18:06:52.886951Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:52.900231Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:52.900318Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:52.903140Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5690, node 3 2025-04-17T18:06:52.951352Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:52.951378Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:52.951389Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:52.951520Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ... { id: "user1" } } } 2025-04-17T18:06:53.226273Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-17T18:06:53.226443Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-17T18:06:56.415933Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345198451597908:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:56.415999Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb1/r3tmp/tmpaPL8Iu/pdisk_1.dat 2025-04-17T18:06:56.630146Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:56.646332Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:56.646426Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:56.650711Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15457, node 4 2025-04-17T18:06:56.710698Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:56.710721Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:56.710728Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:56.710902Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:06:56.970882Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:56.980668Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:06:56.980705Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:06:56.980713Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:06:56.980833Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2025-04-17T18:06:56.980887Z node 4 :GRPC_CLIENT DEBUG: [517000054b88] Connect to grpc://localhost:8571 2025-04-17T18:06:56.982054Z node 4 :GRPC_CLIENT DEBUG: [517000054b88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2025-04-17T18:06:57.006284Z node 4 :GRPC_CLIENT DEBUG: [517000054b88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2025-04-17T18:06:57.006553Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2025-04-17T18:06:57.006581Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2025-04-17T18:06:57.006593Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2025-04-17T18:06:57.006607Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2025-04-17T18:06:57.006627Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-17T18:06:57.006816Z node 4 :GRPC_CLIENT DEBUG: [517000054808] Connect to grpc://localhost:26899 2025-04-17T18:06:57.007773Z node 4 :GRPC_CLIENT DEBUG: [517000054808] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-17T18:06:57.022506Z node 4 :GRPC_CLIENT DEBUG: [517000054808] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-17T18:06:57.022958Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-17T18:06:59.984294Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494345212909467613:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb1/r3tmp/tmp01hOIc/pdisk_1.dat 2025-04-17T18:07:00.001352Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:07:00.102061Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:00.104655Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:00.104751Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:00.106150Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21543, node 5 2025-04-17T18:07:00.156465Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:00.156486Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:00.156496Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:00.156636Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:07:00.416579Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:00.423286Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:00.423315Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:00.423323Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:00.423385Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-17T18:07:00.423414Z node 5 :GRPC_CLIENT DEBUG: [5170000ecc08] Connect to grpc://localhost:24450 2025-04-17T18:07:00.424200Z node 5 :GRPC_CLIENT DEBUG: [5170000ecc08] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-17T18:07:00.431554Z node 5 :GRPC_CLIENT DEBUG: [5170000ecc08] Status 14 Service Unavailable 2025-04-17T18:07:00.431811Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:07:00.431835Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:07:00.431869Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:00.431933Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-17T18:07:00.432123Z node 5 :GRPC_CLIENT DEBUG: [5170000ecc08] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-17T18:07:00.433413Z node 5 :GRPC_CLIENT DEBUG: [5170000ecc08] Status 14 Service Unavailable 2025-04-17T18:07:00.434701Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:07:00.434715Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:07:00.434730Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913738.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913738.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912538.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-17T18:05:40.672266Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.779971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.808885Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.809201Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.817773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.818026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.818322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.818467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.818605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.818763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.818892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.819025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.819143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.819266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.819391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.819502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.850360Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.850521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.850619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.850816Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.851001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.851092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.851143Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.851246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.851310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.851364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.851398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.851565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.851627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.851671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.851703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.851800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.851859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.851904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.851939Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.852017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.852059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.852093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.852148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.852202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.852239Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.852667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-17T18:05:40.852766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-04-17T18:05:40.852871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=56; 2025-04-17T18:05:40.852972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-04-17T18:05:40.853171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.853237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.853276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.853502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.853566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.853605Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.853771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.853817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:40.853848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:40.854062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... d;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:70:9384:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:1:8688:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:60:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:19:8576:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:24:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:15:8648:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600FALLBACK_ACTOR_LOGGING;priority=DEBUGFALLBACK_ACTOR_LOGGING;priority=DEBUG;component=;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:46:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:63:8568:0]; 1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:14:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:89:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:80:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:28:8712:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:50:8536:0]; ;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:7:8688:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:74:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:81:8336:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:4:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:32:8704:0];FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:33:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:88:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:41:2848:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:61:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:27:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:44:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:69:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:93:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:64:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:56:9608:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:79:8408:0]; FALLBACK_ACTOR_LOGGING;priority=FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:30:8704:0]; DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:87:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:94:8360:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:59:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:6:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:20:2840:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:49:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:3:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:47:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:18:8592:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:86:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:45:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:98:9384:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:58:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:96:8328:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:72:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:84:9608:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:11:8672:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:36:8624:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:66:8360:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:77:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:13:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 >> KqpBatchUpdate::SimpleOnePartition [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization >> TTicketParserTest::LoginRefreshGroupsWithError >> KqpBatchDelete::Large_3 [GOOD] >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 19332, MsgBus: 26858 2025-04-17T18:06:29.866067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345081162458719:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:29.866152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d1d/r3tmp/tmpqrML2U/pdisk_1.dat 2025-04-17T18:06:30.168029Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19332, node 1 2025-04-17T18:06:30.218609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:30.218736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:30.220483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:30.233296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:30.233322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:30.233329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:30.233476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26858 TClient is connected to server localhost:26858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:30.640722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:30.655828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:30.766754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:30.874965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:30.948675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:32.415039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345094047362389:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:32.415166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:32.656462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:32.681286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:32.705544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:32.730050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:32.754803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:32.793060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:32.817326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:32.895845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345094047363052:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:32.895889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:32.895954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345094047363057:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:32.899219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:32.907752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345094047363059:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:06:32.978042Z node 1 :TX_PROXY ERROR: Actor# [1:7494345094047363113:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:34.865967Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345081162458719:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:34.866036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30541, MsgBus: 64618 2025-04-17T18:06:37.978971Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345118578858409:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:37.979044Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d1d/r3tmp/tmpuTfgzk/pdisk_1.dat 2025-04-17T18:06:38.054850Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30541, node 2 2025-04-17T18:06:38.105729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:38.105839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:38.107360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:38.112128Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:38.112154Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:38.112167Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:38.112320Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64618 TClient is connected to server localhost:64618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:38.394703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:38.405792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:38.450614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:38.582700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:38.656095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.007683Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.031116Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.060786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.086861Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.122458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.153351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:50.201900Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345173750924166:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.201998Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.202003Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345173750924171:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:50.205726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:50.216569Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345173750924173:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:06:50.296967Z node 3 :TX_PROXY ERROR: Actor# [3:7494345173750924228:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:51.781058Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345156571052557:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:51.781131Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30681, MsgBus: 11375 2025-04-17T18:06:56.046519Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345200559273285:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:56.046605Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000d1d/r3tmp/tmpjrAZdB/pdisk_1.dat 2025-04-17T18:06:56.169394Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:56.191309Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:56.191373Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:56.192960Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30681, node 4 2025-04-17T18:06:56.234870Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:56.234896Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:56.234904Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:56.235060Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11375 TClient is connected to server localhost:11375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:56.667836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:56.674561Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:56.688450Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:06:56.796888Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:56.979560Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:57.062254Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:59.261736Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345213444176933:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:59.261807Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:59.311119Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.339583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.365513Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.395171Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.429200Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.462704Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.503036Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.562469Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345213444177590:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:59.562564Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:59.562734Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345213444177595:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:59.565975Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:59.581619Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345213444177597:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:06:59.660188Z node 4 :TX_PROXY ERROR: Actor# [4:7494345213444177651:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:01.047561Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494345200559273285:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:01.047646Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913741.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913741.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913741.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913741.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912541.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913741.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912541.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912541.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912541.000000s;Name=;Codec=}; 2025-04-17T18:05:41.883828Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:41.987474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:42.003894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:42.004141Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:42.010729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:42.010943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:42.011161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:42.011280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:42.011370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:42.011438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:42.011523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:42.011595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:42.011675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:42.011750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:42.011829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:42.011894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:42.031557Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:42.031708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:42.031811Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:42.031997Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:42.032152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:42.032233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:42.032277Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:42.032397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:42.032467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:42.032515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:42.032549Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:42.032715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:42.032776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:42.032816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:42.032848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:42.032942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:42.032998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:42.033042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:42.033073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:42.033144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:42.033189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:42.033218Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:42.033272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:42.033316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:42.033352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:42.033750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-17T18:05:42.033842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T18:05:42.033924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-17T18:05:42.034015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-04-17T18:05:42.034234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:42.034295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:42.034331Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:42.034542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:42.034597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:42.034645Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... finishLoadingTime=517; 2025-04-17T18:07:06.831369Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=54319; 2025-04-17T18:07:06.844375Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12914; 2025-04-17T18:07:06.857394Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11934; 2025-04-17T18:07:06.857514Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=13029; 2025-04-17T18:07:06.857689Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=103; 2025-04-17T18:07:06.857833Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=92; 2025-04-17T18:07:06.857995Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=113; 2025-04-17T18:07:06.858137Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=91; 2025-04-17T18:07:06.872200Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=13895; 2025-04-17T18:07:06.890394Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18060; 2025-04-17T18:07:06.890545Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=44; 2025-04-17T18:07:06.890625Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=29; 2025-04-17T18:07:06.890676Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-17T18:07:06.890725Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-17T18:07:06.890775Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-17T18:07:06.890873Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=57; 2025-04-17T18:07:06.890929Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-17T18:07:06.891030Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=60; 2025-04-17T18:07:06.891080Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-17T18:07:06.891152Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2025-04-17T18:07:06.891264Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=65; 2025-04-17T18:07:06.891636Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=326; 2025-04-17T18:07:06.891681Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=122406; 2025-04-17T18:07:06.891839Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:07:06.891945Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:07:06.891998Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:07:06.892060Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:07:06.912029Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:07:06.912214Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:06.912283Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:06.912359Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:07:06.912428Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912891942;tx_id=18446744073709551615;;current_snapshot_ts=1744913143048; 2025-04-17T18:07:06.912473Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:06.912521Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:06.912563Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:06.912660Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:06.913195Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:06.913288Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2644:4518];tablet_id=9437184;parent=[1:2602:4483];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-04-17T18:07:06.913847Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:07:06.914486Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:07:06.914526Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:07:06.914553Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:07:06.914600Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:06.914661Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:06.914719Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:07:06.914790Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912891942;tx_id=18446744073709551615;;current_snapshot_ts=1744913143048; 2025-04-17T18:07:06.914849Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:06.914914Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:06.914967Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:06.915065Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:06.916430Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=29;path_id=1; 2025-04-17T18:07:06.917888Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 10487, MsgBus: 13218 2025-04-17T18:06:33.749327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345099282586082:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:33.749449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ceb/r3tmp/tmpHjZ76Q/pdisk_1.dat 2025-04-17T18:06:34.037059Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10487, node 1 2025-04-17T18:06:34.102796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:34.102820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:34.102830Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:34.102977Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:34.115604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:34.115754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:34.117212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13218 TClient is connected to server localhost:13218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:34.530263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:34.547658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:34.658326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:34.790583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:34.846709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:36.256030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345112167489758:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:36.256220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:36.575550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.605313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.633331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.659371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.687537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.740426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.767913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.844233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345112167490421:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:36.844295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345112167490426:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:36.844302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:36.847504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:36.856894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345112167490428:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:06:36.954988Z node 1 :TX_PROXY ERROR: Actor# [1:7494345112167490485:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:37.775391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:38.749557Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345099282586082:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:38.749637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24489, MsgBus: 29633 2025-04-17T18:06:49.297574Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345171262680805:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:49.297641Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ceb/r3tmp/tmpAaZurb/pdisk_1.dat 2025-04-17T18:06:49.402571Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:49.434465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 24489, node 2 2025-04-17T18:06:49.434545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:49.435929Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:49.466395Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:49.466419Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:49.466427Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:49.466564Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29633 TClient is connected to server localhost:29633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:49.849904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:49.866455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:49.918360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:50.094665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:50.156508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:52.584883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345184147584468:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:52.584988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:52.636470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.668304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.695811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.774797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.813438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.850435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.884306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.969507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345184147585133:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:52.969590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:52.969983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345184147585138:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:52.973882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:52.987075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345184147585140:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:06:53.061125Z node 2 :TX_PROXY ERROR: Actor# [2:7494345188442552494:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:54.043290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:54.297994Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345171262680805:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:54.298099Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:07:04.400780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:07:04.400813Z node 2 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913741.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913741.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913741.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913741.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912541.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913741.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124913741.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912541.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912541.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124912541.000000s;Name=;Codec=}; 2025-04-17T18:05:41.808910Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:41.896380Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:41.916999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:41.917251Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:41.924472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:41.924717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:41.924954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:41.925053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:41.925141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:41.925220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:41.925298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:41.925376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:41.925441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:41.926276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:41.926461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:41.926576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:41.947214Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:41.947348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:41.947444Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:41.947640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:41.947835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:41.947920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:41.947959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:41.948046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:41.948094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:41.948144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:41.948174Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:41.948336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:41.948394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:41.948424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:41.948444Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:41.948515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:41.948567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:41.948611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:41.948641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:41.948713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:41.948752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:41.948781Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:41.948832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:41.948879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:41.948917Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:41.949351Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-17T18:05:41.949442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-17T18:05:41.949525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-17T18:05:41.949632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-04-17T18:05:41.949798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:41.949847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:41.949875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:41.950044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:41.950086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:41.950126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... nishLoadingTime=518; 2025-04-17T18:07:07.140956Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=57558; 2025-04-17T18:07:07.153575Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12524; 2025-04-17T18:07:07.166927Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12157; 2025-04-17T18:07:07.167060Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=13359; 2025-04-17T18:07:07.167252Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=114; 2025-04-17T18:07:07.167397Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=89; 2025-04-17T18:07:07.167558Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=108; 2025-04-17T18:07:07.167696Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=87; 2025-04-17T18:07:07.181308Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=13527; 2025-04-17T18:07:07.199121Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17679; 2025-04-17T18:07:07.199304Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=62; 2025-04-17T18:07:07.199394Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=34; 2025-04-17T18:07:07.199450Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-17T18:07:07.199504Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-17T18:07:07.199558Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-17T18:07:07.199651Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=49; 2025-04-17T18:07:07.199705Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-17T18:07:07.199814Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=68; 2025-04-17T18:07:07.199877Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2025-04-17T18:07:07.199953Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2025-04-17T18:07:07.200057Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=60; 2025-04-17T18:07:07.200478Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=376; 2025-04-17T18:07:07.200529Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=125639; 2025-04-17T18:07:07.200695Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:07:07.200816Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:07:07.200880Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:07:07.200949Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:07:07.221684Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:07:07.221865Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:07.221934Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:07.222016Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:07:07.222086Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912891851;tx_id=18446744073709551615;;current_snapshot_ts=1744913142957; 2025-04-17T18:07:07.222136Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:07.222219Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:07.222261Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:07.222360Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:07.222913Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:07.223000Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2644:4518];tablet_id=9437184;parent=[1:2602:4483];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-04-17T18:07:07.223768Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:07:07.224087Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:07:07.224122Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:07:07.224149Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:07:07.224196Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:07.224259Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:07.224323Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:07:07.224391Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912891851;tx_id=18446744073709551615;;current_snapshot_ts=1744913142957; 2025-04-17T18:07:07.224440Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:07.224490Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:07.224544Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:07.224649Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:07.226409Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=29;path_id=1; 2025-04-17T18:07:07.227968Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationRetryError >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> TTicketParserTest::AuthorizationRetryError >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144913745.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913745.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913745.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913745.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913745.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913745.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144913745.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912545.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913745.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124913745.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912545.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912545.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124912545.000000s;Name=;Codec=}; 2025-04-17T18:05:45.798038Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:45.897562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:45.921654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:45.921958Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:45.929620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:45.929822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:45.930044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:45.930170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:45.930350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:45.930456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:45.930569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:45.930683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:45.930800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:45.930906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.931006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:45.931102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:45.959328Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:45.959465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:45.959502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:45.959619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:45.959728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:45.959782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:45.959811Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:45.959869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:45.959911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:45.959938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:45.959958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:45.960095Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:45.960138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:45.960163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:45.960184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:45.960235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:45.960267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:45.960294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:45.960311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:45.960355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:45.960376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:45.960392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:45.960421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:45.960449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:45.960471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:45.960735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=30; 2025-04-17T18:05:45.960812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-17T18:05:45.960886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-17T18:05:45.960951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-04-17T18:05:45.961115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:45.961156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:45.961187Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:45.961307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:45.961331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:45.961349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... ECUTE:finishLoadingTime=446; 2025-04-17T18:07:09.278611Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=51022; 2025-04-17T18:07:09.287980Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=9296; 2025-04-17T18:07:09.296381Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7612; 2025-04-17T18:07:09.296490Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8430; 2025-04-17T18:07:09.296677Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=108; 2025-04-17T18:07:09.296803Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=72; 2025-04-17T18:07:09.296956Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=108; 2025-04-17T18:07:09.297066Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=64; 2025-04-17T18:07:09.305656Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8533; 2025-04-17T18:07:09.316398Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10651; 2025-04-17T18:07:09.316521Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=33; 2025-04-17T18:07:09.316582Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=22; 2025-04-17T18:07:09.316624Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-17T18:07:09.316661Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-04-17T18:07:09.316698Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-17T18:07:09.316769Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-04-17T18:07:09.316808Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-17T18:07:09.316906Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=61; 2025-04-17T18:07:09.316954Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-17T18:07:09.317010Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2025-04-17T18:07:09.317086Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=41; 2025-04-17T18:07:09.317400Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=279; 2025-04-17T18:07:09.317440Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=97192; 2025-04-17T18:07:09.317599Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:07:09.317710Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:07:09.317754Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:07:09.317817Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:07:09.333791Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:07:09.333934Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:09.333984Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:09.334051Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:07:09.334114Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912895860;tx_id=18446744073709551615;;current_snapshot_ts=1744913146966; 2025-04-17T18:07:09.334173Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:09.334240Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:09.334280Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:09.334373Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:09.334840Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:09.334927Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2615:4489];tablet_id=9437184;parent=[1:2575:4456];fline=manager.cpp:82;event=ask_data;request=request_id=151;1={portions_count=29};; 2025-04-17T18:07:09.335600Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:07:09.335996Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:07:09.336030Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:07:09.336054Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:07:09.336095Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:09.336156Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:09.336214Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:07:09.336274Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912895860;tx_id=18446744073709551615;;current_snapshot_ts=1744913146966; 2025-04-17T18:07:09.336316Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:09.336366Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:09.336401Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:09.336487Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:09.338479Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=29;path_id=1; 2025-04-17T18:07:09.339717Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913744.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913744.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913744.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913744.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912544.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913744.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912544.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912544.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912544.000000s;Name=;Codec=}; 2025-04-17T18:05:44.328786Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:44.428033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:44.455396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:44.455722Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:44.464572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:44.464835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:44.465123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:44.465271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:44.465414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:44.465539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:44.465666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:44.465793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:44.465917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:44.466037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:44.466143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:44.466266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:44.494657Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:44.494837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:44.494918Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:44.495103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:44.495273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:44.495365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:44.495410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:44.495505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:44.495578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:44.495623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:44.495653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:44.495813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:44.495877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:44.495919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:44.495952Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:44.496039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:44.496095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:44.496137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:44.496173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:44.496243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:44.496281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:44.496309Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:44.496357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:44.496403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:44.496440Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:44.496838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-17T18:05:44.496928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-17T18:05:44.497014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-17T18:05:44.497096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-17T18:05:44.497303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:44.497385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:44.497426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:44.497656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:44.497712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:44.497744Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... nishLoadingTime=528; 2025-04-17T18:07:09.162582Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=51760; 2025-04-17T18:07:09.173270Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=10588; 2025-04-17T18:07:09.184430Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=10150; 2025-04-17T18:07:09.184561Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=11176; 2025-04-17T18:07:09.184760Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=116; 2025-04-17T18:07:09.184896Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=80; 2025-04-17T18:07:09.185084Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=131; 2025-04-17T18:07:09.185225Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=82; 2025-04-17T18:07:09.201046Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15735; 2025-04-17T18:07:09.220023Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18825; 2025-04-17T18:07:09.220196Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=57; 2025-04-17T18:07:09.220290Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=33; 2025-04-17T18:07:09.220347Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-17T18:07:09.220405Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-17T18:07:09.220456Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-17T18:07:09.220564Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=59; 2025-04-17T18:07:09.220631Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2025-04-17T18:07:09.220735Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2025-04-17T18:07:09.220790Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-17T18:07:09.220886Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2025-04-17T18:07:09.221012Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=71; 2025-04-17T18:07:09.221450Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=376; 2025-04-17T18:07:09.221500Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=117475; 2025-04-17T18:07:09.221675Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:07:09.221789Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:07:09.221851Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:07:09.221919Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:07:09.243652Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:07:09.243839Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:09.243908Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:09.243988Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:07:09.244057Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912894384;tx_id=18446744073709551615;;current_snapshot_ts=1744913145491; 2025-04-17T18:07:09.244102Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:09.244155Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:09.244195Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:09.244297Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:09.244856Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:09.244940Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2644:4518];tablet_id=9437184;parent=[1:2602:4483];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-04-17T18:07:09.245597Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:07:09.245920Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:07:09.245952Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:07:09.245988Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:07:09.246033Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:09.246094Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:09.246165Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:07:09.246252Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912894384;tx_id=18446744073709551615;;current_snapshot_ts=1744913145491; 2025-04-17T18:07:09.246301Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:09.246354Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:09.246398Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:09.246489Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:09.247733Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=29;path_id=1; 2025-04-17T18:07:09.249322Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] Test command err: 2025-04-17T18:06:52.610638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345181074748588:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:52.610886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca5/r3tmp/tmpofgKBW/pdisk_1.dat 2025-04-17T18:06:52.980156Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6011, node 1 2025-04-17T18:06:53.038762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:53.038915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:53.040542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:53.048321Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:53.048352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:53.048361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:53.048503Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:53.328657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:53.342418Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:06:53.345994Z node 1 :TICKET_PARSER DEBUG: Ticket 9152D354FD35E95BA16741F0FBB9583D7141787942320AEC568D127730F05CF6 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:06:56.153596Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345201011826217:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:56.153705Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca5/r3tmp/tmp1wfPR4/pdisk_1.dat 2025-04-17T18:06:56.293800Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21740, node 2 2025-04-17T18:06:56.342284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:56.342756Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:56.348604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:56.385190Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:56.385217Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:56.385250Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:56.385368Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8974 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:56.632881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:56.642341Z node 2 :TICKET_PARSER DEBUG: Ticket B1BEEE3F7F2ED1FFA1FC014FD970F11155A524A88E6E3F9E950C34CE62299AB8 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2025-04-17T18:06:56.643051Z node 2 :TICKET_PARSER ERROR: Ticket B1BEEE3F7F2ED1FFA1FC014FD970F11155A524A88E6E3F9E950C34CE62299AB8: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2025-04-17T18:06:59.689699Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345212945009214:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:59.689753Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca5/r3tmp/tmpsZoMWr/pdisk_1.dat 2025-04-17T18:06:59.794637Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30721, node 3 2025-04-17T18:06:59.828162Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:59.828234Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:59.830859Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:59.847099Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:59.847119Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:59.847128Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:59.847245Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:00.043523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:00.048442Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:00.051476Z node 3 :TICKET_PARSER DEBUG: Ticket 3ACFF066073814D8624A05AE82D0B03F9DCF3596868C4F5F48A12D7390A7B928 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-17T18:07:00.052059Z node 3 :TICKET_PARSER ERROR: Ticket 3ACFF066073814D8624A05AE82D0B03F9DCF3596868C4F5F48A12D7390A7B928: Cannot create token from certificate. Client certificate failed verification 2025-04-17T18:07:02.872552Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345224376364595:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:02.872623Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca5/r3tmp/tmpN1exr6/pdisk_1.dat 2025-04-17T18:07:03.009546Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:03.021448Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:03.021527Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:03.023112Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15000, node 4 2025-04-17T18:07:03.070083Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:03.070109Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:03.070116Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:03.070284Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:03.300836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:03.305488Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:03.307439Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:03.307465Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:03.307474Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:03.308913Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-17T18:07:03.308962Z node 4 :GRPC_CLIENT DEBUG: [5170000a8808] Connect to grpc://localhost:16046 2025-04-17T18:07:03.312888Z node 4 :GRPC_CLIENT DEBUG: [5170000a8808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-17T18:07:03.322104Z node 4 :GRPC_CLIENT DEBUG: [5170000a8808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-04-17T18:07:03.322323Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-17T18:07:03.322434Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:03.322883Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:03.322902Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:03.322907Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:03.322943Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-17T18:07:03.323083Z node 4 :GRPC_CLIENT DEBUG: [5170000a8808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-17T18:07:03.324719Z node 4 :GRPC_CLIENT DEBUG: [5170000a8808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-04-17T18:07:03.324829Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-17T18:07:03.324885Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' 2025-04-17T18:07:06.298260Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494345240646497752:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:06.298617Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca5/r3tmp/tmpC0NBCU/pdisk_1.dat 2025-04-17T18:07:06.483470Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:06.510002Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:06.510098Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:06.511479Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1135, node 5 2025-04-17T18:07:06.563849Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:06.563871Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:06.563880Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:06.564031Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:06.832965Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:06.841763Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:06.841792Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:06.841800Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:06.841865Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-17T18:07:06.841903Z node 5 :GRPC_CLIENT DEBUG: [517000100388] Connect to grpc://localhost:12164 2025-04-17T18:07:06.842840Z node 5 :GRPC_CLIENT DEBUG: [517000100388] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-17T18:07:06.853290Z node 5 :GRPC_CLIENT DEBUG: [517000100388] Status 14 Service Unavailable 2025-04-17T18:07:06.853462Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:07:06.853496Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:07:06.853537Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:06.853638Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-17T18:07:06.854012Z node 5 :GRPC_CLIENT DEBUG: [517000100388] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2025-04-17T18:07:06.856560Z node 5 :GRPC_CLIENT DEBUG: [517000100388] Status 1 CANCELLED 2025-04-17T18:07:06.857147Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2025-04-17T18:07:06.857174Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2025-04-17T18:07:06.857203Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 4577, MsgBus: 29432 2025-04-17T18:07:06.207451Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345242202201846:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:06.207507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e5e/r3tmp/tmplyULp4/pdisk_1.dat 2025-04-17T18:07:06.570481Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4577, node 1 2025-04-17T18:07:06.581560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:06.581891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:06.591812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:06.659515Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:06.659537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:06.659563Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:06.659679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29432 TClient is connected to server localhost:29432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:07.141901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:07.182878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:07:07.312689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:07:07.461163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:07.537979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:08.987030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345250792138231:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:08.987139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:09.359875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:07:09.411868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:07:09.443978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:07:09.486279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:07:09.553984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:07:09.584877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:07:09.652552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:07:09.723919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345255087106196:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:09.724002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:09.724170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345255087106201:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:09.727564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:07:09.736378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345255087106203:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:07:09.795664Z node 1 :TX_PROXY ERROR: Actor# [1:7494345255087106258:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:10.615594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:07:10.648742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:07:10.704533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:07:10.736520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:07:10.758144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:07:10.821304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:07:11.218215Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345242202201846:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:11.218337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144913747.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913747.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913747.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913747.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913747.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913747.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913747.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912547.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913747.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913747.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912547.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912547.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912547.000000s;Name=;Codec=}; 2025-04-17T18:05:48.047029Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:48.148574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:48.171357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:48.171626Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:48.179167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:48.179366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:48.179585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:48.179709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:48.179820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:48.179919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:48.180037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:48.180146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:48.180247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:48.180344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:48.180442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:48.180531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:48.207468Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:48.207619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:48.207679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:48.207859Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:48.208040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:48.208123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:48.208167Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:48.208257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:48.208311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:48.208359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:48.208390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:48.208544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:48.208598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:48.208634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:48.208664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:48.208746Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:48.208794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:48.208832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:48.208857Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:48.208925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:48.208959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:48.208983Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:48.209026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:48.209067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:48.209099Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:48.209463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-17T18:05:48.209544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-17T18:05:48.209617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T18:05:48.209684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-17T18:05:48.209868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:48.209928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:48.209974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:48.210165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:48.210251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:48.210289Z node 1 :TX_COLUMNSHARD NOTICE: tabl ... finishLoadingTime=579; 2025-04-17T18:07:11.933764Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=51491; 2025-04-17T18:07:11.945542Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11687; 2025-04-17T18:07:11.957662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=10985; 2025-04-17T18:07:11.957774Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12130; 2025-04-17T18:07:11.957943Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=98; 2025-04-17T18:07:11.958061Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=67; 2025-04-17T18:07:11.958228Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=121; 2025-04-17T18:07:11.958344Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=72; 2025-04-17T18:07:11.970894Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=12470; 2025-04-17T18:07:11.986764Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15746; 2025-04-17T18:07:11.986904Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=41; 2025-04-17T18:07:11.986997Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=41; 2025-04-17T18:07:11.987049Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-17T18:07:11.987098Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-17T18:07:11.987145Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-17T18:07:11.987228Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-04-17T18:07:11.987281Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-17T18:07:11.987372Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2025-04-17T18:07:11.987422Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-17T18:07:11.987485Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2025-04-17T18:07:11.987578Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=51; 2025-04-17T18:07:11.987982Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=362; 2025-04-17T18:07:11.988021Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=113387; 2025-04-17T18:07:11.988207Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:07:11.988322Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:07:11.988375Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:07:11.988437Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:07:12.006213Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:07:12.006371Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:12.006431Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:12.006503Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:07:12.006568Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912898105;tx_id=18446744073709551615;;current_snapshot_ts=1744913149211; 2025-04-17T18:07:12.006611Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:12.006662Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:12.006698Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:12.006793Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:12.007275Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:12.007352Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2615:4489];tablet_id=9437184;parent=[1:2575:4456];fline=manager.cpp:82;event=ask_data;request=request_id=151;1={portions_count=29};; 2025-04-17T18:07:12.007972Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:07:12.008356Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:07:12.008387Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:07:12.008413Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:07:12.008456Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:12.008513Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:12.008565Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-17T18:07:12.008622Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912898105;tx_id=18446744073709551615;;current_snapshot_ts=1744913149211; 2025-04-17T18:07:12.008666Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:12.008708Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:12.008745Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:12.008827Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:12.009857Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=29;path_id=1; 2025-04-17T18:07:12.011422Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2575:4456];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> TColumnShardTestSchema::RebootHotTiers [GOOD] >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> TTicketParserTest::AuthorizationModify [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144913747.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913747.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913747.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124913747.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144913747.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144913747.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912547.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124913747.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124913747.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124912547.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124912547.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124912547.000000s;Name=;Codec=}; 2025-04-17T18:05:48.096149Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:48.180708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:48.198436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:48.198682Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:48.205689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:48.205879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:48.206090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:48.206243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:48.206353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:48.206445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:48.206551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:48.206652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:48.206762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:48.206862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:48.206958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:48.207043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:48.231832Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:48.231972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:48.232028Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:48.232177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:48.232312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:48.232382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:48.232420Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:48.232495Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:48.232545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:48.232583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:48.232610Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:48.232745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:48.232794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:48.232827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:48.232853Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:48.232924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:48.232965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:48.233001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:48.233024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:48.233084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:48.233114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:48.233137Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:48.233182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:48.233216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:48.233243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:48.233579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-04-17T18:05:48.233654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-17T18:05:48.233725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-17T18:05:48.233789Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-17T18:05:48.233956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:48.234027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:48.234083Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:48.234415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:48.234472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:48.234505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... finishLoadingTime=534; 2025-04-17T18:07:13.249480Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=52564; 2025-04-17T18:07:13.262037Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12484; 2025-04-17T18:07:13.274769Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11729; 2025-04-17T18:07:13.274872Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12744; 2025-04-17T18:07:13.275088Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=148; 2025-04-17T18:07:13.275203Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2025-04-17T18:07:13.275345Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=94; 2025-04-17T18:07:13.275451Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=64; 2025-04-17T18:07:13.290007Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14483; 2025-04-17T18:07:13.308279Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18172; 2025-04-17T18:07:13.308416Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=49; 2025-04-17T18:07:13.308517Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=50; 2025-04-17T18:07:13.308570Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-17T18:07:13.308621Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-17T18:07:13.308680Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-17T18:07:13.308770Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2025-04-17T18:07:13.308818Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-17T18:07:13.308910Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2025-04-17T18:07:13.308971Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-17T18:07:13.309053Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=42; 2025-04-17T18:07:13.309148Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2025-04-17T18:07:13.309492Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=305; 2025-04-17T18:07:13.309530Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=119730; 2025-04-17T18:07:13.309671Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:07:13.309769Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:07:13.309821Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:07:13.309879Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:07:13.327872Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-17T18:07:13.328030Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:13.328093Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:13.328163Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:07:13.328226Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912898148;tx_id=18446744073709551615;;current_snapshot_ts=1744913149255; 2025-04-17T18:07:13.328275Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:13.328322Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:13.328376Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:13.328476Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:13.328935Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:13.329012Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2644:4518];tablet_id=9437184;parent=[1:2602:4483];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-04-17T18:07:13.329493Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:07:13.329946Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:07:13.329978Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:07:13.330002Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:07:13.330047Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:07:13.330105Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:07:13.330197Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-17T18:07:13.330257Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912898148;tx_id=18446744073709551615;;current_snapshot_ts=1744913149255; 2025-04-17T18:07:13.330301Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:07:13.330348Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:13.330388Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:07:13.330474Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:07:13.331589Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1450;size=29;path_id=1; 2025-04-17T18:07:13.333016Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2602:4483];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2025-04-17T18:06:45.645638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345150780949800:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:45.645720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb2/r3tmp/tmpUBzhMg/pdisk_1.dat 2025-04-17T18:06:46.026658Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:46.052633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:46.053139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:46.056323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18399, node 1 2025-04-17T18:06:46.203951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:46.203981Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:46.203991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:46.204123Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:46.627980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:46.653434Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-17T18:06:46.653521Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Connect to grpc://localhost:2650 2025-04-17T18:06:46.657770Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-17T18:06:46.672157Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-17T18:06:46.672324Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:06:46.672408Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:06:46.672527Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-17T18:06:46.673778Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-17T18:06:46.675615Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-17T18:06:46.675739Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:06:46.675773Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:06:47.669141Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-17T18:06:47.669264Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-17T18:06:47.669545Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-17T18:06:47.671816Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-17T18:06:47.671936Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:06:47.671969Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:06:48.669529Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-17T18:06:48.669640Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-17T18:06:48.669916Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-17T18:06:48.671688Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-17T18:06:48.671793Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-17T18:06:48.671827Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:06:50.645875Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345150780949800:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:50.645968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:06:50.670417Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-17T18:06:50.670506Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-17T18:06:50.670692Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-17T18:06:50.672899Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:06:50.673185Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-17T18:06:59.383691Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345211197882085:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:59.384806Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb2/r3tmp/tmpXfFuBw/pdisk_1.dat 2025-04-17T18:06:59.491426Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:59.511675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:59.511765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:59.517160Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8256, node 2 2025-04-17T18:06:59.578767Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:59.578797Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:59.578810Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:59.578923Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:59.771873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:59.778326Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:06:59.783896Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-17T18:06:59.783971Z node 2 :GRPC_CLIENT DEBUG: [5170000d5c88] Connect to grpc://localhost:4256 2025-04-17T18:06:59.785138Z node 2 :GRPC_CLIENT DEBUG: [5170000d5c88] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-17T18:06:59.800417Z node 2 :GRPC_CLI ... T18:07:06.465728Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:06.465810Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:06.467522Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26913, node 4 2025-04-17T18:07:06.530794Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:06.530819Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:06.530827Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:06.530974Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:06.806956Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:06.816512Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:06.816544Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:06.816551Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:06.816602Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:06.816672Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2025-04-17T18:07:06.816699Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2025-04-17T18:07:06.816725Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-17T18:07:06.816747Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2025-04-17T18:07:06.816817Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Connect to grpc://localhost:32252 2025-04-17T18:07:06.820042Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:06.820379Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:06.820486Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:06.820622Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:06.820721Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:06.833377Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Status 16 Access Denied 2025-04-17T18:07:06.833390Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Status 16 Access Denied 2025-04-17T18:07:06.833597Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Status 16 Access Denied 2025-04-17T18:07:06.834310Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2025-04-17T18:07:06.834389Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-17T18:07:06.834414Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2025-04-17T18:07:06.834895Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Status 16 Access Denied 2025-04-17T18:07:06.834976Z node 4 :GRPC_CLIENT DEBUG: [5170000e4a88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:06.838273Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2025-04-17T18:07:06.838363Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-17T18:07:06.838394Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-17T18:07:06.839909Z node 4 :GRPC_CLIENT DEBUG: [5170000e5188] Connect to grpc://localhost:1053 2025-04-17T18:07:06.840891Z node 4 :GRPC_CLIENT DEBUG: [5170000e5188] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-17T18:07:06.849499Z node 4 :GRPC_CLIENT DEBUG: [5170000e5188] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-17T18:07:06.849816Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-17T18:07:10.041950Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494345258310660891:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:10.042016Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb2/r3tmp/tmpqgPsHb/pdisk_1.dat 2025-04-17T18:07:10.188227Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:10.200444Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:10.200530Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:10.205264Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7091, node 5 2025-04-17T18:07:10.242925Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:10.242969Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:10.242978Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:10.243131Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:10.499883Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:10.507135Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:10.507175Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:10.507185Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:10.507289Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-04-17T18:07:10.507333Z node 5 :GRPC_CLIENT DEBUG: [517000124508] Connect to grpc://localhost:29577 2025-04-17T18:07:10.508405Z node 5 :GRPC_CLIENT DEBUG: [517000124508] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-17T18:07:10.518196Z node 5 :GRPC_CLIENT DEBUG: [517000124508] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:10.518416Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:10.518899Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:10.518919Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:10.518926Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:10.518991Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-17T18:07:10.519273Z node 5 :GRPC_CLIENT DEBUG: [517000124508] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-17T18:07:10.520825Z node 5 :GRPC_CLIENT DEBUG: [517000124508] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:10.520974Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2025-04-17T18:06:57.906983Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345204108366747:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:57.907222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca3/r3tmp/tmpduQOYJ/pdisk_1.dat 2025-04-17T18:06:58.213236Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:58.218318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:58.218412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:58.221728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18354, node 1 2025-04-17T18:06:58.280722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:58.280750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:58.280762Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:58.280918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:58.562696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:58.587182Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:06:58.587252Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:06:58.587335Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:06:58.587726Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:06:58.587857Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:10987 2025-04-17T18:06:58.590965Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-17T18:06:58.603269Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:06:58.603491Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-17T18:06:58.604850Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Connect to grpc://localhost:22807 2025-04-17T18:06:58.605699Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-17T18:06:58.615079Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-17T18:06:58.615430Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-17T18:07:00.977293Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345216267810699:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:00.977343Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca3/r3tmp/tmpcmE6B9/pdisk_1.dat 2025-04-17T18:07:01.143521Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:01.170324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:01.170403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:01.173593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6404, node 2 2025-04-17T18:07:01.214005Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:01.214036Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:01.214044Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:01.214155Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:01.421299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:01.425766Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:01.427889Z node 2 :TICKET_PARSER ERROR: Ticket **** (8E120919): Token is not supported 2025-04-17T18:07:04.111067Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345231862678733:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:04.111185Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca3/r3tmp/tmpigen0L/pdisk_1.dat 2025-04-17T18:07:04.275062Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8883, node 3 2025-04-17T18:07:04.306976Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:04.307050Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:04.310610Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:04.340950Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:04.340972Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:04.340983Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:04.341111Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:04.558484Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:04.564188Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:04.565979Z node 3 :TICKET_PARSER ERROR: Ticket **** (8E120919): Unknown token 2025-04-17T18:07:07.244614Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345246952000464:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:07.245225Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca3/r3tmp/tmp1LgPjh/pdisk_1.dat 2025-04-17T18:07:07.350834Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:07.376164Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:07.376252Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:07.378499Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22551, node 4 2025-04-17T18:07:07.415984Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:07.416007Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:07.416014Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (em ... 16 Access Denied 2025-04-17T18:07:07.677801Z node 4 :TICKET_PARSER TRACE: Ticket **** (E2D1584C) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-17T18:07:07.677821Z node 4 :TICKET_PARSER DEBUG: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2025-04-17T18:07:07.678271Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:07.678292Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:07.678299Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:07.678315Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:07.678417Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:07.679696Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Status 16 Access Denied 2025-04-17T18:07:07.679789Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-17T18:07:07.679809Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-04-17T18:07:07.681394Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:07.681413Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:07.681420Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:07.681438Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:07.681549Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-04-17T18:07:07.682796Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Status 16 Access Denied 2025-04-17T18:07:07.682949Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-17T18:07:07.682972Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-17T18:07:07.683423Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:07.683442Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:07.683450Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:07.683469Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:07.683579Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:07.684615Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:07.684722Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-17T18:07:07.684798Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:07.685292Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:07.685309Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:07.685315Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:07.685331Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:07.685435Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-04-17T18:07:07.686500Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:07.686690Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-17T18:07:07.686744Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:07.687196Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:07.687215Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:07.687220Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:07.687235Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2025-04-17T18:07:07.687325Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2025-04-17T18:07:07.688401Z node 4 :GRPC_CLIENT DEBUG: [517000008608] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:07.688490Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2025-04-17T18:07:07.688547Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:10.970796Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494345258051249164:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:10.971047Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca3/r3tmp/tmpJG2SSj/pdisk_1.dat 2025-04-17T18:07:11.092423Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22649, node 5 2025-04-17T18:07:11.125379Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:11.125465Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:11.127292Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:11.164788Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:11.164813Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:11.164820Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:11.164947Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:11.415540Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:11.422111Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:11.422152Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:11.422162Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:11.422216Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:11.422285Z node 5 :GRPC_CLIENT DEBUG: [5170000b5308] Connect to grpc://localhost:13337 2025-04-17T18:07:11.423101Z node 5 :GRPC_CLIENT DEBUG: [5170000b5308] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:11.435925Z node 5 :GRPC_CLIENT DEBUG: [5170000b5308] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:11.436078Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-17T18:07:11.436190Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:11.436734Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:11.436763Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:11.436772Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:11.436807Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:11.436845Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-17T18:07:11.436998Z node 5 :GRPC_CLIENT DEBUG: [5170000b5308] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:11.437549Z node 5 :GRPC_CLIENT DEBUG: [5170000b5308] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:11.440288Z node 5 :GRPC_CLIENT DEBUG: [5170000b5308] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:11.440343Z node 5 :GRPC_CLIENT DEBUG: [5170000b5308] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:11.440418Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-17T18:07:11.440471Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-17T18:07:11.440551Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 >> TTicketParserTest::TicketFromCertificateWithValidationGood |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginCheckRemovedUser >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> KeyValueReadStorage::ReadRangeOk1Key >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2025-04-17T18:07:18.797506Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T18:07:18.800490Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-17T18:07:18.810635Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-17T18:07:18.810702Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-17T18:07:18.816642Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2025-04-17T18:07:18.816692Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2025-04-17T18:07:18.816754Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> DataShardTxOrder::RandomPoints_DelayData [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: 2025-04-17T18:07:15.040630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:15.041157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:15.041216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00107f/r3tmp/tmprv17Qu/pdisk_1.dat 2025-04-17T18:07:15.496786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:15.539096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:15.584852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:15.584983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:15.597167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:15.686977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:15.743575Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:15.743860Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:15.794391Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:15.794503Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:15.796983Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:15.797068Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:15.797118Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:15.798878Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:15.799021Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:15.799089Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:15.810669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:15.832439Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:15.833626Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:15.833778Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:15.833825Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:15.833858Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:15.833888Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:15.835118Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:15.835257Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:15.835437Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:15.835480Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:15.835587Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:15.835628Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:15.835723Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:15.837061Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:15.837455Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:15.837562Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:15.839492Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:15.850294Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:15.850396Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:15.999335Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:16.003800Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:16.003868Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.004558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:16.004622Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:16.004692Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:16.004911Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:16.005030Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:16.005767Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:16.005845Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:16.009309Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:16.010633Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:16.012072Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:16.012122Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.012615Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:16.012685Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:16.013824Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:16.013861Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:16.013918Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:16.013970Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:16.014016Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:16.014110Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.022457Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.024282Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:16.024789Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:16.024895Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:16.049162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.049285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.049361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.055687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:16.061582Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.216387Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.218602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:16.281824Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:16.904404Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ey02d3ehbbfn20wsz7ntg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q4NTM1NTUtMjM0ZjdlNGMtZDdkZDY2ZjUtYWZiYTMxMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:16.939103Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:16.941142Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:16.941316Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 4-17T18:07:17.043352Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:17.043399Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:17.043453Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:07:17.043703Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:17.043771Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:17.044265Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:07:17.044480Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:17.044608Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-17T18:07:17.044671Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 0 2025-04-17T18:07:17.046685Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:17.046729Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-04-17T18:07:17.046899Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:17.046929Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:17.046958Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-04-17T18:07:17.047074Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:17.047133Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:17.047171Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:19.969892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:19.970394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:19.970655Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00107f/r3tmp/tmpjO3fI8/pdisk_1.dat 2025-04-17T18:07:20.208767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:20.236496Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:20.277791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:20.277916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:20.290261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:20.372654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:20.393838Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-04-17T18:07:20.394117Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:20.441572Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:20.441690Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:20.443552Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:20.443635Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:20.443711Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:20.444092Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:20.444232Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:20.444312Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-04-17T18:07:20.457665Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:20.457756Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:20.457891Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:20.457994Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-04-17T18:07:20.458042Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:20.458099Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:20.458140Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.458491Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:20.458604Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:20.458706Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.458748Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.458796Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:20.458849Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.458957Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:671:2572], sessionId# [0:0:0] 2025-04-17T18:07:20.459427Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:20.459635Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:20.459728Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:20.461399Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.472408Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:20.472535Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:20.627908Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:20.628378Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:20.628444Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.628840Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.628899Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:20.628957Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:20.629250Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:20.629398Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:20.629607Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.629673Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:20.630191Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:20.630651Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.633294Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:20.633353Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.633885Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:20.633964Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.634861Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.634920Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:20.634998Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:20.635077Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:20.635138Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:20.635229Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.636977Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.639116Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:20.639793Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:20.639858Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:20.647202Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-04-17T18:07:20.647363Z node 2 :TX_DATASHARD NOTICE: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2025-04-17T18:07:20.647592Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: 2025-04-17T18:07:15.249825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:15.250446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:15.250507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001069/r3tmp/tmpQlCf8D/pdisk_1.dat 2025-04-17T18:07:15.610657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:15.660244Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:15.699975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:15.700112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:15.711677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:15.795436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:15.836885Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:15.837138Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:15.886214Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:15.886343Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:15.887997Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:15.888076Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:15.888128Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:15.888497Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:15.888637Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:15.888709Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:15.899467Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:15.931547Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:15.931754Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:15.931872Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:15.931904Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:15.931935Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:15.931966Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:15.932352Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:15.932485Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:15.932604Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:15.932640Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:15.932678Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:15.932721Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:15.932809Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:15.933245Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:15.933497Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:15.933594Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:15.935291Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:15.946073Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:15.946206Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:16.097188Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:16.102430Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:16.102507Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.103274Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:16.103322Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:16.103384Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:16.103634Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:16.103782Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:16.104602Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:16.104679Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:16.106885Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:16.107313Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:16.108832Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:16.108884Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.109477Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:16.109558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:16.111732Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:16.111792Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:16.111861Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:16.111920Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:16.111969Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:16.112049Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.115795Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.117737Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:16.118359Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:16.118483Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:16.127468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.127602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.127681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.132791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:16.139030Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.286620Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.288421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:16.367630Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:16.904397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ey04xcre26qqxg3rsm9r1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQzMzg2ZGItYjc3YzdlYmEtMzUzNWRjYmYtOWY3M2UzZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:16.939069Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:16.941118Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:16.941324Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-04-17T18:07:20.505399Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.505440Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:20.505482Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.505604Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:671:2572], sessionId# [0:0:0] 2025-04-17T18:07:20.506023Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:20.506247Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:20.506339Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:20.507961Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.518567Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:20.518689Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:20.667718Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:20.668037Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:20.668080Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.668347Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.668390Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:20.668421Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:20.668619Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:20.668756Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:20.668894Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.668935Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:20.669290Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:20.669682Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.671791Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:20.671842Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.672266Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:20.672326Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.672838Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.672882Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:20.672930Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:20.672973Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:20.673010Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:20.673096Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.675053Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.676404Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:20.676865Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:20.676913Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:20.684104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:20.684182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:746:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:20.684242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:20.689106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:20.694655Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.847004Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.849658Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:20.885348Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:20.955807Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ey4kafh5mt90cgttpkjf1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI2NWRiZjItMzY1NmI2YjgtMjExZjFhZGItNDQ5NWNjNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:20.958566Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:856:2687], serverId# [2:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:20.958978Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:20.959153Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-04-17T18:07:20.970278Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.974396Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:20.975562Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:20.986884Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:20.986995Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.987270Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:20.987322Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-17T18:07:20.987606Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.987659Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.987720Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:20.987789Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.987882Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:20.988916Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:20.989290Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:20.989486Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.989534Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:20.989601Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:07:20.989851Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:20.989923Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.990674Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-17T18:07:20.990934Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:20.991124Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-17T18:07:20.991171Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-17T18:07:21.014900Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:21.014961Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-17T18:07:21.015141Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:21.015189Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:21.015233Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-17T18:07:21.015367Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:21.015429Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:21.015479Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2025-04-17T18:03:43.766204Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:03:43.787830Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:03:43.788149Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:03:43.792025Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:03:43.837856Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:03:43.842874Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:03:43.910871Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:03:43.923679Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:03:43.923793Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:03:43.923903Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:03:43.927267Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:03:43.927749Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:03:43.927820Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:03:43.930210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:03:43.930253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:03:44.031207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:03:44.054309Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:03:44.055608Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:03:44.055697Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:03:44.055722Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:03:44.055744Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:03:44.055767Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.055883Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.056254Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.057199Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:03:44.057271Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:03:44.057364Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.057389Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:03:44.057426Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:03:44.057456Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:03:44.057490Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:03:44.057520Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:03:44.057555Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:03:44.057643Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.057680Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.057732Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:03:44.060857Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:03:44.060908Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:03:44.060984Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:03:44.061273Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:03:44.061315Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:03:44.062144Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:03:44.062210Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.062248Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:03:44.062288Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:03:44.062324Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.062597Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:03:44.062639Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:03:44.062687Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:03:44.062719Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.062782Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:03:44.062810Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:03:44.062837Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:03:44.062862Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.062879Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:03:44.074882Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:03:44.074941Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:03:44.074969Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:03:44.075034Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:03:44.075084Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:03:44.077773Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.077813Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:03:44.077840Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:03:44.077915Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:03:44.077932Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:03:44.078016Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:03:44.078078Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.078121Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:03:44.078148Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:03:44.084888Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:03:44.084950Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:03:44.085144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.085181Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:03:44.085229Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:03:44.085266Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:03:44.085295Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:03:44.085328Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:03:44.085361Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:03:44.085403Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.085462Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:03:44.085516Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:03:44.085547Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:03:44.085690Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:03:44.085721Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:03:44.085753Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:03:44.085779Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:03:44.085801Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:03:44.085854Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:03:44.085876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:03:44.085918Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:03:44.085948Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:03:44.085977Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:03:44.086009Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:03:44.086033Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:03:44.086061Z node 1 :TX_D ... r 9437184 txId 521 2025-04-17T18:07:19.960598Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-04-17T18:07:19.960636Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.960665Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2025-04-17T18:07:19.960848Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-04-17T18:07:19.960886Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.960914Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2025-04-17T18:07:19.961035Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-04-17T18:07:19.961069Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.961102Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2025-04-17T18:07:19.961238Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-04-17T18:07:19.961265Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.961289Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2025-04-17T18:07:19.961424Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-17T18:07:19.961449Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.961470Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-04-17T18:07:19.961566Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-04-17T18:07:19.961603Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.961630Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2025-04-17T18:07:19.961710Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-04-17T18:07:19.961733Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.961754Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2025-04-17T18:07:19.961838Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-04-17T18:07:19.961863Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.961883Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2025-04-17T18:07:19.962003Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-04-17T18:07:19.962028Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.962052Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2025-04-17T18:07:19.962221Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-04-17T18:07:19.962256Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.962285Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2025-04-17T18:07:19.962415Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-04-17T18:07:19.962440Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.962461Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2025-04-17T18:07:19.962549Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-17T18:07:19.962581Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.962612Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-04-17T18:07:19.962718Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-04-17T18:07:19.962744Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.962765Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2025-04-17T18:07:19.962853Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-04-17T18:07:19.962875Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.962898Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2025-04-17T18:07:19.962998Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-04-17T18:07:19.963021Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.963042Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2025-04-17T18:07:19.963163Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-04-17T18:07:19.963191Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.963212Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2025-04-17T18:07:19.963301Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-04-17T18:07:19.963324Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.963346Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2025-04-17T18:07:19.963444Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-04-17T18:07:19.963469Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.963505Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2025-04-17T18:07:19.963581Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-17T18:07:19.963613Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.963640Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 2025-04-17T18:07:19.990863Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:07:19.990939Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2025-04-17T18:07:19.991012Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [16:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-17T18:07:19.991098Z node 16 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-17T18:07:19.991152Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:07:19.991430Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:07:19.991471Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2025-04-17T18:07:19.991524Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [16:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:07:19.991563Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:07:19.991793Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:342:2310]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-17T18:07:19.991839Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:07:19.991879Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 30 30 31 29 28 30 27 26 31 28 27 29 31 26 27 17 - 31 28 27 31 27 31 28 31 31 - 5 13 - 5 - actual 30 30 31 29 28 30 27 26 31 28 27 29 31 26 27 17 - 31 28 27 31 27 31 28 31 31 - 5 13 - 5 - interm 30 30 28 29 28 30 27 26 27 28 27 29 17 26 27 17 - 28 28 27 7 27 18 28 23 25 - 5 13 - 5 - >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-04-17T18:07:15.146837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:15.147356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:15.147410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00106f/r3tmp/tmpifZYXs/pdisk_1.dat 2025-04-17T18:07:15.496960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:15.545669Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:15.585869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:15.585982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:15.597472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:15.688091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:15.743719Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:15.743994Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:15.795282Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:15.795382Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:15.796968Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:15.797041Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:15.797090Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:15.798874Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:15.799025Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:15.799095Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:15.809853Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:15.837747Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:15.837936Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:15.838069Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:15.838123Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:15.838162Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:15.838222Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:15.838593Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:15.838701Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:15.838834Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:15.838872Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:15.838914Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:15.838953Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:15.839042Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:15.839451Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:15.839670Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:15.839751Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:15.841398Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:15.852174Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:15.852253Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:16.000823Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:16.009811Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:16.009881Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.010661Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:16.010712Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:16.010769Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:16.010971Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:16.011126Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:16.011926Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:16.011998Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:16.013921Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:16.014385Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:16.015733Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:16.015787Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.016162Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:16.016207Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:16.017240Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:16.017292Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:16.017360Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:16.017423Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:16.017470Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:16.017550Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.021095Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.024044Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:16.024721Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:16.024779Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:16.049061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.049172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.049253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.059929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:16.066547Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.217575Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.220247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:16.293314Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:16.904426Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ey02demnnyp6603tp58f1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmRlOGFkOTgtYTU2YzMyMjgtMmNmMDQwNjEtYTNiYjg2NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:16.939090Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:16.941132Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:16.941321Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 24037893 2025-04-17T18:07:22.050542Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1249:3030], serverId# [2:1250:3031], sessionId# [0:0:0] 2025-04-17T18:07:22.050780Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1249:3030], serverId# [2:1250:3031], sessionId# [0:0:0] 2025-04-17T18:07:22.052201Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1254:3035], serverId# [2:1255:3036], sessionId# [0:0:0] 2025-04-17T18:07:22.052401Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1254:3035], serverId# [2:1255:3036], sessionId# [0:0:0] 2025-04-17T18:07:22.053757Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1259:3040], serverId# [2:1260:3041], sessionId# [0:0:0] 2025-04-17T18:07:22.053958Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1259:3040], serverId# [2:1260:3041], sessionId# [0:0:0] 2025-04-17T18:07:22.056231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:07:22.060680Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-04-17T18:07:22.060832Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:07:22.060928Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-17T18:07:22.061060Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:22.061144Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:07:22.061299Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-17T18:07:22.100346Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1282:3060] 2025-04-17T18:07:22.100596Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:22.128480Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:22.128629Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:22.130014Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-04-17T18:07:22.130096Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-04-17T18:07:22.130146Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-04-17T18:07:22.130465Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:22.130591Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:22.130658Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [2:1299:3060] in generation 1 2025-04-17T18:07:22.157687Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:22.157754Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2025-04-17T18:07:22.157838Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:22.157894Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [2:1301:3070] 2025-04-17T18:07:22.157919Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-04-17T18:07:22.157943Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-04-17T18:07:22.157970Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:07:22.158314Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2025-04-17T18:07:22.158393Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-04-17T18:07:22.158716Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-17T18:07:22.158748Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:22.158776Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2025-04-17T18:07:22.158806Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-17T18:07:22.158853Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1281:3059], serverId# [2:1290:3064], sessionId# [0:0:0] 2025-04-17T18:07:22.158942Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-17T18:07:22.159091Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2025-04-17T18:07:22.159145Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2025-04-17T18:07:22.159514Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-04-17T18:07:22.170355Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-17T18:07:22.170445Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:22.320694Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1311:3080], serverId# [2:1313:3082], sessionId# [0:0:0] 2025-04-17T18:07:22.321167Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715663 at step 4000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 4000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2025-04-17T18:07:22.321204Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:07:22.321858Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-17T18:07:22.321898Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:22.321932Z node 2 :TX_DATASHARD DEBUG: Found ready operation [4000:281474976715663] in PlanQueue unit at 72075186224037894 2025-04-17T18:07:22.322155Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037894 loaded tx from db 4000:281474976715663 keys extracted: 0 2025-04-17T18:07:22.322432Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:22.322693Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-17T18:07:22.322743Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 8] schema version# 1 2025-04-17T18:07:22.323065Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:22.323342Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:22.324879Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 3500 2025-04-17T18:07:22.324915Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:07:22.325151Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 4000} 2025-04-17T18:07:22.325197Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-17T18:07:22.325950Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-17T18:07:22.325993Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-04-17T18:07:22.326024Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037894 2025-04-17T18:07:22.326086Z node 2 :TX_DATASHARD DEBUG: Complete [4000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:22.326126Z node 2 :TX_DATASHARD INFO: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-04-17T18:07:22.326287Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:07:22.327506Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-04-17T18:07:22.327575Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:07:22.327623Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-04-17T18:07:22.327674Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:07:22.327774Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-17T18:07:22.327828Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-17T18:07:22.327893Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:22.328652Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 4000 2025-04-17T18:07:22.328990Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2025-04-17T18:07:22.329029Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-04-17T18:07:22.333132Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1340:3103], serverId# [2:1341:3104], sessionId# [0:0:0] 2025-04-17T18:07:22.333295Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1340:3103], serverId# [2:1341:3104], sessionId# [0:0:0] 2025-04-17T18:07:22.334538Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1345:3108], serverId# [2:1346:3109], sessionId# [0:0:0] 2025-04-17T18:07:22.334703Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1345:3108], serverId# [2:1346:3109], sessionId# [0:0:0] 2025-04-17T18:07:22.336078Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1350:3113], serverId# [2:1351:3114], sessionId# [0:0:0] 2025-04-17T18:07:22.336254Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1350:3113], serverId# [2:1351:3114], sessionId# [0:0:0] >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> EraseRowsTests::EraseRowsShouldSuccess >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 >> TKeyValueTest::TestInlineCopyRangeWorks >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> KqpBatchUpdate::Large_3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: 2025-04-17T18:07:15.040472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:15.040996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:15.041053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001078/r3tmp/tmpZ1g0oI/pdisk_1.dat 2025-04-17T18:07:15.496780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:15.539115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:15.584881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:15.585000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:15.597160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:15.686934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:15.752801Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:15.753097Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:15.802681Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:15.802838Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:15.804810Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:15.804910Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:15.804974Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:15.805386Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:15.805567Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:15.805651Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:15.816625Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:15.847598Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:15.847852Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:15.847983Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:15.848022Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:15.848062Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:15.848103Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:15.848572Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:15.848700Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:15.848842Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:15.848889Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:15.848942Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:15.848986Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:15.849093Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:15.849566Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:15.849796Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:15.849880Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:15.851518Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:15.862497Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:15.862631Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:16.012796Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:16.017758Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:16.017838Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.018724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:16.018781Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:16.018843Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:16.019122Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:16.019294Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:16.020181Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:16.020271Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:16.022500Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:16.023019Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:16.024618Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:16.024669Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.025296Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:16.025365Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:16.026735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:16.026846Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:16.026906Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:16.026966Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:16.027027Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:16.027112Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:16.031075Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.033205Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:16.033866Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:16.033984Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:16.049166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.049288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.049363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:16.056266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:16.062524Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.217157Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:16.219442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:16.284653Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:16.904449Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ey02dc8sqkqn0zb8tgd1t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM2OTVjZWEtODIwMWUwMzItN2IxMjVhMjgtNGIyOTRiZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:16.939072Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:16.941149Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:16.941365Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-04-17T18:07:25.632790Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:25.632825Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:25.632859Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:25.632916Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:25.633347Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:25.633535Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:25.633619Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:25.635290Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:25.646370Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:25.646450Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:25.801097Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:25.801616Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:25.801665Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:25.801859Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:25.801899Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:25.801936Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:25.802223Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:25.802364Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:25.803546Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:25.803607Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:25.804045Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:25.804404Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:25.805827Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:25.805882Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:25.807788Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:25.807871Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:25.808641Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:25.808686Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:25.808731Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:25.808797Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:25.808848Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:25.808937Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:25.810084Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:25.812380Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:25.812444Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:25.812996Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:25.821680Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:25.821780Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:25.821849Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:25.828014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:25.833671Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:25.995636Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:25.999525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:26.035341Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:26.168147Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ey9kw29r5mjar5fjg3r7h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWE5YzgzMzItYjYxMThkMjItNTBmZDQ3MDktMjU5NzgxMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:26.173654Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:26.174061Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:26.174276Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-17T18:07:26.185313Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:26.189395Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:26.190681Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:26.202289Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:26.202389Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:26.202713Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:26.202752Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-17T18:07:26.203024Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:26.203068Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:26.203144Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:26.203199Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:26.203275Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:26.204242Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:26.204626Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:26.204804Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:26.204856Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:26.204903Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:07:26.205177Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:26.205249Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:26.205862Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-17T18:07:26.206260Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:26.206401Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-17T18:07:26.206451Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-17T18:07:26.244212Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:26.244285Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-17T18:07:26.244657Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:26.244689Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:26.244731Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-17T18:07:26.244834Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:26.244884Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:26.244933Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2025-04-17T18:06:59.929828Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345213652160577:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:59.933874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca1/r3tmp/tmpi67YKQ/pdisk_1.dat 2025-04-17T18:07:00.236541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:00.236638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:00.237916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:00.258872Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5576, node 1 2025-04-17T18:07:00.318764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:00.318788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:00.318795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:00.318924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:00.589060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:00.607947Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:07:00.608019Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:24590 2025-04-17T18:07:00.611086Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-17T18:07:00.626992Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:00.627323Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:03.004889Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345229567878463:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:03.004929Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca1/r3tmp/tmpfOrGpR/pdisk_1.dat 2025-04-17T18:07:03.120755Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19371, node 2 2025-04-17T18:07:03.171089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:03.171196Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:03.176405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:03.226829Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:03.226856Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:03.226862Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:03.226981Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:03.411297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:03.419156Z node 2 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2025-04-17T18:07:03.419213Z node 2 :GRPC_CLIENT DEBUG: [5170000b1408] Connect to grpc://localhost:7377 2025-04-17T18:07:03.420002Z node 2 :GRPC_CLIENT DEBUG: [5170000b1408] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2025-04-17T18:07:03.431292Z node 2 :GRPC_CLIENT DEBUG: [5170000b1408] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2025-04-17T18:07:03.431518Z node 2 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2025-04-17T18:07:06.224039Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345240956745637:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:06.224192Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca1/r3tmp/tmpjrSp5l/pdisk_1.dat 2025-04-17T18:07:06.345538Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1196, node 3 2025-04-17T18:07:06.378464Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:06.378538Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:06.394891Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:06.434511Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:06.434534Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:06.434541Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:06.434686Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:06.664477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:06.671147Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:06.671196Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:06.671208Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:06.671248Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:07:06.671310Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Connect to grpc://localhost:63185 2025-04-17T18:07:06.672079Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-17T18:07:06.681639Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Status 14 Service Unavailable 2025-04-17T18:07:06.682076Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:06.682104Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:07:06.682266Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-17T18:07:06.686678Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Status 1 CANCELLED 2025-04-17T18:07:06.686794Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2025-04-17T18:07:09.664127Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345253646284657:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:09.664246Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca1/r3tmp/tmp4y5GdA/pdisk_1.dat 2025-04-17T18:07:09.825066Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:09.858558Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:09.858652Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:09.860492Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8450, node 4 2025-04-17T18:07:09.914960Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:09.914987Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:09.914995Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:09.915127Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:10.158710Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:10.164354Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:10.166299Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-17T18:07:10.166365Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Connect to grpc://localhost:26934 2025-04-17T18:07:10.167467Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-17T18:07:10.182363Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Status 14 Service Unavailable 2025-04-17T18:07:10.182702Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:10.182739Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-17T18:07:10.182950Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-17T18:07:10.187006Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Status 14 Service Unavailable 2025-04-17T18:07:10.187121Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:10.669362Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-17T18:07:10.669420Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-17T18:07:10.669620Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-17T18:07:10.674850Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Status 14 Service Unavailable 2025-04-17T18:07:10.674958Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:11.713577Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-17T18:07:11.713621Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-17T18:07:11.713800Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-17T18:07:11.718282Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Status 14 Service Unavailable 2025-04-17T18:07:11.718405Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:13.714290Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-17T18:07:13.714338Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-17T18:07:13.714539Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-17T18:07:13.716150Z node 4 :GRPC_CLIENT DEBUG: [51700005c288] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:13.716303Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-17T18:07:14.664433Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494345253646284657:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:14.664553Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca1/r3tmp/tmpdDnzZG/pdisk_1.dat 2025-04-17T18:07:22.954343Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:22.957941Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:22.982331Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:22.982434Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:22.984117Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22284, node 5 2025-04-17T18:07:23.062972Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:23.062993Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:23.063000Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:23.063174Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:23.360427Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:23.367556Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:23.370257Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-17T18:07:23.370327Z node 5 :GRPC_CLIENT DEBUG: [5170000e2b08] Connect to grpc://localhost:4454 2025-04-17T18:07:23.371238Z node 5 :GRPC_CLIENT DEBUG: [5170000e2b08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-17T18:07:23.380013Z node 5 :GRPC_CLIENT DEBUG: [5170000e2b08] Status 14 Service Unavailable 2025-04-17T18:07:23.380464Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:23.380491Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-17T18:07:23.380691Z node 5 :GRPC_CLIENT DEBUG: [5170000e2b08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-17T18:07:23.382758Z node 5 :GRPC_CLIENT DEBUG: [5170000e2b08] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:23.383013Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 18133, MsgBus: 8062 2025-04-17T18:06:32.726897Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345094927701832:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:32.726982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000cf7/r3tmp/tmpQzF0f8/pdisk_1.dat 2025-04-17T18:06:33.026494Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18133, node 1 2025-04-17T18:06:33.083416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:33.083441Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:33.083453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:33.083608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:33.095525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:33.095736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:33.097780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8062 TClient is connected to server localhost:8062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:33.541169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:33.563988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:33.645176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:33.795782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:33.871670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:35.690273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345107812605511:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:35.690418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:35.946868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:35.969443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:35.995893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.023078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.047039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.076076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.108141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:36.157904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345112107573470:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:36.157989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:36.158326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345112107573475:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:36.162096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:36.173661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345112107573478:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:06:36.231961Z node 1 :TX_PROXY ERROR: Actor# [1:7494345112107573531:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:37.003317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:37.726954Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345094927701832:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:37.727071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:06:48.015496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:06:48.015547Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 24387, MsgBus: 29172 2025-04-17T18:06:57.954149Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345204293617872:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:57.955426Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000cf7/r3tmp/tmpae1iiZ/pdisk_1.dat 2025-04-17T18:06:58.064298Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:58.086958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:58.087068Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:58.091513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24387, node 2 2025-04-17T18:06:58.137496Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:58.137539Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:58.137548Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:58.137697Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29172 TClient is connected to server localhost:29172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:58.595586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:58.605126Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:06:58.611813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:58.668025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:58.837909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:58.913172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:01.294472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345221473488838:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:01.294565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:01.346463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:07:01.382335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:07:01.414700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:07:01.480213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:07:01.508646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:07:01.544074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:07:01.583830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:07:01.669395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345221473489507:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:01.669490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:01.669519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345221473489512:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:01.673010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:07:01.683465Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345221473489514:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:07:01.763005Z node 2 :TX_PROXY ERROR: Actor# [2:7494345221473489572:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:02.902233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:02.970889Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345204293617872:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:02.978200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:07:13.061266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:07:13.061291Z node 2 :IMPORT WARN: Table profiles were not loaded >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::LoginEmptyTicketBad >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: 2025-04-17T18:07:17.963215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:17.963664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:17.963713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001061/r3tmp/tmpXdfDpi/pdisk_1.dat 2025-04-17T18:07:18.319678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:18.364788Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:18.404199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:18.404333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:18.415648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:18.497717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:18.540075Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:18.540357Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:18.592299Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:18.592430Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:18.594034Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:18.594120Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:18.594207Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:18.594551Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:18.594689Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:18.594759Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:18.605534Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:18.637341Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:18.637566Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:18.637674Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:18.637704Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:18.637733Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:18.637763Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:18.638300Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:18.638412Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:18.638535Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:18.638570Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:18.638604Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:18.638647Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:18.638732Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:18.639164Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:18.639384Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:18.639478Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:18.641183Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:18.653145Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:18.653249Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:18.811889Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:18.816553Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:18.816634Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:18.817412Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:18.817477Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:18.817544Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:18.817786Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:18.817930Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:18.818779Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:18.818871Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:18.821080Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:18.821519Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:18.823093Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:18.823151Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:18.823777Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:18.823854Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:18.825256Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:18.825307Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:18.825370Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:18.825436Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:18.825493Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:18.825578Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:18.829324Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:18.831439Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:18.832044Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:18.832146Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:18.841096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:18.841190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:18.841245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:18.844909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:18.851971Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:19.016697Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:19.019386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:19.096714Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:19.431984Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ey2sqa5mjsa0bmd6j8ykb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2MyYWViOGQtZjMzNzQ3OGMtM2Y2Y2ZlOC0zYzljZGZhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:19.438029Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:19.438554Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:19.438747Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=8 ... 025-04-17T18:07:27.371634Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:27.371675Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:27.371716Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.371779Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:27.372218Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:27.372418Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:27.372492Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:27.374106Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:27.384795Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:27.384905Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:27.540938Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:27.541361Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:27.541403Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.541566Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:27.541605Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:27.541641Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:27.541826Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:27.541932Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:27.543098Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:27.543174Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:27.543658Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:27.544038Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:27.545570Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:27.545621Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.546274Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:27.546363Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.547105Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.547152Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:27.547211Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:27.547274Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:27.547331Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:27.547436Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.548733Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:27.551189Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:27.551256Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:27.551826Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:27.561709Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:27.561810Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:27.561888Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:27.569275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:27.575728Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:27.736298Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:27.739503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:27.778697Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:27.867998Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eyba7630wgf64bhvmhqhe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjQyMGUwYjYtOWEwNzVkODItOWI0NjcwNDMtOWM1N2JkYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:27.870873Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:27.871321Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:27.871516Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-17T18:07:27.882679Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.887057Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:27.888200Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:27.899589Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:27.899679Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.899961Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:27.900008Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-17T18:07:27.900392Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:27.900452Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:27.900511Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:27.900593Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.900694Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:27.901838Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:27.902229Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:27.902462Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:27.902517Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:27.902578Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:07:27.902813Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:27.902876Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.903580Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-17T18:07:27.903804Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:27.903929Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-17T18:07:27.903974Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-17T18:07:27.944813Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:27.944896Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-17T18:07:27.945392Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:27.945444Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:27.945491Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-17T18:07:27.945634Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:27.945716Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.945780Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: 2025-04-17T18:07:19.853033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:19.853478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:19.853531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001051/r3tmp/tmpc4j0OE/pdisk_1.dat 2025-04-17T18:07:20.218962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:20.253844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:20.292543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:20.292706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:20.304227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:20.385577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:20.426695Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:20.426890Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:20.466696Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:20.466787Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:20.468083Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:20.468133Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:20.468169Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:20.468406Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:20.468532Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:20.468602Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:20.479904Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:20.515619Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:20.515793Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:20.515892Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:20.515921Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:20.515949Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:20.515978Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.516413Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:20.516513Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:20.516620Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.516652Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.516684Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:20.516718Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.516792Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:20.517206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:20.517408Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:20.517478Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:20.518963Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.529636Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:20.529725Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:20.678989Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:20.683700Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:20.683758Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.684426Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.684469Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:20.684515Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:20.684737Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:20.684880Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:20.685586Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.685677Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:20.687641Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:20.688038Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.689396Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:20.689439Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.689965Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:20.690028Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.691160Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.691207Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:20.691309Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:20.691371Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:20.691422Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:20.691495Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.694823Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.696640Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:20.697218Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:20.697348Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:20.704063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:20.704139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:20.704204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:20.708244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:20.713065Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.864754Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.867807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:20.943267Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:21.294740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ey4kyd1cwh4qh353sb7zn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgyNTQzZDAtMzRmOTc1MWEtYTYzYWJjNjctNWQ1Y2EzZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:21.301413Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:21.301905Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:21.302133Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-04-17T18:07:29.313392Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:29.313423Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:29.313465Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:29.313524Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:29.313870Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:29.314092Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:29.314162Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:29.315775Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:29.326400Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:29.326505Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:29.480613Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:29.481144Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:29.481200Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:29.481390Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:29.481437Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:29.481479Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:29.481698Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:29.481832Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:29.484037Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:29.484110Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:29.484514Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:29.484861Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:29.486317Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:29.486368Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:29.486890Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:29.486963Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:29.487660Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:29.487703Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:29.487753Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:29.487819Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:29.487866Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:29.487944Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:29.489062Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:29.491270Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:29.491334Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:29.491898Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:29.499093Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:29.499182Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:29.499249Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:29.504158Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:29.509957Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:29.660012Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:29.662874Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:29.696779Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:29.813491Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eyd6sfgk6p0dgdpsp9wmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjcyMzQxMWItYmRkMTBjY2UtMzg3YzdjOGMtOWIyODJjMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:29.816840Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:29.817252Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:29.817441Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-17T18:07:29.828583Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:29.833332Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:29.835168Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:29.848079Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:29.848171Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:29.848511Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:29.848559Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-17T18:07:29.848847Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:29.848903Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:29.848960Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:29.849029Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:29.849136Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:29.850204Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:29.850585Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:29.850770Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:29.850848Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:29.850898Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:07:29.851142Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:29.851213Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:29.851862Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-17T18:07:29.852178Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:29.852318Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-17T18:07:29.852380Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-17T18:07:29.900699Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:29.900777Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-17T18:07:29.901289Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:29.901335Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:29.901378Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-17T18:07:29.901515Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:29.901588Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:29.901637Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> TTicketParserTest::LoginEmptyTicketBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: 2025-04-17T18:07:20.142138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:20.142643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:20.142697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00104d/r3tmp/tmplwOG0y/pdisk_1.dat 2025-04-17T18:07:20.453721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:20.496442Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:20.535033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:20.535155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:20.546411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:20.626942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:20.681427Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-17T18:07:20.681642Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:20.727382Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:20.727560Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:20.729143Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:20.729234Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:20.729290Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:20.729652Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:20.729941Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:20.730019Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-04-17T18:07:20.731547Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-04-17T18:07:20.731772Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:20.741709Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-04-17T18:07:20.741912Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:20.750890Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:20.751079Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:20.752454Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:07:20.752516Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:07:20.752561Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:07:20.752864Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:20.752998Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:20.753049Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:739:2588] in generation 1 2025-04-17T18:07:20.753429Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:20.753508Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:20.754740Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-17T18:07:20.754808Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-17T18:07:20.754864Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-17T18:07:20.755165Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:20.755256Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:20.755302Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-04-17T18:07:20.766204Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:20.799577Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:20.799747Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:20.799833Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-04-17T18:07:20.799856Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:20.799887Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:20.799918Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:20.800149Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:20.800192Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:07:20.800235Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:20.800270Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-04-17T18:07:20.800294Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:07:20.800308Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:07:20.800320Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:07:20.800539Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:20.800557Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-17T18:07:20.800589Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:20.800621Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-04-17T18:07:20.800635Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T18:07:20.800653Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-17T18:07:20.800670Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:07:20.800852Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:20.800931Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:20.801063Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:20.801098Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.801133Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:20.801170Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:20.801212Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:07:20.801286Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:07:20.801722Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-17T18:07:20.801789Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:20.801818Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.801850Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:07:20.801885Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:07:20.801927Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-17T18:07:20.801988Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-17T18:07:20.802191Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:20.802375Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:20.802458Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:20.802811Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:711:2600], sessionId# [0:0:0] 2025-04-17T18:07:20.802856Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:07:20.802880Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:20.802901Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-17T18:07:20.802929Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:07:20.803104Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:07:20.803254Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:07:20.803301Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T18:07:20.803581Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:716:2604], sessionId# [0:0:0] 2025-04-17T18:07:20.803779Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-17T18:07:20.803860Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-17T18:07:20.803898Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-17T18:07:20.805228Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:20.805304Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:07:20.805357Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:07:20.816188Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:20.816283Z nod ... 72075186224037892, clientId# [3:1161:2895], serverId# [3:1189:2911], sessionId# [0:0:0] 2025-04-17T18:07:31.454291Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack init split/merge destination OpId 281474976715663 2025-04-17T18:07:31.454346Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-04-17T18:07:31.454490Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-17T18:07:31.454837Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack init split/merge destination OpId 281474976715663 2025-04-17T18:07:31.454867Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-04-17T18:07:31.455799Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 received split OpId 281474976715663 at state Ready 2025-04-17T18:07:31.466569Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 starting snapshot for split OpId 281474976715663 2025-04-17T18:07:31.466987Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 CancelReadIterators#0 2025-04-17T18:07:31.471723Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 3, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:07:31.471768Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 3, finished edge# 0, front# 0 2025-04-17T18:07:31.472176Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 4, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:07:31.472201Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 4, finished edge# 0, front# 0 2025-04-17T18:07:31.472716Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:07:31.472739Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2025-04-17T18:07:31.474707Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:07:31.474733Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2025-04-17T18:07:31.504292Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:07:31.504346Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-04-17T18:07:31.504803Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 snapshot complete for split OpId 281474976715663 2025-04-17T18:07:31.505025Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715663 2025-04-17T18:07:31.505077Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715663 2025-04-17T18:07:31.505111Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715663 2025-04-17T18:07:31.505134Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715663 2025-04-17T18:07:31.505336Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715663 2025-04-17T18:07:31.505512Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715663 2025-04-17T18:07:31.505544Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715663 2025-04-17T18:07:31.505573Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715663 2025-04-17T18:07:31.505602Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715663 2025-04-17T18:07:31.505687Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715663 2025-04-17T18:07:31.506246Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Sending snapshots from src for split OpId 281474976715663 2025-04-17T18:07:31.506411Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2025-04-17T18:07:31.506499Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2025-04-17T18:07:31.506754Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [3:1235:2944], serverId# [3:1236:2945], sessionId# [0:0:0] 2025-04-17T18:07:31.506793Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1234:2943], serverId# [3:1237:2946], sessionId# [0:0:0] 2025-04-17T18:07:31.506945Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-04-17T18:07:31.508672Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-04-17T18:07:31.509951Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715663 2025-04-17T18:07:31.510082Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-04-17T18:07:31.510158Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:31.510269Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-17T18:07:31.510327Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [3:1240:2949] 2025-04-17T18:07:31.510366Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-17T18:07:31.510410Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-04-17T18:07:31.510444Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:07:31.510548Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715663 2025-04-17T18:07:31.511092Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-17T18:07:31.511123Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:31.511146Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-17T18:07:31.511171Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-17T18:07:31.511441Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1235:2944], serverId# [3:1236:2945], sessionId# [0:0:0] 2025-04-17T18:07:31.511499Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack snapshot OpId 281474976715663 2025-04-17T18:07:31.511557Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037892 2025-04-17T18:07:31.511603Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:31.511641Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-17T18:07:31.511669Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [3:1242:2951] 2025-04-17T18:07:31.511684Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-04-17T18:07:31.511712Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-04-17T18:07:31.511732Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-17T18:07:31.511803Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-04-17T18:07:31.511836Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:07:31.511928Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715663 2025-04-17T18:07:31.512312Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-04-17T18:07:31.512358Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-17T18:07:31.512439Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-17T18:07:31.512464Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:31.512490Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-04-17T18:07:31.512520Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-17T18:07:31.512638Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 1500 next step 2000 2025-04-17T18:07:31.512676Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-17T18:07:31.512754Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1234:2943], serverId# [3:1237:2946], sessionId# [0:0:0] 2025-04-17T18:07:31.512962Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 1500 next step 2000 2025-04-17T18:07:31.513007Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-17T18:07:31.534885Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715663 2025-04-17T18:07:31.539970Z node 3 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715663, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-17T18:07:31.542833Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-17T18:07:31.542894Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-17T18:07:31.543183Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1103:2841], serverId# [3:1104:2842], sessionId# [0:0:0] 2025-04-17T18:07:31.543335Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:31.543392Z node 3 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037889 state 5 2025-04-17T18:07:31.543595Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715663 2025-04-17T18:07:31.543664Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:07:31.543721Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-04-17T18:07:26.806877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:26.807364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:26.807417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001049/r3tmp/tmpExFB7J/pdisk_1.dat 2025-04-17T18:07:27.177167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:27.215984Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:27.257394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:27.257570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:27.269084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:27.354760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:27.398992Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:27.399356Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:27.447367Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:27.447504Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:27.449391Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:27.449480Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:27.449531Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:27.449896Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:27.450069Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:27.450202Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:27.461273Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:27.487174Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:27.487428Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:27.487571Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:27.487614Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:27.487654Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:27.487701Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.488169Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:27.488298Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:27.488452Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:27.488498Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:27.488541Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:27.488582Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.488688Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:27.489188Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:27.489437Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:27.489539Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:27.491337Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:27.502511Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:27.502639Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:27.659580Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:27.663290Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:27.663386Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.664245Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:27.664293Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:27.664352Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:27.664614Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:27.664782Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:27.665540Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:27.665601Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:27.667179Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:27.667536Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:27.668725Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:27.668767Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.669366Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:27.669438Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.670799Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.670842Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:27.670929Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:27.670993Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:27.671058Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:27.671147Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.674949Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:27.677101Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:27.677735Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:27.677848Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:27.686841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:27.686926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:27.686981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:27.690626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:27.696203Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:27.846939Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:27.849530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:27.918688Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:28.303508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eybe59gc5qj9wae4zwjnp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZjNDM2NzgtZmE0MWJjZjUtOTk4NGYzZmMtYTVjN2M1ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:28.314645Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:28.315113Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:28.315246Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 37968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:31.943551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:31.965481Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-04-17T18:07:31.965705Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:32.013512Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:32.013632Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:32.015465Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:32.015565Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:32.015630Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:32.015984Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:32.016117Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:32.016190Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-04-17T18:07:32.026914Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:32.026988Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:32.027091Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:32.027186Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-04-17T18:07:32.027222Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:32.027279Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:32.027317Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.027663Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:32.027750Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:32.027865Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:32.027913Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:32.027951Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:32.027998Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:32.028089Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:671:2572], sessionId# [0:0:0] 2025-04-17T18:07:32.028475Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:32.028692Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:32.028766Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:32.030525Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:32.041186Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:32.041298Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:32.204082Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:32.204450Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:32.204495Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.204851Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:32.204896Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:32.204938Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:32.205156Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:32.205268Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:32.205412Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:32.205460Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:32.205887Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:32.206279Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:32.208556Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:32.208603Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.209158Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:32.209241Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:32.209913Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:32.209950Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:32.210005Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:32.210061Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:32.210110Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:32.210226Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.211732Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:32.213417Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:32.214036Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:32.214094Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:32.219164Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-04-17T18:07:32.219326Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:32.246786Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:32.246863Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.247175Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-04-17T18:07:32.249017Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:745:2627], serverId# [2:746:2628], sessionId# [0:0:0] 2025-04-17T18:07:32.249182Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:32.249414Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:32.249457Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.249637Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:745:2627], serverId# [2:746:2628], sessionId# [0:0:0] 2025-04-17T18:07:32.251428Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:750:2632], serverId# [2:751:2633], sessionId# [0:0:0] 2025-04-17T18:07:32.251567Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:32.251737Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:32.251782Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.251958Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:750:2632], serverId# [2:751:2633], sessionId# [0:0:0] 2025-04-17T18:07:32.253535Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:755:2637], serverId# [2:756:2638], sessionId# [0:0:0] 2025-04-17T18:07:32.253654Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:32.253783Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:32.253819Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.254009Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:755:2637], serverId# [2:756:2638], sessionId# [0:0:0] 2025-04-17T18:07:32.255576Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:760:2642], serverId# [2:761:2643], sessionId# [0:0:0] 2025-04-17T18:07:32.255708Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:32.255915Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:32.255953Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.256108Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:760:2642], serverId# [2:761:2643], sessionId# [0:0:0] 2025-04-17T18:07:32.257588Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:765:2647], serverId# [2:766:2648], sessionId# [0:0:0] 2025-04-17T18:07:32.257715Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:32.257844Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:32.257897Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.258068Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:765:2647], serverId# [2:766:2648], sessionId# [0:0:0] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: 2025-04-17T18:07:19.193192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:19.193660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:19.193711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00105f/r3tmp/tmpHmImyA/pdisk_1.dat 2025-04-17T18:07:19.563225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:19.608910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:19.648653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:19.648815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:19.660395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:19.755634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:19.807656Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-17T18:07:19.807828Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:19.843677Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:19.843828Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:19.845227Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:19.845318Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:19.845376Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:19.845785Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:19.846058Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:19.846151Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-04-17T18:07:19.847711Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-04-17T18:07:19.847935Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:19.857444Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-04-17T18:07:19.857640Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:19.866003Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:19.866215Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:19.867494Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:07:19.867547Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:07:19.867606Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:07:19.867879Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:19.868000Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:19.868049Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:739:2588] in generation 1 2025-04-17T18:07:19.868383Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:19.868449Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:19.869621Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-17T18:07:19.869674Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-17T18:07:19.869727Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-17T18:07:19.869985Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:19.870064Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:19.870132Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-04-17T18:07:19.881826Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:19.917034Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:19.917241Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:19.917341Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-04-17T18:07:19.917376Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:19.917418Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:19.917464Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:19.917772Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:19.917825Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:07:19.917887Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:19.917943Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-04-17T18:07:19.917978Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:07:19.918001Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:07:19.918025Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:07:19.918378Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:19.918410Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-17T18:07:19.918474Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:19.918536Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-04-17T18:07:19.918558Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T18:07:19.918577Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-17T18:07:19.918599Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:07:19.918839Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:19.918933Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:19.919090Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:19.919128Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:19.919164Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:19.919202Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:19.919245Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:07:19.919290Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:07:19.919706Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-17T18:07:19.919759Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:19.919785Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:19.919807Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:07:19.919836Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:07:19.919894Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-17T18:07:19.919972Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-17T18:07:19.920171Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:19.920400Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:19.920493Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:19.920884Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:711:2600], sessionId# [0:0:0] 2025-04-17T18:07:19.920927Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:07:19.920952Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:19.920979Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-17T18:07:19.921010Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:07:19.921174Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:07:19.921326Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:07:19.921430Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T18:07:19.921776Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:716:2604], sessionId# [0:0:0] 2025-04-17T18:07:19.921944Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-17T18:07:19.922052Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-17T18:07:19.922114Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-17T18:07:19.923864Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:19.923945Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:07:19.924004Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:07:19.934915Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:19.935017Z nod ... egularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T18:07:31.969646Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] Handle TEvDataShard::TEvEraseRowsRequest 2025-04-17T18:07:31.969790Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] Propose tx: txId# 281474976715662, shard# 72075186224037890, keys# 3, dependents# 0, dependencies# 1 2025-04-17T18:07:31.969894Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] Propose tx: txId# 281474976715662, shard# 72075186224037888, keys# 3, dependents# 0, dependencies# 1 2025-04-17T18:07:31.969960Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] Propose tx: txId# 281474976715662, shard# 72075186224037889, keys# 3, dependents# 2, dependencies# 0 2025-04-17T18:07:31.970283Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:31.970458Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715662 at tablet 72075186224037888 2025-04-17T18:07:31.970787Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-17T18:07:31.970875Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715662 at tablet 72075186224037890 2025-04-17T18:07:31.971105Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:07:31.971222Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715662 at tablet 72075186224037889 2025-04-17T18:07:31.982521Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-17T18:07:31.982624Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:31.982806Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-04-17T18:07:31.982882Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-04-17T18:07:31.982946Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 1 2025-04-17T18:07:31.983088Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:07:31.983215Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037889, status# 1 2025-04-17T18:07:31.983335Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-17T18:07:31.983382Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-17T18:07:31.983451Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2025-04-17T18:07:31.983488Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2025-04-17T18:07:31.983521Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 1 2025-04-17T18:07:31.983555Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] Register plan: txId# 281474976715662, minStep# 1525, maxStep# 31525 2025-04-17T18:07:31.996436Z node 3 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-04-17T18:07:31.997977Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-17T18:07:32.000688Z node 3 :TX_DATASHARD ERROR: [DistEraser] [3:1109:2846] Reply: txId# 281474976715662, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715662, shard# 72075186224037888 2025-04-17T18:07:32.001042Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2025-04-17T18:07:32.001091Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2025-04-17T18:07:32.001258Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-17T18:07:32.001297Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-17T18:07:32.001526Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1103:2841], serverId# [3:1104:2842], sessionId# [0:0:0] 2025-04-17T18:07:32.002050Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:32.002103Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:32.002147Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-04-17T18:07:32.002333Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:07:32.023861Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1120:2856] 2025-04-17T18:07:32.024123Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:32.029411Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:32.030614Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:32.032926Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:32.033007Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:32.033090Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:32.033530Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:32.033817Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:32.033892Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:1135:2856] in generation 2 2025-04-17T18:07:32.049340Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:32.049456Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-04-17T18:07:32.049629Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:32.049924Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:1138:2864] 2025-04-17T18:07:32.049963Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:32.050033Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:32.050069Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.050310Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-17T18:07:32.050493Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-17T18:07:32.051597Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:32.051698Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:32.051828Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1524 2025-04-17T18:07:32.051871Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.051993Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:32.052109Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:32.052157Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:32.052194Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 1 2025-04-17T18:07:32.052236Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:32.052432Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-17T18:07:32.052477Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-04-17T18:07:32.052521Z node 3 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-04-17T18:07:32.052749Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1500 next step 1524 2025-04-17T18:07:32.052851Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2025-04-17T18:07:32.052935Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1524 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T18:07:32.052986Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1524:281474976715661 at 72075186224037889 2025-04-17T18:07:32.053036Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T18:07:32.053108Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 1524 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-17T18:07:32.053201Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-17T18:07:32.053226Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2025-04-17T18:07:32.053252Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2025-04-17T18:07:32.053359Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715661 2025-04-17T18:07:32.053511Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715661 2025-04-17T18:07:32.053557Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 1524 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-04-17T18:07:32.053607Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1524:281474976715661 at 72075186224037890 2025-04-17T18:07:32.053649Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-17T18:07:32.053681Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037890 {TEvReadSet step# 1524 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-04-17T18:07:32.053749Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2025-04-17T18:06:50.373025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345174670245861:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:50.373170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cab/r3tmp/tmpEh3Fcd/pdisk_1.dat 2025-04-17T18:06:50.615844Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14223, node 1 2025-04-17T18:06:50.671238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:50.671263Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:50.671270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:50.671383Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:50.691048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:50.691171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:50.695500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:50.921130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:51.000179Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:06:51.020663Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:06:51.020711Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:06:51.021873Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****vuqg (FED27453) () has now valid token of user1 2025-04-17T18:06:51.021888Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-17T18:06:53.378278Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345185558367689:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:53.378312Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cab/r3tmp/tmp7ruWPS/pdisk_1.dat 2025-04-17T18:06:53.490526Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22273, node 2 2025-04-17T18:06:53.523828Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:53.524050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:53.530757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:53.556358Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:53.556384Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:53.556390Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:53.556522Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:53.761576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:53.779313Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:06:53.857249Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:06:53.865914Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:06:53.865949Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:06:53.866842Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****FypA (A51CBD90) () has now valid token of user1 2025-04-17T18:06:53.866868Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-17T18:06:56.745628Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345197573009866:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:56.745742Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cab/r3tmp/tmpBv7ZZk/pdisk_1.dat 2025-04-17T18:06:56.877522Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:56.907339Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:56.907426Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:56.911858Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23499, node 3 2025-04-17T18:06:56.953150Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:56.953173Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:56.953180Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:56.953312Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:57.196453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:57.250386Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:06:57.257451Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:06:57.257483Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:06:57.259042Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****4rAg (4A257E27) () has now valid token of user1 2025-04-17T18:06:57.259076Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-17T18:06:57.261934Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:07:00.775382Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****4rAg (4A257E27) 2025-04-17T18:07:00.775785Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****4rAg (4A257E27) () has now valid token of user1 2025-04-17T18:07:01.745235Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345197573009866:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:01.745360Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:07:04.778923Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****4rAg (4A257E27) 2025-04-17T18:07:04.779292Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****4rAg (4A257E27) () has now valid token of user1 2025-04-17T18:07:07.262550Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:07:07.782598Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****4rAg (4A257E27) 2025-04-17T18:07:07.782866Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****4rAg (4A257E27) () has now valid token of user1 2025-04-17T18:07:11.784543Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****4rAg (4A257E27) 2025-04-17T18:07:11.784872Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****4rAg (4A257E27) () has now valid token of user1 2025-04-17T18:07:11.864377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:07:11.864408Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:16.791348Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****4rAg (4A257E27) 2025-04-17T18:07:16.791660Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****4rAg (4A257E27) () has now valid token of user1 2025-04-17T18:07:17.896450Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345291025211151:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:17.896536Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cab/r3tmp/tmpsB5ZK9/pdisk_1.dat 2025-04-17T18:07:18.028659Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:18.043019Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:18.043130Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:18.047692Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20285, node 4 2025-04-17T18:07:18.111001Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:18.111032Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:18.111042Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:18.111169Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:18.370783Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:18.377810Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:18.406464Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:07:18.414709Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:07:18.414738Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:18.415570Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****4PJg (FFE75D55) () has now valid token of user1 2025-04-17T18:07:18.415584Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-17T18:07:18.422274Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:07:22.897888Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494345291025211151:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:22.897995Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:07:22.914427Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****4PJg (FFE75D55) 2025-04-17T18:07:22.914843Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****4PJg (FFE75D55) () has now permanent error message 'User not found' 2025-04-17T18:07:27.917761Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****4PJg (FFE75D55) 2025-04-17T18:07:29.029529Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494345342752256417:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:29.029606Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cab/r3tmp/tmpDR0cRG/pdisk_1.dat 2025-04-17T18:07:29.150066Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18803, node 5 2025-04-17T18:07:29.160886Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:29.160984Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:29.162666Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:29.214878Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:29.214911Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:29.214920Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:29.215060Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:29.482258Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:29.525609Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:07:29.537496Z node 5 :TICKET_PARSER ERROR: Ticket **** (00000000): Ticket is empty >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationUnavailable >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 26015, MsgBus: 22133 2025-04-17T18:07:21.146663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345308065835027:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:21.158117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e5c/r3tmp/tmpmLmp5r/pdisk_1.dat 2025-04-17T18:07:21.473983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:21.474070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:21.474676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:21.477626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26015, node 1 2025-04-17T18:07:21.560555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:21.560583Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:21.560594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:21.560679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22133 TClient is connected to server localhost:22133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:21.997436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:22.022232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:22.165391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:22.325980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:22.413362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:23.974063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345316655771182:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:23.974167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:24.259325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:07:24.285021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:07:24.312193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:07:24.340731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:07:24.366047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:07:24.398660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:07:24.428525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:07:24.507159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345320950739135:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:24.507256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:24.507430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345320950739140:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:24.511487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:07:24.523606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345320950739142:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:07:24.626165Z node 1 :TX_PROXY ERROR: Actor# [1:7494345320950739199:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:25.589702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:07:25.632820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:07:25.660102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:07:25.685498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:07:25.719164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:07:25.753632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:07:26.139426Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345308065835027:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:26.139476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10364, MsgBus: 14582 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e5c/r3tmp/tmpSfD2kf/pdisk_1.dat 2025-04-17T18:07:27.317261Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:27.320640Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 10364, node 2 2025-04-17T18:07:27.362899Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:27.362968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:27.363669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:27.415778Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:27.415799Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:27.415806Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:27.415928Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14582 TClient is connected to server localhost:14582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:27.846621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:27.863271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:27.930491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:28.083928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:28.157614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:30.193394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345347003013644:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:30.193470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:30.254766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:07:30.285790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:07:30.311224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:07:30.338299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:07:30.364228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:07:30.395020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:07:30.462070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:07:30.549218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345347003014313:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:30.549306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:30.549614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345347003014318:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:30.552786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:07:30.563493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345347003014320:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:07:30.630846Z node 2 :TX_PROXY ERROR: Actor# [2:7494345347003014376:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:31.547528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:07:31.584934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:07:31.655283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:07:31.721560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-17T18:07:31.744819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:07:31.776322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 >> KqpFlipJoin::Inner_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2025-04-17T18:07:15.887311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345278835239821:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:15.887404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c96/r3tmp/tmp7mUK2e/pdisk_1.dat 2025-04-17T18:07:16.189475Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1777, node 1 2025-04-17T18:07:16.247216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:16.247335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:16.249070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:16.264351Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:16.264389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:16.264400Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:16.264540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:16.501993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:16.514904Z node 1 :TICKET_PARSER DEBUG: Ticket BF7E567148A7C512933557DD9C42F1261A2E1B097FC1F410E6EBDB2E2C3B5563 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:07:19.015749Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345299491447114:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:19.015811Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c96/r3tmp/tmpAzUL3S/pdisk_1.dat 2025-04-17T18:07:19.133519Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:19.144965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:19.145061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:19.147059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2809, node 2 2025-04-17T18:07:19.214470Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:19.214495Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:19.214507Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:19.214603Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:19.416626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:19.426790Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:19.429940Z node 2 :TICKET_PARSER DEBUG: Ticket D92E4E8DF911F02D8F9B670520BE545B458961ABC7E0A47BB15E58EB2F341A2E () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:07:22.902594Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345311855020166:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:22.903130Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c96/r3tmp/tmpYDCU1H/pdisk_1.dat 2025-04-17T18:07:23.014684Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3146, node 3 2025-04-17T18:07:23.052717Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:23.052795Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:23.054565Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:23.068731Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:23.068760Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:23.068767Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:23.068901Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:23.260110Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:23.265743Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:23.268078Z node 3 :TICKET_PARSER DEBUG: Ticket D66C37AC29EAC1E289E384A925727046D96EDECEDE31642E8C3CAAED8F3BFDAB () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-17T18:07:23.268652Z node 3 :TICKET_PARSER ERROR: Ticket D66C37AC29EAC1E289E384A925727046D96EDECEDE31642E8C3CAAED8F3BFDAB: Cannot create token from certificate. Client certificate failed verification 2025-04-17T18:07:26.560505Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345327103775303:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:26.560632Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c96/r3tmp/tmpjXIrB9/pdisk_1.dat 2025-04-17T18:07:26.691209Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:26.709365Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:26.709470Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:26.710992Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12139, node 4 2025-04-17T18:07:26.755482Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:26.755507Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:26.755520Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:26.755667Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:27.023984Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:27.031547Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:27.035248Z node 4 :TICKET_PARSER DEBUG: Ticket 5565F6A698E6B4DCA55CDACA07BB04417C3983054D219A2520E6578386C8D10F () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:07:30.294326Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494345346383702985:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:30.294956Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c96/r3tmp/tmpele240/pdisk_1.dat 2025-04-17T18:07:30.407292Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30071, node 5 2025-04-17T18:07:30.442071Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:30.442173Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:30.447934Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:30.476830Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:30.476855Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:30.476862Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:30.476985Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:30.743467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:30.753397Z node 5 :TICKET_PARSER DEBUG: Ticket 1672D735D67A79338CF61631F65F226EFB34BA629E27D2A6F0A36F382FB9EA83 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-17T18:07:30.754024Z node 5 :TICKET_PARSER ERROR: Ticket 1672D735D67A79338CF61631F65F226EFB34BA629E27D2A6F0A36F382FB9EA83: Cannot create token from certificate. Client certificate failed verification >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> TTicketParserTest::AuthorizationUnavailable [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] >> KqpJoin::JoinDupColumnRight >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> TKeyValueTest::TestConcatWorks >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2025-04-17T18:07:10.696841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345258219662056:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:10.697607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9b/r3tmp/tmpYueVbt/pdisk_1.dat 2025-04-17T18:07:11.031090Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21245, node 1 2025-04-17T18:07:11.068778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:11.069442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:11.078755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:11.123434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:11.123467Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:11.123474Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:11.123599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12378 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:11.361742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:11.389724Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:11.389873Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:6781 2025-04-17T18:07:11.396282Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:11.411884Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-17T18:07:11.412028Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-17T18:07:11.412066Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:11.412113Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:11.412414Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:11.415659Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-17T18:07:11.418301Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-17T18:07:11.418337Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:12.707650Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-17T18:07:12.707683Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:12.708122Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:12.717352Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-17T18:07:12.717466Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-17T18:07:12.717488Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:13.709596Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-17T18:07:13.709642Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:13.710151Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:13.712185Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:13.712485Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2025-04-17T18:07:13.712579Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-17T18:07:15.697461Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345258219662056:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:15.697627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:07:23.973463Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345316369885222:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:23.973553Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9b/r3tmp/tmp1XtwRw/pdisk_1.dat 2025-04-17T18:07:24.096898Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9288, node 2 2025-04-17T18:07:24.117784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:24.118842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:24.123808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:24.165440Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:24.165460Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:24.165469Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:24.165594Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:24.389426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:24.395751Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:24.398051Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:24.398149Z node 2 :GRPC_CLIENT DEBUG: [5170000f0408] Connect to grpc://localhost:10352 2025-04-17T18:07:24.399168Z node 2 :GRPC_CLIENT DEBUG: [5170000f0408] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:24.417092Z node 2 :GRPC_CLIENT DEBUG: [5170000f0408] Status 14 Service Unavailable 2025-04-17T18:07:24.417294Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-17T18:07:24.417322Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:24.417341Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:24.417539Z node 2 :GRPC_CLIENT DEBUG: [5170000f0408] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:24.421113Z node 2 :GRPC_CLIENT DEBUG: [5170000f0408] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:24.421347Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2025-04-17T18:07:24.421431Z node 2 :TICKET_PARSE ... ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:31.001311Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:31.008632Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:31.008660Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:31.008678Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:31.008712Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:31.008777Z node 4 :GRPC_CLIENT DEBUG: [51700004db88] Connect to grpc://localhost:13135 2025-04-17T18:07:31.009551Z node 4 :GRPC_CLIENT DEBUG: [51700004db88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:31.017307Z node 4 :GRPC_CLIENT DEBUG: [51700004db88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:31.018272Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-17T18:07:31.018307Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-17T18:07:31.019663Z node 4 :GRPC_CLIENT DEBUG: [51700004e288] Connect to grpc://localhost:18874 2025-04-17T18:07:31.020522Z node 4 :GRPC_CLIENT DEBUG: [51700004e288] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-17T18:07:31.034026Z node 4 :GRPC_CLIENT DEBUG: [51700004e288] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-17T18:07:31.034429Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-17T18:07:31.036142Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:31.036170Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:31.036177Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:31.036206Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-17T18:07:31.036366Z node 4 :GRPC_CLIENT DEBUG: [51700004db88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:31.038614Z node 4 :GRPC_CLIENT DEBUG: [51700004db88] Status 16 Access Denied 2025-04-17T18:07:31.038731Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2025-04-17T18:07:31.038759Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-17T18:07:31.039419Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:31.039445Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:31.039454Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:31.039482Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:31.039526Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-17T18:07:31.039728Z node 4 :GRPC_CLIENT DEBUG: [51700004db88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:31.040294Z node 4 :GRPC_CLIENT DEBUG: [51700004db88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:31.041776Z node 4 :GRPC_CLIENT DEBUG: [51700004db88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:31.041863Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-17T18:07:31.041903Z node 4 :GRPC_CLIENT DEBUG: [51700004db88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:31.041933Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-17T18:07:31.041950Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-17T18:07:31.042085Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-17T18:07:34.224906Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494345364165838199:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:34.225495Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9b/r3tmp/tmpzqGONF/pdisk_1.dat 2025-04-17T18:07:34.317142Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13152, node 5 2025-04-17T18:07:34.348183Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:34.348304Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:34.355042Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:34.376085Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:34.376106Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:34.376112Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:34.376235Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:34.611369Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:34.617528Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:34.619105Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:34.619135Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:34.619147Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:34.619182Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:34.619223Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-17T18:07:34.619265Z node 5 :GRPC_CLIENT DEBUG: [517000042908] Connect to grpc://localhost:4563 2025-04-17T18:07:34.620131Z node 5 :GRPC_CLIENT DEBUG: [517000042908] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:34.620443Z node 5 :GRPC_CLIENT DEBUG: [517000042908] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:34.641612Z node 5 :GRPC_CLIENT DEBUG: [517000042908] Status 14 Service Unavailable 2025-04-17T18:07:34.641801Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2025-04-17T18:07:34.641922Z node 5 :GRPC_CLIENT DEBUG: [517000042908] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-17T18:07:34.642127Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-17T18:07:34.642203Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:34.642235Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-17T18:07:34.642283Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-17T18:07:34.642474Z node 5 :GRPC_CLIENT DEBUG: [517000042908] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:34.643005Z node 5 :GRPC_CLIENT DEBUG: [517000042908] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-17T18:07:34.649178Z node 5 :GRPC_CLIENT DEBUG: [517000042908] Status 1 CANCELLED 2025-04-17T18:07:34.649305Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2025-04-17T18:07:34.649377Z node 5 :GRPC_CLIENT DEBUG: [517000042908] Status 1 CANCELLED 2025-04-17T18:07:34.649602Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2025-04-17T18:07:34.649634Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: 2025-04-17T18:07:27.188384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:27.188773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:27.188843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001045/r3tmp/tmpHOsSuq/pdisk_1.dat 2025-04-17T18:07:27.530083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:27.575620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:27.620603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:27.620737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:27.632125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:27.713554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:27.766999Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-17T18:07:27.767260Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:27.813478Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:27.813652Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:27.815339Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:27.815423Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:27.815480Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:27.815847Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:27.816168Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:27.816242Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-04-17T18:07:27.817762Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-04-17T18:07:27.817978Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:27.827922Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-04-17T18:07:27.828106Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:27.836931Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:27.837081Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:27.838436Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:07:27.838499Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:07:27.838538Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:07:27.838823Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:27.838975Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:27.839026Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:739:2588] in generation 1 2025-04-17T18:07:27.839419Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:27.839490Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:27.840653Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-17T18:07:27.840706Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-17T18:07:27.840745Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-17T18:07:27.841032Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:27.841119Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:27.841189Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-04-17T18:07:27.854671Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:27.885250Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:27.885457Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:27.885580Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-04-17T18:07:27.885615Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:27.885661Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:27.885695Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:27.885990Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:27.886037Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:07:27.886085Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:27.886133Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-04-17T18:07:27.886172Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:07:27.886214Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:07:27.886236Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:07:27.886558Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:27.886587Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-17T18:07:27.886634Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:27.886696Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-04-17T18:07:27.886721Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T18:07:27.886739Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-17T18:07:27.886758Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:07:27.886965Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:27.887057Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:27.887202Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:27.887238Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:27.887273Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:27.887307Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:27.887345Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:07:27.887401Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:07:27.887811Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-17T18:07:27.887865Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:27.887890Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:27.887912Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:07:27.887941Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:07:27.887984Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-17T18:07:27.888053Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-17T18:07:27.888256Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:27.888478Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:27.888562Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:27.888940Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:711:2600], sessionId# [0:0:0] 2025-04-17T18:07:27.888979Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:07:27.889001Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:27.889022Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-17T18:07:27.889066Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:07:27.889213Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:07:27.889346Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:07:27.889394Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T18:07:27.889693Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:716:2604], sessionId# [0:0:0] 2025-04-17T18:07:27.889882Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-17T18:07:27.889980Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-17T18:07:27.890041Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-17T18:07:27.891759Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:27.891834Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:07:27.891878Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:07:27.902813Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:27.902899Z nod ... ode 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:36.928371Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:36.928414Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:36.928630Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:36.928752Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:36.929792Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:36.929854Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:36.930393Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:36.930777Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:36.932125Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:36.932169Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:36.932624Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:36.932682Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:36.933267Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:36.933310Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:36.933359Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:36.933416Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:36.933463Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:36.933565Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:36.934799Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:36.936840Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:36.936897Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:36.937388Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:36.945941Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:36.946068Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:36.946653Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:36.952408Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:36.961097Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.116192Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.123813Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:37.158632Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:37.261233Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eymfg5pq250sbxw13krdj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDM2MDY1NmQtNmJlZjhkNjItYWViMGRkYWYtYzQ1ZDA4OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:37.263653Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:37.263980Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:37.264124Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-04-17T18:07:37.275024Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.471640Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2eymtccw1294784b04x1k2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2U2NjY3OGUtNTJjNDFlMmMtNGFlOGNiNTAtN2Y2MGU5ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:37.474094Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint64_value: 0 } } 2025-04-17T18:07:37.481443Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:895:2718], serverId# [3:896:2719], sessionId# [0:0:0] 2025-04-17T18:07:37.482489Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:37.493956Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:37.494036Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.494104Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-17T18:07:37.494823Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-17T18:07:37.494891Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.495078Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:37.495142Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037888 2025-04-17T18:07:37.495341Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.495386Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:37.495438Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:37.495494Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.495650Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:895:2718], serverId# [3:896:2719], sessionId# [0:0:0] 2025-04-17T18:07:37.542717Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2eyn0r0td7x0hpfs2t3kdd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2U2NjY3OGUtNTJjNDFlMmMtNGFlOGNiNTAtN2Y2MGU5ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:37.545230Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-17T18:07:37.545426Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-04-17T18:07:37.553971Z node 3 :TX_DATASHARD INFO: Write transaction 6 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-04-17T18:07:37.554245Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:07:37.554440Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:07:37.554532Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.554784Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:917:2693], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:863:2693]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:917:2693].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:07:37.555342Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:910:2693], SessionActorId: [3:863:2693], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:863:2693]. isRollback=0 2025-04-17T18:07:37.555747Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2U2NjY3OGUtNTJjNDFlMmMtNGFlOGNiNTAtN2Y2MGU5ZWQ=, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01js2eyn0r0td7x0hpfs2t3kdd, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:911:2693] from: [3:910:2693] 2025-04-17T18:07:37.555936Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:911:2693] TxId: 281474976715662. Ctx: { TraceId: 01js2eyn0r0td7x0hpfs2t3kdd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2U2NjY3OGUtNTJjNDFlMmMtNGFlOGNiNTAtN2Y2MGU5ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:07:37.556266Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:7] at 72075186224037888 2025-04-17T18:07:37.556314Z node 3 :TX_DATASHARD DEBUG: Skip empty write operation for [0:7] at 72075186224037888 2025-04-17T18:07:37.556443Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.556587Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2U2NjY3OGUtNTJjNDFlMmMtNGFlOGNiNTAtN2Y2MGU5ZWQ=, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01js2eyn0r0td7x0hpfs2t3kdd, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: 2025-04-17T18:07:32.124578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:32.124977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:32.125020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001039/r3tmp/tmp4s2V9V/pdisk_1.dat 2025-04-17T18:07:32.509842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:32.556527Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:32.595868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:32.596017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:32.608155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:32.689001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:32.731015Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:32.731253Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:32.772134Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:32.772243Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:32.773882Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:32.773942Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:32.773994Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:32.774292Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:32.774404Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:32.774469Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:32.785220Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:32.803901Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:32.804092Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:32.804214Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:32.804245Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:32.804267Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:32.804292Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.804740Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:32.804866Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:32.804958Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:32.804986Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:32.805016Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:32.805042Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:32.805115Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:32.805572Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:32.805787Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:32.805886Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:32.807594Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:32.818712Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:32.818794Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:32.969208Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:32.973367Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:32.973438Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.974085Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:32.974124Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:32.974158Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:32.974359Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:32.974478Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:32.975084Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:32.975133Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:32.976633Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:32.976993Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:32.978192Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:32.978241Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.978688Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:32.978748Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:32.979715Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:32.979767Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:32.979826Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:32.979872Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:32.979914Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:32.979991Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.982906Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:32.984431Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:32.984886Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:32.984967Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:32.992262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:32.992368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:32.992429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:32.996773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:33.001472Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:33.153389Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:33.156328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:33.244892Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:33.577172Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eygkyczx82hbsq02hekyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg4MWJkNjctNjVlM2I4YS04MmMyYWRkOS1lYWJhYjJhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:33.583247Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:33.583683Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:33.583857Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-04-17T18:07:37.079397Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:37.079441Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:37.079486Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.079601Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:671:2572], sessionId# [0:0:0] 2025-04-17T18:07:37.080035Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:37.080236Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:37.080312Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:37.082089Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.092853Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:37.092981Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:37.244307Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:37.244703Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:37.244753Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.245162Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.245217Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:37.245264Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:37.245491Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:37.245664Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:37.245844Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.245911Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:37.246444Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:37.246860Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:37.249377Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:37.249429Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.250029Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:37.250109Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.250894Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.250945Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:37.250998Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:37.251061Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:37.251129Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:37.251226Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.253076Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.254792Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:37.255523Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:37.255590Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:37.263810Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:37.263933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:746:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:37.264003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:37.269140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:37.275221Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.427615Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.430666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:37.465199Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:37.551543Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eymse8pvn45addgdcrg6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjZkMTEwZGUtOWU4YThiMTgtNjQ4YjI1ZGItZWI0NDJkZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:37.560562Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:856:2687], serverId# [2:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:37.560950Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:37.561119Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-17T18:07:37.574795Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.577761Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:37.578864Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:37.590051Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:37.590138Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.590378Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:37.590416Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-17T18:07:37.590685Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.590737Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:37.590780Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:37.590836Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.590921Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:37.591845Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:37.592214Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:37.592430Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.592475Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:37.592523Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:07:37.592762Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:37.592827Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.593323Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-17T18:07:37.593575Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:37.593765Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-17T18:07:37.593822Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-17T18:07:37.613404Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:37.613460Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-17T18:07:37.613583Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.613607Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:37.613638Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-17T18:07:37.613740Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:37.613788Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.613821Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: 2025-04-17T18:07:32.256187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:32.256599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:32.256661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001041/r3tmp/tmpvtH5cM/pdisk_1.dat 2025-04-17T18:07:32.625366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:32.674331Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:32.715221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:32.715378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:32.727551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:32.814118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:32.853290Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:32.853485Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:32.893532Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:32.893655Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:32.895128Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:32.895203Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:32.895252Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:32.895592Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:32.895725Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:32.895807Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:32.906602Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:32.933522Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:32.933756Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:32.933906Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:32.933958Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:32.934007Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:32.934041Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:32.934485Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:32.934630Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:32.934786Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:32.934833Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:32.934875Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:32.934917Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:32.935003Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:32.935457Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:32.935683Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:32.935774Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:32.937456Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:32.948216Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:32.948328Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:33.100591Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:33.110974Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:33.111073Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:33.111952Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:33.112001Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:33.112068Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:33.112342Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:33.112523Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:33.113437Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:33.113525Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:33.115824Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:33.116275Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:33.118087Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:33.118143Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:33.118722Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:33.118840Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:33.120162Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:33.120209Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:33.120295Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:33.120357Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:33.120414Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:33.120494Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:33.124315Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:33.126332Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:33.126924Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:33.127032Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:33.136646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:33.136794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:33.136881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:33.142521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:33.148443Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:33.308503Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:33.311270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:33.394252Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:33.749282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eygrefbn2e8ca4cdrcda7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgwOTQzZmMtZGJjYWFiZTgtMTFjYWU2NmEtYTMzY2QwYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:33.755901Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:33.756400Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:33.756575Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 88 2025-04-17T18:07:37.352041Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:37.352088Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:37.352131Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.352261Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:671:2572], sessionId# [0:0:0] 2025-04-17T18:07:37.352711Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:37.352936Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:37.353023Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:37.356015Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.366834Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:37.366952Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:37.517274Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:37.517752Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:37.517806Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.518335Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.518388Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:37.518439Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:37.518703Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:37.518891Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:37.519082Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.519146Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:37.519636Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:37.520062Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:37.522671Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:37.522728Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.523296Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:37.523373Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.524143Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.524182Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:37.524229Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:37.524292Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:37.524344Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:37.524423Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.526135Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.527903Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:37.528591Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:37.528664Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:37.535130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:37.535204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:746:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:37.535270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:37.539206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:37.546239Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.699604Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:37.702586Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:37.739978Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:37.811032Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eyn1x5x8eydz3zbr8fyn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGE5YmY3NC05ZTUyODgwMy0yYjY0NzJiLWQyNThhOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:37.813433Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:856:2687], serverId# [2:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:37.813805Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:37.813982Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-04-17T18:07:37.825364Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.829117Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:37.830156Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:37.841393Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:37.841490Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:37.841737Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:37.841774Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-17T18:07:37.842033Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.842078Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:37.842127Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:37.842207Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.842288Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:37.843098Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:37.843379Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:37.843539Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.843578Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:37.843632Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:07:37.843829Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:37.843900Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.844379Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-17T18:07:37.844596Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:37.844773Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-17T18:07:37.844815Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-17T18:07:37.867567Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:37.867636Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-17T18:07:37.867796Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:37.867829Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:37.867865Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-17T18:07:37.867980Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:37.868040Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:37.868078Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 >> TExtSubDomainTest::GenericCases >> TExtSubDomainTest::DeclareAndDrop >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> KqpFlipJoin::Inner_3 [GOOD] >> KqpFlipJoin::LeftSemi_1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2025-04-17T18:07:07.457390Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345246392102751:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:07.457456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9e/r3tmp/tmppP3q91/pdisk_1.dat 2025-04-17T18:07:07.808600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:07.808719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:07.810433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:07.834218Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20495, node 1 2025-04-17T18:07:07.843409Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:07:07.882922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:07.882948Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:07.882959Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:07.883101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:08.161685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:08.461460Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 2025-04-17T18:07:08.480660Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:07:08.480701Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:08.481291Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****UdTA (DEEDEB69) () has now retryable error message 'Security state is empty' 2025-04-17T18:07:08.481481Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:07:08.481494Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:08.481872Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****UdTA (DEEDEB69) () has now retryable error message 'Security state is empty' 2025-04-17T18:07:08.481885Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-04-17T18:07:08.481900Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-04-17T18:07:08.481927Z node 1 :TICKET_PARSER ERROR: Ticket eyJh****UdTA (DEEDEB69): Security state is empty 2025-04-17T18:07:10.471401Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****UdTA (DEEDEB69) 2025-04-17T18:07:10.471667Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:07:10.471685Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:10.471908Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****UdTA (DEEDEB69) () has now retryable error message 'Security state is empty' 2025-04-17T18:07:10.471920Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-04-17T18:07:11.482357Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:07:12.457704Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345246392102751:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:12.457827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:07:13.474279Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****UdTA (DEEDEB69) 2025-04-17T18:07:13.474493Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:07:13.474506Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:13.475242Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****UdTA (DEEDEB69) () has now valid token of user1 2025-04-17T18:07:13.475253Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-17T18:07:18.478400Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****UdTA (DEEDEB69) 2025-04-17T18:07:18.478811Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****UdTA (DEEDEB69) () has now valid token of user1 2025-04-17T18:07:19.063016Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345299960704498:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:19.063072Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9e/r3tmp/tmp9ORk3S/pdisk_1.dat 2025-04-17T18:07:19.193626Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:19.232331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:19.232453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:19.234399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14437, node 2 2025-04-17T18:07:19.289876Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:19.289898Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:19.289910Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:19.290018Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:19.518724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:19.526084Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:07:19.526166Z node 2 :GRPC_CLIENT DEBUG: [51700007a588] Connect to grpc://localhost:64647 2025-04-17T18:07:19.528750Z node 2 :GRPC_CLIENT DEBUG: [51700007a588] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-17T18:07:19.554413Z node 2 :GRPC_CLIENT DEBUG: [51700007a588] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-17T18:07:19.554631Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:22.253378Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345310155935895:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:22.253446Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9e/r3tmp/tmpHDqG7o/pdisk_1.dat 2025-04-17T18:07:22.372919Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19905, node 3 2025-04-17T18:07:22.401261Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:22.401343Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:22.404213Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:22.442830Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:22.442849Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:22.442855Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:22.442986Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:22.643679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:22.652351Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:22.652381Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:22.652392Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:22.652421Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:07:22.652470Z node 3 :GRPC_CLIENT DEBUG: [51700001ee88] Connect to grpc://localhost:14007 2025-04-17T18:07:22.653469Z node 3 :GRPC_CLIENT DEBUG: [51700001ee88] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-17T18:07:22.664739Z node 3 :GRPC_CLIENT DEBUG: [51700001ee88] Status 14 Service Unavailable 2025-04-17T18:07:22.665168Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:22.665198Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:07:22.665342Z node 3 :GRPC_CLIENT DEBUG: [51700001ee88] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-17T18:07:22.666731Z node 3 :GRPC_CLIENT DEBUG: [51700001ee88] Status 14 Service Unavailable 2025-04-17T18:07:22.666826Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:23.259868Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-17T18:07:23.259917Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:07:23.260056Z node 3 :GRPC_CLIENT DEBUG: [51700001ee88] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-17T18:07:23.263317Z node 3 :GRPC_CLIENT DEBUG: [51700001ee88] Status 14 Service Unavailable 2025-04-17T18:07:23.263561Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' NebiusAccessService::Authenticate request 2025-04-17T18:07:25.260767Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) iam_token: "user1" 2025-04-17T18:07:25.260799Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication NebiusAccessService::Authenticate response 2025-04-17T18:07:25.260942Z node 3 :GRPC_CLIENT DEBUG: [51700001ee88] Request AuthenticateRequest { iam_token: "**** (8E120919)" } account { user_account { id: "user1" } } 0: "" 2025-04-17T18:07:25.263199Z node 3 :GRPC_CLIENT DEBUG: [51700001ee88] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-17T18:07:25.263367Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:27.253580Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345310155935895:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:27.253669Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:07:35.340690Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345366398279227:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:35.340736Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9e/r3tmp/tmpNvB6JO/pdisk_1.dat 2025-04-17T18:07:35.466958Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:35.497995Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:35.498100Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:35.499942Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23744, node 4 2025-04-17T18:07:35.558826Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:35.558856Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:35.558865Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:35.558990Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:35.856127Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:35.863205Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:35.866985Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-17T18:07:35.867036Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:07:35.867046Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:07:35.867095Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:07:35.867142Z node 4 :GRPC_CLIENT DEBUG: [5170000d2488] Connect to grpc://localhost:61287 2025-04-17T18:07:35.868076Z node 4 :GRPC_CLIENT DEBUG: [5170000d2488] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-17T18:07:35.893504Z node 4 :GRPC_CLIENT DEBUG: [5170000d2488] Status 14 Service Unavailable 2025-04-17T18:07:35.894283Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-17T18:07:35.894317Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-17T18:07:35.894477Z node 4 :GRPC_CLIENT DEBUG: [5170000d2488] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-17T18:07:35.906614Z node 4 :GRPC_CLIENT DEBUG: [5170000d2488] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-17T18:07:35.910470Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-17T18:07:39.088141Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494345384983347136:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:39.088301Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9e/r3tmp/tmpJ8cxrS/pdisk_1.dat 2025-04-17T18:07:39.193015Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:39.253123Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:39.253200Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18014, node 5 2025-04-17T18:07:39.254737Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:39.284716Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:39.284743Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:39.284753Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:39.284892Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:39.510416Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:39.522260Z node 5 :TICKET_PARSER ERROR: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> TExtSubDomainTest::DeclareAndDrop [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] |87.6%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 >> KqpJoin::JoinDupColumnRight [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop [GOOD] Test command err: 2025-04-17T18:07:40.917057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345386954720028:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:40.917152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d01/r3tmp/tmp2mKtzG/pdisk_1.dat 2025-04-17T18:07:41.303979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:41.337675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:41.337818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:41.339494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27858 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:41.530303Z node 1 :TX_PROXY DEBUG: actor# [1:7494345386954720266:2103] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:41.530353Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345391249687843:2257] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:41.531409Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345391249687585:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:41.531459Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345391249687585:2116], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:41.531686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:41.533531Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719961:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345391249687848:2258] 2025-04-17T18:07:41.533608Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345386954719961:2049] Subscribe: subscriber# [1:7494345391249687848:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.533684Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719964:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345391249687849:2258] 2025-04-17T18:07:41.533700Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345386954719964:2052] Subscribe: subscriber# [1:7494345391249687849:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.533736Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719967:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345391249687850:2258] 2025-04-17T18:07:41.533752Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345386954719967:2055] Subscribe: subscriber# [1:7494345391249687850:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.533792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687848:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345386954719961:2049] 2025-04-17T18:07:41.533815Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687849:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345386954719964:2052] 2025-04-17T18:07:41.533830Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687850:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345386954719967:2055] 2025-04-17T18:07:41.533868Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345391249687845:2258] 2025-04-17T18:07:41.533927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345391249687846:2258] 2025-04-17T18:07:41.533975Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345391249687844:2258][/dc-1] Set up state: owner# [1:7494345391249687585:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:41.534170Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719961:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345391249687848:2258] 2025-04-17T18:07:41.534227Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719964:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345391249687849:2258] 2025-04-17T18:07:41.534242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719967:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345391249687850:2258] 2025-04-17T18:07:41.535113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345391249687847:2258] 2025-04-17T18:07:41.535176Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345391249687844:2258][/dc-1] Path was already updated: owner# [1:7494345391249687585:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:41.535220Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687848:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687845:2258], cookie# 1 2025-04-17T18:07:41.535234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687849:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687846:2258], cookie# 1 2025-04-17T18:07:41.535245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687850:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687847:2258], cookie# 1 2025-04-17T18:07:41.535995Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719961:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687848:2258], cookie# 1 2025-04-17T18:07:41.536026Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719964:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687849:2258], cookie# 1 2025-04-17T18:07:41.536046Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719967:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687850:2258], cookie# 1 2025-04-17T18:07:41.536087Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687848:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345386954719961:2049], cookie# 1 2025-04-17T18:07:41.536100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687849:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345386954719964:2052], cookie# 1 2025-04-17T18:07:41.536112Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687850:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345386954719967:2055], cookie# 1 2025-04-17T18:07:41.536178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345391249687845:2258], cookie# 1 2025-04-17T18:07:41.536212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:41.536242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345391249687846:2258], cookie# 1 2025-04-17T18:07:41.536267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:41.536288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345391249687847:2258], cookie# 1 2025-04-17T18:07:41.536299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Unexpected sync response: sender# [1:7494345391249687847:2258], cookie# 1 2025-04-17T18:07:41.583395Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345391249687585:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:41.584622Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345391249687585:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... d: 281474976710659, ready parts: 1/1, is published: true 2025-04-17T18:07:41.792230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7494345391249687910:2297] message: TxId: 281474976710659 2025-04-17T18:07:41.792253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-04-17T18:07:41.792266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-17T18:07:41.792300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710659:0 2025-04-17T18:07:41.792331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:07:41.792542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-04-17T18:07:41.792570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-04-17T18:07:41.792630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:07:41.792644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T18:07:41.792682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:07:41.792853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:07:41.792868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T18:07:41.792906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:07:41.793328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:07:41.793349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:07:41.794371Z node 1 :TX_PROXY DEBUG: actor# [1:7494345386954720266:2103] Handle TEvNavigate describe path /dc-1 2025-04-17T18:07:41.794397Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345391249687927:2318] HANDLE EvNavigateScheme /dc-1 2025-04-17T18:07:41.794461Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345391249687585:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:41.794524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494345391249687585:2116], cookie# 4 2025-04-17T18:07:41.794568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687848:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687845:2258], cookie# 4 2025-04-17T18:07:41.794594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687849:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687846:2258], cookie# 4 2025-04-17T18:07:41.794617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687850:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687847:2258], cookie# 4 2025-04-17T18:07:41.794623Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719961:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687848:2258], cookie# 4 2025-04-17T18:07:41.794657Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719964:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687849:2258], cookie# 4 2025-04-17T18:07:41.794657Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386954719967:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391249687850:2258], cookie# 4 2025-04-17T18:07:41.794689Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687848:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7494345386954719961:2049], cookie# 4 2025-04-17T18:07:41.794702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687850:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7494345386954719967:2055], cookie# 4 2025-04-17T18:07:41.794714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391249687849:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7494345386954719964:2052], cookie# 4 2025-04-17T18:07:41.794786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7494345391249687845:2258], cookie# 4 2025-04-17T18:07:41.794810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:41.794826Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7494345391249687847:2258], cookie# 4 2025-04-17T18:07:41.794840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:41.794882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7494345391249687846:2258], cookie# 4 2025-04-17T18:07:41.794894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391249687844:2258][/dc-1] Unexpected sync response: sender# [1:7494345391249687846:2258], cookie# 4 2025-04-17T18:07:41.794939Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345391249687585:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T18:07:41.795020Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345391249687585:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494345391249687844:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913261798 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:41.795106Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345391249687585:2116], cacheItem# { Subscriber: { Subscriber: [1:7494345391249687844:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913261798 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-17T18:07:41.795224Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345391249687928:2319], recipient# [1:7494345391249687927:2318], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:07:41.795257Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345391249687927:2318] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:41.795311Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345391249687927:2318] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-17T18:07:41.795746Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345391249687927:2318] Handle TEvDescribeSchemeResult Forward to# [1:7494345391249687926:2317] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913261798 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913261798 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRight [GOOD] Test command err: Trying to start YDB, gRPC: 18682, MsgBus: 17418 2025-04-17T18:07:38.131664Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345381701542518:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:38.132975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e5a/r3tmp/tmpxTk5JT/pdisk_1.dat 2025-04-17T18:07:38.436105Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18682, node 1 2025-04-17T18:07:38.523568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:38.523684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:38.526839Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:38.526860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:38.526873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:38.526986Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:07:38.528082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17418 TClient is connected to server localhost:17418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:39.022958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:39.035282Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:07:39.048365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:39.171074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:39.308261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:39.385845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:40.951110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345390291478826:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:40.951267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:41.222343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:07:41.255775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:07:41.287201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:07:41.314136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:07:41.388446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:07:41.433930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:07:41.503203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:07:41.578698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345394586446791:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:41.578760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:41.579012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345394586446796:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:41.583568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:07:41.592825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345394586446798:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:07:41.686027Z node 1 :TX_PROXY ERROR: Actor# [1:7494345394586446856:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:42.667231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:07:42.706253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:07:42.740999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:07:43.131342Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345381701542518:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:43.131426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-17T18:07:41.239674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345394251490042:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:41.239748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cfd/r3tmp/tmpGFpR99/pdisk_1.dat 2025-04-17T18:07:41.658625Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:41.678125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:41.678497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:41.685359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15652 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:41.876305Z node 1 :TX_PROXY DEBUG: actor# [1:7494345394251490285:2109] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:41.876348Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345394251490776:2434] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:41.876467Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345394251490314:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:41.876500Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345394251490314:2124], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:41.876671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:41.878471Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489986:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345394251490781:2435] 2025-04-17T18:07:41.878518Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345394251489986:2049] Subscribe: subscriber# [1:7494345394251490781:2435], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.878583Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489989:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345394251490782:2435] 2025-04-17T18:07:41.878601Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345394251489989:2052] Subscribe: subscriber# [1:7494345394251490782:2435], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.878620Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489992:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345394251490783:2435] 2025-04-17T18:07:41.878644Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345394251489992:2055] Subscribe: subscriber# [1:7494345394251490783:2435], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.878700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490781:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345394251489986:2049] 2025-04-17T18:07:41.878723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490782:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345394251489989:2052] 2025-04-17T18:07:41.878739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490783:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345394251489992:2055] 2025-04-17T18:07:41.878773Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345394251490778:2435] 2025-04-17T18:07:41.878806Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345394251490779:2435] 2025-04-17T18:07:41.878862Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345394251490777:2435][/dc-1] Set up state: owner# [1:7494345394251490314:2124], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:41.878954Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345394251490780:2435] 2025-04-17T18:07:41.878991Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345394251490777:2435][/dc-1] Path was already updated: owner# [1:7494345394251490314:2124], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:41.879027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490781:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490778:2435], cookie# 1 2025-04-17T18:07:41.879059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490782:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490779:2435], cookie# 1 2025-04-17T18:07:41.879072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490783:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490780:2435], cookie# 1 2025-04-17T18:07:41.879102Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489986:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345394251490781:2435] 2025-04-17T18:07:41.879123Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489986:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490781:2435], cookie# 1 2025-04-17T18:07:41.879140Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489989:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345394251490782:2435] 2025-04-17T18:07:41.879152Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489989:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490782:2435], cookie# 1 2025-04-17T18:07:41.879164Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489992:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345394251490783:2435] 2025-04-17T18:07:41.879174Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489992:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490783:2435], cookie# 1 2025-04-17T18:07:41.883156Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490781:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345394251489986:2049], cookie# 1 2025-04-17T18:07:41.883190Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490782:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345394251489989:2052], cookie# 1 2025-04-17T18:07:41.883218Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490783:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345394251489992:2055], cookie# 1 2025-04-17T18:07:41.883264Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345394251490778:2435], cookie# 1 2025-04-17T18:07:41.883298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:41.883312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345394251490779:2435], cookie# 1 2025-04-17T18:07:41.883331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:41.883350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345394251490780:2435], cookie# 1 2025-04-17T18:07:41.883360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Unexpected sync response: sender# [1:7494345394251490780:2435], cookie# 1 2025-04-17T18:07:41.978023Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345394251490314:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:41.978532Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345394251490314:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345394251490314:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494345398546458268:2586] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744913262204 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:42.721841Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345394251490314:2124], cacheItem# { Subscriber: { Subscriber: [1:7494345398546458268:2586] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744913262204 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-17T18:07:42.722005Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345398546458415:2677], recipient# [1:7494345398546458414:2676], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:07:42.722033Z node 1 :TX_PROXY INFO: Actor# [1:7494345398546458414:2676] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-17T18:07:42.734486Z node 1 :TX_PROXY DEBUG: actor# [1:7494345394251490285:2109] Handle TEvNavigate describe path /dc-1 2025-04-17T18:07:42.734520Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345398546458420:2679] HANDLE EvNavigateScheme /dc-1 2025-04-17T18:07:42.734598Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345394251490314:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:42.734672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494345394251490314:2124], cookie# 4 2025-04-17T18:07:42.734720Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490781:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490778:2435], cookie# 4 2025-04-17T18:07:42.734735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490782:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490779:2435], cookie# 4 2025-04-17T18:07:42.734749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490783:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490780:2435], cookie# 4 2025-04-17T18:07:42.734770Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489986:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490781:2435], cookie# 4 2025-04-17T18:07:42.734794Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489989:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490782:2435], cookie# 4 2025-04-17T18:07:42.734809Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345394251489992:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345394251490783:2435], cookie# 4 2025-04-17T18:07:42.734834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490781:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345394251489986:2049], cookie# 4 2025-04-17T18:07:42.734847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490782:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345394251489989:2052], cookie# 4 2025-04-17T18:07:42.734861Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345394251490783:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345394251489992:2055], cookie# 4 2025-04-17T18:07:42.734882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345394251490778:2435], cookie# 4 2025-04-17T18:07:42.734907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:42.734922Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345394251490779:2435], cookie# 4 2025-04-17T18:07:42.734936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:42.734964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345394251490780:2435], cookie# 4 2025-04-17T18:07:42.734974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345394251490777:2435][/dc-1] Unexpected sync response: sender# [1:7494345394251490780:2435], cookie# 4 2025-04-17T18:07:42.735011Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345394251490314:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T18:07:42.735071Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345394251490314:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494345394251490777:2435] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913262169 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:42.735119Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345394251490314:2124], cacheItem# { Subscriber: { Subscriber: [1:7494345394251490777:2435] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913262169 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-17T18:07:42.735230Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345398546458421:2680], recipient# [1:7494345398546458420:2679], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:07:42.735251Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345398546458420:2679] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:42.735306Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345398546458420:2679] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-17T18:07:42.735832Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345398546458420:2679] Handle TEvDescribeSchemeResult Forward to# [1:7494345398546458419:2678] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913262169 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913262169 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913262204 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::GenericCases [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2025-04-17T18:07:41.054981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345390934731434:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:41.055607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d05/r3tmp/tmpP3FvNB/pdisk_1.dat 2025-04-17T18:07:41.492704Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:41.497623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:41.497724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:41.514101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10840 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:41.661131Z node 1 :TX_PROXY DEBUG: actor# [1:7494345390934731658:2115] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:41.661176Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345390934732142:2447] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:41.661303Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345390934731706:2139], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:41.661329Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345390934731706:2139], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:41.661501Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:41.662851Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764041:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345390934732147:2448] 2025-04-17T18:07:41.662865Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764044:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345390934732148:2448] 2025-04-17T18:07:41.662903Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345386639764044:2053] Subscribe: subscriber# [1:7494345390934732148:2448], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.662922Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345386639764041:2050] Subscribe: subscriber# [1:7494345390934732147:2448], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.662958Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764047:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345390934732149:2448] 2025-04-17T18:07:41.662972Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345386639764047:2056] Subscribe: subscriber# [1:7494345390934732149:2448], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.662980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345390934732148:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345386639764044:2053] 2025-04-17T18:07:41.663005Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764044:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345390934732148:2448] 2025-04-17T18:07:41.663052Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345390934732147:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345386639764041:2050] 2025-04-17T18:07:41.663082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345390934732149:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345386639764047:2056] 2025-04-17T18:07:41.663089Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764041:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345390934732147:2448] 2025-04-17T18:07:41.663102Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764047:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345390934732149:2448] 2025-04-17T18:07:41.663117Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345390934732145:2448] 2025-04-17T18:07:41.663152Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345390934732144:2448] 2025-04-17T18:07:41.663184Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345390934732143:2448][/dc-1] Set up state: owner# [1:7494345390934731706:2139], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:41.663280Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345390934732146:2448] 2025-04-17T18:07:41.663336Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345390934732143:2448][/dc-1] Path was already updated: owner# [1:7494345390934731706:2139], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:41.663383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345390934732147:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345390934732144:2448], cookie# 1 2025-04-17T18:07:41.663400Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345390934732148:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345390934732145:2448], cookie# 1 2025-04-17T18:07:41.663412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345390934732149:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345390934732146:2448], cookie# 1 2025-04-17T18:07:41.663440Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764041:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345390934732147:2448], cookie# 1 2025-04-17T18:07:41.663468Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764044:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345390934732148:2448], cookie# 1 2025-04-17T18:07:41.663506Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764047:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345390934732149:2448], cookie# 1 2025-04-17T18:07:41.663537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345390934732147:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345386639764041:2050], cookie# 1 2025-04-17T18:07:41.663557Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345390934732148:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345386639764044:2053], cookie# 1 2025-04-17T18:07:41.663569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345390934732149:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345386639764047:2056], cookie# 1 2025-04-17T18:07:41.663596Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345390934732144:2448], cookie# 1 2025-04-17T18:07:41.663630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:41.663648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345390934732145:2448], cookie# 1 2025-04-17T18:07:41.663668Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:41.663694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345390934732146:2448], cookie# 1 2025-04-17T18:07:41.663731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345390934732143:2448][/dc-1] Unexpected sync response: sender# [1:7494345390934732146:2448], cookie# 1 2025-04-17T18:07:41.704841Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345390934731706:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:41.705134Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345390934731706:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... g_requests 2025-04-17T18:07:44.338822Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764047:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494345403819634840:3023] 2025-04-17T18:07:44.338838Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345386639764044:2053] Subscribe: subscriber# [1:7494345403819634845:3024], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:44.338845Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764047:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7494345403819634846:3024] 2025-04-17T18:07:44.338864Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764044:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494345403819634839:3023] 2025-04-17T18:07:44.338868Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345386639764047:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-17T18:07:44.338892Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345386639764047:2056] Subscribe: subscriber# [1:7494345403819634846:3024], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:44.338910Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345390934731706:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-17T18:07:44.338917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345403819634844:3024][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494345386639764041:2050] 2025-04-17T18:07:44.338935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345403819634845:3024][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494345386639764044:2053] 2025-04-17T18:07:44.338951Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345403819634846:3024][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494345386639764047:2056] 2025-04-17T18:07:44.338977Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345390934731706:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7494345403819634833:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:44.338985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345403819634834:3024][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494345403819634841:3024] 2025-04-17T18:07:44.339004Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345403819634834:3024][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494345403819634842:3024] 2025-04-17T18:07:44.339043Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345403819634834:3024][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7494345390934731706:2139], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:44.339059Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345390934731706:2139], cacheItem# { Subscriber: { Subscriber: [1:7494345403819634833:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:44.339065Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345403819634834:3024][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494345403819634843:3024] 2025-04-17T18:07:44.339081Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345403819634834:3024][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7494345390934731706:2139], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:44.339087Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345390934731706:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-17T18:07:44.339094Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764041:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494345403819634844:3024] 2025-04-17T18:07:44.339107Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764044:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494345403819634845:3024] 2025-04-17T18:07:44.339118Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345386639764047:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494345403819634846:3024] 2025-04-17T18:07:44.339134Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345390934731706:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7494345403819634834:3024] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:44.339173Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345390934731706:2139], cacheItem# { Subscriber: { Subscriber: [1:7494345403819634834:3024] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:44.339262Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345403819634847:3025], recipient# [1:7494345403819634832:2338], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:45.076426Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345390934731706:2139], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:45.076558Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345390934731706:2139], cacheItem# { Subscriber: { Subscriber: [1:7494345395229699676:2625] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:45.076667Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345408114602160:3029], recipient# [1:7494345408114602159:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:45.333812Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345390934731706:2139], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:45.334096Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345390934731706:2139], cacheItem# { Subscriber: { Subscriber: [1:7494345403819634821:3021] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:45.334308Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345408114602165:3030], recipient# [1:7494345408114602164:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 >> KqpJoinOrder::DatetimeConstantFold-ColumnStore >> KqpFlipJoin::LeftSemi_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2025-04-17T18:07:47.249301Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T18:07:47.252306Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-17T18:07:47.258485Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T18:07:47.258559Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-17T18:07:47.265074Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-17T18:07:47.265187Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2025-04-17T18:07:47.264891Z ErrorReason# >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_1 [GOOD] Test command err: Trying to start YDB, gRPC: 61474, MsgBus: 19760 2025-04-17T18:07:35.218236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345365091610667:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:35.218303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e5b/r3tmp/tmpVVOnLj/pdisk_1.dat 2025-04-17T18:07:35.610391Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:35.639196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:35.639315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:35.654788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61474, node 1 2025-04-17T18:07:35.740990Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:35.741013Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:35.741020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:35.741152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19760 TClient is connected to server localhost:19760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:36.349414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:36.377791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:36.537007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:07:36.697715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:07:36.763185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:38.182442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345377976514315:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:38.182567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:38.487922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.559954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.591886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.622809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.690255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.757898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.785829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.847184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345377976514986:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:38.847268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:38.847336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345377976514991:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:38.850530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:07:38.858970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345377976514993:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:07:38.921409Z node 1 :TX_PROXY ERROR: Actor# [1:7494345377976515046:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:39.819672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:07:39.844978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:07:39.871251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:07:39.957032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:07:40.218653Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345365091610667:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:40.218760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62583, MsgBus: 2170 2025-04-17T18:07:41.803091Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345391493236578:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:41.803123Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e5b/r3tmp/tmpWZLFqd/pdisk_1.dat 2025-04-17T18:07:41.923303Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62583, node 2 2025-04-17T18:07:41.959594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:41.959671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:41.961173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:41.970658Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:41.970677Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:41.970683Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:41.970792Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2170 TClient is connected to server localhost:2170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:42.439794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:42.452913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:42.518775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:42.669242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:42.749148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:44.698313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345404378140250:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:44.698424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:44.759466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:07:44.836115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:07:44.868177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:07:44.909149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:07:44.945257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:07:44.981487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:07:45.016714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:07:45.106635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345408673108212:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:45.106722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:45.107414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345408673108218:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:45.110662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:07:45.158683Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345408673108220:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:07:45.254293Z node 2 :TX_PROXY ERROR: Actor# [2:7494345408673108279:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:45.984272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:07:46.016625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:07:46.049658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:07:46.080105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-17T18:07:46.803404Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345391493236578:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:46.803472Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 12239, MsgBus: 5079 2025-04-17T18:00:10.878093Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494343454685757787:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:10.882417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001add/r3tmp/tmpzZkGfc/pdisk_1.dat 2025-04-17T18:00:11.542723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:00:11.572553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:00:11.572639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:00:11.578881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12239, node 1 2025-04-17T18:00:11.758980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:00:11.759004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:00:11.759015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:00:11.759114Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5079 TClient is connected to server localhost:5079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:00:12.485355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:12.520712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:12.726230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:12.927406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:13.029807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:00:15.051988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343476160595887:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:15.052104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:15.420339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:00:15.458668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:00:15.557426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:00:15.592828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:00:15.632638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:00:15.694839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:00:15.779101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:00:15.857612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343476160596559:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:15.857701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:15.857927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494343476160596564:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:00:15.864563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:00:15.869193Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494343454685757787:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:00:15.870458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:00:15.880531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494343476160596566:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:00:15.943031Z node 1 :TX_PROXY ERROR: Actor# [1:7494343476160596623:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:00:17.184569Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh70v3rkcybdwjpq7gspc, ReplyQueryCompileError, status TIMEOUT remove tx with tx_id: 2025-04-17T18:00:17.202536Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh71c4gmbkb1bejq9ekhd, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.222360Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh7229qph8r46b77mb33w, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.317445Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh72yfbzsz3ryvecyaesd, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.347528Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh75qeb5hwhbvh5q6c4vf, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.367246Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh76fftcq4hfz8z1k210f, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.386595Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh76z01tbwmwaypz2w5c1, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.402733Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh77kdxmj6es982g94213, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.425053Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh7867p4t0hxt54ca66gw, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.443788Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh78s8tkg2qqztetqz19f, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.474407Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, ActorId: [1:7494343484750531555:2516], ActorState: ExecuteState, TraceId: 01js2eh79kfshzrrsqae983dmt, Create QueryResponse for error on request, msg: 2025-04-17T18:00:17.498713Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494343484750531625:2516] TxId: 281474976710672. Ctx: { TraceId: 01js2eh7aa4d1p2zv4cwrgam51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgxN2ZiZmQtYzNmODI4ZGItM2YzNjhhODQtZDgzZTY4OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 12ms } {
: Error: Cancelling after 15ms during execution } ] 2025-04-17T18:00:17.499294Z node 1 :KQP_COMPUTE ERROR: ... tYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey3an72d4zxrdj9425r9f, Create QueryResponse for error on request, msg: 2025-04-17T18:07:20.547336Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey41789fjp5q33hs9w3f6, Create QueryResponse for error on request, msg: 2025-04-17T18:07:21.395671Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey4vpewqs7na3ez6acxcp, Create QueryResponse for error on request, msg: 2025-04-17T18:07:21.848331Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey59s0avx3rnf9a22hjtv, Create QueryResponse for error on request, msg: 2025-04-17T18:07:22.370436Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey5t19y5fn525p3npbgy9, Create QueryResponse for error on request, msg: 2025-04-17T18:07:23.258448Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey6np72rwdfn1vsvf55fb, Create QueryResponse for error on request, msg: 2025-04-17T18:07:24.086933Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey7fj9v5533rjwtrd32vg, Create QueryResponse for error on request, msg: 2025-04-17T18:07:24.990480Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey8bpbeqw1ajdaeg4w4dc, Create QueryResponse for error on request, msg: 2025-04-17T18:07:25.463552Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494345322807795264:2517] TxId: 281474976710898. Ctx: { TraceId: 01js2ey8te0p9pfgxmtv1atwh3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 455ms } {
: Error: Cancelling after 452ms during execution } ] 2025-04-17T18:07:25.463755Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey8te0p9pfgxmtv1atwh3, Create QueryResponse for error on request, msg: 2025-04-17T18:07:25.929435Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2ey991arwt548gjn598ybb, Create QueryResponse for error on request, msg: 2025-04-17T18:07:26.804952Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eya4aez903nsbchqt8dwb, Create QueryResponse for error on request, msg: 2025-04-17T18:07:27.553538Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyavmdte5kfa41y1k1xee, Create QueryResponse for error on request, msg: 2025-04-17T18:07:28.452618Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eybqr7nx7majjfnxekzr9, Create QueryResponse for error on request, msg: 2025-04-17T18:07:29.335786Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyck8f5nnb38jqwhp7s3r, Create QueryResponse for error on request, msg: 2025-04-17T18:07:30.118475Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eydbk6zf8m6bayy3md51h, Create QueryResponse for error on request, msg: 2025-04-17T18:07:30.881980Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eye3d66at5smv03ek6zs3, Create QueryResponse for error on request, msg: 2025-04-17T18:07:31.737033Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyey15vbf5nemrvs9q30p, Create QueryResponse for error on request, msg: 2025-04-17T18:07:32.628965Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyfswb7s2fc26yx50ayfy, Create QueryResponse for error on request, msg: 2025-04-17T18:07:33.546458Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eygpde5f41kb94gfagve5, Create QueryResponse for error on request, msg: 2025-04-17T18:07:34.416274Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyhhm00wvhkrmjg4gbq62, Create QueryResponse for error on request, msg: 2025-04-17T18:07:35.280131Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyjcgb24k2fxbdap51qer, Create QueryResponse for error on request, msg: 2025-04-17T18:07:36.143480Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyk7e6srgh4004hngvfvz, Create QueryResponse for error on request, msg: 2025-04-17T18:07:36.632782Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494345370052436201:2517] TxId: 281474976710924. Ctx: { TraceId: 01js2eykpqd95csr4hmphf2519, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 481ms } {
: Error: Cancelling after 481ms during execution } ] 2025-04-17T18:07:36.632947Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7494345370052436201:2517] TxId: 281474976710924. Ctx: { TraceId: 01js2eykpqd95csr4hmphf2519, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-04-17T18:07:36.632955Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494345370052436206:4374], TxId: 281474976710924, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=. CustomerSuppliedId : . TraceId : 01js2eykpqd95csr4hmphf2519. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7494345370052436201:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T18:07:36.633037Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7494345370052436205:4373], TxId: 281474976710924, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=. TraceId : 01js2eykpqd95csr4hmphf2519. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7494345370052436201:2517], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-17T18:07:36.633536Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eykpqd95csr4hmphf2519, Create QueryResponse for error on request, msg: 2025-04-17T18:07:37.125493Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eym62cdxk5k0cybzn8p01, Create QueryResponse for error on request, msg: 2025-04-17T18:07:38.002887Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyn165e27nyejyecg6y0e, Create QueryResponse for error on request, msg: 2025-04-17T18:07:38.822499Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eynty1cvs1abgwjg81svw, Create QueryResponse for error on request, msg: 2025-04-17T18:07:39.616940Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eypkpfprgraypeytd0qja, Create QueryResponse for error on request, msg: 2025-04-17T18:07:40.428799Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyqd156a5z2nvskqktf1x, Create QueryResponse for error on request, msg: 2025-04-17T18:07:41.296153Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyr83f5mwanya64w45wry, Create QueryResponse for error on request, msg: 2025-04-17T18:07:42.158467Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eys304may01k8a16466as, Create QueryResponse for error on request, msg: 2025-04-17T18:07:43.086306Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyszj8ek4zjgby0fjd4yz, Create QueryResponse for error on request, msg: 2025-04-17T18:07:43.984637Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eytvxe6n0g7e2j44wfjs3, Create QueryResponse for error on request, msg: 2025-04-17T18:07:44.906166Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmVlNzc2NDItYTU0YmZkNTYtYjY0Mjc0OGEtMjY4NDM4MTU=, ActorId: [4:7494344910490928356:2517], ActorState: ExecuteState, TraceId: 01js2eyvrn0a97qemer4374b7y, Create QueryResponse for error on request, msg: >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false |87.6%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |87.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> TKeyValueTest::TestIncorrectRequestThenResponseError >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> KqpJoinOrder::CanonizedJoinOrderTPCH6 [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: 2025-04-17T18:07:38.411772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:38.412305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:38.412372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001032/r3tmp/tmpRdQGyA/pdisk_1.dat 2025-04-17T18:07:38.773912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.816487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:38.855868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:38.856978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:38.868534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:38.949425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.991392Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:07:38.991684Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:39.025169Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:39.025306Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:39.026730Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:39.026808Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:39.026845Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:39.027116Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:39.027216Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:39.027289Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:07:39.038017Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:39.069837Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:39.070077Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:39.070200Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:07:39.070227Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:39.070261Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:39.070295Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:39.070708Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:39.070831Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:39.071005Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:39.071068Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:39.071109Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:39.071179Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:39.071303Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:39.071777Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:39.072032Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:39.072146Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:39.073668Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:39.084444Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:39.084545Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:39.234713Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:39.239472Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:39.239552Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:39.240393Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:39.240443Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:39.240494Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:39.240740Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:39.240897Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:39.241717Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:39.241789Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:39.243815Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:39.244268Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:39.246796Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:39.246858Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:39.247430Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:39.247499Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:39.248706Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:39.248748Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:39.248804Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:39.248869Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:39.248920Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:39.248995Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:39.264821Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:39.267105Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:39.267696Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:39.267805Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:39.276898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:39.277012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:39.277081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:39.282258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:39.288330Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:39.438637Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:39.441038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:39.506962Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:39.888680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eypra8chkfxntb6hcg077, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2E1ODc0NjEtOWRmN2EzZmYtOTU3ZjJmOGItMWE5MjY0MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:39.905828Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:39.906364Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:39.906563Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=7 ... 025-04-17T18:07:48.492342Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:48.492384Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:48.492436Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:48.492506Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-17T18:07:48.492982Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:48.493133Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:48.493187Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:48.494551Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:48.505796Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:48.505930Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:07:48.663711Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-17T18:07:48.664374Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:07:48.664436Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:48.664672Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:48.664722Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:07:48.664769Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:07:48.665031Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:07:48.665171Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:48.666649Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:48.666728Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:07:48.667183Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:07:48.667603Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:48.669174Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:07:48.669227Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:48.669830Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:07:48.669924Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:48.670773Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:48.670822Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:48.670871Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:07:48.670955Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:48.671027Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:07:48.671119Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:48.672427Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:48.675047Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:07:48.675126Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:07:48.675745Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:07:48.684293Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:48.684412Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:48.684504Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:48.690478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:48.697840Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:48.857481Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:48.860755Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:07:48.895325Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:48.973816Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2eyzyac0jxtqfphb661pqx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmRmZDczMzYtYWUyNDI2MTMtNmE1OWRmNjUtNTExOTg1YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:07:48.976440Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-17T18:07:48.976871Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:07:48.977030Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-04-17T18:07:48.988644Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:48.992777Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:48.993992Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-17T18:07:49.005685Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-17T18:07:49.005775Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:49.006054Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:49.006093Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-17T18:07:49.006431Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:49.006484Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:49.006530Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:49.006597Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:49.006693Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-17T18:07:49.007624Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:49.007970Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:49.008147Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:49.008208Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:49.008258Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:07:49.008507Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:49.008577Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:49.009194Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-17T18:07:49.009397Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:49.009495Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-17T18:07:49.009534Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-17T18:07:49.045621Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:49.045697Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-17T18:07:49.046117Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:49.046156Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:49.046210Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-17T18:07:49.046332Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:49.046398Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:49.046441Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> TExtSubDomainTest::DeclareAndLs >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-17T18:07:40.847050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345387055458326:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:40.847145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d0e/r3tmp/tmpJajuX0/pdisk_1.dat 2025-04-17T18:07:41.309076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:41.309896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:41.315295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:41.363425Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15140 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:41.606594Z node 1 :TX_PROXY DEBUG: actor# [1:7494345387055458543:2114] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:41.606646Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345391350426330:2444] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:41.606796Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345391350425870:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:41.606823Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345391350425870:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:41.607045Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:41.608797Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345387055458234:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345391350426335:2445] 2025-04-17T18:07:41.608849Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345387055458234:2050] Subscribe: subscriber# [1:7494345391350426335:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.608896Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345387055458237:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345391350426336:2445] 2025-04-17T18:07:41.608913Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345387055458237:2053] Subscribe: subscriber# [1:7494345391350426336:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.608932Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345387055458240:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345391350426337:2445] 2025-04-17T18:07:41.608945Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345387055458240:2056] Subscribe: subscriber# [1:7494345391350426337:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:41.608979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391350426335:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345387055458234:2050] 2025-04-17T18:07:41.609008Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391350426336:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345387055458237:2053] 2025-04-17T18:07:41.609047Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391350426337:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345387055458240:2056] 2025-04-17T18:07:41.609113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345391350426332:2445] 2025-04-17T18:07:41.609149Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345391350426333:2445] 2025-04-17T18:07:41.609221Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345391350426331:2445][/dc-1] Set up state: owner# [1:7494345391350425870:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:41.609312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345391350426334:2445] 2025-04-17T18:07:41.609357Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345391350426331:2445][/dc-1] Path was already updated: owner# [1:7494345391350425870:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:41.609410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391350426335:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391350426332:2445], cookie# 1 2025-04-17T18:07:41.609429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391350426336:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391350426333:2445], cookie# 1 2025-04-17T18:07:41.609441Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391350426337:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391350426334:2445], cookie# 1 2025-04-17T18:07:41.609467Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345387055458234:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345391350426335:2445] 2025-04-17T18:07:41.609493Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345387055458234:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391350426335:2445], cookie# 1 2025-04-17T18:07:41.609508Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345387055458237:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345391350426336:2445] 2025-04-17T18:07:41.609534Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345387055458237:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391350426336:2445], cookie# 1 2025-04-17T18:07:41.609551Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345387055458240:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345391350426337:2445] 2025-04-17T18:07:41.609562Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345387055458240:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345391350426337:2445], cookie# 1 2025-04-17T18:07:41.610291Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391350426335:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345387055458234:2050], cookie# 1 2025-04-17T18:07:41.610321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391350426336:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345387055458237:2053], cookie# 1 2025-04-17T18:07:41.610349Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345391350426337:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345387055458240:2056], cookie# 1 2025-04-17T18:07:41.610393Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345391350426332:2445], cookie# 1 2025-04-17T18:07:41.610508Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:41.610544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345391350426333:2445], cookie# 1 2025-04-17T18:07:41.610566Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:41.610592Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345391350426334:2445], cookie# 1 2025-04-17T18:07:41.610611Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345391350426331:2445][/dc-1] Unexpected sync response: sender# [1:7494345391350426334:2445], cookie# 1 2025-04-17T18:07:41.656715Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345391350425870:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:41.657066Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345391350425870:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... 5-04-17T18:07:49.239140Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494345416003139204:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-17T18:07:49.239143Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345416003138861:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7494345428888042210:2927] 2025-04-17T18:07:49.239155Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345416003138861:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-17T18:07:49.239180Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494345416003139204:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7494345428888042192:2925] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:49.239191Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345416003138861:2050] Subscribe: subscriber# [3:7494345428888042210:2927], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:49.239220Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345416003139204:2127], cacheItem# { Subscriber: { Subscriber: [3:7494345428888042192:2925] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:49.239230Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345416003138864:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7494345428888042211:2927] 2025-04-17T18:07:49.239237Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345416003138864:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-17T18:07:49.239257Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345416003138864:2053] Subscribe: subscriber# [3:7494345428888042211:2927], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:49.239314Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345416003138867:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7494345428888042212:2927] 2025-04-17T18:07:49.239324Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345416003138867:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-17T18:07:49.239336Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345428888042213:2928], recipient# [3:7494345428888042189:2321], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:49.239345Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345416003138867:2056] Subscribe: subscriber# [3:7494345428888042212:2927], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:49.239396Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494345428888042210:2927][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345416003138861:2050] 2025-04-17T18:07:49.239430Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494345428888042211:2927][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345416003138864:2053] 2025-04-17T18:07:49.239452Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494345428888042212:2927][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345416003138867:2056] 2025-04-17T18:07:49.239483Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494345428888042206:2927][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345428888042207:2927] 2025-04-17T18:07:49.239511Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494345428888042206:2927][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345428888042208:2927] 2025-04-17T18:07:49.239540Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345416003138861:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345428888042210:2927] 2025-04-17T18:07:49.239542Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7494345428888042206:2927][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7494345416003139204:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:49.239578Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345416003138864:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345428888042211:2927] 2025-04-17T18:07:49.239586Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494345428888042206:2927][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345428888042209:2927] 2025-04-17T18:07:49.239614Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345416003138867:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345428888042212:2927] 2025-04-17T18:07:49.239669Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7494345428888042206:2927][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7494345416003139204:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:49.239673Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494345416003139204:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-17T18:07:49.239744Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494345416003139204:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7494345428888042206:2927] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:49.239835Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345416003139204:2127], cacheItem# { Subscriber: { Subscriber: [3:7494345428888042206:2927] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:49.239897Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345428888042214:2929], recipient# [3:7494345428888042191:2323], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:49.358871Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345416003139204:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:49.359024Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345416003139204:2127], cacheItem# { Subscriber: { Subscriber: [3:7494345420298107537:2912] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:49.359119Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345428888042216:2930], recipient# [3:7494345428888042215:2324], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH6 [GOOD] Test command err: Trying to start YDB, gRPC: 63406, MsgBus: 14171 2025-04-17T18:06:42.425649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345140448757066:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:42.425826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e62/r3tmp/tmpW4Pwik/pdisk_1.dat 2025-04-17T18:06:42.688607Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63406, node 1 2025-04-17T18:06:42.746004Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:42.746023Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:42.746030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:42.746193Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:42.782089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:42.782198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:42.783806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14171 TClient is connected to server localhost:14171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:43.140737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:45.050445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345153333659627:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:45.050456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345153333659619:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:45.050546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:45.053970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:06:45.064258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345153333659633:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:06:45.140689Z node 1 :TX_PROXY ERROR: Actor# [1:7494345153333659684:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:45.400653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:06:45.581351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:06:45.581532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:06:45.581748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:06:45.581777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:06:45.581801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:06:45.581886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:06:45.581976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:06:45.582000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:06:45.582084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:06:45.582101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:06:45.582203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:06:45.582205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:06:45.582302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:06:45.582326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:06:45.582407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:06:45.582425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:06:45.582511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:06:45.582520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:06:45.582627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:06:45.582628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:06:45.582761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494345153333659932:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:06:45.582765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:06:45.582881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:06:45.583005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345153333659951:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:06:45.608191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494345153333659995:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:06:45.608257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494345153333659995:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:06:45.608478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;sel ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.648916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.654621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.654627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.660527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.660553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.666658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.666660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.672793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.672797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.679032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.679042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.685305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.685396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.691359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.691760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.697561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.697770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.704093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.704093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.710900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.710900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.717252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.717264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.723215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.723219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.729257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.729271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.735165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.735168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.741314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.741329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.747396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.747396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.753478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.753480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.759249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.759255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.764488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.769812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.775125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.780445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.785779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.792714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.798022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.804514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:40.915524Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2exzje6wqjk0h9f4shbhbz", SessionId: ydb://session/3?node_id=1&id=YmVkY2IwMWEtZTgyM2VjNGEtODM0NmZkMzMtNGY4MDlhZTI=, Slow query, duration: 25.381104s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:07:41.152598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:07:41.152917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:07:41.153091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494345333722336360:9780];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:07:41.153293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: 2025-04-17T18:07:49.642568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345427635205242:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:49.642676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ced/r3tmp/tmpn2W6tK/pdisk_1.dat 2025-04-17T18:07:49.987411Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:50.000122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:50.000240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:50.008226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19662 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:50.206291Z node 1 :TX_PROXY DEBUG: actor# [1:7494345427635205468:2093] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:50.206338Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345431930173272:2431] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:50.206486Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345427635205514:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:50.206523Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345427635205514:2118], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:50.206702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:50.208986Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205195:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345431930173277:2432] 2025-04-17T18:07:50.209053Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345427635205195:2049] Subscribe: subscriber# [1:7494345431930173277:2432], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:50.209112Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205198:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345431930173278:2432] 2025-04-17T18:07:50.209130Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345427635205198:2052] Subscribe: subscriber# [1:7494345431930173278:2432], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:50.209151Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205201:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345431930173279:2432] 2025-04-17T18:07:50.209165Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345427635205201:2055] Subscribe: subscriber# [1:7494345431930173279:2432], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:50.209204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173277:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345427635205195:2049] 2025-04-17T18:07:50.209225Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173278:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345427635205198:2052] 2025-04-17T18:07:50.209254Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173279:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345427635205201:2055] 2025-04-17T18:07:50.209300Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345431930173274:2432] 2025-04-17T18:07:50.209323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345431930173275:2432] 2025-04-17T18:07:50.209393Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345431930173273:2432][/dc-1] Set up state: owner# [1:7494345427635205514:2118], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:50.209491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345431930173276:2432] 2025-04-17T18:07:50.209548Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345431930173273:2432][/dc-1] Path was already updated: owner# [1:7494345427635205514:2118], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:50.209610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173277:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173274:2432], cookie# 1 2025-04-17T18:07:50.209631Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173278:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173275:2432], cookie# 1 2025-04-17T18:07:50.209644Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173279:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173276:2432], cookie# 1 2025-04-17T18:07:50.209677Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205195:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345431930173277:2432] 2025-04-17T18:07:50.209699Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205195:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173277:2432], cookie# 1 2025-04-17T18:07:50.209718Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205198:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345431930173278:2432] 2025-04-17T18:07:50.209737Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205198:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173278:2432], cookie# 1 2025-04-17T18:07:50.209755Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205201:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345431930173279:2432] 2025-04-17T18:07:50.209767Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205201:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173279:2432], cookie# 1 2025-04-17T18:07:50.210243Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173277:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345427635205195:2049], cookie# 1 2025-04-17T18:07:50.210262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173278:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345427635205198:2052], cookie# 1 2025-04-17T18:07:50.210294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173279:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345427635205201:2055], cookie# 1 2025-04-17T18:07:50.210333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345431930173274:2432], cookie# 1 2025-04-17T18:07:50.210364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:50.210383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345431930173275:2432], cookie# 1 2025-04-17T18:07:50.210422Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:50.210446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345431930173276:2432], cookie# 1 2025-04-17T18:07:50.210459Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Unexpected sync response: sender# [1:7494345431930173276:2432], cookie# 1 2025-04-17T18:07:50.283933Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345427635205514:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:50.284262Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345427635205514:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345427635205514:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494345431930173462:2577] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744913270485 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:50.990024Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345427635205514:2118], cacheItem# { Subscriber: { Subscriber: [1:7494345431930173462:2577] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744913270485 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-17T18:07:50.990245Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345431930173609:2668], recipient# [1:7494345431930173608:2667], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:07:50.990303Z node 1 :TX_PROXY INFO: Actor# [1:7494345431930173608:2667] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-17T18:07:50.993551Z node 1 :TX_PROXY DEBUG: actor# [1:7494345427635205468:2093] Handle TEvNavigate describe path /dc-1 2025-04-17T18:07:50.993618Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345431930173611:2670] HANDLE EvNavigateScheme /dc-1 2025-04-17T18:07:50.993733Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345427635205514:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:50.993817Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494345427635205514:2118], cookie# 4 2025-04-17T18:07:50.993882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173277:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173274:2432], cookie# 4 2025-04-17T18:07:50.993940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173278:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173275:2432], cookie# 4 2025-04-17T18:07:50.993955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173279:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173276:2432], cookie# 4 2025-04-17T18:07:50.993967Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205195:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173277:2432], cookie# 4 2025-04-17T18:07:50.993975Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205198:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173278:2432], cookie# 4 2025-04-17T18:07:50.994018Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345427635205201:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345431930173279:2432], cookie# 4 2025-04-17T18:07:50.994026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173277:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345427635205195:2049], cookie# 4 2025-04-17T18:07:50.994044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173278:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345427635205198:2052], cookie# 4 2025-04-17T18:07:50.994058Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345431930173279:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345427635205201:2055], cookie# 4 2025-04-17T18:07:50.994090Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345431930173274:2432], cookie# 4 2025-04-17T18:07:50.994110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:50.994125Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345431930173275:2432], cookie# 4 2025-04-17T18:07:50.994141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:50.994167Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345431930173276:2432], cookie# 4 2025-04-17T18:07:50.994195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345431930173273:2432][/dc-1] Unexpected sync response: sender# [1:7494345431930173276:2432], cookie# 4 2025-04-17T18:07:50.994212Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345427635205514:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T18:07:50.994285Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345427635205514:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494345431930173273:2432] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913270464 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:50.994347Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345427635205514:2118], cacheItem# { Subscriber: { Subscriber: [1:7494345431930173273:2432] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913270464 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-17T18:07:50.994502Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345431930173612:2671], recipient# [1:7494345431930173611:2670], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:07:50.994557Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345431930173611:2670] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:50.994631Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345431930173611:2670] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-17T18:07:50.995301Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345431930173611:2670] Handle TEvDescribeSchemeResult Forward to# [1:7494345431930173610:2669] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913270464 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913270464 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913270485 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndLs [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs [GOOD] Test command err: 2025-04-17T18:07:51.728713Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345436338164164:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:51.734784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ce3/r3tmp/tmpBu6TJU/pdisk_1.dat 2025-04-17T18:07:52.098818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:52.098957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:52.100415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:52.102532Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27496 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:52.337146Z node 1 :TX_PROXY DEBUG: actor# [1:7494345436338164235:2096] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:52.337186Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345440633131828:2258] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:52.337321Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345436338164267:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:52.337357Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345436338164267:2114], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:52.337519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:52.339664Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163956:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345440633131833:2259] 2025-04-17T18:07:52.339726Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345436338163956:2049] Subscribe: subscriber# [1:7494345440633131833:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:52.339796Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163959:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345440633131834:2259] 2025-04-17T18:07:52.339823Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345436338163959:2052] Subscribe: subscriber# [1:7494345440633131834:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:52.339850Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163962:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345440633131835:2259] 2025-04-17T18:07:52.339881Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345436338163962:2055] Subscribe: subscriber# [1:7494345440633131835:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:52.339921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131833:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345436338163956:2049] 2025-04-17T18:07:52.339946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131834:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345436338163959:2052] 2025-04-17T18:07:52.339963Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131835:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345436338163962:2055] 2025-04-17T18:07:52.339997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345440633131830:2259] 2025-04-17T18:07:52.340037Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345440633131831:2259] 2025-04-17T18:07:52.340102Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345440633131829:2259][/dc-1] Set up state: owner# [1:7494345436338164267:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:52.340188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345440633131832:2259] 2025-04-17T18:07:52.340247Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345440633131829:2259][/dc-1] Path was already updated: owner# [1:7494345436338164267:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:52.340283Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131833:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131830:2259], cookie# 1 2025-04-17T18:07:52.340296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131834:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131831:2259], cookie# 1 2025-04-17T18:07:52.340326Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131835:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131832:2259], cookie# 1 2025-04-17T18:07:52.340358Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163956:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345440633131833:2259] 2025-04-17T18:07:52.340380Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163956:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131833:2259], cookie# 1 2025-04-17T18:07:52.340396Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163959:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345440633131834:2259] 2025-04-17T18:07:52.340407Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163959:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131834:2259], cookie# 1 2025-04-17T18:07:52.340419Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163962:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345440633131835:2259] 2025-04-17T18:07:52.340431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163962:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131835:2259], cookie# 1 2025-04-17T18:07:52.340906Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131833:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345436338163956:2049], cookie# 1 2025-04-17T18:07:52.340936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131834:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345436338163959:2052], cookie# 1 2025-04-17T18:07:52.340952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131835:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345436338163962:2055], cookie# 1 2025-04-17T18:07:52.340983Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345440633131830:2259], cookie# 1 2025-04-17T18:07:52.341020Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:52.341039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345440633131831:2259], cookie# 1 2025-04-17T18:07:52.341074Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:52.341095Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345440633131832:2259], cookie# 1 2025-04-17T18:07:52.341108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Unexpected sync response: sender# [1:7494345440633131832:2259], cookie# 1 2025-04-17T18:07:52.387356Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345436338164267:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:52.387715Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345436338164267:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... SyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-04-17T18:07:52.575413Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345436338164267:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494345440633131883:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744913272606 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:52.575496Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345436338164267:2114], cacheItem# { Subscriber: { Subscriber: [1:7494345440633131883:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744913272606 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T18:07:52.575650Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345440633131890:2300], recipient# [1:7494345440633131882:2298], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:07:52.575684Z node 1 :TX_PROXY INFO: Actor# [1:7494345440633131882:2298] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-17T18:07:52.578660Z node 1 :TX_PROXY DEBUG: actor# [1:7494345436338164235:2096] Handle TEvNavigate describe path /dc-1 2025-04-17T18:07:52.578705Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345440633131892:2302] HANDLE EvNavigateScheme /dc-1 2025-04-17T18:07:52.578802Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345436338164267:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:52.578895Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494345436338164267:2114], cookie# 4 2025-04-17T18:07:52.578978Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131833:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131830:2259], cookie# 4 2025-04-17T18:07:52.579006Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131834:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131831:2259], cookie# 4 2025-04-17T18:07:52.579019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131835:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131832:2259], cookie# 4 2025-04-17T18:07:52.579047Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163956:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131833:2259], cookie# 4 2025-04-17T18:07:52.579088Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163959:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131834:2259], cookie# 4 2025-04-17T18:07:52.579130Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345436338163962:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345440633131835:2259], cookie# 4 2025-04-17T18:07:52.579177Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131833:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345436338163956:2049], cookie# 4 2025-04-17T18:07:52.579198Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131834:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345436338163959:2052], cookie# 4 2025-04-17T18:07:52.579209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345440633131835:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345436338163962:2055], cookie# 4 2025-04-17T18:07:52.579240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345440633131830:2259], cookie# 4 2025-04-17T18:07:52.579280Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:52.579296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345440633131831:2259], cookie# 4 2025-04-17T18:07:52.579311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:52.579330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7494345440633131832:2259], cookie# 4 2025-04-17T18:07:52.579356Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345440633131829:2259][/dc-1] Unexpected sync response: sender# [1:7494345440633131832:2259], cookie# 4 2025-04-17T18:07:52.579422Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345436338164267:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T18:07:52.579500Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345436338164267:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494345440633131829:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913272592 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:52.579567Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494345436338164267:2114], cacheItem# { Subscriber: { Subscriber: [1:7494345440633131829:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913272592 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-17T18:07:52.579691Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494345440633131893:2303], recipient# [1:7494345440633131892:2302], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:07:52.579721Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345440633131892:2302] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:52.579770Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345440633131892:2302] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-17T18:07:52.580356Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345440633131892:2302] Handle TEvDescribeSchemeResult Forward to# [1:7494345440633131891:2301] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913272592 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913272592 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913272606 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 >> TCacheTest::WatchRoot >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> TCacheTest::CheckSystemViewAccess >> TCacheTest::MigrationCommon >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 >> TCacheTest::CheckSystemViewAccess [GOOD] >> TCacheTest::CookiesArePreserved >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-17T18:07:46.051991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345415189998539:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:46.058518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cfa/r3tmp/tmps5O5wu/pdisk_1.dat 2025-04-17T18:07:46.395916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:46.437159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:46.437364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:46.441436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63598 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:46.603963Z node 1 :TX_PROXY DEBUG: actor# [1:7494345415189998660:2137] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:46.604051Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345415189999095:2432] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:46.604184Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345415189998709:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:46.604215Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345415189998709:2157], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:46.604439Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:46.606475Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345410895031010:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345415189999100:2433] 2025-04-17T18:07:46.606544Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345410895031010:2050] Subscribe: subscriber# [1:7494345415189999100:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:46.606594Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345410895031013:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345415189999101:2433] 2025-04-17T18:07:46.606611Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345410895031013:2053] Subscribe: subscriber# [1:7494345415189999101:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:46.606635Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345410895031016:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345415189999102:2433] 2025-04-17T18:07:46.606650Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345410895031016:2056] Subscribe: subscriber# [1:7494345415189999102:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:46.606727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345415189999100:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345410895031010:2050] 2025-04-17T18:07:46.606783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345415189999101:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345410895031013:2053] 2025-04-17T18:07:46.606833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345415189999102:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345410895031016:2056] 2025-04-17T18:07:46.606973Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345410895031010:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345415189999100:2433] 2025-04-17T18:07:46.607024Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345410895031013:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345415189999101:2433] 2025-04-17T18:07:46.607102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345415189999097:2433] 2025-04-17T18:07:46.607139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345415189999098:2433] 2025-04-17T18:07:46.607190Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345415189999096:2433][/dc-1] Set up state: owner# [1:7494345415189998709:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:46.607312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345415189999099:2433] 2025-04-17T18:07:46.607357Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345415189999096:2433][/dc-1] Path was already updated: owner# [1:7494345415189998709:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:46.607400Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345415189999100:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345415189999097:2433], cookie# 1 2025-04-17T18:07:46.607434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345415189999101:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345415189999098:2433], cookie# 1 2025-04-17T18:07:46.607448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345415189999102:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345415189999099:2433], cookie# 1 2025-04-17T18:07:46.607461Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345410895031016:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345415189999102:2433] 2025-04-17T18:07:46.607476Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345410895031016:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345415189999102:2433], cookie# 1 2025-04-17T18:07:46.610242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345410895031010:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345415189999100:2433], cookie# 1 2025-04-17T18:07:46.610284Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345410895031013:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345415189999101:2433], cookie# 1 2025-04-17T18:07:46.610315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345415189999102:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345410895031016:2056], cookie# 1 2025-04-17T18:07:46.610331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345415189999100:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345410895031010:2050], cookie# 1 2025-04-17T18:07:46.610371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345415189999101:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345410895031013:2053], cookie# 1 2025-04-17T18:07:46.610405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345415189999099:2433], cookie# 1 2025-04-17T18:07:46.610441Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:46.610455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345415189999097:2433], cookie# 1 2025-04-17T18:07:46.610472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:46.610524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345415189999098:2433], cookie# 1 2025-04-17T18:07:46.610537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345415189999096:2433][/dc-1] Unexpected sync response: sender# [1:7494345415189999098:2433], cookie# 1 2025-04-17T18:07:46.661588Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345415189998709:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:46.661855Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345415189998709:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:54.298768Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345433326283781:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7494345446211187223:3007] 2025-04-17T18:07:54.298781Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345433326283781:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-17T18:07:54.298832Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345433326283781:2050] Subscribe: subscriber# [3:7494345446211187223:3007], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:54.298862Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345433326283784:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7494345446211187224:3007] 2025-04-17T18:07:54.298877Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345433326283784:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-17T18:07:54.298907Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494345446211187223:3007][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345433326283781:2050] 2025-04-17T18:07:54.298919Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345433326283784:2053] Subscribe: subscriber# [3:7494345446211187224:3007], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:54.298943Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494345446211187218:3007][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345446211187220:3007] 2025-04-17T18:07:54.298949Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345433326283787:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7494345446211187225:3007] 2025-04-17T18:07:54.298958Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345433326283787:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-17T18:07:54.298976Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494345446211187224:3007][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345433326283784:2053] 2025-04-17T18:07:54.299011Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345433326283787:2056] Subscribe: subscriber# [3:7494345446211187225:3007], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:54.299039Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494345446211187218:3007][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345446211187221:3007] 2025-04-17T18:07:54.299050Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345433326283781:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345446211187223:3007] 2025-04-17T18:07:54.299075Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345433326283784:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345446211187224:3007] 2025-04-17T18:07:54.299111Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7494345446211187218:3007][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7494345433326284121:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:54.299145Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494345446211187225:3007][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345433326283787:2056] 2025-04-17T18:07:54.299166Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494345433326284121:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-17T18:07:54.299169Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494345446211187218:3007][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7494345446211187222:3007] 2025-04-17T18:07:54.299218Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7494345446211187218:3007][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7494345433326284121:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:54.299223Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494345433326284121:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7494345446211187218:3007] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:54.299245Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345433326283787:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345446211187225:3007] 2025-04-17T18:07:54.299292Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345433326284121:2127], cacheItem# { Subscriber: { Subscriber: [3:7494345446211187218:3007] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:54.299362Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345446211187226:3009], recipient# [3:7494345446211187217:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:55.154560Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345433326284121:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:55.154688Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345433326284121:2127], cacheItem# { Subscriber: { Subscriber: [3:7494345437621252544:2991] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:55.154808Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345450506154539:3013], recipient# [3:7494345450506154538:2326], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:55.300637Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345433326284121:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:55.300772Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345433326284121:2127], cacheItem# { Subscriber: { Subscriber: [3:7494345446211187218:3007] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:55.300858Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345450506154544:3014], recipient# [3:7494345450506154543:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TCacheTest::Navigate >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> TCacheTest::CookiesArePreserved [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: 2025-04-17T18:07:38.501113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:38.501596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:38.501648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00102a/r3tmp/tmp11xaVw/pdisk_1.dat 2025-04-17T18:07:38.846958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:38.890924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:38.930617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:38.930754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:38.942267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:39.024110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:39.075679Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-17T18:07:39.075936Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:39.124216Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:39.124422Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:39.126113Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:39.126242Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:39.126298Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:39.126692Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:39.127023Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:39.127094Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-04-17T18:07:39.128675Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-04-17T18:07:39.128936Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:39.139405Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-04-17T18:07:39.139637Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:39.148082Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:39.148258Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:39.149568Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:07:39.149624Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:07:39.149674Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:07:39.149971Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:39.150114Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:39.150173Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:739:2588] in generation 1 2025-04-17T18:07:39.150583Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:39.150679Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:39.151843Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-17T18:07:39.151908Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-17T18:07:39.151948Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-17T18:07:39.152183Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:39.152267Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:39.152314Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-04-17T18:07:39.163456Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:39.183507Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:39.183687Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:39.183780Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-04-17T18:07:39.183807Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:39.183834Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:39.183859Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:39.184182Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:39.184221Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:07:39.184261Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:39.184303Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-04-17T18:07:39.184327Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:07:39.184344Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:07:39.184359Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:07:39.184625Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:39.184643Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-17T18:07:39.184674Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:39.184715Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-04-17T18:07:39.184728Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T18:07:39.184751Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-17T18:07:39.184767Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:07:39.184921Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:39.185001Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:39.185132Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:39.185164Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:39.185194Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:39.185225Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:39.185269Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:07:39.185307Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:07:39.185731Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-17T18:07:39.185776Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:39.185799Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:39.185822Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:07:39.185863Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:07:39.185908Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-17T18:07:39.185972Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-17T18:07:39.186196Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:39.186389Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:39.186477Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:39.186870Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:711:2600], sessionId# [0:0:0] 2025-04-17T18:07:39.186903Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:07:39.186919Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:39.186947Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-17T18:07:39.186974Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:07:39.187141Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:07:39.187295Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:07:39.187350Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T18:07:39.187679Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:716:2604], sessionId# [0:0:0] 2025-04-17T18:07:39.187835Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-17T18:07:39.187945Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-17T18:07:39.187998Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-17T18:07:39.189975Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:39.190072Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:07:39.190138Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:07:39.201188Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:39.201321Z nod ... [2000:281474976715662] at 72075186224037888 for LoadAndWaitInRS 2025-04-17T18:07:55.861197Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:55.861371Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715662 2025-04-17T18:07:55.861424Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 19 Seqno# 6 Flags# 0} 2025-04-17T18:07:55.861482Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-17T18:07:55.861757Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:07:55.861786Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:55.861826Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2000:281474976715662] at 72075186224037890 for LoadAndWaitInRS 2025-04-17T18:07:55.862060Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:55.873347Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:55.873461Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [3:1109:2846], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:07:55.873558Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 5} 2025-04-17T18:07:55.873608Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:55.873729Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-04-17T18:07:55.873803Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 2 2025-04-17T18:07:55.873927Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:07:55.873969Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715662] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1109:2846], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:07:55.874016Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 2000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2025-04-17T18:07:55.874045Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:07:55.874125Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-04-17T18:07:55.874161Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 2 2025-04-17T18:07:55.874236Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1109:2846] Reply: txId# 281474976715662, status# OK, error# 2025-04-17T18:07:55.874527Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-17T18:07:55.874569Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-17T18:07:55.874929Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:55.874980Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:55.875014Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:07:55.875082Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:07:55.875170Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1103:2841], serverId# [3:1104:2842], sessionId# [0:0:0] 2025-04-17T18:07:55.878614Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:07:55.879090Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:07:55.879329Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:55.879378Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:55.879431Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037889 for WaitForStreamClearance 2025-04-17T18:07:55.879746Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:55.879822Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:07:55.880514Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 1 2025-04-17T18:07:55.880757Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037889, TxId: 281474976715664, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:55.880892Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715664, PendingAcks: 0 2025-04-17T18:07:55.880950Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 0 2025-04-17T18:07:55.884677Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-17T18:07:55.884729Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037889 2025-04-17T18:07:55.885194Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:55.885233Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:55.885271Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037889 for ReadTableScan 2025-04-17T18:07:55.885398Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:55.885452Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:07:55.885508Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:07:55.888950Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:55.889349Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:55.889541Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:55.889587Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:55.889635Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:07:55.889897Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:55.889967Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:55.890703Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-04-17T18:07:55.890965Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:55.891110Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-04-17T18:07:55.891164Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-04-17T18:07:55.931068Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:07:55.931164Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-04-17T18:07:55.931709Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:55.931755Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:55.931798Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-04-17T18:07:55.931940Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:55.932008Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:55.932055Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:55.935157Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-17T18:07:55.935587Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-17T18:07:55.935839Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:07:55.935893Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:55.935952Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-04-17T18:07:55.936203Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:55.936277Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:07:55.936970Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-04-17T18:07:55.937204Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:07:55.937341Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-04-17T18:07:55.937393Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-04-17T18:07:55.980114Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-17T18:07:55.980193Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037890 2025-04-17T18:07:55.980688Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:07:55.980729Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:55.980769Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037890 for ReadTableScan 2025-04-17T18:07:55.980898Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:55.980969Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:07:55.981023Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2025-04-17T18:07:56.752860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:56.752930Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:56.950169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-17T18:07:56.967498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:07:56.976898Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-17T18:07:56.977609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-17T18:07:56.983971Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:205:2195], for# user1@builtin, access# DescribeSchema 2025-04-17T18:07:56.984587Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:211:2201], for# user1@builtin, access# 2025-04-17T18:07:57.404354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:57.404417Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:57.455644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-17T18:07:57.461838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:07:57.467892Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 >> TCacheTest::PathBelongsToDomain [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2025-04-17T18:07:57.677811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:57.677875Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:57.841299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:07:57.862307Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-17T18:07:58.139993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:58.140054Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:58.186533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-17T18:07:58.191321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:07:58.196357Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-17T18:07:58.203274Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:226:2204], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:07:58.203535Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:228:2206], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] Test command err: 2025-04-17T18:05:49.483169Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:49.597599Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:05:49.604123Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:05:49.604649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:49.628402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:49.628645Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:49.638574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:49.638908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:49.639174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:49.639310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:49.639434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:49.639561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:49.639683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:49.639791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:49.639919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:49.640026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:49.640128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:49.640236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:49.666292Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:05:49.669083Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:49.669234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:49.669284Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:49.669468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:49.669652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:49.669745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:49.669790Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:49.669904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:49.669971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:49.670018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:49.670076Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:49.670280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:49.670346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:49.670393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:49.670422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:49.670536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:49.670610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:49.670664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:49.670710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:49.670790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:49.670832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:49.670862Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:49.670927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:49.670973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:49.671009Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:49.671443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-17T18:05:49.671530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-17T18:05:49.671608Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T18:05:49.671708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-04-17T18:05:49.671872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:49.671923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:49.671957Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:49.672156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:49.672205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:49.672234Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:49.672429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:49.672478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:49.672515Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:49.672750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:49.672799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:49.672850Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... rce_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.541732Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.541788Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:07:55.541838Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:07:55.541959Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:07:55.542072Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.542114Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:07:55.542210Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-04-17T18:07:55.542265Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-04-17T18:07:55.542407Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:585:2601];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-04-17T18:07:55.542502Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.542614Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.542768Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.542893Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:07:55.542987Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.543079Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.543133Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:592:2608] finished for tablet 9437184 2025-04-17T18:07:55.543733Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:585:2601];stats={"p":[{"events":["f_bootstrap"],"t":0.065},{"events":["f_ProduceResults"],"t":0.498},{"events":["l_bootstrap"],"t":0.755},{"events":["f_processing","f_task_result"],"t":0.778},{"events":["l_task_result"],"t":7.102},{"events":["f_ack"],"t":7.143},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":8.014}],"full":{"a":1744913267529080,"name":"_full_task","f":1744913267529080,"d_finished":0,"c":0,"l":1744913275543211,"d":8014131},"events":[{"name":"bootstrap","f":1744913267594091,"d_finished":690839,"c":1,"l":1744913268284930,"d":690839},{"a":1744913275542875,"name":"ack","f":1744913274672542,"d_finished":806424,"c":903,"l":1744913275542800,"d":806760},{"a":1744913275542861,"name":"processing","f":1744913268307260,"d_finished":3535721,"c":4515,"l":1744913275542802,"d":3536071},{"name":"ProduceResults","f":1744913268027386,"d_finished":1460480,"c":5420,"l":1744913275543101,"d":1460480},{"a":1744913275543104,"name":"Finish","f":1744913275543104,"d_finished":0,"c":0,"l":1744913275543211,"d":107},{"name":"task_result","f":1744913268307290,"d_finished":2649406,"c":3612,"l":1744913274631145,"d":2649406}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.543834Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:585:2601];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:07:55.544372Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:585:2601];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.065},{"events":["f_ProduceResults"],"t":0.498},{"events":["l_bootstrap"],"t":0.755},{"events":["f_processing","f_task_result"],"t":0.778},{"events":["l_task_result"],"t":7.102},{"events":["f_ack"],"t":7.143},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":8.014}],"full":{"a":1744913267529080,"name":"_full_task","f":1744913267529080,"d_finished":0,"c":0,"l":1744913275543901,"d":8014821},"events":[{"name":"bootstrap","f":1744913267594091,"d_finished":690839,"c":1,"l":1744913268284930,"d":690839},{"a":1744913275542875,"name":"ack","f":1744913274672542,"d_finished":806424,"c":903,"l":1744913275542800,"d":807450},{"a":1744913275542861,"name":"processing","f":1744913268307260,"d_finished":3535721,"c":4515,"l":1744913275542802,"d":3536761},{"name":"ProduceResults","f":1744913268027386,"d_finished":1460480,"c":5420,"l":1744913275543101,"d":1460480},{"a":1744913275543104,"name":"Finish","f":1744913275543104,"d_finished":0,"c":0,"l":1744913275543901,"d":797},{"name":"task_result","f":1744913268307290,"d_finished":2649406,"c":3612,"l":1744913274631145,"d":2649406}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-17T18:07:55.544493Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:07:47.461122Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-04-17T18:07:55.544568Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:07:55.544841Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TCacheTest::RacyRecreateAndSync >> TCacheTest::MigrationLostMessage >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> TCacheTest::Attributes >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] >> TCacheTest::RacyCreateAndSync [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-17T18:07:49.190275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345425071619534:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:49.190350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cf3/r3tmp/tmpWuMwHf/pdisk_1.dat 2025-04-17T18:07:49.545279Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:49.602077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:49.602300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:49.605345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10905 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:49.700233Z node 1 :TX_PROXY DEBUG: actor# [1:7494345425071619797:2139] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:49.700314Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345425071620241:2445] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:49.700543Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345425071619874:2172], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:49.700635Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345425071619874:2172], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:49.700971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:49.703090Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345425071619439:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345425071620246:2446] 2025-04-17T18:07:49.703174Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345425071619439:2050] Subscribe: subscriber# [1:7494345425071620246:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:49.703236Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345425071619442:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345425071620247:2446] 2025-04-17T18:07:49.703257Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345425071619442:2053] Subscribe: subscriber# [1:7494345425071620247:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:49.703301Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345425071619445:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345425071620248:2446] 2025-04-17T18:07:49.703319Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345425071619445:2056] Subscribe: subscriber# [1:7494345425071620248:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:49.703368Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345425071620246:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345425071619439:2050] 2025-04-17T18:07:49.703390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345425071620247:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345425071619442:2053] 2025-04-17T18:07:49.703424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345425071620248:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345425071619445:2056] 2025-04-17T18:07:49.703488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345425071620243:2446] 2025-04-17T18:07:49.703538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345425071620244:2446] 2025-04-17T18:07:49.703583Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345425071620242:2446][/dc-1] Set up state: owner# [1:7494345425071619874:2172], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:49.703731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345425071620245:2446] 2025-04-17T18:07:49.703780Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345425071620242:2446][/dc-1] Path was already updated: owner# [1:7494345425071619874:2172], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:49.703820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345425071620246:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345425071620243:2446], cookie# 1 2025-04-17T18:07:49.703833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345425071620247:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345425071620244:2446], cookie# 1 2025-04-17T18:07:49.703846Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345425071620248:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345425071620245:2446], cookie# 1 2025-04-17T18:07:49.703868Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345425071619439:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345425071620246:2446] 2025-04-17T18:07:49.703901Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345425071619439:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345425071620246:2446], cookie# 1 2025-04-17T18:07:49.703930Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345425071619442:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345425071620247:2446] 2025-04-17T18:07:49.703944Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345425071619442:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345425071620247:2446], cookie# 1 2025-04-17T18:07:49.703955Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345425071619445:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345425071620248:2446] 2025-04-17T18:07:49.703965Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345425071619445:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345425071620248:2446], cookie# 1 2025-04-17T18:07:49.706062Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345425071620246:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345425071619439:2050], cookie# 1 2025-04-17T18:07:49.706097Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345425071620247:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345425071619442:2053], cookie# 1 2025-04-17T18:07:49.706108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345425071620248:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345425071619445:2056], cookie# 1 2025-04-17T18:07:49.706137Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345425071620243:2446], cookie# 1 2025-04-17T18:07:49.706192Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:49.706212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345425071620244:2446], cookie# 1 2025-04-17T18:07:49.706232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:49.706260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345425071620245:2446], cookie# 1 2025-04-17T18:07:49.706273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345425071620242:2446][/dc-1] Unexpected sync response: sender# [1:7494345425071620245:2446], cookie# 1 2025-04-17T18:07:49.761270Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345425071619874:2172], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:49.761638Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345425071619874:2172], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... Set up state: owner# [3:7494345447320550744:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:57.262407Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494345460205453578:2799][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7494345460205453595:2799] 2025-04-17T18:07:57.262430Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7494345460205453578:2799][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7494345447320550744:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:57.262456Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345447320550403:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345460205453591:2800] 2025-04-17T18:07:57.262471Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345447320550403:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345460205453589:2798] 2025-04-17T18:07:57.262482Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345447320550403:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345460205453597:2799] 2025-04-17T18:07:57.262497Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345447320550406:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345460205453592:2800] 2025-04-17T18:07:57.262535Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345447320550406:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7494345460205453599:2798] 2025-04-17T18:07:57.262560Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345447320550406:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-04-17T18:07:57.262596Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7494345447320550406:2056] Subscribe: subscriber# [3:7494345460205453599:2798], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:57.262625Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345447320550406:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345460205453598:2799] 2025-04-17T18:07:57.262759Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494345447320550744:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-17T18:07:57.262824Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494345447320550744:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7494345460205453580:2800] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:57.262901Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345447320550744:2129], cacheItem# { Subscriber: { Subscriber: [3:7494345460205453580:2800] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:57.262942Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494345447320550744:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-17T18:07:57.262976Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494345447320550744:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7494345460205453577:2798] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:57.263017Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345447320550744:2129], cacheItem# { Subscriber: { Subscriber: [3:7494345460205453577:2798] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:57.263042Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494345447320550744:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-17T18:07:57.263102Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494345447320550744:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7494345460205453578:2799] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:07:57.263151Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345447320550744:2129], cacheItem# { Subscriber: { Subscriber: [3:7494345460205453578:2799] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:57.263248Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345460205453600:2801], recipient# [3:7494345460205453576:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:57.263315Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345460205453601:2802], recipient# [3:7494345460205453575:2318], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:57.263364Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7494345460205453599:2798][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7494345447320550406:2056] 2025-04-17T18:07:57.263392Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494345460205453577:2798][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7494345460205453587:2798] 2025-04-17T18:07:57.263417Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7494345460205453577:2798][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7494345447320550744:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:57.263633Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345447320550400:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345460205453588:2798] 2025-04-17T18:07:57.263658Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345447320550400:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345460205453596:2799] 2025-04-17T18:07:57.263674Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7494345447320550406:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7494345460205453599:2798] 2025-04-17T18:07:57.309939Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345447320550744:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:57.310082Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345447320550744:2129], cacheItem# { Subscriber: { Subscriber: [3:7494345451615518924:2785] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:07:57.310237Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345460205453603:2803], recipient# [3:7494345460205453602:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess >> TCacheTest::SystemView >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2025-04-17T18:07:59.203125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:59.203192Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:59.331259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:07:59.345016Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-17T18:07:59.346647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:07:59.370674Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T18:07:59.389623Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-17T18:07:59.602197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:59.602255Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:59.644804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:07:59.654204Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 >> TCacheTest::Recreate >> TCacheTest::CheckAccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:78:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:79:2110] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:79:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 >> TCacheTest::SystemView [GOOD] >> TCacheTest::TableSchemaVersion >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: 2025-04-17T18:07:40.021794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:07:40.022350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:07:40.022405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00101e/r3tmp/tmpZmbLfQ/pdisk_1.dat 2025-04-17T18:07:40.401998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:07:40.443057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:40.486379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:40.486544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:40.501174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:40.587957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:40.643491Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-17T18:07:40.643757Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:40.693452Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:40.693715Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:40.695539Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:07:40.695639Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:07:40.695698Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:07:40.696083Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:40.696397Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:40.696482Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-04-17T18:07:40.698159Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-04-17T18:07:40.698389Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:40.709978Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-04-17T18:07:40.710200Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:40.720167Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:40.720339Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:40.721803Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:07:40.721866Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:07:40.721929Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:07:40.722300Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:40.722488Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:40.722554Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:739:2588] in generation 1 2025-04-17T18:07:40.722988Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:40.723075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:07:40.724384Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-17T18:07:40.724456Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-17T18:07:40.724509Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-17T18:07:40.724813Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:07:40.724915Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:07:40.724955Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-04-17T18:07:40.736204Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:40.766779Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:07:40.766969Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:40.767083Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-04-17T18:07:40.767108Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:07:40.767133Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:07:40.767163Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:07:40.767440Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:40.767467Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:07:40.767500Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:40.767561Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-04-17T18:07:40.767587Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:07:40.767608Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:07:40.767626Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:07:40.767858Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:07:40.767877Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-17T18:07:40.767910Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:07:40.767942Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-04-17T18:07:40.767955Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T18:07:40.767968Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-17T18:07:40.767980Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:07:40.768129Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:07:40.768212Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:07:40.768324Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:07:40.768352Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:40.768381Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:07:40.768412Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:07:40.768441Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:07:40.768475Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:07:40.768822Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-17T18:07:40.768858Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:07:40.768876Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:40.768893Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:07:40.768912Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:07:40.768948Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-17T18:07:40.769006Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-17T18:07:40.769157Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:07:40.769345Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:07:40.769425Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:07:40.769716Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:711:2600], sessionId# [0:0:0] 2025-04-17T18:07:40.769745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:07:40.769763Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:40.769778Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-17T18:07:40.769798Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:07:40.769938Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:07:40.770044Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:07:40.770105Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T18:07:40.770361Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:716:2604], sessionId# [0:0:0] 2025-04-17T18:07:40.770475Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-17T18:07:40.770570Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-04-17T18:07:40.770641Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-04-17T18:07:40.771913Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:07:40.771974Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:07:40.772002Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:07:40.783106Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:07:40.783222Z nod ... TX_DATASHARD DEBUG: Deleted RS at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 seqno 1 txId 281474976715665 2025-04-17T18:07:59.279155Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:59.279266Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-17T18:07:59.279444Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 2500 TxId: 281474976715666 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-17T18:07:59.279606Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 2500 TxId: 281474976715666 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-17T18:07:59.279676Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 2500 TxId: 281474976715666 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-17T18:07:59.279858Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:59.290998Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 2500} 2025-04-17T18:07:59.291109Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-17T18:07:59.302432Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 2500} 2025-04-17T18:07:59.302527Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-17T18:07:59.302591Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715666 2025-04-17T18:07:59.302656Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-17T18:07:59.302722Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1443:3073], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:07:59.302864Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 } 2025-04-17T18:07:59.302927Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:07:59.303289Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1443:3073] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037891, status# 2 2025-04-17T18:07:59.303441Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715666 2025-04-17T18:07:59.303546Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 2500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2025-04-17T18:07:59.303646Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037893 2025-04-17T18:07:59.304028Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2025-04-17T18:07:59.304330Z node 3 :TX_DATASHARD DEBUG: Send 3 change records: to# [3:1241:2953], at tablet# 72075186224037891 2025-04-17T18:07:59.304389Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2025-04-17T18:07:59.304460Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-17T18:07:59.304498Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:07:59.304538Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2500:281474976715666] at 72075186224037893 for LoadAndWaitInRS 2025-04-17T18:07:59.304929Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:59.305445Z node 3 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2025-04-17T18:07:59.316643Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-17T18:07:59.316716Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715666] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1443:3073], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:07:59.316788Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 2500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2025-04-17T18:07:59.316828Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-17T18:07:59.316935Z node 3 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2025-04-17T18:07:59.316965Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2025-04-17T18:07:59.317033Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2025-04-17T18:07:59.317062Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2025-04-17T18:07:59.317177Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1443:3073] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037893, status# 2 2025-04-17T18:07:59.317242Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1443:3073] Reply: txId# 281474976715666, status# OK, error# 2025-04-17T18:07:59.317357Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715666 2025-04-17T18:07:59.317591Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-04-17T18:07:59.317628Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037891 2025-04-17T18:07:59.317765Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1437:3068], serverId# [3:1438:3069], sessionId# [0:0:0] 2025-04-17T18:07:59.317891Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-17T18:07:59.317928Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:59.317963Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-17T18:07:59.318905Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037893 2025-04-17T18:07:59.319224Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037893 2025-04-17T18:07:59.319385Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-17T18:07:59.319423Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:59.319472Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037893 for WaitForStreamClearance 2025-04-17T18:07:59.319702Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:59.319763Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-17T18:07:59.320290Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2025-04-17T18:07:59.320424Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2025-04-17T18:07:59.354107Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037893 2025-04-17T18:07:59.354203Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715667, at: 72075186224037893 2025-04-17T18:07:59.354488Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-17T18:07:59.354530Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:59.354573Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037893 for ReadTableScan 2025-04-17T18:07:59.354714Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:59.354785Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-17T18:07:59.354828Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-17T18:07:59.355839Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-04-17T18:07:59.356075Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-04-17T18:07:59.356183Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-17T18:07:59.356205Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:59.356230Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037892 for WaitForStreamClearance 2025-04-17T18:07:59.356369Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:59.356405Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-17T18:07:59.356768Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2025-04-17T18:07:59.356868Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2025-04-17T18:07:59.358041Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-04-17T18:07:59.358066Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037892 2025-04-17T18:07:59.358224Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-17T18:07:59.358246Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:07:59.358270Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037892 for ReadTableScan 2025-04-17T18:07:59.358333Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:07:59.358366Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-17T18:07:59.358400Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckAccess [GOOD] Test command err: 2025-04-17T18:07:59.893492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:59.893547Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:08:00.011356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:08:00.027763Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-17T18:08:00.352981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:00.353050Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:08:00.408659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-17T18:08:00.419940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-17T18:08:00.423515Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:197:2187], for# user1@builtin, access# DescribeSchema 2025-04-17T18:08:00.424118Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:201:2191], for# user1@builtin, access# DescribeSchema >> TCacheTest::MigrationUndo [GOOD] >> TCacheTest::SysLocks [GOOD] |87.7%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |87.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> TCacheTest::TableSchemaVersion [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2025-04-17T18:08:00.637036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:00.637093Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:08:00.789500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:08:00.806442Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-17T18:08:00.808184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:08:00.857660Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T18:08:00.868048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-17T18:08:01.085623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:01.085690Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:08:01.140448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2025-04-17T18:07:59.315063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:59.315137Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:59.428994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:175:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:178:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:179:2067] recipient: [1:177:2171] Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:181:2067] recipient: [1:177:2171] 2025-04-17T18:07:59.471866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:59.471911Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:211:2067] recipient: [1:24:2071] 2025-04-17T18:07:59.499173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-17T18:07:59.505952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:245:2067] recipient: [1:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:245:2067] recipient: [1:238:2213] Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:246:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:246:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:248:2067] recipient: [1:240:2214] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:248:2067] recipient: [1:240:2214] Leader for TabletID 72075186233409546 is [1:250:2219] sender: [1:251:2067] recipient: [1:238:2213] Leader for TabletID 72075186233409546 is [1:250:2219] sender: [1:252:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:253:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:253:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:255:2221] sender: [1:258:2067] recipient: [1:240:2214] Leader for TabletID 72075186233409547 is [1:255:2221] sender: [1:290:2067] recipient: [1:24:2071] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-17T18:07:59.533796Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-17T18:07:59.612314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:333:2067] recipient: [1:330:2280] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:333:2067] recipient: [1:330:2280] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:336:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:336:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:337:2284] sender: [1:338:2067] recipient: [1:330:2280] Leader for TabletID 72075186233409548 is [1:337:2284] sender: [1:341:2067] recipient: [1:24:2071] TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 2025-04-17T18:07:59.674616Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-17T18:07:59.752778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:417:2067] recipient: [1:413:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:417:2067] recipient: [1:413:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:418:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:418:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:420:2337] sender: [1:421:2067] recipient: [1:413:2333] Leader for TabletID 72075186233409549 is [1:420:2337] sender: [1:422:2067] recipient: [1:24:2071] 2025-04-17T18:07:59.790377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:59.790424Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-04-17T18:07:59.807402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:07:59.807447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T18:07:59.807650Z node 1 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-17T18:07:59.807729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T18:07:59.823618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-17T18:07:59.824016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 skipDeleteNotification path: /Root/USER_0/DirA/Table1 pathId: [OwnerId: 72057594046678944, LocalPathId: 4] Strong: 1 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-17T18:07:59.875903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2025-04-17T18:07:59.956768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:621:2067] recipient: [1:617:2502] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:621:2067] recipient: [1:617:2502] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:622:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:622:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:624:2506] sender: [1:625:2067] recipient: [1:617:2502] Leader for TabletID 72075186233409550 is [1:624:2506] sender: [1:626:2067] recipient: [1:24:2071] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 TestWaitNotification: OK eventTxId 110 2025-04-17T18:08:00.399469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:00.399515Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:08:00.451431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-17T18:08:00.456672Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [2:174:2170], Recipient [2:70:2109]: NActors::TEvents::TEvPoison 2025-04-17T18:08:00.457292Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:177:2171] Leader for TabletID 7205759404667894 ... : 2] was 2 2025-04-17T18:08:00.994283Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: Mediator, at schemeshard: 72057594046678944 2025-04-17T18:08:00.994302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:08:00.994331Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:3, tabletId: 72075186233409548, PathId: [OwnerId: 72057594046678944, LocalPathId: 4], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-17T18:08:00.994353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T18:08:00.994466Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-17T18:08:00.994660Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.994824Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-17T18:08:00.995105Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.995219Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.995634Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.995714Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.995925Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.996075Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.996196Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.996360Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.996432Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.996614Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.996844Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.996997Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.997051Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.997096Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:08:00.997330Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:08:00.998608Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:08:00.998748Z node 2 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T18:08:00.999669Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [2:515:2401], Recipient [2:515:2401]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-17T18:08:00.999732Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-17T18:08:01.000290Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:08:01.000359Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:08:01.000571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:08:01.000616Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:08:01.000653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:08:01.000681Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:08:01.000855Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:531:2401], Recipient [2:515:2401]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T18:08:01.000893Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T18:08:01.000926Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:08:01.023346Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:01.023496Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:383:2319] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:01.023736Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:544:2419], recipient# [2:543:2418], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:08:01.024193Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:01.024277Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:392:2322] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 200 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:01.024418Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:546:2421], recipient# [2:545:2420], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:08:01.024813Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:01.024927Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:401:2325] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 300 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:01.025128Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:548:2423], recipient# [2:547:2422], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [72057594046678944:4:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2025-04-17T18:08:00.561857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:00.561920Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:08:00.704433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-17T18:08:01.011708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:01.011774Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:08:01.069799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-17T18:08:01.127865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-17T18:08:01.319211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> TCacheTest::List >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> ColumnShardTiers::DSConfigsStub [GOOD] >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> ColumnShardTiers::DSConfigs [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> TCacheTest::MigrationCommit [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: 2025-04-17T18:06:19.639811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:19.640439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:19.640505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000aa9/r3tmp/tmp25pgcv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12375, node 1 TClient is connected to server localhost:2474 2025-04-17T18:06:20.184140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:20.225809Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:20.230290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:20.230350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:20.230384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:20.230810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:20.265821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:20.265938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:20.277570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:20.386008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-17T18:06:20.510547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:06:20.510804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:06:20.511090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:06:20.511227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:06:20.511341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:06:20.511456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:06:20.511558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:06:20.511662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:06:20.511798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:06:20.511925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:06:20.512067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:06:20.512167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:06:20.528298Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-04-17T18:06:20.529580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:06:20.529652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:06:20.529755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:06:20.529788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:06:20.529934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:06:20.529980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:06:20.530060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:06:20.530100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:06:20.530154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:06:20.530200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:06:20.530234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:06:20.530258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:06:20.530841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:06:20.530894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:06:20.531045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:06:20.531085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:06:20.531262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:06:20.531325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:06:20.531495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:06:20.531525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:06:20.531605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:06:20.531631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:06:20.549845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:06:20.549915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:06:20.550073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:06:20.550131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:06:20.550210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:06:20.550269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:06:20.550339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=norma ... -04-17T18:07:16.231599Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-04-17T18:07:16.231658Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-17T18:07:16.231695Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-04-17T18:07:16.231745Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-17T18:07:16.231803Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-17T18:07:16.231853Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; Initialization finished REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 2025-04-17T18:07:27.395619Z node 1 :TX_PROXY ERROR: Actor# [1:3713:4848] txid# 281474976715753, issues: { message: "Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable" severity: 1 } REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=
: Error: Execution, code: 1060
:1:27: Error: Executing DROP OBJECT EXTERNAL_DATA_SOURCE
: Error:
: Error: Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable, code: 2003 , code: 2003 ;EXPECTATION=0 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 2025-04-17T18:07:38.542265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715764:0, at schemeshard: 72057594046644480 2025-04-17T18:07:39.231001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715764;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715764; 2025-04-17T18:07:39.231379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715764;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715764; 2025-04-17T18:07:39.231722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715764;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715764; REQUEST=DROP TABLE `/Root/olapStore/olapTable`;RESULT=
: Info: Execution, code: 1060
:1:12: Info: Executing DROP TABLE
: Info: Success, code: 4 ;EXPECTATION=1 FINISHED_REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-04-17T18:07:49.726950Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:07:49.727370Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:07:49.727431Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-17T18:07:49.727486Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:07:49.727545Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-04-17T18:07:49.727589Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-17T18:07:49.727655Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-17T18:07:49.727713Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:49.727860Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:07:49.727898Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:07:49.727931Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:07:49.728027Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:07:49.728061Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2025-04-17T18:07:49.728094Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037888 2025-04-17T18:07:49.728124Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2025-04-17T18:07:49.728150Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-04-17T18:07:49.728187Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037888 2025-04-17T18:07:49.728228Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:49.728259Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:07:49.728283Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2025-04-17T18:07:49.728309Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037889 2025-04-17T18:07:49.728334Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2025-04-17T18:07:49.728358Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-04-17T18:07:49.728388Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037889 2025-04-17T18:07:49.728446Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:49.728487Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:07:49.728512Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2025-04-17T18:07:49.728540Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037890 2025-04-17T18:07:49.728566Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2025-04-17T18:07:49.728589Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-04-17T18:07:49.728622Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037890 2025-04-17T18:07:49.728658Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:49.728880Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:745:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-17T18:07:49.728995Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:750:2629];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-17T18:07:49.729059Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:760:2635];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-04-17T18:08:00.946045Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:08:00.946130Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:08:00.946160Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:08:00.946338Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:08:00.946549Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:08:00.946666Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-17T18:08:00.946770Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-17T18:08:00.946898Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:00.948394Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:08:00.948437Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2025-04-17T18:08:00.948473Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-04-17T18:08:00.948528Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:00.948561Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:08:00.948587Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2025-04-17T18:08:00.948615Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-04-17T18:08:00.948653Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:00.948682Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:08:00.948706Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2025-04-17T18:08:00.948732Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-04-17T18:08:00.948767Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:00.949844Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:745:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-17T18:08:00.949922Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:750:2629];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-17T18:08:00.949974Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:760:2635];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> TPQCachingProxyTest::TestPublishAndForget >> TPQCachingProxyTest::MultipleSessions >> TPQCachingProxyTest::TestDeregister ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationCommit [GOOD] Test command err: 2025-04-17T18:08:02.600429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:02.600486Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:08:02.750193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-04-17T18:08:02.776886Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:08:02.777235Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:08:02.777301Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-17T18:08:03.043558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:03.043607Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:08:03.097201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-04-17T18:08:03.145731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:03.145813Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:211:2067] recipient: [2:24:2071] 2025-04-17T18:08:03.174646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-17T18:08:03.181334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:243:2217] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:243:2217] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:249:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:249:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:250:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:250:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:252:2219] sender: [2:253:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [2:255:2221] sender: [2:256:2067] recipient: [2:243:2217] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-17T18:08:03.211745Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:252:2219] sender: [2:289:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:255:2221] sender: [2:290:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-17T18:08:03.279082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:344:2289] sender: [2:345:2067] recipient: [2:336:2285] TestWaitNotification: OK eventTxId 103 Leader for TabletID 72075186233409548 is [2:344:2289] sender: [2:362:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-17T18:08:03.477838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:424:2067] recipient: [2:416:2333] Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:425:2067] recipient: [2:24:2071] 2025-04-17T18:08:03.522097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:03.522161Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-04-17T18:08:03.543299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:08:03.543365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T18:08:03.543752Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-17T18:08:03.543893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T18:08:03.561898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-17T18:08:03.561974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:511:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:513:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:515:2067] recipient: [2:514:2407] Leader for TabletID 72057594046678944 is [2:516:2408] sender: [2:517:2067] recipient: [2:514:2407] 2025-04-17T18:08:03.615731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:08:03.615805Z node 2 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:516:2408] sender: [2:544:2067] recipient: [2:24:2071] { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: 2025-04-17T18:06:06.726862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:06.727499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:06.727574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c88/r3tmp/tmpgAVh8D/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12478, node 1 TClient is connected to server localhost:24559 2025-04-17T18:06:07.659501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:07.701943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:07.714804Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:07.714860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:07.714896Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:07.716102Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:07.753806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:07.754326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:07.767387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-17T18:06:19.821288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:19.821470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:19.826369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-17T18:06:19.987701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:868:2707], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:19.987812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:19.988087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:873:2712], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:19.993841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-17T18:06:20.124423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:875:2714], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:06:20.625246Z node 1 :TX_PROXY ERROR: Actor# [1:972:2782] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:21.463868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:21.800489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-17T18:06:22.312459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-17T18:06:22.918443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:06:23.286917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:06:24.423449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:06:24.712045Z node 1 :TX_DATASHARD NOTICE: Starting TBuildIndexScan TabletId: 72075186224037889 BuildIndexId: 281474976715679 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 9 TargetName: "/Root/.metadata/secrets/values/index_by_secret_id/indexImplTable" IndexColumns: "secretId" IndexColumns: "ownerUserId" KeyRange { From: "\002\000\000\000\000\200\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } SnapshotTxId: 281474976710758 SnapshotStep: 13000 SeqNoGeneration: 2 SeqNoRound: 1 ScanSettings { } row version v13000/281474976710758 2025-04-17T18:06:24.715386Z node 1 :TX_DATASHARD NOTICE: Done TBuildIndexScan TabletId: 72075186224037889 Id: 281474976715679, requested range: [(Utf8 : NULL, Utf8 : NULL) ; ()), last acked point: ()Stats { RowsSent: 0 BytesSent: 0 }Status { Code: SUCCESS Issues:
: Error: Shard or requested range is empty } BuildIndexId: 281474976715679 TabletId: 72075186224037889 Status: DONE UploadStatus: SUCCESS Issues { message: "Shard or requested range is empty" severity: 1 } RequestSeqNoGeneration: 2 RequestSeqNoRound: 1 2025-04-17T18:06:24.717298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-17T18:06:39.509436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-04-17T18:06:40.265556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:06:40.265616Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-17T18:06:40.584951Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-04-17T18:06:40.585011Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-17T18:06:40.585079Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-04-17T18:06:40.585160Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-04-17T18:06:40.585414Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-04-17T18:06:40.585656Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-17T18:06:40.585713Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-04-17T18:06:40.585759Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-17T18:06:40.585820Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:06:40.587267Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-04-17T18:06:40.588499Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-04-17T18:06:40.588687Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-04-17T18:06:40.588832Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-04-17T18:06:40.588908Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-17T18:06:40.588962Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_ ... :Tier '/Root/tier2' stopped at tablet 0 2025-04-17T18:07:39.810330Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-17T18:07:39.810390Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-04-17T18:07:50.848551Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:50.848632Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:50.848665Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:50.848705Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:50.849151Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:50.849271Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:50.849320Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-04-17T18:07:50.849375Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-04-17T18:07:50.849430Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-04-17T18:07:50.849469Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-17T18:07:50.849536Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-04-17T18:07:50.849606Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:50.849652Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:50.849679Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-04-17T18:07:50.849706Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-04-17T18:07:50.849732Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-04-17T18:07:50.849755Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-17T18:07:50.849785Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-04-17T18:07:50.849820Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:50.849864Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:50.849889Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-04-17T18:07:50.849916Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-04-17T18:07:50.849942Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-04-17T18:07:50.849963Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-17T18:07:50.849993Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-04-17T18:07:50.850028Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:50.850071Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:50.850095Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-17T18:07:50.850120Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-17T18:07:50.850150Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-17T18:07:50.850194Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:07:50.850227Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-17T18:07:50.850258Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:50.850373Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:50.850401Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-17T18:07:50.850427Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-17T18:07:50.850454Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-17T18:07:50.850477Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:07:50.850508Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-17T18:07:50.850541Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:50.850680Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3157:4424];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-17T18:07:50.850766Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3163:4427];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-17T18:07:50.850830Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3166:4430];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-04-17T18:08:01.905464Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:01.905551Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:01.905588Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:01.905634Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:01.905812Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:01.906056Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:01.906515Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:01.906574Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-04-17T18:08:01.906636Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-17T18:08:01.906724Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:01.906773Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:01.906801Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-04-17T18:08:01.906832Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-17T18:08:01.906873Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:01.906901Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:01.906927Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-04-17T18:08:01.906955Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-17T18:08:01.906997Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:01.907036Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:01.907060Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-17T18:08:01.907086Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:08:01.907125Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:01.907192Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:01.907216Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-17T18:08:01.907243Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:08:01.907280Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:01.907926Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:01.907964Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-17T18:08:01.907996Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:08:01.908038Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:01.908646Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3157:4424];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-17T18:08:01.908737Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3163:4427];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-17T18:08:01.908791Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3166:4430];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 >> TPQCachingProxyTest::MultipleSessions [GOOD] >> TPQCachingProxyTest::TestDeregister [GOOD] >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-04-17T18:08:04.506072Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:08:04.506148Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:08:04.522968Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:08:04.523669Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-17T18:08:04.523743Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2025-04-17T18:08:04.523857Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-04-17T18:08:04.505196Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:08:04.505308Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:08:04.522997Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:08:04.523650Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2025-04-17T18:08:04.523737Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-17T18:08:04.523772Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 2 2025-04-17T18:08:04.523855Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-04-17T18:08:04.523944Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-04-17T18:08:04.523997Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-17T18:08:04.524079Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-04-17T18:08:04.505208Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:08:04.505315Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:08:04.523735Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:08:04.523867Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-17T18:08:04.523994Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-17T18:08:04.524042Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-17T18:08:04.524159Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-04-17T18:08:04.505223Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:08:04.505325Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:08:04.523223Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:08:04.523653Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-17T18:08:04.523788Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-17T18:08:04.523850Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2025-04-17T18:08:04.523886Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-17T18:08:04.523963Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-04-17T18:08:04.524028Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2025-04-17T18:08:04.524080Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2025-04-17T18:08:04.524159Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2, Generation: 2 >> TPQCachingProxyTest::OutdatedSession >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> KqpJoinOrder::ShuffleEliminationOneJoin [GOOD] >> TPQCachingProxyTest::OutdatedSession [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2025-04-17T18:08:05.672329Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:08:05.672407Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-17T18:08:05.687227Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:08:05.687310Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-17T18:08:05.687378Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-17T18:08:05.687417Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-17T18:08:05.687489Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 |87.8%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] Test command err: 2025-04-17T18:05:39.219508Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:39.369153Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:05:39.383271Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:05:39.386070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:39.413067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:39.413384Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:39.425686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:39.425946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:39.426207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:39.426351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:39.426474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:39.426587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:39.426711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:39.426846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:39.426972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:39.427084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:39.427194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:39.427331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:39.456932Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:05:39.459550Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:39.459681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:39.459734Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:39.459921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:39.461382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:39.461493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:39.461547Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:39.461665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:39.461833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:39.461885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:39.461922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:39.462150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:39.462245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:39.462290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:39.462322Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:39.462412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:39.462485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:39.462552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:39.462596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:39.462670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:39.462716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:39.462751Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:39.462836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:39.462877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:39.462906Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:39.463311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-17T18:05:39.463408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-17T18:05:39.463507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-17T18:05:39.463596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-17T18:05:39.463745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:39.463815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:39.463857Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:39.464118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:39.464169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:39.464208Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:39.464390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:39.464446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:39.464485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:39.464655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:39.464695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:39.464725Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... mn_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.940796Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.940831Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:08:03.940868Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:08:03.940979Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:08:03.941074Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.941108Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:08:03.941189Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-17T18:08:03.941237Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-17T18:08:03.941375Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:851:2843];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-04-17T18:08:03.941511Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.941613Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.941798Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.941930Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:08:03.942021Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.942098Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.942140Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:858:2850] finished for tablet 9437184 2025-04-17T18:08:03.942832Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:851:2843];stats={"p":[{"events":["f_bootstrap"],"t":0.056},{"events":["f_ProduceResults"],"t":0.516},{"events":["l_bootstrap"],"t":0.777},{"events":["f_processing","f_task_result"],"t":0.8},{"events":["l_task_result"],"t":7.192},{"events":["f_ack"],"t":7.239},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":8.062}],"full":{"a":1744913275879372,"name":"_full_task","f":1744913275879372,"d_finished":0,"c":0,"l":1744913283942241,"d":8062869},"events":[{"name":"bootstrap","f":1744913275936291,"d_finished":720343,"c":1,"l":1744913276656634,"d":720343},{"a":1744913283941910,"name":"ack","f":1744913283118442,"d_finished":765006,"c":904,"l":1744913283941834,"d":765337},{"a":1744913283941897,"name":"processing","f":1744913276679487,"d_finished":3490426,"c":4520,"l":1744913283941836,"d":3490770},{"name":"ProduceResults","f":1744913276395449,"d_finished":1445629,"c":5426,"l":1744913283942118,"d":1445629},{"a":1744913283942122,"name":"Finish","f":1744913283942122,"d_finished":0,"c":0,"l":1744913283942241,"d":119},{"name":"task_result","f":1744913276679522,"d_finished":2647018,"c":3616,"l":1744913283072250,"d":2647018}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.942947Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:851:2843];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:08:03.943569Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:851:2843];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.056},{"events":["f_ProduceResults"],"t":0.516},{"events":["l_bootstrap"],"t":0.777},{"events":["f_processing","f_task_result"],"t":0.8},{"events":["l_task_result"],"t":7.192},{"events":["f_ack"],"t":7.239},{"events":["l_ProduceResults","f_Finish"],"t":8.062},{"events":["l_ack","l_processing","l_Finish"],"t":8.063}],"full":{"a":1744913275879372,"name":"_full_task","f":1744913275879372,"d_finished":0,"c":0,"l":1744913283943002,"d":8063630},"events":[{"name":"bootstrap","f":1744913275936291,"d_finished":720343,"c":1,"l":1744913276656634,"d":720343},{"a":1744913283941910,"name":"ack","f":1744913283118442,"d_finished":765006,"c":904,"l":1744913283941834,"d":766098},{"a":1744913283941897,"name":"processing","f":1744913276679487,"d_finished":3490426,"c":4520,"l":1744913283941836,"d":3491531},{"name":"ProduceResults","f":1744913276395449,"d_finished":1445629,"c":5426,"l":1744913283942118,"d":1445629},{"a":1744913283942122,"name":"Finish","f":1744913283942122,"d_finished":0,"c":0,"l":1744913283943002,"d":880},{"name":"task_result","f":1744913276679522,"d_finished":2647018,"c":3616,"l":1744913283072250,"d":2647018}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:03.943674Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:07:55.813685Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-17T18:08:03.943740Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:08:03.944047Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:78:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:143:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:90:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:89:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:89:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:146:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:90:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:91:2057] recipient: [26:89:2118] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:93:2057] recipient: [26:89:2118] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:92:2119] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:146:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TBlobStorageProxyTest::TestPersistence >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TGRpcCmsTest::AuthTokenTest >> TGRpcCmsTest::DescribeOptionsTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: 2025-04-17T18:06:10.984454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:10.984974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:10.985036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000c15/r3tmp/tmpAOTI0j/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31023, node 1 TClient is connected to server localhost:29834 2025-04-17T18:06:11.566293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:11.610239Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:11.614639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:11.614699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:11.614749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:11.615152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:11.650526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:11.650680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:11.662297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-17T18:06:23.312747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:23.312822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:23.312874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:23.317110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T18:06:23.330560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T18:06:23.371720Z node 1 :TX_PROXY ERROR: Actor# [1:813:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:23.590203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-17T18:06:24.401253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:24.802565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-17T18:06:25.490653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-17T18:06:26.211078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:06:26.601203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:06:27.601674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:06:27.891660Z node 1 :TX_DATASHARD NOTICE: Starting TBuildIndexScan TabletId: 72075186224037889 BuildIndexId: 281474976715679 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 9 TargetName: "/Root/.metadata/secrets/values/index_by_secret_id/indexImplTable" IndexColumns: "secretId" IndexColumns: "ownerUserId" KeyRange { From: "\002\000\000\000\000\200\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } SnapshotTxId: 281474976710758 SnapshotStep: 13000 SeqNoGeneration: 2 SeqNoRound: 1 ScanSettings { } row version v13000/281474976710758 2025-04-17T18:06:27.892618Z node 1 :TX_DATASHARD NOTICE: Done TBuildIndexScan TabletId: 72075186224037889 Id: 281474976715679, requested range: [(Utf8 : NULL, Utf8 : NULL) ; ()), last acked point: ()Stats { RowsSent: 0 BytesSent: 0 }Status { Code: SUCCESS Issues:
: Error: Shard or requested range is empty } BuildIndexId: 281474976715679 TabletId: 72075186224037889 Status: DONE UploadStatus: SUCCESS Issues { message: "Shard or requested range is empty" severity: 1 } RequestSeqNoGeneration: 2 RequestSeqNoRound: 1 2025-04-17T18:06:27.894751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-17T18:06:42.900040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-04-17T18:06:43.384362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:06:43.384428Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-17T18:06:43.986879Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-04-17T18:06:43.986960Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-17T18:06:43.987025Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-04-17T18:06:43.987106Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-04-17T18:06:43.987258Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-04-17T18:06:43.987585Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-17T18:06:43.987635Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-04-17T18:06:43.987685Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-17T18:06:43.987750Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:06:43.989251Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-04-17T18:06:43.990593Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-04-17T18:06:43.990761Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-04-17T18:06:43.990912Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-04-17T18:06:43.990997Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-17T18:06:43.991075Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-17T18:06:55.235064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715717:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", ... :Tier '/Root/tier2' stopped at tablet 0 2025-04-17T18:07:43.509531Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-17T18:07:43.509577Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-04-17T18:07:54.565654Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:54.565739Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:54.565772Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:54.565931Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:54.566441Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-17T18:07:54.566564Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:54.566625Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-04-17T18:07:54.566686Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-04-17T18:07:54.566745Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-04-17T18:07:54.566792Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-17T18:07:54.566865Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-04-17T18:07:54.566938Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:54.566987Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:54.567016Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-04-17T18:07:54.567045Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-04-17T18:07:54.567073Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-04-17T18:07:54.567098Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-17T18:07:54.567132Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-04-17T18:07:54.567171Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:54.567205Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:54.567230Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-04-17T18:07:54.567259Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-04-17T18:07:54.567285Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-04-17T18:07:54.567311Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-17T18:07:54.567342Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-04-17T18:07:54.567377Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:54.567425Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:54.567450Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-17T18:07:54.567476Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-17T18:07:54.567513Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-17T18:07:54.567548Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:07:54.567588Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-17T18:07:54.567632Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:07:54.567927Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3152:4422];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-17T18:07:54.568023Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3158:4425];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-17T18:07:54.568089Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3166:4431];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-17T18:07:54.568198Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-17T18:07:54.568228Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-17T18:07:54.568260Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-17T18:07:54.568297Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-17T18:07:54.568331Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:07:54.568376Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-17T18:07:54.568414Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-04-17T18:08:05.603222Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:05.603344Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:05.603407Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:05.603442Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:05.603483Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:05.603637Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-17T18:08:05.604064Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:05.604119Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-17T18:08:05.604178Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:08:05.604258Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:05.604472Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:05.604504Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-17T18:08:05.604533Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:08:05.604574Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:05.605094Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:05.605129Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-17T18:08:05.605158Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-17T18:08:05.605199Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:05.605403Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:05.605439Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-04-17T18:08:05.605468Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-17T18:08:05.605512Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:05.605544Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:05.605568Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-04-17T18:08:05.605594Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-17T18:08:05.605633Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:05.605666Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-17T18:08:05.605691Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-04-17T18:08:05.605716Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-17T18:08:05.605769Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-17T18:08:05.606359Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3152:4422];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-17T18:08:05.606445Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3158:4425];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-17T18:08:05.606568Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3166:4431];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationOneJoin [GOOD] Test command err: Trying to start YDB, gRPC: 6628, MsgBus: 6023 2025-04-17T18:06:56.473753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345197244522235:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:56.474093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e5f/r3tmp/tmpeSzU6w/pdisk_1.dat 2025-04-17T18:06:56.861051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:56.867330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:56.867448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6628, node 1 2025-04-17T18:06:56.871583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:56.916770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:56.916816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:56.916877Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:56.917024Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6023 TClient is connected to server localhost:6023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:57.390147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:59.182846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345210129424552:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:59.182945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:59.183022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345210129424557:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:59.186366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:06:59.197105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345210129424566:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:06:59.281724Z node 1 :TX_PROXY ERROR: Actor# [1:7494345210129424617:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:59.585384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.793851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:06:59.794042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:06:59.794473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:06:59.794603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:06:59.794718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:06:59.794843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:06:59.794944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:06:59.795049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:06:59.795157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:06:59.795261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:06:59.795366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:06:59.795493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494345210129424897:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:06:59.821012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:06:59.821068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:06:59.821243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:06:59.821316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:06:59.821425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:06:59.821521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:06:59.821651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:06:59.821755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:06:59.821854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:06:59.821969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:06:59.822068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:06:59.822162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345210129424881:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:06:59.826374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345210129424901:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:06:59.826439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345210129424901:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:06:59.826632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id= ... tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.176963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.180156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.180348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.184130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.185310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.188676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.190455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.192956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.195705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.198832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.202355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.203538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.208699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.208959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.213671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.218842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.224208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.229922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.235147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.240540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.245894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.251548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.255596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.257885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.261992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.263399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.266518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.268972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.272037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.274335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.277245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.280484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.282522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.285452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.287544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.289918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.293295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.296046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.297541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.300110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.303058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.304606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.308753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.308911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.313502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.314539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:07:57.406232Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2eydz1f8hz2hat1tczn6tz", SessionId: ydb://session/3?node_id=1&id=NTM5MTU4NGQtMzNkNWYxY2ItNGZhZTE2NjAtMzA3ZGZlNDY=, Slow query, duration: 27.132774s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:07:57.634859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:07:57.634877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:07:57.635511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TGRpcCmsTest::DisabledTxTest >> TBlobStorageProxyTest::TestDoubleEmptyGet >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> TBlobStorageProxyTest::TestGetMultipart >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-17T18:07:50.518768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345433174210653:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:50.519157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cda/r3tmp/tmpBgrfg2/pdisk_1.dat 2025-04-17T18:07:50.863455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:50.922919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:50.923071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:50.931247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23622 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:51.056608Z node 1 :TX_PROXY DEBUG: actor# [1:7494345433174210851:2133] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:51.056675Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345437469178582:2428] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:51.056901Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345433174210874:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:51.056950Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345433174210874:2146], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:51.057195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:51.063185Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345433174210501:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345437469178587:2429] 2025-04-17T18:07:51.063212Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345433174210504:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345437469178588:2429] 2025-04-17T18:07:51.063267Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345433174210501:2050] Subscribe: subscriber# [1:7494345437469178587:2429], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:51.063278Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345433174210504:2053] Subscribe: subscriber# [1:7494345437469178588:2429], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:51.063346Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345433174210507:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345437469178589:2429] 2025-04-17T18:07:51.063361Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345433174210507:2056] Subscribe: subscriber# [1:7494345437469178589:2429], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:51.063404Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345437469178587:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345433174210501:2050] 2025-04-17T18:07:51.063442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345437469178588:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345433174210504:2053] 2025-04-17T18:07:51.063460Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345433174210501:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345437469178587:2429] 2025-04-17T18:07:51.063470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345437469178589:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345433174210507:2056] 2025-04-17T18:07:51.063477Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345433174210504:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345437469178588:2429] 2025-04-17T18:07:51.063490Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345433174210507:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345437469178589:2429] 2025-04-17T18:07:51.063545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345437469178584:2429] 2025-04-17T18:07:51.063583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345437469178585:2429] 2025-04-17T18:07:51.063642Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345437469178583:2429][/dc-1] Set up state: owner# [1:7494345433174210874:2146], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:51.063795Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345437469178586:2429] 2025-04-17T18:07:51.063857Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345437469178583:2429][/dc-1] Path was already updated: owner# [1:7494345433174210874:2146], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:51.063909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345437469178587:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345437469178584:2429], cookie# 1 2025-04-17T18:07:51.063937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345437469178588:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345437469178585:2429], cookie# 1 2025-04-17T18:07:51.063959Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345437469178589:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345437469178586:2429], cookie# 1 2025-04-17T18:07:51.064004Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345433174210501:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345437469178587:2429], cookie# 1 2025-04-17T18:07:51.064027Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345433174210504:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345437469178588:2429], cookie# 1 2025-04-17T18:07:51.064042Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345433174210507:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345437469178589:2429], cookie# 1 2025-04-17T18:07:51.064069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345437469178587:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345433174210501:2050], cookie# 1 2025-04-17T18:07:51.064087Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345437469178588:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345433174210504:2053], cookie# 1 2025-04-17T18:07:51.064120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345437469178589:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345433174210507:2056], cookie# 1 2025-04-17T18:07:51.064161Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345437469178584:2429], cookie# 1 2025-04-17T18:07:51.064189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:51.064202Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345437469178585:2429], cookie# 1 2025-04-17T18:07:51.064218Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:51.064243Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345437469178586:2429], cookie# 1 2025-04-17T18:07:51.064261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345437469178583:2429][/dc-1] Unexpected sync response: sender# [1:7494345437469178586:2429], cookie# 1 2025-04-17T18:07:51.125403Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345433174210874:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:51.125816Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345433174210874:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... nfo }] } 2025-04-17T18:08:05.970100Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345469696192917:2128], cacheItem# { Subscriber: { Subscriber: [3:7494345482581096219:3193] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:05.970218Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345495465999169:4088], recipient# [3:7494345495465999168:2345], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:06.745250Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345469696192917:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:06.745383Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345469696192917:2128], cacheItem# { Subscriber: { Subscriber: [3:7494345473991161551:3174] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:06.745502Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345499760966476:4089], recipient# [3:7494345499760966475:2346], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:06.749737Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345469696192917:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:06.749869Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345469696192917:2128], cacheItem# { Subscriber: { Subscriber: [3:7494345473991161551:3174] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:06.749952Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345499760966478:4090], recipient# [3:7494345499760966477:2347], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:06.970971Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345469696192917:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:06.971091Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345469696192917:2128], cacheItem# { Subscriber: { Subscriber: [3:7494345482581096219:3193] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:06.971194Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345499760966489:4094], recipient# [3:7494345499760966488:2348], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.746484Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345469696192917:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.746651Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345469696192917:2128], cacheItem# { Subscriber: { Subscriber: [3:7494345473991161551:3174] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:07.746974Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345504055933796:4095], recipient# [3:7494345504055933795:2349], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.752682Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345469696192917:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.752815Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345469696192917:2128], cacheItem# { Subscriber: { Subscriber: [3:7494345473991161551:3174] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:07.753286Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345504055933798:4096], recipient# [3:7494345504055933797:2350], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.971951Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345469696192917:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.972083Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345469696192917:2128], cacheItem# { Subscriber: { Subscriber: [3:7494345482581096219:3193] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:07.972199Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345504055933809:4100], recipient# [3:7494345504055933808:2351], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout |87.9%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] >> KqpScanLogs::WideCombine+EnabledLogs [GOOD] >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-17T18:07:53.322594Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345441930537691:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:53.323779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cce/r3tmp/tmp8kGxe6/pdisk_1.dat 2025-04-17T18:07:53.704152Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:53.709814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:53.709967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:53.718264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19827 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:53.867283Z node 1 :TX_PROXY DEBUG: actor# [1:7494345441930537937:2130] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:53.867353Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345441930538385:2437] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:53.867540Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494345441930537971:2145], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:07:53.867587Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494345441930537971:2145], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:07:53.867913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:07:53.870482Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345441930537593:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345441930538390:2438] 2025-04-17T18:07:53.870555Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345441930537593:2050] Subscribe: subscriber# [1:7494345441930538390:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:53.870613Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345441930537599:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345441930538392:2438] 2025-04-17T18:07:53.870642Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345441930537599:2056] Subscribe: subscriber# [1:7494345441930538392:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:53.870683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345441930538390:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345441930537593:2050] 2025-04-17T18:07:53.870741Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345441930538392:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345441930537599:2056] 2025-04-17T18:07:53.870861Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345441930538387:2438] 2025-04-17T18:07:53.870954Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345441930538389:2438] 2025-04-17T18:07:53.871018Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494345441930538386:2438][/dc-1] Set up state: owner# [1:7494345441930537971:2145], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:53.871301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345441930538390:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345441930538387:2438], cookie# 1 2025-04-17T18:07:53.871326Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345441930538391:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345441930538388:2438], cookie# 1 2025-04-17T18:07:53.871347Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345441930538392:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345441930538389:2438], cookie# 1 2025-04-17T18:07:53.871391Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345441930537593:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345441930538390:2438] 2025-04-17T18:07:53.871621Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345441930537593:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345441930538390:2438], cookie# 1 2025-04-17T18:07:53.871662Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345441930537599:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345441930538392:2438] 2025-04-17T18:07:53.871681Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345441930537599:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345441930538392:2438], cookie# 1 2025-04-17T18:07:53.874447Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345441930537596:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494345441930538391:2438] 2025-04-17T18:07:53.874543Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494345441930537596:2053] Subscribe: subscriber# [1:7494345441930538391:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:07:53.874611Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345441930537596:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494345441930538391:2438], cookie# 1 2025-04-17T18:07:53.874652Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345441930538390:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345441930537593:2050], cookie# 1 2025-04-17T18:07:53.874666Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345441930538392:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345441930537599:2056], cookie# 1 2025-04-17T18:07:53.874693Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345441930538391:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345441930537596:2053] 2025-04-17T18:07:53.874717Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494345441930538391:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345441930537596:2053], cookie# 1 2025-04-17T18:07:53.874778Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345441930538387:2438], cookie# 1 2025-04-17T18:07:53.874825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:07:53.874841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345441930538389:2438], cookie# 1 2025-04-17T18:07:53.874862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:07:53.874905Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494345441930538388:2438] 2025-04-17T18:07:53.874965Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494345441930538386:2438][/dc-1] Path was already updated: owner# [1:7494345441930537971:2145], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:07:53.874998Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494345441930538388:2438], cookie# 1 2025-04-17T18:07:53.875041Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494345441930538386:2438][/dc-1] Unexpected sync response: sender# [1:7494345441930538388:2438], cookie# 1 2025-04-17T18:07:53.875066Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494345441930537596:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494345441930538391:2438] 2025-04-17T18:07:53.922649Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494345441930537971:2145], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:07:53.923045Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494345441930537971:2145], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... nfo }] } 2025-04-17T18:08:07.063976Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345479838372619:2132], cacheItem# { Subscriber: { Subscriber: [3:7494345497018243008:3005] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:07.064156Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345505608178312:3554], recipient# [3:7494345505608178311:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.873513Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345479838372619:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.873658Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345479838372619:2132], cacheItem# { Subscriber: { Subscriber: [3:7494345484133341035:2986] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:07.873818Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345505608178327:3555], recipient# [3:7494345505608178326:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.875755Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345479838372619:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:07.875857Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345479838372619:2132], cacheItem# { Subscriber: { Subscriber: [3:7494345484133341035:2986] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:07.875941Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345505608178329:3556], recipient# [3:7494345505608178328:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:08.064761Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345479838372619:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:08.064906Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345479838372619:2132], cacheItem# { Subscriber: { Subscriber: [3:7494345497018243008:3005] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:08.065007Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345509903145636:3560], recipient# [3:7494345509903145635:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:08.878533Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345479838372619:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:08.878683Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345479838372619:2132], cacheItem# { Subscriber: { Subscriber: [3:7494345484133341035:2986] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:08.878800Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345509903145649:3561], recipient# [3:7494345509903145648:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:09.066278Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345479838372619:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:09.066420Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345479838372619:2132], cacheItem# { Subscriber: { Subscriber: [3:7494345497018243008:3005] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:09.066518Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345514198112956:3565], recipient# [3:7494345514198112955:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:09.879834Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494345479838372619:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:08:09.880002Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494345479838372619:2132], cacheItem# { Subscriber: { Subscriber: [3:7494345484133341035:2986] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:08:09.880147Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494345514198112967:3566], recipient# [3:7494345514198112966:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TGRpcCmsTest::DescribeOptionsTest [GOOD] >> TGRpcCmsTest::DisabledTxTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |87.9%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 |87.9%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/0013d4/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 22982, MsgBus: 7992 2025-04-17T18:03:59.742328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344438064743549:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:59.742609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013d4/r3tmp/tmpuKEfpY/pdisk_1.dat 2025-04-17T18:04:00.182917Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:00.204051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:00.204147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:00.211906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22982, node 1 2025-04-17T18:04:00.382162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:00.382198Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:00.382204Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:00.382359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7992 TClient is connected to server localhost:7992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:00.885500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:04:00.914072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:01.052926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.210165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.289190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:02.758505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344450949647090:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:02.758599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.062107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.129807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.196815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.228842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.256795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.294075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.375550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.427678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344455244615057:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.427753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.428688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344455244615062:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.431905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:03.440202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344455244615064:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:03.528725Z node 1 :TX_PROXY ERROR: Actor# [1:7494344455244615118:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:04.743480Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344438064743549:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:04.743557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:15.177685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:04:15.177721Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"80b32267-2046d47-ec505a92-660385b0") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"a7457697-53d5198b-9ad82c61-3085a07a") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"16c64ac6-86b66f45-ed914eef-6d335eb3")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2025-04-17T18:08:10.819482Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7494345516101545884:7509], blobId: 0, bytes: 2402376 2025-04-17T18:08:10.820268Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976710972. Error: [TEvError] File size limit exceeded: 2/0Mb 2025-04-17T18:08:10.837658Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7494345516101545884:7509], blobId: 1, bytes: 144 2025-04-17T18:08:10.837755Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7494345516101545884:7509], blobId: 2, bytes: 1200936 2025-04-17T18:08:10.838000Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7494345516101545884:7509], blobId: 3, bytes: 72 2025-04-17T18:08:10.841035Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345516101545874:4570], TxId: 281474976710972, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmUzNjY4NWItNWJiNDk3ZmEtMzExMzJiZDgtYTBhMmNkOTE=. TraceId : 01js2ezmnkezdczwjatzvgwgx4. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] File size limit exceeded: 2/0Mb }. 2025-04-17T18:08:10.842443Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345516101545875:4571], TxId: 281474976710972, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmUzNjY4NWItNWJiNDk3ZmEtMzExMzJiZDgtYTBhMmNkOTE=. CustomerSuppliedId : . TraceId : 01js2ezmnkezdczwjatzvgwgx4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-17T18:08:10.856030Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmUzNjY4NWItNWJiNDk3ZmEtMzExMzJiZDgtYTBhMmNkOTE=, ActorId: [1:7494345511806578563:4565], ActorState: ExecuteState, TraceId: 01js2ezmnkezdczwjatzvgwgx4, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::WideCombine+EnabledLogs [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/0013d3/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 62951, MsgBus: 17815 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013d3/r3tmp/tmp9G3lo3/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62951, node 1 TClient is connected to server localhost:17815 TClient is connected to server localhost:17815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (DataType 'Uint64)) (let $4 '('('"_logical_id" '505) '('"_id" '"812f08d1-7761fa29-f3539d14-7f98af73") '('"_wide_channels" (StructType '('"Value" (OptionalType (DataType 'String))) '('_yql_agg_0 $3))))) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($12) (block '( (let $13 (lambda '($15) (Member $15 '"Key") (Member $15 '"Value"))) (let $14 (lambda '($25 $26) $25 $26)) (return (FromFlow (WideCombiner (ExpandMap (ToFlow $12) $13) '-1073741824 (lambda '($16 $17) $17) (lambda '($18 $19 $20) (AggrCountInit $19)) (lambda '($21 $22 $23 $24) (AggrCountUpdate $22 $24)) $14))) ))) $4)) (let $6 (DqCnHashShuffle (TDqOutput $5 '0) '('0))) (let $7 (DqPhyStage '($6) (lambda '($27) (block '( (let $28 (WideCombiner (ToFlow $27) '"" (lambda '($29 $30) $29) (lambda '($31 $32 $33) $33) (lambda '($34 $35 $36 $37) (AggrAdd $36 $37)) (lambda '($38 $39) $39))) (return (FromFlow (NarrowMap $28 (lambda '($40) (AsStruct '('"column0" $40)))))) ))) '('('"_logical_id" '1265) '('"_id" '"ecd84044-d7f2e600-7c825d53-a09b45c")))) (let $8 (DqCnUnionAll (TDqOutput $7 '0))) (let $9 (DqPhyStage '($8) (lambda '($41) $41) '('('"_logical_id" '1533) '('"_id" '"e4a774f5-175008ab-c8615e74-b22eac49")))) (let $10 '($5 $7 $9)) (let $11 (DqCnResult (TDqOutput $9 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $10 '($11) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $3))) '0 '0)) '('('"type" '"query")))) ) |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2025-04-17T18:08:08.782629Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345509698095161:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:08.782690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00175c/r3tmp/tmp6YpZWv/pdisk_1.dat 2025-04-17T18:08:09.318643Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:09.326407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:09.326536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:09.332765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10867, node 1 2025-04-17T18:08:09.480328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:09.480354Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:09.480360Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:09.480456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:09.876772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:09.967872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-17T18:08:09.992291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> TGRpcCmsTest::AuthTokenTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-04-17T18:08:08.791253Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345510504276851:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:08.791485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00174f/r3tmp/tmpdxIGEx/pdisk_1.dat 2025-04-17T18:08:09.253012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:09.253584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:09.259035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:09.295971Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20079, node 1 2025-04-17T18:08:09.336330Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:09.336369Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:09.480610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:09.480650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:09.480659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:09.480804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:09.908652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:12654 2025-04-17T18:08:10.139118Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locking 2025-04-17T18:08:10.139155Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locked by parent 2025-04-17T18:08:10.144169Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now active 2025-04-17T18:08:10.192489Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285140, Sender [1:7494345519094212042:2315], Recipient [1:7494345514799244425:2200]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" } 2025-04-17T18:08:10.192533Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-04-17T18:08:10.196960Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } >> KqpScanLogs::WideCombine-EnabledLogs [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2025-04-17T18:08:08.792785Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345506566420211:2105];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:08.809047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001742/r3tmp/tmpeXXuRo/pdisk_1.dat 2025-04-17T18:08:09.278036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:09.278098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:09.282229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:09.284494Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10399, node 1 2025-04-17T18:08:09.480485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:09.480506Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:09.480520Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:09.480614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:09.879732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:09.943703Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7494345510861388281:2314], Recipient [1:7494345510861387965:2198]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-17T18:08:09.943732Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-17T18:08:09.943757Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:09.943773Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:09.943867Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-17T18:08:09.943963Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744913289943772) 2025-04-17T18:08:09.944922Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744913289943772 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-17T18:08:09.945103Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-17T18:08:09.951285Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-17T18:08:09.951990Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744913289943772&action=1" } } } 2025-04-17T18:08:09.952132Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:09.952212Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-17T18:08:09.952335Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-17T18:08:09.952499Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285139, Sender [1:7494345510861388281:2314], Recipient [1:7494345510861387965:2198]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744913289943772&action=1" } UserToken: "" } 2025-04-17T18:08:09.952533Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-04-17T18:08:09.955760Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7494345510861388281:2314] 2025-04-17T18:08:09.955875Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744913289943772&action=1" } } 2025-04-17T18:08:09.956003Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-17T18:08:09.956168Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T18:08:09.960469Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-17T18:08:09.960525Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T18:08:09.960610Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7494345510861388286:2198], Recipient [1:7494345510861387965:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T18:08:09.960641Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-17T18:08:09.960661Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:09.960677Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:09.960712Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-17T18:08:09.960754Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-17T18:08:09.960832Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-17T18:08:09.967595Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T18:08:09.967642Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:09.967650Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:09.967658Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:09.967717Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-17T18:08:09.967740Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744913289943772 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T18:08:09.971196Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-17T18:08:09.971354Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:09.971387Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-17T18:08:09.971400Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-17T18:08:09.977066Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-17T18:08:09.978747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-17T18:08:09.981778Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-17T18:08:09.981871Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-17T18:08:09.988453Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-17T18:08:09.992671Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-17T18:08:09.993160Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913290029 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T18:08:09.993185Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-17T18:08:09.993231Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user- ... 10.363988Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913290029 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186224037895 Coordinators: 72075186224037893 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037892 Mediators: 72075186224037890 Mediators: 72075186224037889 SchemeShard: 72075186224037897 Hive: 72075186224037888 SysViewProcessor: 72075186224037891 StatisticsAggregator: 72075186224037894 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } StoragePools { Name: "/Root/users/user-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-17T18:08:10.364003Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-04-17T18:08:10.364126Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-17T18:08:10.365070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-17T18:08:10.368287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:08:10.368330Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T18:08:10.368347Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:10.371033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-17T18:08:10.371497Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-17T18:08:10.371534Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2025-04-17T18:08:10.375752Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-04-17T18:08:10.392248Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-17T18:08:10.393205Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-04-17T18:08:10.393224Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-17T18:08:10.393259Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-17T18:08:10.393336Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7494345515156356188:2198], Recipient [1:7494345510861387965:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-17T18:08:10.393357Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-17T18:08:10.393371Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:10.393378Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:10.393415Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-17T18:08:10.393446Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1744913290351250 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T18:08:10.393486Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744913290351250 issue= 2025-04-17T18:08:10.395456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-17T18:08:10.395834Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-17T18:08:10.395916Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.395941Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:10.396493Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494345510861387847:2196], Recipient [1:7494345510861387965:2198]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.396516Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T18:08:10.395898Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-17T18:08:10.396611Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:10.395952Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-17T18:08:10.396626Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:10.396660Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-17T18:08:10.396682Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1744913290351250 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T18:08:10.402963Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-17T18:08:10.403016Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:10.403043Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-17T18:08:10.403161Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-17T18:08:10.404719Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-17T18:08:10.404910Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-17T18:08:10.405929Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-17T18:08:10.404775Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-17T18:08:10.410694Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-17T18:08:10.410771Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7494345515156356323:2198], Recipient [1:7494345510861387965:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-17T18:08:10.411806Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-17T18:08:10.411874Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:10.411894Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:10.411945Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-17T18:08:10.411965Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-17T18:08:10.415867Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T18:08:10.415891Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:10.415899Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:10.415904Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:10.415967Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744913290351250 2025-04-17T18:08:10.415977Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744913290351250 issue= 2025-04-17T18:08:10.415992Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744913290351250 issue= 2025-04-17T18:08:10.416005Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-17T18:08:10.416089Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744913290351250 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T18:08:10.423586Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-17T18:08:10.434386Z node 1 :CMS_TENANTS TRACE: Send /Root/users/user-1 notification to [1:7494345515156356176:2391]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744913290351250&action=2" ready: true status: SUCCESS } } 2025-04-17T18:08:10.434502Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:10.476499Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7494345515156356359:2394], Recipient [1:7494345510861387965:2198]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-04-17T18:08:10.476532Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-17T18:08:10.476647Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-04-17T18:08:10.482209Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7494345515156356362:2395], Recipient [1:7494345510861387965:2198]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-04-17T18:08:10.482261Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-04-17T18:08:10.482466Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-04-17T18:08:10.486420Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-17T18:08:10.486615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TBlobStorageProxyTest::TestGetFail [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources >> KqpRm::NotEnoughExecutionUnits >> KqpRm::SnapshotSharingByExchanger >> KqpRm::NodesMembershipByExchanger >> KqpRm::SingleSnapshotByExchanger >> KqpRm::SingleTask >> KqpRm::NotEnoughMemory >> KqpRm::ManyTasks >> KqpRm::DisonnectNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2025-04-17T18:08:08.778687Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345507565218868:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:08.778732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001751/r3tmp/tmpM9enGf/pdisk_1.dat 2025-04-17T18:08:09.297168Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:09.307491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:09.307617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:09.314131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19418, node 1 2025-04-17T18:08:09.482936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:09.482957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:09.482965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:09.483091Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:09.876578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:09.956935Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7494345511860186970:2314], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-17T18:08:09.956998Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-17T18:08:09.957030Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:09.957043Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:09.957186Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" 2025-04-17T18:08:09.957387Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744913289957077) 2025-04-17T18:08:09.957931Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744913289957077 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-17T18:08:09.958131Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-17T18:08:09.961177Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-17T18:08:09.962004Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744913289957077&action=1" } } } 2025-04-17T18:08:09.962134Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:09.962225Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-17T18:08:09.962366Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-17T18:08:09.962706Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-17T18:08:09.962837Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T18:08:09.966838Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-17T18:08:09.966895Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T18:08:09.966976Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7494345511860186975:2194], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-17T18:08:09.967020Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-17T18:08:09.967064Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:09.967072Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:09.967103Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-17T18:08:09.967129Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-17T18:08:09.967184Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-17T18:08:09.970770Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7494345511860186981:2315], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744913289957077&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-17T18:08:09.970805Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-17T18:08:09.970999Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744913289957077&action=1" } } 2025-04-17T18:08:09.976542Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-17T18:08:09.976573Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:09.976582Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:09.976589Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:09.976648Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-17T18:08:09.976670Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744913289957077 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T18:08:09.979087Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-17T18:08:09.979247Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:09.979273Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-17T18:08:09.979279Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-17T18:08:09.989818Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "Root" 2025-04-17T18:08:09.992804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-17T18:08:09.994829Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-17T18:08:09.994891Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-17T18:08:09.997991Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-17T18:08:10.003848Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-17T18:08:10.004318Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913290043 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 ... .366424Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7494345516155154859:2385], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-17T18:08:10.366456Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-17T18:08:10.366515Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.366864Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494345511860186537:2196], Recipient [1:7494345511860186657:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.366893Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T18:08:10.367479Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-17T18:08:10.370051Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7494345516155154869:2386], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-17T18:08:10.370083Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-17T18:08:10.370123Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.370405Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494345511860186537:2196], Recipient [1:7494345511860186657:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.370468Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T18:08:10.371000Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-17T18:08:10.374235Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7494345516155154877:2387], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-17T18:08:10.374259Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-17T18:08:10.374290Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.374557Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494345511860186537:2196], Recipient [1:7494345511860186657:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.374569Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T18:08:10.375341Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-17T18:08:10.380963Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7494345516155154889:2388], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-17T18:08:10.380986Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-17T18:08:10.381027Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.381218Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494345511860186537:2196], Recipient [1:7494345511860186657:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.381231Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T18:08:10.381834Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-17T18:08:10.383791Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-04-17T18:08:10.383809Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-17T18:08:10.383881Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-17T18:08:10.383935Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7494345516155154372:2194], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-17T18:08:10.383949Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-04-17T18:08:10.383970Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-17T18:08:10.383979Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-17T18:08:10.384026Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2025-04-17T18:08:10.384044Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1744913289957077 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-17T18:08:10.384115Z node 1 :CMS_TENANTS TRACE: Update database for /Root/users/user-1 confirmedsubdomain=2 2025-04-17T18:08:10.384667Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7494345516155154901:2389], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-17T18:08:10.384726Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-17T18:08:10.384759Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.384857Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494345511860186537:2196], Recipient [1:7494345511860186657:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.384869Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T18:08:10.385348Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-17T18:08:10.386921Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2025-04-17T18:08:10.386960Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-17T18:08:10.388194Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7494345516155154907:2390], Recipient [1:7494345511860186657:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-17T18:08:10.388221Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-17T18:08:10.388254Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.388411Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7494345511860186537:2196], Recipient [1:7494345511860186657:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-17T18:08:10.388434Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-17T18:08:10.388930Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } TClient is connected to server localhost:2458 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037897 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037897 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanReso... (TRUNCATED) 2025-04-17T18:08:10.654804Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-17T18:08:10.655210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:08:11.069912Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:12.071227Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::WideCombine-EnabledLogs [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/0013c6/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 17727, MsgBus: 3459 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013c6/r3tmp/tmp3n3h5y/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17727, node 1 TClient is connected to server localhost:3459 TClient is connected to server localhost:3459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (DataType 'Uint64)) (let $4 '('('"_logical_id" '505) '('"_id" '"a8808f4a-7d447897-9816fc0e-fd66889b") '('"_wide_channels" (StructType '('"Value" (OptionalType (DataType 'String))) '('_yql_agg_0 $3))))) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($12) (block '( (let $13 (lambda '($15) (Member $15 '"Key") (Member $15 '"Value"))) (let $14 (lambda '($25 $26) $25 $26)) (return (FromFlow (WideCombiner (ExpandMap (ToFlow $12) $13) '-1073741824 (lambda '($16 $17) $17) (lambda '($18 $19 $20) (AggrCountInit $19)) (lambda '($21 $22 $23 $24) (AggrCountUpdate $22 $24)) $14))) ))) $4)) (let $6 (DqCnHashShuffle (TDqOutput $5 '0) '('0))) (let $7 (DqPhyStage '($6) (lambda '($27) (block '( (let $28 (WideCombiner (ToFlow $27) '"" (lambda '($29 $30) $29) (lambda '($31 $32 $33) $33) (lambda '($34 $35 $36 $37) (AggrAdd $36 $37)) (lambda '($38 $39) $39))) (return (FromFlow (NarrowMap $28 (lambda '($40) (AsStruct '('"column0" $40)))))) ))) '('('"_logical_id" '1265) '('"_id" '"1b831ae6-a59de9c9-7e6be8bd-e6178471")))) (let $8 (DqCnUnionAll (TDqOutput $7 '0))) (let $9 (DqPhyStage '($8) (lambda '($41) $41) '('('"_logical_id" '1533) '('"_id" '"32f6e97b-1fd1bbec-810990e2-f8a4cd04")))) (let $10 '($5 $7 $9)) (let $11 (DqCnResult (TDqOutput $9 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $10 '($11) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $3))) '0 '0)) '('('"type" '"query")))) ) |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] |88.0%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/0013b9/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk8 Trying to start YDB, gRPC: 62337, MsgBus: 17796 2025-04-17T18:03:59.765489Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344437311419005:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:03:59.781942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013b9/r3tmp/tmp0kUnDy/pdisk_1.dat 2025-04-17T18:04:00.239334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:00.265830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:00.265928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:00.268285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62337, node 1 2025-04-17T18:04:00.371414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:00.371438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:00.371445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:00.371571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17796 TClient is connected to server localhost:17796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:00.853696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:00.862709Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:00.872934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.049073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.200291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:01.280293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:02.990080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344450196322649:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:02.990231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.271798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.302283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.333453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.363686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.392570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.464608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.499853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:03.574396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344454491290609:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.574496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.574696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344454491290614:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:03.578023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:03.587189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344454491290616:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:03.668502Z node 1 :TX_PROXY ERROR: Actor# [1:7494344454491290673:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:04.765800Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344437311419005:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:04.766605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:15.237724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:04:15.237745Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"750dfd6d-17305131-5d364970-4aa58d4d") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"8a84a92f-71b075da-a07ec61b-7938b1cb") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"1dafd3b7-316cc12-efb8f028-7751daef")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:79:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:78:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:78:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:79:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:78:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:78:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:87:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:86:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:86:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 >> KqpRm::SingleTask [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> KqpRm::NotEnoughExecutionUnits [GOOD] >> KqpRm::ManyTasks [GOOD] >> KqpRm::NotEnoughMemory [GOOD] |88.0%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-04-17T18:08:15.276166Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:08:15.276622Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/001702/r3tmp/tmp8qxkWo/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:08:15.277121Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/001702/r3tmp/tmp8qxkWo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001702/r3tmp/tmp8qxkWo/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17145308958427822990 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:08:15.320354Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.320694Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.340914Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T18:08:15.341013Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T18:08:15.341129Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T18:08:15.341176Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T18:08:15.341259Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341283Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.341314Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341331Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342295Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.354527Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354775Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.354828Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354988Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.355140Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.355164Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.355240Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.355566Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.355593Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.355648Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.355716Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.356095Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T18:08:15.356318Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356563Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356867Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:15.356988Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.357046Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.357237Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.357312Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.357359Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.357421Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.359909Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.359986Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.360037Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.360074Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.360109Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:463:2341])) 2025-04-17T18:08:15.364714Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.365019Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.365082Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.365150Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.365186Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-2 (2 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.365228Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:463:2341])) 2025-04-17T18:08:15.365270Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.365381Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-3-3 (3 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.365426Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-3-3 (3 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.365484Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.365510Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-3-3 (3 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.365538Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:463:2341])) 2025-04-17T18:08:15.365564Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.365684Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-4-4 (4 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.365734Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-4-4 (4 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.365763Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.365790Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-4-4 (4 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.365815Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:463:2341])) 2025-04-17T18:08:15.365841Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.365950Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-5-5 (5 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.365991Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-5-5 (5 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.366019Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.366044Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-5-5 (5 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.366069Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:463:2341])) 2025-04-17T18:08:15.366094Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.366295Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-6-6 (6 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.366327Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-6-6 (6 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.366355Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.366382Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-6-6 (6 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.366418Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:463:2341])) 2025-04-17T18:08:15.366464Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.366571Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-7-7 (7 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.366597Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-7-7 (7 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.366623Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.366665Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-7-7 (7 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.366691Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:463:2341])) 2025-04-17T18:08:15.366716Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.366805Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-8-8 (8 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.366843Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-8-8 (8 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.366885Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.366910Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-8-8 (8 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.366943Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:463:2341])) 2025-04-17T18:08:15.366967Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.367061Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-9-9 (9 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.367086Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-9-9 (9 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.367110Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.367148Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-9-9 (9 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.367175Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:463:2341])) 2025-04-17T18:08:15.367199Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.367301Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:463:2341]) (release resources {0, 100}) 2025-04-17T18:08:15.367348Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:463:2341])) 2025-04-17T18:08:15.367394Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-04-17T18:08:15.277564Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:08:15.278088Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/001701/r3tmp/tmpoPE3So/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:08:15.278644Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/001701/r3tmp/tmpoPE3So/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001701/r3tmp/tmpoPE3So/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5323626462596315827 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:08:15.320337Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.320655Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.341496Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T18:08:15.341615Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T18:08:15.341808Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T18:08:15.341880Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T18:08:15.341969Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.342059Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342112Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.342135Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342339Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.356155Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.356448Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.356530Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.356772Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.356972Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.357011Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.357126Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.357307Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.357345Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.357446Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.357560Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.358261Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T18:08:15.358533Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.358789Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.359200Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:15.359366Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.359458Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.359702Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.359827Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.359904Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.360011Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-04-17T18:08:15.277747Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:08:15.278262Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00171e/r3tmp/tmptnQO1l/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:08:15.278867Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00171e/r3tmp/tmptnQO1l/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00171e/r3tmp/tmptnQO1l/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2399181641835031926 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:08:15.320507Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.320794Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.341583Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T18:08:15.341727Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T18:08:15.341928Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T18:08:15.342010Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T18:08:15.342110Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.342145Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342203Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.342230Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342465Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.354120Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354494Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.354594Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354878Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.355090Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.355140Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.355260Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.355454Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.355495Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.355585Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.355693Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.356298Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T18:08:15.356527Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356840Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.357290Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:15.357471Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.357573Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.357835Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.357984Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.358068Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.358160Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2025-04-17T18:08:15.278403Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:08:15.278876Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00170f/r3tmp/tmpl4rvko/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:08:15.279447Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00170f/r3tmp/tmpl4rvko/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00170f/r3tmp/tmpl4rvko/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16341447682243435986 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:08:15.320476Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.320718Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.340911Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T18:08:15.341038Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T18:08:15.341175Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T18:08:15.341227Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T18:08:15.341293Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341318Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.341346Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341382Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342300Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.350209Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.350590Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.350690Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-04-17T18:08:15.350897Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.351054Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.351078Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.351163Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.351331Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.351359Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.351427Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-04-17T18:08:15.351506Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.354883Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T18:08:15.355099Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.355408Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.355692Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:15.355830Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.355941Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.356141Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.356290Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356374Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.356439Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.359083Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:463:2341]) priority=0 resources={0, 1000} 2025-04-17T18:08:15.359144Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.359206Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.359265Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.359309Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:463:2341])) 2025-04-17T18:08:15.364700Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2025-04-17T18:08:15.364841Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:463:2341]) priority=0 resources={0, 100000} 2025-04-17T18:08:15.364891Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.364950Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task kqp-1-2-2 (2 by [1:463:2341]) 2025-04-17T18:08:15.364978Z node 1 :RESOURCE_BROKER DEBUG: Removing task kqp-1-2-2 (2 by [1:463:2341]) 2025-04-17T18:08:15.365044Z node 1 :KQP_RESOURCE_MANAGER NOTICE: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx last failed memory allocation: 0B, tx total execution units: 0, started at: 2025-04-17T18:08:15.359007Z } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-04-17T18:08:15.276066Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:08:15.276463Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00171d/r3tmp/tmplKLPqu/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:08:15.276937Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00171d/r3tmp/tmplKLPqu/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00171d/r3tmp/tmplKLPqu/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1267981659647409458 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:08:15.320466Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.320765Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.341189Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T18:08:15.341312Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T18:08:15.341437Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T18:08:15.341486Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T18:08:15.341573Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341620Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.341652Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341677Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342298Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.353863Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354156Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.354237Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354440Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.354609Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.354643Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.354744Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354915Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.354942Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.354999Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.355070Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.355512Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T18:08:15.355710Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.355886Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356173Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:15.356288Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356349Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.356557Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.356646Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356712Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.356770Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.359311Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.359387Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.359441Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.359495Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.359539Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:463:2341])) 2025-04-17T18:08:15.364706Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.364960Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:463:2341]) (release resources {0, 100}) 2025-04-17T18:08:15.365003Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:463:2341])) 2025-04-17T18:08:15.365047Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. >> KqpScanLogs::GraceJoin+EnabledLogs [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> KqpScanLogs::GraceJoin-EnabledLogs [GOOD] >> KqpRm::NodesMembershipByExchanger [GOOD] >> KqpRm::SingleSnapshotByExchanger [GOOD] >> KqpRm::DisonnectNodes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> TGRpcNewCoordinationClient::SessionMethods >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK >> YdbTableBulkUpsert::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-04-17T18:08:15.276305Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:08:15.276701Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016f2/r3tmp/tmpbnHBL0/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:08:15.277253Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016f2/r3tmp/tmpbnHBL0/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016f2/r3tmp/tmpbnHBL0/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12153621942440620964 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:08:15.320426Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.320718Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.341472Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T18:08:15.341607Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T18:08:15.341772Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T18:08:15.341843Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T18:08:15.341938Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341973Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342029Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.342051Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342289Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.358301Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.358613Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.358697Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.358924Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.359120Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.359151Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.359251Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.359471Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.359508Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.359593Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.359702Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.360279Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T18:08:15.360579Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.360858Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.361255Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:15.361446Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.361541Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.361790Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.361936Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.362034Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.362098Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:16.357237Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:16.357329Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:16.358113Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:16.619324Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-04-17T18:08:15.277020Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:08:15.277505Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016f3/r3tmp/tmppLdXSa/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:08:15.278048Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016f3/r3tmp/tmppLdXSa/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016f3/r3tmp/tmppLdXSa/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15530583727412435172 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:08:15.320387Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.320699Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.341188Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T18:08:15.341309Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T18:08:15.341465Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T18:08:15.341533Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T18:08:15.341620Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341653Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.341698Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341716Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342299Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.355322Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.355620Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.355699Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.355907Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.356109Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.356144Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.356242Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.356427Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.356462Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.356540Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.356647Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.357200Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T18:08:15.357478Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.357777Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.358170Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:15.358370Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.358464Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.358694Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.358825Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.358929Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.358998Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.362035Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.362098Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.362164Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.362248Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.362292Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:463:2341])) 2025-04-17T18:08:15.364691Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.364791Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:15.364835Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.364886Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:15.364932Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:15.364961Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:463:2341])) 2025-04-17T18:08:15.365024Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:15.365171Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.365259Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-17T18:08:15.365472Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:16.357565Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:16.357652Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:463:2341]) (release resources {0, 100}) 2025-04-17T18:08:16.357709Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300200 (remove task kqp-1-2-1 (1 by [1:463:2341])) 2025-04-17T18:08:16.357739Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100400 2025-04-17T18:08:16.357784Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-17T18:08:16.357876Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:463:2341]) (release resources {0, 100}) 2025-04-17T18:08:16.357918Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300200 to 0.100400 (remove task kqp-2-1-2 (2 by [1:463:2341])) 2025-04-17T18:08:16.357952Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-17T18:08:16.358122Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:16.358260Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913296 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:16.358474Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:16.619632Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> TTableProfileTests::UseDefaultProfile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-04-17T18:08:15.276605Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:08:15.277055Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016df/r3tmp/tmpB3LrnM/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:08:15.277585Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016df/r3tmp/tmpB3LrnM/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016df/r3tmp/tmpB3LrnM/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2336498565653665197 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:08:15.320508Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.320791Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.341587Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T18:08:15.341751Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T18:08:15.341968Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T18:08:15.342057Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T18:08:15.342160Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.342240Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342286Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.342311Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342523Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.356760Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.357082Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.357171Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.357405Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.357622Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.357660Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.357800Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.357982Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.358021Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.358099Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.358231Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.358809Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T18:08:15.359021Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.359277Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.359704Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:15.359901Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.359996Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.360229Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.360357Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.360445Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.360524Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:16.357225Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:16.357317Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:16.358534Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-04-17T18:08:16.358697Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-17T18:08:16.358860Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-04-17T18:08:16.359504Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:16.359643Z node 2 :TX_PROXY WARN: actor# [2:145:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-17T18:08:16.359761Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:16.359893Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:83:2074] ServerId# [1:354:2272] TabletId# 72057594037932033 PipeClientId# [2:83:2074] 2025-04-17T18:08:16.360197Z node 2 :KQP_RESOURCE_MANAGER INFO: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:16.360236Z node 2 :KQP_RESOURCE_MANAGER INFO: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:468:2103], reason: tenant updated 2025-04-17T18:08:16.362385Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:16.362509Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:16.669394Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> TGRpcYdbTest::RemoveNotExistedDirectory >> ClientStatsCollector::PrepareQuery >> YdbYqlClient::TestReadTableOneBatch >> YdbTableBulkUpsert::ValidRetry >> TGRpcAuthentication::ValidCredentials >> YdbYqlClient::TestDecimal1 >> YdbYqlClient::SecurityTokenAuth >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials >> TGRpcNewClient::YqlQueryWithParams >> TTableProfileTests::OverwriteCachingPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::GraceJoin+EnabledLogs [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/00139e/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 31769, MsgBus: 28798 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00139e/r3tmp/tmpFRJBXP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31769, node 1 TClient is connected to server localhost:28798 TClient is connected to server localhost:28798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '776) '('"_id" '"13909c42-77a1bcb3-b98151f6-c17c3695") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '674) '('"_id" '"6ab2dc36-34e1e416-83ce84f8-a4747dad") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '686) '('"_id" '"fd2176da-6bdc64c7-b0584996-fd973eff")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 >> YdbOlapStore::LogLast50 >> TGRpcYdbTest::GetOperationBadRequest >> YdbLogStore::LogStore >> YdbTableBulkUpsert::Nulls ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::GraceJoin-EnabledLogs [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6873/0013ac/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 65521, MsgBus: 28215 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013ac/r3tmp/tmpSLSroj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65521, node 1 TClient is connected to server localhost:28215 TClient is connected to server localhost:28215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '776) '('"_id" '"546b341c-9d0dbaf6-6cac149b-cdc19cbf") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '674) '('"_id" '"3042ea4-e0d0eb6a-75ecdd94-e4ad68b") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '686) '('"_id" '"4bee988f-3d3009-9f4b7b15-982911dc")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> KqpRm::SnapshotSharingByExchanger [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 >> YdbYqlClient::TestReadTableMultiShardWholeTable >> YdbMonitoring::SelfCheckWithNodesDying >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin >> TTableProfileTests::UseTableProfilePreset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-04-17T18:08:15.275582Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:08:15.275977Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/001722/r3tmp/tmpmj6Wch/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:08:15.276400Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/001722/r3tmp/tmpmj6Wch/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/001722/r3tmp/tmpmj6Wch/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15996314293847974328 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:08:15.320356Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.320652Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:08:15.341349Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:464:2100] with ResourceBroker at [2:435:2099] 2025-04-17T18:08:15.341474Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-04-17T18:08:15.341624Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:463:2341] with ResourceBroker at [1:434:2322] 2025-04-17T18:08:15.341728Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:466:2342] 2025-04-17T18:08:15.341815Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341846Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.341883Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-17T18:08:15.341917Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-17T18:08:15.342308Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.353425Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.353762Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.353841Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354064Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.354291Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.354324Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.354422Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354605Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-17T18:08:15.354643Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:15.354726Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913295 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:15.354831Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-17T18:08:15.355370Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-17T18:08:15.355571Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.355830Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356193Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-17T18:08:15.356373Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356461Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.356662Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:15.356793Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-17T18:08:15.356867Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:15.356944Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:16.358345Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:16.358429Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:16.358566Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:16.358619Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:16.358662Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:16.358693Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:16.358743Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:463:2341])) 2025-04-17T18:08:16.358912Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:16.358960Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:463:2341]) priority=0 resources={0, 100} 2025-04-17T18:08:16.359000Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:16.359031Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:463:2341]) from queue queue_kqp_resource_manager 2025-04-17T18:08:16.359062Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:463:2341]) to queue queue_kqp_resource_manager 2025-04-17T18:08:16.359106Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:463:2341])) 2025-04-17T18:08:16.359153Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:16.359211Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:16.359345Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913296 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-17T18:08:16.359587Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:16.620516Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:16.620666Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [2:464:2100]) priority=0 resources={0, 100} 2025-04-17T18:08:16.620717Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [2:464:2100]) to queue queue_kqp_resource_manager 2025-04-17T18:08:16.620785Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:464:2100]) from queue queue_kqp_resource_manager 2025-04-17T18:08:16.620824Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [2:464:2100]) to queue queue_kqp_resource_manager 2025-04-17T18:08:16.620873Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:464:2100])) 2025-04-17T18:08:16.621008Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:16.621088Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-2-2 (2 by [2:464:2100]) priority=0 resources={0, 100} 2025-04-17T18:08:16.621129Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-2-2 (2 by [2:464:2100]) to queue queue_kqp_resource_manager 2025-04-17T18:08:16.621171Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:464:2100]) from queue queue_kqp_resource_manager 2025-04-17T18:08:16.621213Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-2-2 (2 by [2:464:2100]) to queue queue_kqp_resource_manager 2025-04-17T18:08:16.621252Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:464:2100])) 2025-04-17T18:08:16.621324Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-17T18:08:16.621434Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:16.621597Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913297 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-17T18:08:16.621968Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:18.105077Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:18.105187Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:463:2341]) (release resources {0, 100}) 2025-04-17T18:08:18.105256Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [1:463:2341])) 2025-04-17T18:08:18.105291Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-04-17T18:08:18.105339Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-17T18:08:18.105371Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:463:2341]) (release resources {0, 100}) 2025-04-17T18:08:18.105412Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-1-2 (2 by [1:463:2341])) 2025-04-17T18:08:18.105449Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-17T18:08:18.105499Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:18.105612Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744913298 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:18.105892Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-17T18:08:18.378199Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-17T18:08:18.378336Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [2:464:2100]) (release resources {0, 100}) 2025-04-17T18:08:18.378395Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:464:2100])) 2025-04-17T18:08:18.378434Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-04-17T18:08:18.378488Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-17T18:08:18.378571Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-2-2-2 (2 by [2:464:2100]) (release resources {0, 100}) 2025-04-17T18:08:18.378622Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:464:2100])) 2025-04-17T18:08:18.378664Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-17T18:08:18.378728Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-17T18:08:18.378901Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744913299 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-17T18:08:18.379171Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-17T18:08:18.674480Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 |88.0%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcNewCoordinationClient::CreateDropDescribe >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 |88.0%| [TA] $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::RetryOperationAsync >> KqpJoinOrder::DatetimeConstantFold-ColumnStore [GOOD] |88.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials [GOOD] >> TGRpcNewClient::SimpleYqlQuery >> TGRpcYdbTest::GetOperationBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TGRpcYdbTest::SdkUuid >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 >> ClientStatsCollector::PrepareQuery [GOOD] >> ClientStatsCollector::CounterCacheMiss >> TGRpcNewClient::YqlQueryWithParams [GOOD] >> TGRpcNewClient::YqlExplainDataQuery >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync >> YdbYqlClient::TestReadTableOneBatch [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder >> TGRpcAuthentication::ValidCredentials [GOOD] >> TGRpcAuthentication::NoDescribeRights >> YdbYqlClient::SecurityTokenAuth [GOOD] >> YdbYqlClient::RetryOperationTemplate >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::Types >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> YdbTableBulkUpsert::Simple [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 14893, MsgBus: 7145 2025-04-17T18:07:47.469041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345417584477368:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:47.469096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e59/r3tmp/tmpIqUzcI/pdisk_1.dat 2025-04-17T18:07:47.841137Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14893, node 1 2025-04-17T18:07:47.883235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:47.883329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:47.884914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:47.926312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:47.926343Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:47.926352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:47.926492Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7145 TClient is connected to server localhost:7145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:48.399161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:48.419610Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:07:50.195878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345430469379917:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:50.195975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:50.195979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345430469379929:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:50.200529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:50.216798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345430469379931:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:07:50.307080Z node 1 :TX_PROXY ERROR: Actor# [1:7494345430469379982:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:50.585998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:07:50.705557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:07:50.732121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:07:50.758904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:07:50.784971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:07:50.913529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:07:50.942525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:07:50.967362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:07:50.989248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.015801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.043391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.069792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.095758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.647882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:07:51.675146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.707736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.739922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.766088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.796833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.829329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.899453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.933787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:07:51.966744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:07:52.006356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:07:52.047017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:07:52.077721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:07:52.110831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:07:52.186032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:07:52.231373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:07:52.301515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:07:52.369578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T18:07:52.401488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.277068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.278853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.281845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.284360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.285876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.289312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.290511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.294033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.294977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.298879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.299211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.303898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.304381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.308894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.310547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.313867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.316303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.318035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.322390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.322974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.327615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.327884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.331145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.333411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.334722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.339168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.339207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.344201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.344798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.349324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.350165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.354194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.355759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.360213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.362444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.364275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.367760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.368249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.372280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.372503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.376190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.376216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.380494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.381684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.385582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:17.453608Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ez4mc63tsej5rk538v6tm", SessionId: ydb://session/3?node_id=1&id=NjY0ODQ1ZGItNjhkN2E5YzQtY2E5ODVjZmEtZTAzYzAyOWI=, Slow query, duration: 23.968106s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:08:17.677082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:08:17.677447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:08:17.677894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494345524958689103:6350];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-17T18:08:17.678220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TTableProfileTests::UseDefaultProfile [GOOD] >> TTableProfileTests::OverwriteCompactionPolicy >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword >> TTableProfileTests::OverwriteCachingPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 9055, MsgBus: 30073 2025-04-17T18:07:13.020445Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345270413015569:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:13.020597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e5d/r3tmp/tmpAeqS0E/pdisk_1.dat 2025-04-17T18:07:13.403959Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9055, node 1 2025-04-17T18:07:13.445304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:13.445720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:13.449943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:13.469338Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:13.469365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:13.469375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:13.469484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30073 TClient is connected to server localhost:30073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:07:13.921503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:07:13.935147Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:07:15.614651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345279002950833:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:15.614654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345279002950825:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:15.614739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:15.618017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:07:15.627989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345279002950839:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:07:15.685473Z node 1 :TX_PROXY ERROR: Actor# [1:7494345279002950890:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:07:16.022119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:07:16.220302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:07:16.220539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:07:16.220569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:07:16.220573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:07:16.220859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:07:16.220861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:07:16.221040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:07:16.221065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:07:16.221158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:07:16.221195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:07:16.221285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:07:16.221310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:07:16.221412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:07:16.221451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:07:16.221555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:07:16.221591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:07:16.221700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:07:16.221760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:07:16.221829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:07:16.221876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:07:16.221984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:07:16.221989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:07:16.222172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494345283297918460:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:07:16.222173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494345283297918474:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:07:16.248433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345283297918462:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:07:16.248490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494345283297918462:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstr ... tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.803777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.806927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.809480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.811769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.815124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.816628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.820598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.821460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.826246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.826626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.830848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.832256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.835675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.838534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.841244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.844731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.846377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.850652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.852072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.856652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.858015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.863018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.863613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.869390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.869613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.875507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.876074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.881625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.882532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.888973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.893851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.895866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.900447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.903033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.906786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.908894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.912715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.914559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.917842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.921580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.923891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.928187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.930930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.936542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.940966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:09.964402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:08:10.050410Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2eyxzyf006t2zc1rq6t459", SessionId: ydb://session/3?node_id=1&id=NDgzYzg1MmItZmEzNzM0OS05ZDczMGY4OS1hZWNmNzkyNA==, Slow query, duration: 23.363650s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:08:10.250061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:08:10.250064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:08:10.250730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 >> TGRpcClientLowTest::SimpleRequest >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> YdbYqlClient::TestDoubleKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2025-04-17T18:07:56.755300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:56.755363Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:56.949987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:07:56.978065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-17T18:07:56.980172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:07:57.016626Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-17T18:07:57.584627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:57.584704Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:57.639604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners >> ClientStatsCollector::CounterCacheMiss [GOOD] >> ClientStatsCollector::CounterRetryOperation >> TGRpcYdbTest::SdkUuid [GOOD] >> TGRpcYdbTest::SdkUuidViaParams >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync [GOOD] >> YdbYqlClient::SimpleColumnFamilies >> YdbYqlClient::TestDecimal35 [GOOD] >> YdbYqlClient::TestDecimalFullStack ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] Test command err: 2025-04-17T18:05:42.916560Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:43.010818Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:05:43.014929Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:05:43.015358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:43.039451Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:43.039758Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:43.047577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:43.047831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:43.048078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:43.048245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:43.048408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:43.048580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:43.048740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:43.048896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:43.049060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:43.049215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:43.049365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:43.049504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:43.077309Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:05:43.083275Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:43.083415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:43.083459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:43.083634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:43.083812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:43.083896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:43.083962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:43.084066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:43.084127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:43.084169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:43.084199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:43.084348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:43.084422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:43.084458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:43.084483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:43.084572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:43.084644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:43.084696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:43.084727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:43.084814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:43.084862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:43.084888Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:43.084935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:43.085007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:43.085034Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:43.085416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-17T18:05:43.085489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-04-17T18:05:43.085570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-17T18:05:43.085646Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-17T18:05:43.085799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:43.085850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:43.085888Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:43.086117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:43.086160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:43.086227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:43.086395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:43.086452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:43.086490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:43.086694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:43.086748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:43.086775Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... _names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.743710Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.743747Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:08:23.743780Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:08:23.743882Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:08:23.743972Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.744001Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:08:23.744093Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-17T18:08:23.744155Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-17T18:08:23.744305Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:574:2590];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-04-17T18:08:23.744400Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.744495Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.744657Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.744774Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:08:23.744855Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.744925Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.744976Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:581:2597] finished for tablet 9437184 2025-04-17T18:08:23.745638Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:574:2590];stats={"p":[{"events":["f_bootstrap"],"t":0.068},{"events":["f_ProduceResults"],"t":0.511},{"events":["l_bootstrap"],"t":0.78},{"events":["f_processing","f_task_result"],"t":0.804},{"events":["l_task_result"],"t":7.546},{"events":["f_ack"],"t":7.586},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":8.505}],"full":{"a":1744913295239091,"name":"_full_task","f":1744913295239091,"d_finished":0,"c":0,"l":1744913303745062,"d":8505971},"events":[{"name":"bootstrap","f":1744913295307533,"d_finished":712116,"c":1,"l":1744913296019649,"d":712116},{"a":1744913303744756,"name":"ack","f":1744913302825141,"d_finished":852138,"c":904,"l":1744913303744687,"d":852444},{"a":1744913303744743,"name":"processing","f":1744913296043976,"d_finished":3781358,"c":4520,"l":1744913303744689,"d":3781677},{"name":"ProduceResults","f":1744913295750447,"d_finished":1547297,"c":5426,"l":1744913303744947,"d":1547297},{"a":1744913303744949,"name":"Finish","f":1744913303744949,"d_finished":0,"c":0,"l":1744913303745062,"d":113},{"name":"task_result","f":1744913296044013,"d_finished":2837471,"c":3616,"l":1744913302785505,"d":2837471}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.745743Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:574:2590];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:08:23.746841Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:574:2590];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.068},{"events":["f_ProduceResults"],"t":0.511},{"events":["l_bootstrap"],"t":0.78},{"events":["f_processing","f_task_result"],"t":0.804},{"events":["l_task_result"],"t":7.546},{"events":["f_ack"],"t":7.586},{"events":["l_ProduceResults","f_Finish"],"t":8.505},{"events":["l_ack","l_processing","l_Finish"],"t":8.506}],"full":{"a":1744913295239091,"name":"_full_task","f":1744913295239091,"d_finished":0,"c":0,"l":1744913303745798,"d":8506707},"events":[{"name":"bootstrap","f":1744913295307533,"d_finished":712116,"c":1,"l":1744913296019649,"d":712116},{"a":1744913303744756,"name":"ack","f":1744913302825141,"d_finished":852138,"c":904,"l":1744913303744687,"d":853180},{"a":1744913303744743,"name":"processing","f":1744913296043976,"d_finished":3781358,"c":4520,"l":1744913303744689,"d":3782413},{"name":"ProduceResults","f":1744913295750447,"d_finished":1547297,"c":5426,"l":1744913303744947,"d":1547297},{"a":1744913303744949,"name":"Finish","f":1744913303744949,"d_finished":0,"c":0,"l":1744913303745798,"d":849},{"name":"task_result","f":1744913296044013,"d_finished":2837471,"c":3616,"l":1744913302785505,"d":2837471}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.746950Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:08:15.168012Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-17T18:08:23.747012Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:08:23.747278Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2025-04-17T18:07:56.752877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:56.752949Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:56.955417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-17T18:07:56.968928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [1:174:2170], Recipient [1:70:2109]: NActors::TEvents::TEvPoison 2025-04-17T18:07:56.969583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:175:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:178:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:179:2067] recipient: [1:177:2171] Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:181:2067] recipient: [1:177:2171] 2025-04-17T18:07:56.976066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:07:56.988781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:07:56.988987Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:07:56.993702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:07:56.993783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:07:56.993837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:07:56.993885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:07:56.993934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:07:56.994006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:07:56.994102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:07:56.994164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:07:56.994485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:07:57.010202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:07:57.011317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:07:57.011473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:07:57.011666Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [1:7238242728502259555:7369577], Recipient [1:180:2172]: TSystem::Undelivered 2025-04-17T18:07:57.011701Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2025-04-17T18:07:57.011727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:57.011774Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:57.011962Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Clear operation queue and active pipes 2025-04-17T18:07:57.011996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:07:57.012787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-17T18:07:57.012886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.012951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.014100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.014203Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T18:07:57.014726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.014830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.014916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.014982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.015046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.015150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.015349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.015424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.018244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.018327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.018499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.019264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.019388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.019562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.019710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.020232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.020442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.020602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.020649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.020711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.021764Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:07:57.023063Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:07:57.023184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T18:07:57.023667Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:180:2172], Recipient [1:180:2172]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-17T18:07:57.023704Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-17T18:07:57.024267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:07:57.024317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:07:57.024402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:07:57.024446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:07:57.024478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:07:57.024522Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:07:57.024771Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:196:2172], Recipient [1:180:2172]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T18:07:57.024805Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-17T18:07:57.024838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:211:2067] recipient: [1:24:2071] 2025-04-17T18:07:57.046750Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:210:2189], Recipient [1:180:2172]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-04-17T18:07:57.046825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:07:57.143979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:07:57.144337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:07:57.144447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:07:57.144663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T18:07:57.144922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:07:57.145092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:07:57.145143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 7205759404667894 ... 057594046678944, status: OK, at schemeshard: 72075186233409549 2025-04-17T18:07:57.639343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125003, Sender [1:420:2337], Recipient [1:488:2383]: NKikimrScheme.TEvSyncTenantSchemeShard DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-17T18:07:57.639382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvSyncTenantSchemeShard 2025-04-17T18:07:57.639467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-17T18:07:57.639557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:07:57.639609Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:07:57.639684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:420:2337], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:07:57.639763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:07:57.639808Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-17T18:07:58.008245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:58.008320Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-17T18:07:58.060638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-04-17T18:07:58.105997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:58.106076Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:211:2067] recipient: [2:24:2071] 2025-04-17T18:07:58.135109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-17T18:07:58.142959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:243:2217] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:243:2217] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:249:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:249:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:250:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:250:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:252:2219] sender: [2:253:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [2:255:2221] sender: [2:256:2067] recipient: [2:243:2217] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-17T18:07:58.161048Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:252:2219] sender: [2:289:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:255:2221] sender: [2:290:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-17T18:07:58.202001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:344:2289] sender: [2:345:2067] recipient: [2:336:2285] TestWaitNotification: OK eventTxId 103 Leader for TabletID 72075186233409548 is [2:344:2289] sender: [2:362:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-17T18:07:58.347809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:424:2067] recipient: [2:416:2333] Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:425:2067] recipient: [2:24:2071] 2025-04-17T18:07:58.398168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:07:58.398249Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-04-17T18:07:58.418518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:07:58.418573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T18:07:58.418860Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-17T18:07:58.418998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T18:07:58.439511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-17T18:07:58.439889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-17T18:07:58.483362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:555:2067] recipient: [2:551:2440] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:555:2067] recipient: [2:551:2440] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:556:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:556:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:558:2444] sender: [2:559:2067] recipient: [2:551:2440] Leader for TabletID 72075186233409550 is [2:558:2444] sender: [2:560:2067] recipient: [2:24:2071] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2025-04-17T18:08:00.820712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:08:00.820776Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:00.883813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:08:00.883868Z node 2 :IMPORT WARN: Table profiles were not loaded >> TGRpcNewClient::SimpleYqlQuery [GOOD] >> TGRpcNewClient::TestAuth ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] Test command err: 2025-04-17T18:05:40.202769Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:40.302521Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:05:40.306914Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:05:40.307287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:40.330099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:40.330422Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:40.337959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:40.338160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:40.338384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:40.338476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:40.338554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:40.338633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:40.338711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:40.338790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:40.338874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:40.338937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.338991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:40.339051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:40.355940Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:05:40.357997Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:40.358095Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:40.358127Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:40.358254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.358387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:40.358450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:40.358482Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:40.358566Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:40.358622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:40.358671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:40.358696Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:40.358823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:40.358870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:40.358904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:40.358923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:40.359001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:40.359056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:40.359091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:40.359114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:40.359162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:40.359192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:40.359212Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:40.359244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:40.359271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:40.359287Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:40.359572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-04-17T18:05:40.359626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=19; 2025-04-17T18:05:40.359679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-04-17T18:05:40.359745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-04-17T18:05:40.359849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:40.359916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:40.359938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:40.360062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:40.360096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.360119Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:40.360228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:40.360259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:40.360281Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:40.360441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:40.360472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:40.360499Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... ames=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.809163Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.809225Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:08:23.809294Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:08:23.809421Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:08:23.809542Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.809580Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:08:23.809696Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-17T18:08:23.809741Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-17T18:08:23.809881Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:574:2590];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-04-17T18:08:23.809975Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.810086Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.812591Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.812793Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:08:23.812941Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.813040Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.813093Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:581:2597] finished for tablet 9437184 2025-04-17T18:08:23.813858Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:574:2590];stats={"p":[{"events":["f_bootstrap"],"t":0.067},{"events":["f_ProduceResults"],"t":0.525},{"events":["l_bootstrap"],"t":0.803},{"events":["f_processing","f_task_result"],"t":0.828},{"events":["l_task_result"],"t":7.887},{"events":["f_ack"],"t":7.933},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":8.896}],"full":{"a":1744913294916545,"name":"_full_task","f":1744913294916545,"d_finished":0,"c":0,"l":1744913303813199,"d":8896654},"events":[{"name":"bootstrap","f":1744913294983872,"d_finished":736632,"c":1,"l":1744913295720504,"d":736632},{"a":1744913303812769,"name":"ack","f":1744913302850525,"d_finished":898509,"c":904,"l":1744913303812660,"d":898939},{"a":1744913303812754,"name":"processing","f":1744913295745111,"d_finished":3956617,"c":4520,"l":1744913303812664,"d":3957062},{"name":"ProduceResults","f":1744913295441978,"d_finished":1608699,"c":5426,"l":1744913303813067,"d":1608699},{"a":1744913303813071,"name":"Finish","f":1744913303813071,"d_finished":0,"c":0,"l":1744913303813199,"d":128},{"name":"task_result","f":1744913295745150,"d_finished":2973817,"c":3616,"l":1744913302804016,"d":2973817}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.813974Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:574:2590];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:08:23.814596Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:574:2590];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.067},{"events":["f_ProduceResults"],"t":0.525},{"events":["l_bootstrap"],"t":0.803},{"events":["f_processing","f_task_result"],"t":0.828},{"events":["l_task_result"],"t":7.887},{"events":["f_ack"],"t":7.933},{"events":["l_ProduceResults","f_Finish"],"t":8.896},{"events":["l_ack","l_processing","l_Finish"],"t":8.897}],"full":{"a":1744913294916545,"name":"_full_task","f":1744913294916545,"d_finished":0,"c":0,"l":1744913303814032,"d":8897487},"events":[{"name":"bootstrap","f":1744913294983872,"d_finished":736632,"c":1,"l":1744913295720504,"d":736632},{"a":1744913303812769,"name":"ack","f":1744913302850525,"d_finished":898509,"c":904,"l":1744913303812660,"d":899772},{"a":1744913303812754,"name":"processing","f":1744913295745111,"d_finished":3956617,"c":4520,"l":1744913303812664,"d":3957895},{"name":"ProduceResults","f":1744913295441978,"d_finished":1608699,"c":5426,"l":1744913303813067,"d":1608699},{"a":1744913303813071,"name":"Finish","f":1744913303813071,"d_finished":0,"c":0,"l":1744913303814032,"d":961},{"name":"task_result","f":1744913295745150,"d_finished":2973817,"c":3616,"l":1744913302804016,"d":2973817}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:08:23.814705Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:08:14.842458Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-17T18:08:23.814777Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:08:23.815078Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> TGRpcNewClient::YqlExplainDataQuery [GOOD] >> TGRpcNewCoordinationClient::CheckUnauthorized >> YdbTableBulkUpsert::Types [GOOD] >> YdbTableBulkUpsert::Uint8 >> YdbYqlClient::TestReadTableNotNullBorder [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 |88.1%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcAuthentication::NoDescribeRights [GOOD] >> TGRpcClientLowTest::BiStreamPing >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbTableBulkUpsert::Timeout >> YdbYqlClient::RetryOperationTemplate [GOOD] >> YdbYqlClient::RetryOperationSync >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter >> YdbYqlClient::TestTzTypesFullStack >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> TGRpcYdbTest::CreateTableBadRequest >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> YdbIndexTable::AlterIndexImplBySuperUser >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 >> TYqlDateTimeTests::SimpleUpsertSelect >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> TGRpcClientLowTest::SimpleRequest [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService >> TTableProfileTests::OverwriteCompactionPolicy [GOOD] >> TTableProfileTests::OverwriteExecutionPolicy >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestMultipleModifications >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> TGRpcNewClient::TestAuth [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] >> TGRpcNewCoordinationClient::CreateAlter >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> TGRpcYdbTest::SdkUuidViaParams [GOOD] >> TGRpcYdbTest::ReadTablePg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 29:79:2110] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:81:2111] Leader for TabletID 72057594037927937 is [29:81:2111] sender: [29:135:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:77:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:81:2057] recipient: [30:79:2110] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:83:2057] recipient: [30:79:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:82:2111] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:136:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:83:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:84:2057] recipient: [31:82:2113] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:86:2057] recipient: [31:82:2113] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:85:2114] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:139:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:83:2057] recipient: [32:82:2113] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:84:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:86:2057] recipient: [32:82:2113] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:85:2114] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:139:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:81:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:85:2057] recipient: [33:83:2113] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:87:2057] recipient: [33:83:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:86:2114] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:140:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:86:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:88:2057] recipient: [34:87:2116] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:90:2057] recipient: [34:87:2116] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:89:2117] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:143:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:84:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:87:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:88:2057] recipient: [35:86:2116] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:90:2057] recipient: [35:86:2116] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:89:2117] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:143:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:85:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:89:2057] recipient: [36:87:2116] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:91:2057] recipient: [36:87:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:90:2117] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:144:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:91:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:92:2057] recipient: [37:90:2119] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:94:2057] recipient: [37:90:2119] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:93:2120] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:147:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:88:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:91:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:92:2057] recipient: [38:90:2119] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:94:2057] recipient: [38:90:2119] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:93:2120] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:147:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:89:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:92:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:93:2057] recipient: [39:91:2119] Leader for TabletID 72057594037927937 is [39:94:2120] sender: [39:95:2057] recipient: [39:91:2119] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin >> YdbYqlClient::TestReadTableNotNullBorder2 [GOOD] >> YdbYqlClient::TestReadTableSnapshot >> YdbYqlClient::TestDecimalFullStack [GOOD] >> YdbYqlClient::TestDescribeDirectory >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant >> YdbTableBulkUpsert::Timeout [GOOD] >> YdbTableBulkUpsert::RetryOperationSync ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: 2025-04-17T18:05:44.647275Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:44.736745Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:05:44.741549Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:05:44.741977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:44.768684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:44.769044Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:44.777850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:44.778072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:44.778368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:44.778528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:44.778657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:44.778802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:44.778938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:44.779064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:44.779241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:44.779360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:44.779471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:44.779578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:44.804675Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:05:44.807553Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:44.807701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:44.807753Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:44.807957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:44.808123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:44.808204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:44.808249Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:44.808369Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:44.808449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:44.808496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:44.808530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:44.808751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:44.808823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:44.808864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:44.808896Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:44.808987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:44.809071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:44.809120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:44.809161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:44.809256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:44.809299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:44.809327Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:44.809382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:44.809441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:44.809490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:44.809925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-17T18:05:44.810007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-17T18:05:44.810086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-17T18:05:44.810171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-17T18:05:44.810343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:44.810393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:44.810422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:44.810649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:44.810689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:44.810718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:44.810903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:44.810945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:44.810975Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:44.811147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:44.811184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:44.811210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... :20 #26 0x315586e7 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:97:5 #27 0x315709e7 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:502:9 #28 0xfff8fba in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:218:21 #29 0xfff5dfc in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #30 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #31 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a288576 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a288576 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a288576 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a288576 in __allocate_at_least > *> > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x2a288576 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:358:25 #6 0x2a288576 in std::__y1::deque>, std::__y1::allocator>>>::__add_back_capacity() /-S/contrib/libs/cxxsupp/libcxx/include/deque:2144:51 #7 0x2a27a8b4 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/deque:1528:5 #8 0x2a27a8b4 in NKikimr::NOlap::TBlobManager::StartBlobBatch() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:380:23 #9 0x2a25febd in TWriteAction /-S/ydb/core/tx/columnshard/blobs_action/bs/write.h:41:30 #10 0x2a25febd in void std::__y1::allocator::construct[abi:fe190000]> const&, std::__y1::shared_ptr&>(NKikimr::NOlap::NBlobOperations::NBlobStorage::TWriteAction*, TBasicString> const&, std::__y1::shared_ptr&) /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #11 0x2a25562a in construct > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #12 0x2a25562a in __shared_ptr_emplace > &, std::__y1::shared_ptr &, std::__y1::allocator, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #13 0x2a25562a in allocate_shared, const TBasicString > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #14 0x2a25562a in make_shared > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #15 0x2a25562a in NKikimr::NOlap::NBlobOperations::NBlobStorage::TOperator::DoStartWritingAction() /-S/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp:16:12 #16 0x25c9b4f5 in NKikimr::NOlap::IBlobsStorageOperator::StartWritingAction(NKikimr::NOlap::NBlobOperations::EConsumer) /-S/ydb/core/tx/columnshard/blobs_action/abstract/storage.h:106:23 #17 0x25cf39a0 in NKikimr::NColumnShard::TWriteOperation::Start(NKikimr::NColumnShard::TColumnShard&, std::__y1::shared_ptr const&, NActors::TActorId const&, NKikimr::NOlap::TWritingContext const&) /-S/ydb/core/tx/columnshard/operations/write.cpp:39:53 #18 0x25eae844 in NKikimr::NColumnShard::TWriteTask::Execute(NKikimr::NColumnShard::TColumnShard*, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:26:21 #19 0x25eafdd8 in NKikimr::NColumnShard::TWriteTasksQueue::Drain(bool, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:45:15 #20 0x25e5fa6e in NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__write.cpp:616:22 #21 0x25de1ff6 in NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/columnshard/columnshard_impl.h:449:13 #22 0x116159cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #23 0x2cd33274 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #24 0x2cd2bae9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #25 0x2cd35e63 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #26 0x3157cfe2 in NKikimr::NEvents::TDataEvents::TEvWriteResult* NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TAutoPtr&, std::__y1::function, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:446:13 #27 0x3155beb4 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:510:20 #28 0x3155beb4 in NKikimr::NTxUT::WaitWriteResult(NActors::TTestBasicRuntime&, unsigned long, std::__y1::vector>*) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:120:26 #29 0x3155d631 in NKikimr::NTxUT::WriteDataImpl(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, unsigned long, TBasicString> const&, std::__y1::shared_ptr const&, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:146:16 #30 0x3155e660 in NKikimr::NTxUT::WriteData(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:161:16 #31 0xfff977f in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:230:9 #32 0xfff5dfc in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #33 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #34 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #42 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #43 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #44 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7f8767e87d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2310254 byte(s) leaked in 41290 allocation(s). >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 >> ClientStatsCollector::CounterRetryOperation [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::WrongTableProfile >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> YdbIndexTable::AlterIndexImplBySuperUser [GOOD] >> YdbIndexTable::CreateTableAddIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit >> TGRpcClientLowTest::SimpleRequestDummyService [GOOD] >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable >> YdbScripting::Params >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::TimestampKey >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbYqlClient::QueryLimits >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> YdbOlapStore::LogLast50 [GOOD] >> YdbOlapStore::LogLast50ByResource >> YdbYqlClient::TestYqlIssues >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> YdbYqlClient::TestMultipleModifications [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> TTableProfileTests::OverwriteExecutionPolicy [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::BasicMethods >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: 2025-04-17T18:05:44.715239Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:44.799143Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:05:44.803430Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:05:44.803859Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:44.829146Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:44.829456Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:44.837586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:44.837802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:44.838036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:44.838201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:44.838317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:44.838431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:44.838554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:44.838663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:44.838807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:44.838933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:44.839025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:44.839126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:44.865702Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:05:44.868434Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:44.868581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:44.868632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:44.868829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:44.868990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:44.869063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:44.869104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:44.869206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:44.869271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:44.869365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:44.869409Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:44.869612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:44.869684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:44.869725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:44.869755Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:44.869841Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:44.869908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:44.869953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:44.869980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:44.870063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:44.870108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:44.870148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:44.870226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:44.870266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:44.870296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:44.870681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-17T18:05:44.870788Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-04-17T18:05:44.870861Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-17T18:05:44.870948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-17T18:05:44.871104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:44.871178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:44.871213Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:44.871393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:44.871446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:44.871480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:44.871699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:44.871758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:44.871800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:44.871989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:44.872042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:44.872073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... Kikimr::NTxUT::WriteData(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:161:16 #31 0xfff977f in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:230:9 #32 0xfff5eb9 in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #33 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #34 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #42 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #43 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #44 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7fcec09ced8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0xf53bcdd in NObjectFactory::TFactoryObjectCreator::Create() const /-S/library/cpp/object_factory/object_factory.h:38:20 #2 0x1ca4f2d9 in MakeHolder /-S/library/cpp/object_factory/object_factory.h:137:38 #3 0x1ca4f2d9 in NKikimr::NOlap::NStorageOptimizer::IOptimizerPlannerConstructor::BuildDefault() /-S/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.h:204:23 #4 0x1ca4d66f in NKikimr::NOlap::TIndexInfo::DeserializeOptionsFromProto(NKikimrSchemeOp::TColumnTableSchemeOptions const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:209:40 #5 0x1ca53d3e in NKikimr::NOlap::TIndexInfo::DeserializeFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:250:5 #6 0x1ca6084b in NKikimr::NOlap::TIndexInfo::BuildFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:333:17 #7 0x25bfef6b in NKikimr::NOlap::TColumnEngineForLogs::RegisterSchemaVersion(NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:103:29 #8 0x25bfaa2e in NKikimr::NOlap::TColumnEngineForLogs::TColumnEngineForLogs(unsigned long, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:42:5 #9 0x2a2e4fea in make_unique &, std::__y1::shared_ptr &, std::__y1::shared_ptr &, const NKikimr::NOlap::TSnapshot &, const unsigned int &, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData, std::__y1::shared_ptr &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:30 #10 0x2a2e4fea in NKikimr::NColumnShard::TTablesManager::AddSchemaVersion(unsigned int, NKikimr::NOlap::TSnapshot const&, NKikimrSchemeOp::TColumnTableSchema const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:282:24 #11 0x2a2e6877 in NKikimr::NColumnShard::TTablesManager::AddTableVersion(NKikimr::NColumnShard::TInternalPathId, NKikimr::NOlap::TSnapshot const&, NKikimrTxColumnShard::TTableVersionInfo const&, std::__y1::optional const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:320:9 #12 0x2a1b7c79 in NKikimr::NColumnShard::TColumnShard::RunEnsureTable(NKikimrTxColumnShard::TCreateTable const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:431:19 #13 0x2a1b65e0 in NKikimr::NColumnShard::TColumnShard::RunInit(NKikimrTxColumnShard::TInitShard const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:373:9 #14 0x2a1b5d51 in NKikimr::NColumnShard::TColumnShard::RunSchemaTx(NKikimrTxColumnShard::TSchemaTxBody const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:328:13 #15 0xff6795e in NKikimr::NColumnShard::TSchemaTransactionOperator::ProgressOnExecute(NKikimr::NColumnShard::TColumnShard&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/transactions/operators/schema.h:94:19 #16 0x25d9236d in NKikimr::NColumnShard::TColumnShard::TTxProgressTx::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__progress_tx.cpp:80:13 #17 0x1852b274 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #18 0x184f09f6 in NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&) /-S/ydb/core/tablet_flat/flat_executor.cpp:4143:9 #19 0x116159cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #20 0x2cd33274 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #21 0x2cd2bae9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #22 0x2cd35e63 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #23 0x3157b473 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #24 0x315597d2 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #25 0x315597d2 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #26 0x315586e7 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:97:5 #27 0x315709e7 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:502:9 #28 0xfff8fba in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:218:21 #29 0xfff5ecf in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #30 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #31 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 SUMMARY: AddressSanitizer: 3076960 byte(s) leaked in 54995 allocation(s). >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> TGRpcYdbTest::ExplainQuery >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] >> TGRpcNewClient::InMemoryTables >> TGRpcYdbTest::ReadTablePg [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword >> YdbYqlClient::TestDescribeDirectory [GOOD] >> YdbYqlClient::TestReadTableSnapshot [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest3 >> ClientStatsCollector::ExternalMetricRegistryByRawPtr [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr >> YdbIndexTable::CreateTableAddIndex [GOOD] >> YdbIndexTable::AlterTableAddIndex >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDescribeDirectory [GOOD] Test command err: 2025-04-17T18:08:17.974457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345548253979167:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:17.974554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2c/r3tmp/tmpd4miC8/pdisk_1.dat 2025-04-17T18:08:18.456604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.499211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.499291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.509668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27169, node 1 2025-04-17T18:08:18.798768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.798792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.798800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.798904Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.225680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:19.254489Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:21.158687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345565433849377:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.159011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345565433849369:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.159087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.161758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:21.188226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345565433849383:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:21.267295Z node 1 :TX_PROXY ERROR: Actor# [1:7494345565433849458:2675] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:23.454937Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345571781609430:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:23.454992Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2c/r3tmp/tmpadPHNL/pdisk_1.dat 2025-04-17T18:08:23.663687Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30875, node 4 2025-04-17T18:08:23.898104Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:23.898213Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:23.916740Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:23.918855Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:23.918868Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:23.918879Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:23.918996Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:24.255487Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:26.518958Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345584666512337:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.519012Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345584666512326:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.519114Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.522394Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:26.547244Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345584666512340:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:26.611236Z node 4 :TX_PROXY ERROR: Actor# [4:7494345584666512411:2671] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:28.234022Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345596146429376:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.239595Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2c/r3tmp/tmpillYsb/pdisk_1.dat 2025-04-17T18:08:28.390755Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:28.427978Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:28.428055Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:28.432520Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12470, node 7 2025-04-17T18:08:28.502815Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:28.502837Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:28.502844Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:28.502978Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:28.736403Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:31.278167Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:31.467880Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345609031332360:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:31.467995Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:31.468249Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345609031332372:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:31.472346Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:31.502156Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345609031332374:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:31.607507Z node 7 :TX_PROXY ERROR: Actor# [7:7494345609031332466:2804] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:31.720580Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f09qa3ap9z9nkapn692dv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc1ZWEzODktZmQwMWMxM2QtMTgzMTUxOGEtYTY0YzQyYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:31.827898Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f0a0ea3mdms9ecvhftm84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc1ZWEzODktZmQwMWMxM2QtMTgzMTUxOGEtYTY0YzQyYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:31.963975Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f0a4g0s6bh2zgva4y7gb0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc1ZWEzODktZmQwMWMxM2QtMTgzMTUxOGEtYTY0YzQyYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:32.050569Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2f0a7fe9m0hn3hdz9hfk9j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc1ZWEzODktZmQwMWMxM2QtMTgzMTUxOGEtYTY0YzQyYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:32.137199Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2f0aa7atx1hwv0y20vr8vh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc1ZWEzODktZmQwMWMxM2QtMTgzMTUxOGEtYTY0YzQyYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:33.119463Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2f0ad2c6sdg4py0xm8w90e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc1ZWEzODktZmQwMWMxM2QtMTgzMTUxOGEtYTY0YzQyYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:33.144882Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2f0ad2c6sdg4py0xm8w90e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc1ZWEzODktZmQwMWMxM2QtMTgzMTUxOGEtYTY0YzQyYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:33.229481Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494345596146429376:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:33.229607Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:08:34.801259Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345621795650460:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:34.801729Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2c/r3tmp/tmpbORzzK/pdisk_1.dat 2025-04-17T18:08:35.063462Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63803, node 10 2025-04-17T18:08:35.131677Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:35.131775Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:35.212062Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:35.258798Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:35.258830Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:35.258839Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:35.258972Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:35.512519Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:38.326321Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345638975520721:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:38.326437Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:38.357302Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableSnapshot [GOOD] Test command err: 2025-04-17T18:08:18.009023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345551098955668:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.009325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a27/r3tmp/tmpoSQKaS/pdisk_1.dat 2025-04-17T18:08:18.497224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.497311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.500662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:18.542778Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24303, node 1 2025-04-17T18:08:18.696528Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.696622Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.798692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.798717Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.798722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.798807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.293646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:19.311565Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:21.326781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345563983858586:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.327433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.733301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:21.891450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345563983858766:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.891523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.891537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345563983858771:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.895052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:21.919646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345563983858773:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:22.002413Z node 1 :TX_PROXY ERROR: Actor# [1:7494345563983858853:2811] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:22.155518Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f00c27r8pj6h4y5pkannc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZhYTllNzgtYjNmOTAyMjktM2EwMGQzY2MtYmEzMTE5YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:22.372071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f00n2c12t8nrfk6stjz3f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZhYTllNzgtYjNmOTAyMjktM2EwMGQzY2MtYmEzMTE5YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:22.398532Z node 1 :TX_PROXY ERROR: [ReadTable [1:7494345568278826218:2369] TxId# 281474976710663] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-04-17T18:08:22.406526Z node 1 :TX_PROXY ERROR: [ReadTable [1:7494345568278826221:2370] TxId# 281474976710664] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-04-17T18:08:23.798494Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345574520334099:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:23.798743Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a27/r3tmp/tmpkejC2H/pdisk_1.dat 2025-04-17T18:08:23.998433Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2903, node 4 2025-04-17T18:08:24.098251Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:24.098268Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:24.098273Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:24.098379Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:08:24.108385Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:24.108472Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:24.111265Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:24.314581Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:26.642331Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345587405236994:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.642431Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.666029Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:26.751933Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345587405237148:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.752068Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.752376Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345587405237153:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.755814Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:26.779899Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345587405237155:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:26.843804Z node 4 :TX_PROXY ERROR: Actor# [4:7494345587405237227:2783] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_m ... in>: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.506209Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:32.614526Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345613628501340:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.614626Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.614863Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345613628501345:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.618918Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:32.643711Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345613628501347:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:32.715274Z node 7 :TX_PROXY ERROR: Actor# [7:7494345613628501420:2794] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:32.807421Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0av5e1kh4v2ycc2rp9fp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Zjc3MmUyOGUtNmJhY2QwZmItNzQ0YzhjMDYtN2E5OGNkZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:32.981412Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f0b2e93p08wvdq7fjryyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Zjc3MmUyOGUtNmJhY2QwZmItNzQ0YzhjMDYtN2E5OGNkZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:33.030312Z node 7 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:08:34.862256Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345617993767230:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:34.904844Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a27/r3tmp/tmpKdQiai/pdisk_1.dat 2025-04-17T18:08:35.105009Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:35.141485Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:35.141576Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:35.143648Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26094, node 10 2025-04-17T18:08:35.254563Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:35.254586Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:35.254599Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:35.254720Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:35.503844Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:35.580888Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01js2f0dqw1szxkhqmv192mh38, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37932, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.997322s 2025-04-17T18:08:35.597972Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01js2f0drd7wq89y4138crtt4w, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37944, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:38.254457Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ExecuteSchemeQueryRequest, traceId# 01js2f0gbe5bxr2733nv3jx51w, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37950, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:38.259565Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345635173637458:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:38.259647Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:38.273958Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:38.280605Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:38.280727Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:38.280737Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:38.280777Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:38.354550Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:38.354655Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:38.354660Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:38.354777Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:38.383255Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01js2f0gff6fbysjbfsva4g8d7, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37962, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:38.404738Z node 10 :READ_TABLE_API DEBUG: [10:7494345635173637615:2346] Adding quota request to queue ShardId: 0, TxId: 281474976710659 2025-04-17T18:08:38.404799Z node 10 :READ_TABLE_API DEBUG: [10:7494345635173637615:2346] Assign stream quota to Shard 0, Quota 5, TxId 281474976710659 Reserved: 5 of 25, Queued: 0 2025-04-17T18:08:38.406778Z node 10 :READ_TABLE_API DEBUG: [10:7494345635173637615:2346] got stream part, size: 35, RU required: 128 rate limiter absent 2025-04-17T18:08:38.407155Z node 10 :READ_TABLE_API DEBUG: [10:7494345635173637615:2346] Starting inactivity timer for 600.000000s with tag 3 2025-04-17T18:08:38.407206Z node 10 :READ_TABLE_API NOTICE: [10:7494345635173637615:2346] Finish grpc stream, status: 400000 2025-04-17T18:08:38.414720Z node 10 :GRPC_SERVER DEBUG: [0x51a000039080] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.415015Z node 10 :GRPC_SERVER DEBUG: [0x51a000039680] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.415288Z node 10 :GRPC_SERVER DEBUG: [0x51a00003a280] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.415462Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c8a80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.415628Z node 10 :GRPC_SERVER DEBUG: [0x51a0000de680] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.415764Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c9680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.415890Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c9080] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.416128Z node 10 :GRPC_SERVER DEBUG: [0x51a00003ae80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.420190Z node 10 :GRPC_SERVER DEBUG: [0x51a000039c80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.420332Z node 10 :GRPC_SERVER DEBUG: [0x51a000092480] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.422411Z node 10 :GRPC_SERVER DEBUG: [0x51a00002b280] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.422572Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a9880] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.422742Z node 10 :GRPC_SERVER DEBUG: [0x51a000064e80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.422894Z node 10 :GRPC_SERVER DEBUG: [0x51a000065480] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.423019Z node 10 :GRPC_SERVER DEBUG: [0x51a0000de080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.423170Z node 10 :GRPC_SERVER DEBUG: [0x51a000065a80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.423301Z node 10 :GRPC_SERVER DEBUG: [0x51a00002d080] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> YdbTableBulkUpsert::RetryOperation >> YdbImport::Simple >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain >> TTableProfileTests::WrongTableProfile [GOOD] >> TYqlDateTimeTests::DateKey >> YdbYqlClient::QueryLimits [GOOD] >> YdbYqlClient::QueryStats >> YdbTableBulkUpsert::Uint8 [GOOD] >> YdbTableBulkUpsert::ZeroRows >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::DataValidation >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDateTimeTests::IntervalKey >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts >> TGRpcYdbTest::DropTableBadRequest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbOlapStore::BulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2025-04-17T18:08:19.940376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345555060126094:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:19.940575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a10/r3tmp/tmpZ1RLwr/pdisk_1.dat 2025-04-17T18:08:20.406352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:20.406437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:20.415134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:20.451310Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20954, node 1 2025-04-17T18:08:20.650475Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:20.650519Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:20.779961Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:20.779982Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:20.779991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:20.780109Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:21.134517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:21.223811Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01js2ezzq52t424znch2p38g6e, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43584, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.985401s 2025-04-17T18:08:21.261534Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01js2ezzqw6vztya6m7s3rw9yb, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43588, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:23.218982Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01js2f01nj5qd5qdjv49r1gjjx, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:40264, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:23.219545Z node 1 :TX_PROXY DEBUG: actor# [1:7494345555060126156:2141] Handle TEvProposeTransaction 2025-04-17T18:08:23.219580Z node 1 :TX_PROXY DEBUG: actor# [1:7494345555060126156:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:08:23.219642Z node 1 :TX_PROXY DEBUG: actor# [1:7494345555060126156:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345572239996139:2631] 2025-04-17T18:08:23.287578Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:40264" 2025-04-17T18:08:23.287622Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:08:23.287897Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:08:23.287969Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:08:23.288097Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:08:23.288214Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:08:23.288257Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:08:23.288367Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:08:23.290767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:23.294878Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T18:08:23.294924Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345572239996139:2631] txid# 281474976710658 SEND to# [1:7494345572239996138:2336] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T18:08:23.295726Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:23.295814Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:23.295822Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:23.295848Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:23.351531Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996195:2685], Recipient [1:7494345572239996353:2341]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.352427Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996203:2693], Recipient [1:7494345572239996357:2345]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.353153Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996194:2684], Recipient [1:7494345572239996379:2352]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.353698Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996202:2692], Recipient [1:7494345572239996355:2343]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.353719Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996196:2686], Recipient [1:7494345572239996390:2353]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.354241Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996190:2680], Recipient [1:7494345572239996394:2354]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.354243Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996193:2683], Recipient [1:7494345572239996342:2340]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.354651Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996191:2681], Recipient [1:7494345572239996358:2346]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.354651Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996200:2690], Recipient [1:7494345572239996368:2350]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.355180Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996204:2694], Recipient [1:7494345572239996365:2347]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.355599Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996192:2682], Recipient [1:7494345572239996366:2348]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.356036Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996198:2688], Recipient [1:7494345572239996369:2351]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.356398Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996205:2695], Recipient [1:7494345572239996354:2342]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.356803Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996199:2689], Recipient [1:7494345572239996356:2344]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.357218Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996197:2687], Recipient [1:7494345572239996367:2349]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.357632Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345572239996201:2691], Recipient [1:7494345572239996341:2339]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:23.362306Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7494345572239996202:2692], Recipient [1:7494345572239996355:2343]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:08:23.362739Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:7494345572239996355:2343] 2025-04-17T18:08:23.362941Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:08:23.366377Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7494345572239996199:2689], Recipient [1:7494345572239996356:2344]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:08:23.366744Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037898 actor [1:7494345572239996356:2344] 2025-04-17T18:08:23.366951Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:08:23.377342Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7494345572239996205:2695], Recipient [1:7494345572239996354:2342]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:08:23.377692Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:7494345572239996354:2342] 2025-04-17T18:08:23.377812Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:08:23.388121Z node 1 :TX_DATASH ... 5072, Sender [10:7494345643586285365:2342], Recipient [10:7494345643586285365:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:08:40.909354Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:08:40.909377Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-04-17T18:08:40.909391Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:08:40.909405Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for WaitForStreamClearance 2025-04-17T18:08:40.909414Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit WaitForStreamClearance 2025-04-17T18:08:40.909429Z node 10 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715681] at 72075186224037897 2025-04-17T18:08:40.909439Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-17T18:08:40.909452Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit WaitForStreamClearance 2025-04-17T18:08:40.909461Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit ReadTableScan 2025-04-17T18:08:40.909471Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-04-17T18:08:40.909626Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2025-04-17T18:08:40.909637Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:08:40.909646Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-17T18:08:40.909654Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-17T18:08:40.909664Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-17T18:08:40.909687Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-17T18:08:40.911559Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7494345647881253693:2140], Recipient [10:7494345643586285365:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T18:08:40.911585Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T18:08:40.911625Z node 10 :READ_TABLE_API DEBUG: [10:7494345647881253675:2401] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2025-04-17T18:08:40.911649Z node 10 :READ_TABLE_API DEBUG: [10:7494345647881253675:2401] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2025-04-17T18:08:40.912092Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2025-04-17T18:08:40.912269Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2025-04-17T18:08:40.912487Z node 10 :READ_TABLE_API DEBUG: [10:7494345647881253675:2401] got stream part, size: 75, RU required: 128 rate limiter absent 2025-04-17T18:08:40.912793Z node 10 :READ_TABLE_API DEBUG: [10:7494345647881253675:2401] Starting inactivity timer for 600.000000s with tag 3 2025-04-17T18:08:40.914407Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2025-04-17T18:08:40.914435Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2025-04-17T18:08:40.915384Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-04-17T18:08:40.915397Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2025-04-17T18:08:40.915479Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7494345647881253676:2401], Recipient [10:7494345643586285365:2342]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2025-04-17T18:08:40.915495Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-04-17T18:08:40.915506Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2025-04-17T18:08:40.915536Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2025-04-17T18:08:40.915602Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7494345647881253676:2401], Recipient [10:7494345643586285365:2342]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2025-04-17T18:08:40.915614Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-04-17T18:08:40.918773Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345647881253676:2401], Recipient [10:7494345643586285365:2342]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913320948 TxId: 281474976715680 2025-04-17T18:08:40.918992Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7494345643586285365:2342], Recipient [10:7494345643586285365:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:08:40.919008Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:08:40.919032Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-04-17T18:08:40.919048Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:08:40.919068Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2025-04-17T18:08:40.919079Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-04-17T18:08:40.919097Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2025-04-17T18:08:40.919121Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-17T18:08:40.919137Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2025-04-17T18:08:40.919147Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2025-04-17T18:08:40.919158Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-04-17T18:08:40.919187Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayCompleteNoMoreRestarts 2025-04-17T18:08:40.919201Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2025-04-17T18:08:40.919210Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2025-04-17T18:08:40.919231Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2025-04-17T18:08:40.919260Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-17T18:08:40.919268Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2025-04-17T18:08:40.919279Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2025-04-17T18:08:40.919291Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:08:40.919301Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-17T18:08:40.919312Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-17T18:08:40.919322Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-17T18:08:40.927977Z node 10 :READ_TABLE_API NOTICE: [10:7494345647881253675:2401] Finish grpc stream, status: 400000 2025-04-17T18:08:40.933333Z node 10 :GRPC_SERVER DEBUG: [0x51a000154880] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.933562Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e2880] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.933717Z node 10 :GRPC_SERVER DEBUG: [0x51a000156680] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.933935Z node 10 :GRPC_SERVER DEBUG: [0x51a0000db080] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.934139Z node 10 :GRPC_SERVER DEBUG: [0x51a000156c80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.934315Z node 10 :GRPC_SERVER DEBUG: [0x51a000157280] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.934461Z node 10 :GRPC_SERVER DEBUG: [0x51a00002e880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.934632Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e1680] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.934772Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e1c80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.934913Z node 10 :GRPC_SERVER DEBUG: [0x51a00001fe80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.935042Z node 10 :GRPC_SERVER DEBUG: [0x51a000025e80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.935176Z node 10 :GRPC_SERVER DEBUG: [0x51a00002e280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.935316Z node 10 :GRPC_SERVER DEBUG: [0x51a00002dc80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.935457Z node 10 :GRPC_SERVER DEBUG: [0x51a000162680] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.935589Z node 10 :GRPC_SERVER DEBUG: [0x51a000155a80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.935717Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e2280] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.935842Z node 10 :GRPC_SERVER DEBUG: [0x51a00002d680] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.938424Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-17T18:08:40.938444Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-04-17T18:08:40.938463Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 10 ms, propose latency: 29 ms, status: COMPLETE 2025-04-17T18:08:40.938537Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TGRpcClientLowTest::ChangeAcl [GOOD] >> TGRpcNewClient::InMemoryTables [GOOD] >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] Test command err: 2025-04-17T18:08:18.085098Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345550811795853:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.088643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a25/r3tmp/tmpXnLu8z/pdisk_1.dat 2025-04-17T18:08:18.631668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.631758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.637381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:18.643266Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9509, node 1 2025-04-17T18:08:18.732309Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.732346Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.798887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.798909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.798915Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.798997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.247933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:21.200485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345563696698558:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.200669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.201093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345563696698570:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.204700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:21.228579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345563696698572:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:21.288500Z node 1 :TX_PROXY ERROR: Actor# [1:7494345563696698642:2684] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a25/r3tmp/tmpvI3mKJ/pdisk_1.dat 2025-04-17T18:08:23.709936Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:23.784751Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:23.847351Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:23.847417Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:23.851301Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26131, node 4 2025-04-17T18:08:24.043021Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:24.043051Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:24.043060Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:24.043195Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:24.291558Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:26.462511Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345584082381135:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.462581Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.488817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:26.686093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345584082381300:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.686197Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.686385Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345584082381305:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.689849Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:26.717377Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345584082381307:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:26.792360Z node 4 :TX_PROXY ERROR: Actor# [4:7494345584082381384:2807] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:28.594303Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345595259703350:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.594373Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a25/r3tmp/tmpmWGyS2/pdisk_1.dat 2025-04-17T18:08:28.761298Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:28.800459Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:28.800608Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:28.807756Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10509, node 7 2025-04-17T18:08:29.042761Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:29.042786Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:29.042797Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:29.042923Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:29.347814Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:29.547681Z node 7 :TX_PROXY ERROR: Actor# [7:7494345599554671595:2594] txid# 281474976715658, Access denied for bad@builtin on path /Root, with access CreateTable 2025-04-17T18:08:29.547840Z node 7 :TX_PROXY ERROR: Actor# [7:7494345599554671595:2594] txid# 281474976715658, issues: { message: "Access denied for bad@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-04-17T18:08:33.598314Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345617264235277:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:33.604971Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a25/r3tmp/tmpxe0juV/pdisk_1.dat 2025-04-17T18:08:33.851597Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21991, node 10 2025-04-17T18:08:33.934856Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:33.934961Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:33.980827Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:34.000496Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:34.000519Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:34.000528Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:34.000677Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:34.283281Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:34.371514Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:34.525334Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:34.584156Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:08:34.628792Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a25/r3tmp/tmp9PVoAK/pdisk_1.dat 2025-04-17T18:08:38.770249Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:38.835360Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:38.878106Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:38.878322Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:38.885105Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65433, node 13 2025-04-17T18:08:38.984924Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:38.984951Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:38.984960Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:38.985130Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:39.317084Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:39.338465Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:39.456089Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] Test command err: 2025-04-17T18:08:17.972242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345549063039769:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:17.972312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a35/r3tmp/tmp1ZgcyR/pdisk_1.dat 2025-04-17T18:08:18.627558Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.648099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.648168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.652226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18151, node 1 2025-04-17T18:08:18.956033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.956056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.956063Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.956164Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.293060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:19.441650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:23.054292Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345572051882618:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:23.054407Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a35/r3tmp/tmpvaRM7D/pdisk_1.dat 2025-04-17T18:08:23.222138Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:23.253153Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:23.253249Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:23.256529Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31031, node 4 2025-04-17T18:08:23.433793Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:23.433822Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:23.433833Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:23.433959Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:23.740408Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:23.831800Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:27.679314Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345590076198804:2106];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:27.680950Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a35/r3tmp/tmpPSiFOw/pdisk_1.dat 2025-04-17T18:08:27.891460Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28290, node 7 2025-04-17T18:08:28.020794Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:28.020880Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:28.056324Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:28.112773Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:28.112798Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:28.112807Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:28.112952Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:28.387788Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:28.538386Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:33.337748Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345615595699051:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:33.341927Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a35/r3tmp/tmp9Xaqa5/pdisk_1.dat 2025-04-17T18:08:33.660668Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:33.704688Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:33.704776Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:33.712067Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20149, node 10 2025-04-17T18:08:33.948046Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:33.948084Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:33.948092Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:33.948221Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:34.328368Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:34.411941Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:38.816323Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345637497745310:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:38.816866Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a35/r3tmp/tmpRoBMjv/pdisk_1.dat 2025-04-17T18:08:38.980996Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:39.015147Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:39.015226Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:39.018715Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22312, node 13 2025-04-17T18:08:39.086987Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:39.087010Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:39.087018Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:39.087124Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:39.396078Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:39.480876Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] >> TGRpcYdbTest::CreateTableBadRequest3 [GOOD] >> TGRpcYdbTest::CreateTableWithIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2025-04-17T18:08:17.969694Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345548395503641:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:17.969738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a33/r3tmp/tmpeqGO1P/pdisk_1.dat 2025-04-17T18:08:18.636505Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13308, node 1 2025-04-17T18:08:18.716432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.716526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.768195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:18.835525Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.835550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.835562Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.835705Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.266680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:19.289570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:14885 TClient is connected to server localhost:14885 2025-04-17T18:08:19.716411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:21.443355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345565575373919:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.443426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345565575373927:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.443481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.447288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T18:08:21.486369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345565575373933:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T18:08:21.589030Z node 1 :TX_PROXY ERROR: Actor# [1:7494345565575374009:2714] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:14885 TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913299325 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\204\002\032\004user \003" EffectiveACL: "\n\016\010\001\020\200\204\002\032\004user \003" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913301509 ParentPathId: 1 PathState: EPathStateCreate Owner: "met... (TRUNCATED) 2025-04-17T18:08:23.881418Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345573712881321:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:23.881481Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a33/r3tmp/tmppg6C0P/pdisk_1.dat 2025-04-17T18:08:24.181861Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12663, node 4 2025-04-17T18:08:24.263404Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:24.270722Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:24.350545Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:24.417940Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:24.417976Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:24.417985Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:24.418099Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:24.643730Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:7994 TClient is connected to server localhost:7994 2025-04-17T18:08:25.131743Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:26.936692Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345586597784239:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.936819Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345586597784260:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.936829Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.940610Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T18:08:26.962603Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345586597784276:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T18:08:27.023171Z node 4 :TX_PROXY ERROR: Actor# [4:7494345590892751641:2701] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:7994 TClient::Ls request: Root 2025-04-17T18:08:27.367416Z node 4 :TX_PROXY ERROR: Access denied for user with access DescribeSchema to path Root TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 12 ErrorReason: "Access denied" 2025-04-17T18:08:28.919098Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345594730311885:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.919160Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a33/r3tmp/tmpRaqz3P/pdisk_1.dat 2025-04-17T18:08:29.129031Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:29.167465Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:29.167544Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:29.175604Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13781, node 7 2025-04-17T18:08:29.282779Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:29.282805Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:29.282813Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:29.282957Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:29.587655Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:29.675858Z node 7 :TICKET_PARSER ERROR: Ticket some****oken (BB86510A): Could not find correct token validator 2025-04-17T18:08:29.678241Z node 7 :GRPC_SERVER ERROR: Received TEvRefreshTokenResponse, Authenticated = 0 2025-04-17T18:08:33.734604Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345615284317609:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:33.734669Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a33/r3tmp/tmpLDkDBm/pdisk_1.dat 2025-04-17T18:08:34.129518Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:34.175858Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:34.175963Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:34.183742Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19699, node 10 2025-04-17T18:08:34.347077Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:34.347114Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:34.347123Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:34.347295Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:34.649371Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:38.990447Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345637244928521:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:38.994582Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a33/r3tmp/tmpBg1j0t/pdisk_1.dat 2025-04-17T18:08:39.273736Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20145, node 13 2025-04-17T18:08:39.444193Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:39.444294Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:39.449849Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:39.449875Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:39.449883Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:39.450029Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:08:39.471207Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:39.803723Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18211 2025-04-17T18:08:40.265044Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 >> TGRpcYdbTest::ExplainQuery [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> YdbIndexTable::AlterTableAddIndex [GOOD] >> YdbLogStore::AlterLogStore >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewClient::InMemoryTables [GOOD] Test command err: 2025-04-17T18:08:18.042316Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345550380291843:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.042379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2f/r3tmp/tmpDsjC5i/pdisk_1.dat 2025-04-17T18:08:18.494246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.494344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.501604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:18.567178Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27051, node 1 2025-04-17T18:08:18.644468Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.645313Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.799617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.799639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.799645Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.799752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.350988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:22.749340Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345570476618174:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:22.749430Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2f/r3tmp/tmpx8qTev/pdisk_1.dat 2025-04-17T18:08:22.954381Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21466, node 4 2025-04-17T18:08:23.060789Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:23.060884Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:23.063424Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:23.067008Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:23.067027Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:23.067032Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:23.067137Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:23.337796Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:26.114727Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:26.251459Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345587656488522:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.251545Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345587656488533:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.253893Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.254796Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:26.273623Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345587656488536:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:26.348274Z node 4 :TX_PROXY ERROR: Actor# [4:7494345587656488613:2795] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:28.331918Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345592704624668:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.332012Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2f/r3tmp/tmphawHmE/pdisk_1.dat 2025-04-17T18:08:28.547904Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30158, node 7 2025-04-17T18:08:28.669239Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:28.669443Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:28.707967Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:28.775006Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:28.775034Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:28.775040Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:28.775157Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:29.059700Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:33.358256Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345614660517359:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:33.358362Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2f/r3tmp/tmpqpXWZx/pdisk_1.dat 2025-04-17T18:08:33.779696Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:33.808286Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:33.808397Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:33.813337Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2478, node 10 2025-04-17T18:08:34.146842Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:34.146864Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:34.146881Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:34.147034Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:34.446053Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:37.197453Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:37.332281Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:08:37.398357Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345631840387894:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:37.398460Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:37.399736Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345631840387906:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:37.403908Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-17T18:08:37.432225Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345631840387908:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-17T18:08:37.501671Z node 10 :TX_PROXY ERROR: Actor# [10:7494345631840387979:2902] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:37.620417Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2f0fgj6tpb62n6yj43g5e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTMwMzhhMjAtZWZiZDU4YTktMmIxOTAyNTktMmQ0Y2ZhN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:37.776372Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-17T18:08:39.520743Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345639522677163:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:39.520793Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2f/r3tmp/tmpNpgPpx/pdisk_1.dat 2025-04-17T18:08:39.729561Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:39.755686Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:39.755790Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:39.771868Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1913, node 13 2025-04-17T18:08:39.911219Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:39.911242Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:39.911251Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:39.911380Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:40.194372Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:42.756952Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:42.896321Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:42.956807Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] Test command err: 2025-04-17T18:08:19.988288Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345554701999624:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:19.988340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0e/r3tmp/tmp6zu0gb/pdisk_1.dat 2025-04-17T18:08:20.503048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:20.503139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:20.506277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:20.512982Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21096, node 1 2025-04-17T18:08:20.786018Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:20.786044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:20.786050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:20.786147Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:21.115790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:24.835075Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345578907344528:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:24.835153Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0e/r3tmp/tmp9at99e/pdisk_1.dat 2025-04-17T18:08:25.047707Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13854, node 4 2025-04-17T18:08:25.182042Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:25.182140Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:25.198672Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:25.247457Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:25.247483Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:25.247497Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:25.247625Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:25.480719Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:29.548971Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345599930550569:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:29.677406Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0e/r3tmp/tmpG1A5Ed/pdisk_1.dat 2025-04-17T18:08:29.879679Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:29.902978Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:29.903049Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:29.907018Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19118, node 7 2025-04-17T18:08:30.022024Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:30.022056Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:30.022069Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:30.022223Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:30.283728Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:34.838574Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345622246680794:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:34.838630Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0e/r3tmp/tmpuDo8Uw/pdisk_1.dat 2025-04-17T18:08:35.044842Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:35.072256Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:35.072345Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:35.081083Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8564, node 10 2025-04-17T18:08:35.265848Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:35.265882Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:35.265891Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:35.266064Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:35.555713Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:35.574730Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:08:40.129014Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345646293770962:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:40.150424Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0e/r3tmp/tmpTvFLPi/pdisk_1.dat 2025-04-17T18:08:40.348449Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17279, node 13 2025-04-17T18:08:40.442275Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:40.442424Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:40.473855Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:40.534652Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:40.534672Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:40.534680Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:40.534797Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:40.868751Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExplainQuery [GOOD] Test command err: 2025-04-17T18:08:18.384345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345551272262603:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.384397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a19/r3tmp/tmp2222yg/pdisk_1.dat 2025-04-17T18:08:18.839013Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.856884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.856973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.865190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2220, node 1 2025-04-17T18:08:19.062703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:19.062727Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:19.062733Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:19.062858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.432227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:23.050763Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345572292803210:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:23.050836Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a19/r3tmp/tmp0KQir0/pdisk_1.dat 2025-04-17T18:08:23.313210Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14833, node 4 2025-04-17T18:08:23.400391Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:23.400472Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:23.447192Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:23.522837Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:23.522855Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:23.522861Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:23.522989Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:23.802890Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:26.430656Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345585177706151:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.430814Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.431121Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345585177706163:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.435567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:26.460843Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345585177706165:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:26.545420Z node 4 :TX_PROXY ERROR: Actor# [4:7494345585177706251:2682] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:28.586725Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345592765502342:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.592650Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a19/r3tmp/tmpbOFq3S/pdisk_1.dat 2025-04-17T18:08:28.866764Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6845, node 7 2025-04-17T18:08:28.953079Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:28.953169Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:28.959187Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:28.962808Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:28.962826Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:28.962833Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:28.962964Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:29.229885Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1744913311.786749 384501 text_format.cc:383] Error parsing text-format Ydb.Type: 3:13: Unknown enumeration value of "TYPE_UNDEFINED" for field "type_id". 2025-04-17T18:08:31.792022Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345605650405254:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:31.792096Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345605650405246:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:31.792170Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:31.795494Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:31.823898Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345605650405260:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 28147497671 ... 4046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:32.021226Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MWMzZTY2NzktMjc4M2U2NzctY2RjYzllN2EtYzYxZGJiOGY=, ActorId: [7:7494345605650405228:2332], ActorState: ExecuteState, TraceId: 01js2f0a1e084s0kng7jjnjz5r, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-04-17T18:08:32.022155Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2f0a1e084s0kng7jjnjz5r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MWMzZTY2NzktMjc4M2U2NzctY2RjYzllN2EtYzYxZGJiOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: E0000 00:00:1744913312.024294 384501 text_format.cc:383] Error parsing text-format Ydb.Type: 5:21: Unknown enumeration value of "Int32" for field "type_id". 2025-04-17T18:08:32.078975Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MWMzZTY2NzktMjc4M2U2NzctY2RjYzllN2EtYzYxZGJiOGY=, ActorId: [7:7494345605650405228:2332], ActorState: ExecuteState, TraceId: 01js2f0a8saaxnb28s2pe3nzv3, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-04-17T18:08:32.079712Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f0a8saaxnb28s2pe3nzv3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MWMzZTY2NzktMjc4M2U2NzctY2RjYzllN2EtYzYxZGJiOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-04-17T18:08:33.905947Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345615238791616:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:33.906014Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a19/r3tmp/tmpb8ZoRx/pdisk_1.dat 2025-04-17T18:08:34.157166Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30042, node 10 2025-04-17T18:08:34.279088Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:34.279188Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:34.359702Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:34.383963Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:34.383995Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:34.384016Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:34.384130Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:34.658226Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:37.400816Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345632418661863:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:37.400886Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345632418661871:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:37.400931Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:37.403973Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:37.432433Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345632418661877:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:37.495268Z node 10 :TX_PROXY ERROR: Actor# [10:7494345632418661957:2685] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:39.416027Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345640516939045:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:39.416089Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a19/r3tmp/tmp5QX3xt/pdisk_1.dat 2025-04-17T18:08:39.738410Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:39.776660Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:39.776748Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:39.779548Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16470, node 13 2025-04-17T18:08:40.014898Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:40.014922Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:40.014930Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:40.015065Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:40.423695Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:40.511478Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:43.351981Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345657696809412:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:43.352547Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345657696809424:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:43.357127Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:43.357439Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:43.402140Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494345657696809426:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:43.511381Z node 13 :TX_PROXY ERROR: Actor# [13:7494345657696809501:2799] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:43.594045Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0naf5pwcagyvfrqk6f28, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YWE1MjExZjgtMjQ4MGVjZjgtODZhMTI5MjItNGU3MTc0Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] Test command err: 2025-04-17T18:08:17.980567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345549090179477:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:17.981141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a24/r3tmp/tmpMhsbkV/pdisk_1.dat 2025-04-17T18:08:18.554318Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.562823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.562911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.567438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9708, node 1 2025-04-17T18:08:18.867031Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.867053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.867073Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.867169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.253008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:19.332174Z node 1 :TX_PROXY ERROR: Actor# [1:7494345557680114937:2611] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a24/r3tmp/tmpqydJdG/pdisk_1.dat 2025-04-17T18:08:23.143032Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:23.154016Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:23.208306Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:23.208399Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:23.212272Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17792, node 4 2025-04-17T18:08:23.389076Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:23.389100Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:23.389107Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:23.389240Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:08:23.695536Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:08:26.162121Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345583894640250:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.162122Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345583894640255:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.162239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.166138Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:26.189431Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345583894640264:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:26.255866Z node 4 :TX_PROXY ERROR: Actor# [4:7494345583894640333:2686] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:28.337598Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345593507474902:2233];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.337691Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a24/r3tmp/tmpIJHaUZ/pdisk_1.dat 2025-04-17T18:08:28.607413Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:28.665101Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:28.665187Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:28.668257Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15654, node 7 2025-04-17T18:08:28.913876Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:28.913893Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:28.913897Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:28.914008Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:29.245331Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:32.165213Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345610687344967:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.165347Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.165817Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345610687344979:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.172604Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:32.199532Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345610687344981:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:32.271385Z node 7 :TX_PROXY ERROR: Actor# [7:7494345610687345053:2694] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it ... u don't have access permissions } 2025-04-17T18:08:37.539205Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:37.544064Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:37.544175Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:37.544217Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:37.544291Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:37.555594Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:37.555743Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:37.555757Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:37.555812Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:37.559936Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345633299033989:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:37.665871Z node 10 :TX_PROXY ERROR: Actor# [10:7494345633299034063:2796] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:37.844121Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f0fmx7t7tk755k5c46qz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODkxZGUwOS1lMGY3M2MxNi0yMjJiMTVlOS04MGM2MTMxMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:37.886304Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01js2f0fzy43q7nfqrr6ba3xsv, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42776, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:37.886625Z node 10 :READ_TABLE_API NOTICE: [10:7494345633299034107:2351] Finish grpc stream, status: 400010 2025-04-17T18:08:37.889144Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01js2f0g01a8xad3dmb200p4md, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42776, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:37.906733Z node 10 :READ_TABLE_API DEBUG: [10:7494345633299034108:2352] Adding quota request to queue ShardId: 0, TxId: 281474976715662 2025-04-17T18:08:37.906780Z node 10 :READ_TABLE_API DEBUG: [10:7494345633299034108:2352] Assign stream quota to Shard 0, Quota 5, TxId 281474976715662 Reserved: 5 of 25, Queued: 0 2025-04-17T18:08:37.964263Z node 10 :READ_TABLE_API DEBUG: [10:7494345633299034108:2352] got stream part, size: 246, RU required: 128 rate limiter absent 2025-04-17T18:08:37.964554Z node 10 :READ_TABLE_API DEBUG: [10:7494345633299034108:2352] Starting inactivity timer for 600.000000s with tag 3 2025-04-17T18:08:38.016746Z node 10 :READ_TABLE_API NOTICE: [10:7494345633299034108:2352] Finish grpc stream, status: 400000 2025-04-17T18:08:38.020221Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01js2f0g446aqpzb82zsmcgrr7, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42776, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:38.040595Z node 10 :READ_TABLE_API DEBUG: [10:7494345637594001428:2354] Adding quota request to queue ShardId: 0, TxId: 281474976715664 2025-04-17T18:08:38.040632Z node 10 :READ_TABLE_API DEBUG: [10:7494345637594001428:2354] Assign stream quota to Shard 0, Quota 5, TxId 281474976715664 Reserved: 5 of 25, Queued: 0 2025-04-17T18:08:38.041347Z node 10 :READ_TABLE_API DEBUG: [10:7494345637594001428:2354] got stream part, size: 84, RU required: 128 rate limiter absent 2025-04-17T18:08:38.041638Z node 10 :READ_TABLE_API DEBUG: [10:7494345637594001428:2354] Starting inactivity timer for 600.000000s with tag 3 2025-04-17T18:08:38.091945Z node 10 :READ_TABLE_API NOTICE: [10:7494345637594001428:2354] Finish grpc stream, status: 400000 2025-04-17T18:08:38.094285Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01js2f0g6e5fhftqfaxf2ce1q2, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42776, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:38.104658Z node 10 :READ_TABLE_API DEBUG: [10:7494345637594001450:2357] Adding quota request to queue ShardId: 0, TxId: 281474976715666 2025-04-17T18:08:38.104693Z node 10 :READ_TABLE_API DEBUG: [10:7494345637594001450:2357] Assign stream quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2025-04-17T18:08:38.105905Z node 10 :READ_TABLE_API DEBUG: [10:7494345637594001450:2357] got stream part, size: 210, RU required: 128 rate limiter absent 2025-04-17T18:08:38.106191Z node 10 :READ_TABLE_API DEBUG: [10:7494345637594001450:2357] Starting inactivity timer for 600.000000s with tag 3 2025-04-17T18:08:38.108155Z node 10 :READ_TABLE_API NOTICE: [10:7494345637594001450:2357] Finish grpc stream, status: 400000 2025-04-17T18:08:38.111861Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b0480] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.112159Z node 10 :GRPC_SERVER DEBUG: [0x51a000039680] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.112346Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b1080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.112527Z node 10 :GRPC_SERVER DEBUG: [0x51a000052880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.112697Z node 10 :GRPC_SERVER DEBUG: [0x51a0000afe80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.112864Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b0a80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.113031Z node 10 :GRPC_SERVER DEBUG: [0x51a000051680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.113204Z node 10 :GRPC_SERVER DEBUG: [0x51a00004e680] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.113395Z node 10 :GRPC_SERVER DEBUG: [0x51a000045680] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.113581Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e7c80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.113753Z node 10 :GRPC_SERVER DEBUG: [0x51a000090680] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.113918Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b3a80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.114102Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b6480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.114290Z node 10 :GRPC_SERVER DEBUG: [0x51a00004b080] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.114491Z node 10 :GRPC_SERVER DEBUG: [0x51a00004ce80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.114651Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a8c80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-17T18:08:38.114819Z node 10 :GRPC_SERVER DEBUG: [0x51a000038a80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:40.443698Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345644451247537:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:40.443741Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a24/r3tmp/tmpr5t7jM/pdisk_1.dat 2025-04-17T18:08:40.763546Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17413, node 13 2025-04-17T18:08:40.853987Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:40.854006Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:40.854012Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:40.854132Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:08:40.867436Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:40.867524Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:40.879561Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:41.167496Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:41.267875Z node 13 :TICKET_PARSER DEBUG: Ticket F0E17FDED351CFB30C60D18C96020CF109782F340147C3BC082CA9E6B7BBA1B5 (ipv6:[::1]:47368) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:08:41.472734Z node 13 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token 2025-04-17T18:08:41.548171Z node 13 :TICKET_PARSER DEBUG: Ticket 397F4D595BEC73BB50E0FED83604355E1864AD092F7349F8435FF113425CD377 (ipv6:[::1]:47412) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-17T18:08:41.548807Z node 13 :TICKET_PARSER ERROR: Ticket 397F4D595BEC73BB50E0FED83604355E1864AD092F7349F8435FF113425CD377: Cannot create token from certificate. Client certificate failed verification >> YdbImport::Simple [GOOD] >> YdbImport::EmptyData >> YdbYqlClient::CreateTableWithPartitionAtKeys >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts >> YdbTableBulkUpsert::DataValidation [GOOD] >> YdbTableBulkUpsert::DecimalPK >> YdbYqlClient::TestReadTableMultiShard >> TTableProfileTests::OverwritePartitioningPolicy [GOOD] >> TTableProfileTests::OverwriteStoragePolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] Test command err: 2025-04-17T18:08:18.058597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345553313131723:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.058651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2a/r3tmp/tmpZcJ12j/pdisk_1.dat 2025-04-17T18:08:18.574797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.616892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.616960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.624826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1705, node 1 2025-04-17T18:08:18.791161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.791183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.791192Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.791295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.227599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:19.253151Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:21.124223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345566198034631:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.124319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.124449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345566198034640:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.128385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:21.151709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345566198034645:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:21.248911Z node 1 :TX_PROXY ERROR: Actor# [1:7494345566198034716:2689] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:21.932290Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YzIwNGYwMTgtMzA5MTg1YzQtOTg4MmIyYTgtMjI5YjA1NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-04-17T18:08:23.281960Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345571275657842:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:23.281994Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2a/r3tmp/tmpY4DfaW/pdisk_1.dat 2025-04-17T18:08:23.478596Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:23.509507Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:23.509648Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:23.516652Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9108, node 4 2025-04-17T18:08:23.710687Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:23.710707Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:23.710713Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:23.710813Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:23.936395Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:23.954754Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:08:26.321119Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345584160560766:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.321149Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345584160560758:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.321229Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.324800Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:26.343598Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345584160560772:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:08:26.423519Z node 4 :TX_PROXY ERROR: Actor# [4:7494345584160560849:2679] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:28.091134Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345593569950705:2179];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.091203Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2a/r3tmp/tmpxFpbLS/pdisk_1.dat 2025-04-17T18:08:28.253403Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:28.292571Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:28.292648Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:28.297752Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64693, node 7 2025-04-17T18:08:28.443334Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:28.443354Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:28.443362Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:28.443502Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVe ... j9e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZWY4MWEyNjctYWRlNjg2ZWQtOWExYThjYTQtZDlmYzUzZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:33.714892Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2f0bxgbk30aamj7mny7m1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzhkNGQ0MzMtNGNmMzhlZWQtYjI1MzQ5MGEtMWQ3NGQzYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:33.716643Z node 7 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=24; 2025-04-17T18:08:33.716927Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7494345615044788800:2361], Table: `Root/names` ([72057594046644480:2:1]), SessionActorId: [7:7494345610749821125:2361]Got LOCKS BROKEN for table `Root/names`. ShardID=72075186224037888, Sink=[7:7494345615044788800:2361].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:08:33.716991Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7494345615044788762:2361], SessionActorId: [7:7494345610749821125:2361], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `Root/names`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[7:7494345610749821125:2361]. isRollback=0 2025-04-17T18:08:33.717141Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzhkNGQ0MzMtNGNmMzhlZWQtYjI1MzQ5MGEtMWQ3NGQzYw==, ActorId: [7:7494345610749821125:2361], ActorState: ExecuteState, TraceId: 01js2f0bxgbk30aamj7mny7m1h, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [7:7494345615044788794:2361] from: [7:7494345615044788762:2361] 2025-04-17T18:08:33.717195Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7494345615044788794:2361] TxId: 281474976715678. Ctx: { TraceId: 01js2f0bxgbk30aamj7mny7m1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzhkNGQ0MzMtNGNmMzhlZWQtYjI1MzQ5MGEtMWQ3NGQzYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `Root/names`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:08:33.717324Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzhkNGQ0MzMtNGNmMzhlZWQtYjI1MzQ5MGEtMWQ3NGQzYw==, ActorId: [7:7494345610749821125:2361], ActorState: ExecuteState, TraceId: 01js2f0bxgbk30aamj7mny7m1h, Create QueryResponse for error on request, msg: 2025-04-17T18:08:35.308639Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345624328285376:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:35.311562Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2a/r3tmp/tmph716uq/pdisk_1.dat 2025-04-17T18:08:35.504205Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:35.535884Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:35.535969Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:35.538522Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25604, node 10 2025-04-17T18:08:35.690846Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:35.690873Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:35.690883Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:35.691022Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:35.967706Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:38.578260Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345637213188284:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:38.578325Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345637213188292:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:38.578377Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:38.582578Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:38.599016Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345637213188298:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:38.658043Z node 10 :TX_PROXY ERROR: Actor# [10:7494345637213188384:2682] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:40.404247Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345644362612509:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:40.404331Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2a/r3tmp/tmpU0OhLI/pdisk_1.dat 2025-04-17T18:08:40.588482Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25139, node 13 2025-04-17T18:08:40.727452Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:40.727567Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:40.762794Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:40.762816Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:40.762825Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:40.763136Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:08:40.764294Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9877 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:41.195704Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:44.099300Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345661542482726:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:44.099406Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:44.099695Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345661542482738:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:44.104777Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:44.152950Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494345661542482740:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:08:44.254128Z node 13 :TX_PROXY ERROR: Actor# [13:7494345661542482805:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] Test command err: 2025-04-17T18:08:21.102719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345563513065048:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:21.104043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a09/r3tmp/tmppXZYF7/pdisk_1.dat 2025-04-17T18:08:21.593575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:21.593654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:21.601104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:21.644798Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20868, node 1 2025-04-17T18:08:21.680540Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:21.689493Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:21.765284Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:21.765307Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:21.765313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:21.765411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:22.061737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:22.141878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:22.237338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:22.306151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:08:22.319204Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T18:08:22.324531Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-17T18:08:25.781655Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345580404243318:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:25.782042Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a09/r3tmp/tmp2ROmUk/pdisk_1.dat 2025-04-17T18:08:25.938460Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:25.972111Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:25.972203Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:25.975467Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9925, node 4 2025-04-17T18:08:26.144558Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:26.144582Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:26.144589Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:26.144735Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:26.436745Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:30.526633Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345603111209722:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:30.526694Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a09/r3tmp/tmpewSWZh/pdisk_1.dat 2025-04-17T18:08:30.853350Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3834, node 7 2025-04-17T18:08:30.921852Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:30.921925Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:30.979646Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:31.018771Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:31.018793Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:31.018802Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:31.018962Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:31.312976Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:31.532490Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:35.866913Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345624963867226:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:35.867000Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a09/r3tmp/tmppVhi0G/pdisk_1.dat 2025-04-17T18:08:36.057322Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:36.091877Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:36.091977Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:36.099700Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3242, node 10 2025-04-17T18:08:36.236489Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:36.236516Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:36.236525Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:36.236673Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:36.515876Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:36.586462Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:41.106684Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345651306094498:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:41.106760Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a09/r3tmp/tmp3oYFzr/pdisk_1.dat 2025-04-17T18:08:41.354595Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:41.379527Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:41.379589Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:41.381862Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28847, node 13 2025-04-17T18:08:41.522740Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:41.522770Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:41.522779Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:41.522917Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:41.779635Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:41.884388Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> YdbTableBulkUpsert::RetryOperation [GOOD] >> TGRpcYdbTest::CreateYqlSession >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError >> TGRpcYdbTest::MakeListRemoveDirectory >> YdbTableBulkUpsert::ZeroRows [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> YdbYqlClient::TestYqlWrongTable >> YdbOlapStore::ManyTables >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2025-04-17T18:08:17.990571Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345547984036796:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:17.990628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2e/r3tmp/tmppOnBIH/pdisk_1.dat 2025-04-17T18:08:18.450017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.450155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.453931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1832, node 1 2025-04-17T18:08:18.509338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.512335Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.512348Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.800418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.800452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.800463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.800570Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.297169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:21.336228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.030952s 2025-04-17T18:08:21.830416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345565163908987:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.830483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345565163908999:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.830487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.835492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:21.864911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345565163909001:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:21.971491Z node 1 :TX_PROXY ERROR: Actor# [1:7494345565163909134:4223] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:22.684626Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f00a57b30chn0j9kjz4c5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzRlNWE0YWQtMzFhYWNmNjUtM2RjY2UyM2EtMzY3MWQwYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-04-17T18:08:24.260512Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345576028167475:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:24.261608Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2e/r3tmp/tmpIAqytb/pdisk_1.dat 2025-04-17T18:08:24.445121Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:24.477182Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:24.477262Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:24.480635Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25544, node 4 2025-04-17T18:08:24.594960Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:24.594986Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:24.594995Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:24.595123Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:24.819382Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:24.836404Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:27.272301Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table 2025-04-17T18:08:29.140269Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345596768500012:2151];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:29.189770Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2e/r3tmp/tmprH5ct4/pdisk_1.dat 2025-04-17T18:08:29.369835Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7842, node 7 2025-04-17T18:08:29.490285Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:29.490399Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:29.496416Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:29.590513Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:29.590532Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:29.590541Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:29.590685Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:29.891684Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:29.918415Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:08:32.357243Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part ... timed out, duration: 0 sec 4 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 8 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 16 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 32 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 64 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 128 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 256 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 512 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 1024 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 2048 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 4096 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 8192 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 16384 usec test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2e/r3tmp/tmpBKhNny/pdisk_1.dat 2025-04-17T18:08:35.345889Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:35.389413Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:35.402296Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:35.402382Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:35.410036Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3859, node 10 2025-04-17T18:08:35.567619Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:35.567641Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:35.567662Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:35.567804Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:35.820319Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:38.458925Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS 2025-04-17T18:08:41.298620Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345650045858064:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:41.298728Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a2e/r3tmp/tmpYfl1Q0/pdisk_1.dat 2025-04-17T18:08:41.505257Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:41.509846Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:41.509919Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:41.514875Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22393, node 13 2025-04-17T18:08:41.599879Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:41.599900Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:41.599908Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:41.600057Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:41.879570Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:44.919326Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> YdbS3Internal::TestS3Listing >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> YdbYqlClient::TestReadWrongTable [GOOD] >> TYqlDateTimeTests::DateKey [GOOD] >> TYqlDateTimeTests::DatetimeKey >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts >> YdbLogStore::AlterLogStore [GOOD] >> YdbLogStore::AlterLogTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2025-04-17T18:08:29.530600Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345597333721588:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:29.530655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fe/r3tmp/tmpvXUyxT/pdisk_1.dat 2025-04-17T18:08:30.042474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:30.064928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:30.065035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:30.104627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21668, node 1 2025-04-17T18:08:30.238766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:30.238793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:30.238862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:30.238975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:30.556188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:32.576616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345610218624532:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.576680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345610218624528:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.576812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:32.582583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:32.613450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345610218624542:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:32.677646Z node 1 :TX_PROXY ERROR: Actor# [1:7494345610218624617:2678] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:34.909233Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345620849222345:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:34.909358Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fe/r3tmp/tmp15vzed/pdisk_1.dat 2025-04-17T18:08:35.111318Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:35.140859Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:35.140927Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:35.144478Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64876, node 4 2025-04-17T18:08:35.250918Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:35.250938Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:35.250945Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:35.251056Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:35.495458Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:37.725060Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345633734125272:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:37.725121Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:37.725191Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345633734125284:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:37.729279Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:37.747959Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345633734125286:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:08:37.810706Z node 4 :TX_PROXY ERROR: Actor# [4:7494345633734125359:2676] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fe/r3tmp/tmpH2tCbb/pdisk_1.dat 2025-04-17T18:08:39.678558Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:39.842861Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:39.884140Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:39.884225Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:39.887572Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22745, node 7 2025-04-17T18:08:40.071022Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:40.071044Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:40.071054Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:40.071200Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:40.472473Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:43.163387Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345658945670236:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:43.163493Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:43.199995Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:43.428462Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345658945670400:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:43.428558Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:43.428988Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345658945670405:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:43.432715Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:43.461086Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345658945670407:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:43.550657Z node 7 :TX_PROXY ERROR: Actor# [7:7494345658945670485:2802] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:43.650001Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0nd35d1cg3gc2dvmrwbc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YmJiNTcxNmItODA2OTYyYjMtMmNkODMzMGYtNmMyYjljMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:43.756119Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f0nnm3b5c76wsww95rcw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTI0YzdiZTgtYmJkZmU3MzctMWE5YzExM2YtZDI3MThlZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:43.837360Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YmJiNTcxNmItODA2OTYyYjMtMmNkODMzMGYtNmMyYjljMDU=, ActorId: [7:7494345658945670232:2335], ActorState: ExecuteState, TraceId: 01js2f0nr0ffw4yk0e67ywdkae, Create QueryResponse for error on request, msg: 2025-04-17T18:08:45.306987Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345665869050504:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:45.307073Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fe/r3tmp/tmpGsammf/pdisk_1.dat 2025-04-17T18:08:45.468593Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:45.502660Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:45.502734Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:45.506609Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4125, node 10 2025-04-17T18:08:45.565276Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:45.565292Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:45.565298Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:45.565399Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:45.812122Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:45.883161Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01js2f0qssbf1e4bzhcs4g530e, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42458, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.986051s 2025-04-17T18:08:45.894898Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01js2f0qt6d0d1hbav69v1h48n, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42464, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:48.483159Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01js2f0tb3bfhnwsfp5fc0tgyz, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42470, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:48.484494Z node 10 :TX_PROXY ERROR: [ReadTable [10:7494345678753953413:2336] TxId# 281474976715658] Navigate request failed for table 'Root/NoTable' 2025-04-17T18:08:48.484587Z node 10 :TX_PROXY ERROR: [ReadTable [10:7494345678753953413:2336] TxId# 281474976715658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2025-04-17T18:08:48.484975Z node 10 :READ_TABLE_API NOTICE: [10:7494345678753953412:2336] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2025-04-17T18:08:48.490271Z node 10 :GRPC_SERVER DEBUG: [0x51a000070e80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.490486Z node 10 :GRPC_SERVER DEBUG: [0x51a00009ea80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.490646Z node 10 :GRPC_SERVER DEBUG: [0x51a0000afe80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.490840Z node 10 :GRPC_SERVER DEBUG: [0x51a000092480] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.490993Z node 10 :GRPC_SERVER DEBUG: [0x51a000071480] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.491175Z node 10 :GRPC_SERVER DEBUG: [0x51a000071a80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.491310Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d3880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.491453Z node 10 :GRPC_SERVER DEBUG: [0x51a00000de80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.491581Z node 10 :GRPC_SERVER DEBUG: [0x51a000003080] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.491728Z node 10 :GRPC_SERVER DEBUG: [0x51a000064280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.491866Z node 10 :GRPC_SERVER DEBUG: [0x51a00001c280] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.492035Z node 10 :GRPC_SERVER DEBUG: [0x51a000094280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.492188Z node 10 :GRPC_SERVER DEBUG: [0x51a000035a80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.492325Z node 10 :GRPC_SERVER DEBUG: [0x51a000095a80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.492491Z node 10 :GRPC_SERVER DEBUG: [0x51a00003fc80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.492666Z node 10 :GRPC_SERVER DEBUG: [0x51a000002480] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-17T18:08:48.492815Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d3e80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> ColumnShardTiers::TieringUsage [GOOD] >> TKeyValueTest::TestConcatToLongKey [GOOD] >> YdbImport::EmptyData [GOOD] >> YdbImport::ImportFromS3ToExistingTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 >> TGRpcYdbTest::CreateTableWithIndex [GOOD] >> TGRpcYdbTest::CreateDeleteYqlSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:88:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:88:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:95:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:94:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:94:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:76:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:79:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:78:2110] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:82:2057] recipient: [37:78:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:81:2111] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:135:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:77:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:80:2057] recipient: [38:79:2110] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:81:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:83:2057] recipient: [38:79:2110] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:82:2111] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:136:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:80:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:83:2057] recipient: [39:82:2113] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:84:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:86:2057] recipient: [39:82:2113] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:85:2114] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:139:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:52:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:82:2113] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:86:2057] recipient: [40:82:2113] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2114] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:81:2057] recipient: [41:36:2083] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:84:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:85:2057] recipient: [41:83:2113] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:87:2057] recipient: [41:83:2113] !Reboot 72057594037927937 (actor [41:56:2097]) rebooted! !Reboot 72057594037927937 (actor [41:56:2097]) tablet resolver refreshed! new actor is[41:86:2114] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:140:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:57:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:74:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:84:2057] recipient: [42:36:2083] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:87:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:88:2057] recipient: [42:86:2116] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:90:2057] recipient: [42:86:2116] !Reboot 72057594037927937 (actor [42:56:2097]) rebooted! !Reboot 72057594037927937 (actor [42:56:2097]) tablet resolver refreshed! new actor is[42:89:2117] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:143:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:57:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:74:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:84:2057] recipient: [43:36:2083] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:87:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:88:2057] recipient: [43:86:2116] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:90:2057] recipient: [43:86:2116] !Reboot 72057594037927937 (actor [43:56:2097]) rebooted! !Reboot 72057594037927937 (actor [43:56:2097]) tablet resolver refreshed! new actor is[43:89:2117] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:143:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:52:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:57:2057] recipient: [44:52:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:74:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:85:2057] recipient: [44:36:2083] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:88:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:89:2057] recipient: [44:87:2116] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:91:2057] recipient: [44:87:2116] !Reboot 72057594037927937 (actor [44:56:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:90:2117] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:144:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> YdbTableBulkUpsert::DecimalPK [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: 2025-04-17T18:06:30.046559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:06:30.047060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:06:30.047119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000953/r3tmp/tmpMxvCMP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10836, node 1 TClient is connected to server localhost:25557 2025-04-17T18:06:30.605530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:06:30.638948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:30.645516Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:30.645559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:30.645582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:30.645881Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:30.680433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:30.680580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:30.692126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-17T18:06:41.076456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:678:2569], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.076621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.191827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-17T18:06:41.529001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.529099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.529287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:827:2666], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.533119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-17T18:06:41.661524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:829:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:06:41.891061Z node 1 :TX_PROXY ERROR: Actor# [1:924:2734] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:42.447420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:42.868870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-17T18:06:43.496351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-17T18:06:44.138552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:06:44.491010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:06:45.448071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:06:45.748122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-17T18:06:59.841018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-17T18:07:11.827319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715709:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-17T18:07:14.295835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715726:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715726 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 15 2025-04-17T18:07:14.550093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2929:4236];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:07:14.572576Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2929:4236];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:07:14.572911Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037892 2025-04-17T18:07:14.581128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2929:4236];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:07:14.581354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2929:4236];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:07:14.581624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2929:4236];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:07:14.581746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2929:4236];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:07:14.581958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2929:4236];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:07:14.582096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2929:4236];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:07:14.582242Z node ... Cleanup;portions_count=0;portions_prepared=2;drop=0;skip=0;portions_counter=2;chunks=18;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:08:50.452441Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=3900278-1bb711f0-b4215144-19eafe9d;tablet_id=72075186224037893;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::3c2692a-1bb711f0-af646875-bb444afc; 2025-04-17T18:08:50.452536Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=3900278-1bb711f0-b4215144-19eafe9d;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:08:50.452598Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=3900278-1bb711f0-b4215144-19eafe9d;tablet_id=72075186224037893;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-17T18:08:50.452641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=3900278-1bb711f0-b4215144-19eafe9d;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:08:50.452738Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Save Batch GenStep: 1:16 Blob count: 1 2025-04-17T18:08:50.452837Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=19;external_task_id=3900278-1bb711f0-b4215144-19eafe9d;mem=5382;cpu=0; 2025-04-17T18:08:50.452943Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:08:50.453007Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Save Batch GenStep: 1:17 Blob count: 1 2025-04-17T18:08:50.453377Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4239];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=504;external_task_id=3c2692a-1bb711f0-af646875-bb444afc;type=CS::TTL;priority=0;; 2025-04-17T18:08:50.455093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2932:4239];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=storage.cpp:87;event=granule_compaction_weight;priority=(10,19999998864); 2025-04-17T18:08:50.455190Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2932:4239];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=optimizer.h:894;stop_instant=NO_VALUE_OPTIONAL;size=2656;next=;count=2;info={bytes=1136;count=1;records=1};event=start_optimization;stop_point=;main_portion=19; 2025-04-17T18:08:50.455436Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2932:4239];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=manager.cpp:10;event=lock;process_id=CS::GENERAL::3c2dacc-1bb711f0-90d0bc6a-25dc0aab; 2025-04-17T18:08:50.455584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4239];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5382;external_task_id=3c2dacc-1bb711f0-90d0bc6a-25dc0aab;type=CS::GENERAL;priority=0;; 2025-04-17T18:08:50.456187Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4239];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=20;task=cpu=0;mem=504;external_task_id=3c2692a-1bb711f0-af646875-bb444afc;type=CS::TTL;priority=0;; 2025-04-17T18:08:50.456232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4239];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=3c2692a-1bb711f0-af646875-bb444afc;mem=504;cpu=0; 2025-04-17T18:08:50.456272Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4239];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=3c2692a-1bb711f0-af646875-bb444afc;task_id=20;mem=504;cpu=0; 2025-04-17T18:08:50.456506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4239];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=21;task=cpu=0;mem=5382;external_task_id=3c2dacc-1bb711f0-90d0bc6a-25dc0aab;type=CS::GENERAL;priority=0;; 2025-04-17T18:08:50.456539Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4239];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=3c2dacc-1bb711f0-90d0bc6a-25dc0aab;mem=5382;cpu=0; 2025-04-17T18:08:50.456570Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4239];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=3c2dacc-1bb711f0-90d0bc6a-25dc0aab;task_id=21;mem=5382;cpu=0; 2025-04-17T18:08:50.456631Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2979:4271];tablet_id=72075186224037893;parent=[1:2932:4239];fline=manager.cpp:82;event=ask_data;request=request_id=41;16={portions_count=2};; 2025-04-17T18:08:50.456802Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2979:4271];tablet_id=72075186224037893;parent=[1:2932:4239];fline=columnshard_impl.cpp:1034;background=cleanup;changes_info=type=CS::CLEANUP::PORTIONS;details=(drop 2 portions(portion_id:18;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1136;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1736275429000;tx_id=18446744073709551615;);)(portion_id:17;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;));remove_snapshot:(plan_step=1736275429000;tx_id=18446744073709551615;);));; 2025-04-17T18:08:50.456971Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037893 2025-04-17T18:08:50.457114Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[45] (CS::CLEANUP::PORTIONS) apply at tablet 72075186224037893 2025-04-17T18:08:50.457726Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=4040;raw_bytes=58457;count=2;records=52} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111904;raw_bytes=3648967;count=2;records=3040} inactive {blob_bytes=2656;raw_bytes=2178;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 2025-04-17T18:08:50.457926Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2979:4271];tablet_id=72075186224037893;parent=[1:2932:4239];fline=manager.cpp:82;event=ask_data;request=request_id=42;16={portions_count=2};; 2025-04-17T18:08:50.458098Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2979:4271];tablet_id=72075186224037893;parent=[1:2932:4239];fline=columnshard_impl.cpp:880;event=compaction;external_task_id=3c2dacc-1bb711f0-90d0bc6a-25dc0aab; 2025-04-17T18:08:50.458167Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2979:4271];tablet_id=72075186224037893;parent=[1:2932:4239];fline=columnshard_impl.cpp:619;event=start_changes;type=CS::GENERAL;task_id=3c2dacc-1bb711f0-90d0bc6a-25dc0aab; 2025-04-17T18:08:50.458530Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=3c2dacc-1bb711f0-90d0bc6a-25dc0aab;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-17T18:08:50.459619Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=3c2dacc-1bb711f0-90d0bc6a-25dc0aab; 2025-04-17T18:08:50.464620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent_id=[1:2932:4239];fline=general_compaction.cpp:133;event=blobs_created_diff;appended=0;;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:264];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:264:256];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:520:232];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:752:192];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:944:192];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:1136:192];;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:1328:192];;;;switched=(portion_id:20;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1136;index_size:0;meta:((produced=INSERTED;)););(portion_id:19;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-17T18:08:50.464683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;parent_id=[1:2932:4239];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-17T18:08:50.464877Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2932:4239];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-17T18:08:50.465229Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037893 2025-04-17T18:08:50.465459Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[47] (CS::GENERAL) apply at tablet 72075186224037893 2025-04-17T18:08:50.467132Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037893 Save Batch GenStep: 1:18 Blob count: 1 2025-04-17T18:08:50.467273Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=4040;raw_bytes=58457;count=2;records=52} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111904;raw_bytes=3648967;count=2;records=3040} inactive {blob_bytes=2656;raw_bytes=2178;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 >> TGRpcYdbTest::MakeListRemoveDirectory [GOOD] >> TGRpcYdbTest::ReadTable >> TGRpcYdbTest::CreateYqlSession [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery >> YdbYqlClient::TestReadTableMultiShard [GOOD] >> YdbYqlClient::TestReadTableMultiShardUseSnapshot |88.2%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] Test command err: 2025-04-17T18:08:26.416271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345586210964618:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:26.416335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a05/r3tmp/tmp8ZAn25/pdisk_1.dat 2025-04-17T18:08:26.838298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:26.859647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:26.859732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:26.876078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19565, node 1 2025-04-17T18:08:27.017565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:27.017596Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:27.017602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:27.017712Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:27.324683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:31.356729Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345606243106460:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:31.359518Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a05/r3tmp/tmp699pw4/pdisk_1.dat 2025-04-17T18:08:31.668370Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:31.699060Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:31.699186Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:31.702441Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27060, node 4 2025-04-17T18:08:31.856374Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:31.856402Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:31.856413Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:31.856532Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:32.126870Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:36.674467Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345627985106457:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:36.676653Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a05/r3tmp/tmp55gyNC/pdisk_1.dat 2025-04-17T18:08:36.994402Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:37.019242Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:37.019316Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:37.041325Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13363, node 7 2025-04-17T18:08:37.245289Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:37.245308Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:37.245316Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:37.245439Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:37.539035Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:41.755779Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345648381213414:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:41.755846Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a05/r3tmp/tmpFeNfqB/pdisk_1.dat 2025-04-17T18:08:42.057958Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:42.117489Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:42.117580Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:42.123967Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19740, node 10 2025-04-17T18:08:42.250954Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:42.250978Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:42.250986Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:42.251122Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:42.615811Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a05/r3tmp/tmpvR1q2Q/pdisk_1.dat 2025-04-17T18:08:47.175321Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:47.241546Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:47.281237Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:47.281350Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:47.288573Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14980, node 13 2025-04-17T18:08:47.386200Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:47.386226Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:47.386234Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:47.386387Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:47.703133Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] >> YdbYqlClient::TestYqlWrongTable [GOOD] >> YdbYqlClient::TraceId >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] >> YdbTableBulkUpsert::Nulls [GOOD] >> YdbTableBulkUpsert::NotNulls >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> TYqlDateTimeTests::SimpleOperations [GOOD] >> TYqlDecimalTests::DecimalKey ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] Test command err: 2025-04-17T18:08:18.150615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345553204058916:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.150672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1f/r3tmp/tmpD6U81G/pdisk_1.dat 2025-04-17T18:08:18.746546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.746645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.750624Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.751884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19208, node 1 2025-04-17T18:08:18.943074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.943092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.943099Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.943188Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.262554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8936 2025-04-17T18:08:19.581495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:19.600124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:20.271933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:20.272672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:20.278997Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:08:20.279802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8936 2025-04-17T18:08:20.710229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8936 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913301200 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:21.583987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8936 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1744913301880 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:22.197251Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-17T18:08:22.203578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:08:25.020593Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345582658222954:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:25.020936Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1f/r3tmp/tmpUu89If/pdisk_1.dat 2025-04-17T18:08:25.179559Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:25.213798Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:25.213883Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:25.218441Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2135, node 4 2025-04-17T18:08:25.326611Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:25.326631Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:25.326639Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:25.326771Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:25.525931Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:20511 2025-04-17T18:08:25.796597Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:25.822157Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:26.337380Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494345587698160559:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:26.338349Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:08:26.393325Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:26.393411Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:26.404038Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-17T18:08:26.405761Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20511 2025-04-17T18:08:26.728196Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20511 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1744913306930 ParentPathId: 2 PathStat ... error=incorrect path status: LookupError; 2025-04-17T18:08:34.899204Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:35.902615Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:38.180962Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345636629232106:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:38.181028Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1f/r3tmp/tmpbAVYSm/pdisk_1.dat 2025-04-17T18:08:38.473389Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:38.538605Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:38.538720Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 64399, node 10 2025-04-17T18:08:38.550608Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:38.766832Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:38.766857Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:38.766871Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:38.767022Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15802 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:39.050192Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:15802 2025-04-17T18:08:39.500173Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:39.537785Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:40.064107Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494345647279016617:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:40.064179Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:08:40.099606Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:40.099694Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:40.105096Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-04-17T18:08:40.118453Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15802 2025-04-17T18:08:40.711523Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-17T18:08:40.712087Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:08:41.067683Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:42.070519Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:43.078673Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:44.083123Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:46.074552Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345671379311311:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:46.074604Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1f/r3tmp/tmpZoG7nO/pdisk_1.dat 2025-04-17T18:08:46.296872Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:46.382104Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:46.382227Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:46.387536Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23241, node 13 2025-04-17T18:08:46.447973Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:46.448009Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:46.448020Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:46.448219Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:46.852116Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:46.866866Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:23941 2025-04-17T18:08:47.261760Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:47.294340Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:08:48.298722Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7494345678606715095:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:48.298778Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:08:48.374667Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:48.374763Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:48.394928Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-17T18:08:48.398788Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23941 2025-04-17T18:08:48.814411Z node 13 :TX_PROXY ERROR: Actor# [13:7494345679969247276:2880] txid# 281474976710660, issues: { message: "Error at split boundary 0: Value of type Uint64 expected in tuple at position 1" severity: 1 } 2025-04-17T18:08:48.827295Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-17T18:08:48.827942Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:08:49.305700Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:50.308005Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:51.314673Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables >> Yq_1::Basic_Null >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 >> Yq_1::CreateConnection_With_Existing_Name >> Yq_1::CreateQuery_With_Idempotency >> Yq_1::ModifyConnections >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer >> PrivateApi::PingTask ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] Test command err: 2025-04-17T18:08:17.994865Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345546819947639:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:17.995712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a32/r3tmp/tmpP7L3B2/pdisk_1.dat 2025-04-17T18:08:18.440924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.468954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.469061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.491424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22044, node 1 2025-04-17T18:08:18.794811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.794831Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.794842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.794948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.340855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:21.193368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 CLIENT_DEADLINE_EXCEEDED 2025-04-17T18:08:21.858896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345563999819631:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.858896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345563999819620:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.858968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.862638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:21.885046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345563999819634:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:21.987453Z node 1 :TX_PROXY ERROR: Actor# [1:7494345563999819761:4253] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:22.412293Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f00b14khjaky5vr60h2dx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRhMjAzODktNjhiMmE5ZDMtNjMwMmM1ZmYtOTczMzYyYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:24.119609Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345578593990084:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:24.119678Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a32/r3tmp/tmpCl3tKL/pdisk_1.dat 2025-04-17T18:08:24.344013Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:24.383466Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:24.383536Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:24.391086Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26105, node 4 2025-04-17T18:08:24.563622Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:24.563641Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:24.563653Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:24.563756Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:24.819199Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:27.064624Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:28.780123Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345592976801719:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.780177Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a32/r3tmp/tmpsvD459/pdisk_1.dat 2025-04-17T18:08:28.973830Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29153, node 7 2025-04-17T18:08:29.091764Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:29.091847Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:29.100803Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:29.112076Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:29.112114Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:29.112121Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:29.112212Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:29.338930Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:31.843925Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:33.780474Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494345592976801719:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:33.780530Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:08:42.358473Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345656015166202:2075]; ... ne=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=4ca8b72-1bb711f0-9de375df-c9a3d521; 2025-04-17T18:08:52.191580Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-17T18:08:52.201681Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7494345694924886074:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-17T18:08:52.208639Z node 13 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-04-17T18:08:52.208939Z node 13 :TX_COLUMNSHARD DEBUG: TxWriteIndex[5] (CS::INDEXATION) apply at tablet 72075186224037888 2025-04-17T18:08:52.209839Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-04-17T18:08:52.209849Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345699219853559:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:52.209935Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:52.209965Z node 13 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 2025-04-17T18:08:52.210222Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345699219853571:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:52.215938Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:52.224026Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7494345694924886074:2339];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046644480, LocalPathId: 1]; 2025-04-17T18:08:52.225236Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-17T18:08:52.225292Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;fline=with_appended.cpp:65;portions=1,;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521; 2025-04-17T18:08:52.225637Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::4ca8b72-1bb711f0-9de375df-c9a3d521; 2025-04-17T18:08:52.225706Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:08:52.225782Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;tablet_id=72075186224037888;fline=columnshard_impl.cpp:780;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-04-17T18:08:52.225876Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T18:08:52.226333Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:08:52.226397Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:08:52.226435Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:08:52.226551Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:08:52.226930Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Delete Blob DS:2181038080:[72075186224037888:1:1:3:0:3864:0] 2025-04-17T18:08:52.226977Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-04-17T18:08:52.227067Z node 13 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=4ca8b72-1bb711f0-9de375df-c9a3d521;mem=3380;cpu=0; 2025-04-17T18:08:52.227210Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:780;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-04-17T18:08:52.244966Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7494345694924886074:2339];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046644480, LocalPathId: 1]; 2025-04-17T18:08:52.249833Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494345699219853573:2413], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:52.311572Z node 13 :TX_PROXY ERROR: Actor# [13:7494345699219853715:2913] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:52.491111Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7494345694924886074:2339];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T18:08:52.584518Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f0xzb5pp7fbzr1tzrk61r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NmRiYjJmYTMtZjE1YzYxNDItMWI2Y2Y4YjQtZjIzMGUyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:52.645178Z node 13 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976710662 scanId: 1 version: {1744913332281:max} readable: {1744913332617:max} at tablet 72075186224037888 2025-04-17T18:08:52.645382Z node 13 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976710662 scanId: 1 at tablet 72075186224037888 2025-04-17T18:08:52.645938Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7494345694924886074:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1744913332281:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-04-17T18:08:52.786660Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7494345694924886074:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1744913332281:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-04-17T18:08:52.788334Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7494345694924886074:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1744913332281:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":8},{"from":12},{"from":4},{"from":6},{"from":2},{"from":14},{"from":10}]},{"owner_id":2,"inputs":[{"from":15}]},{"owner_id":4,"inputs":[{"from":15}]},{"owner_id":6,"inputs":[{"from":15}]},{"owner_id":8,"inputs":[{"from":15}]},{"owner_id":10,"inputs":[{"from":15}]},{"owner_id":12,"inputs":[{"from":15}]},{"owner_id":14,"inputs":[{"from":15}]},{"owner_id":15,"inputs":[]}],"nodes":{"15":{"p":{"p":{"data":[{"name":"stringToString","id":7},{"name":"id","id":1},{"name":"timestamp","id":2},{"name":"dateTimeS","id":3},{"name":"dateTimeU","id":4},{"name":"date","id":5},{"name":"utf8ToString","id":6}]},"o":"7,1,2,3,4,5,6","t":"FetchOriginalData"},"w":14,"id":15},"2":{"p":{"i":"5","p":{"address":{"name":"date","id":5}},"o":"5","t":"AssembleOriginalData"},"w":19,"id":2},"8":{"p":{"i":"1","p":{"address":{"name":"id","id":1}},"o":"1","t":"AssembleOriginalData"},"w":19,"id":8},"0":{"p":{"i":"5,3,4,1,7,2,6","t":"Projection"},"w":133,"id":0},"4":{"p":{"i":"3","p":{"address":{"name":"dateTimeS","id":3}},"o":"3","t":"AssembleOriginalData"},"w":19,"id":4},"14":{"p":{"i":"6","p":{"address":{"name":"utf8ToString","id":6}},"o":"6","t":"AssembleOriginalData"},"w":19,"id":14},"10":{"p":{"i":"7","p":{"address":{"name":"stringToString","id":7}},"o":"7","t":"AssembleOriginalData"},"w":19,"id":10},"6":{"p":{"i":"4","p":{"address":{"name":"dateTimeU","id":4}},"o":"4","t":"AssembleOriginalData"},"w":19,"id":6},"12":{"p":{"i":"2","p":{"address":{"name":"timestamp","id":2}},"o":"2","t":"AssembleOriginalData"},"w":19,"id":12}}}; 2025-04-17T18:08:52.794896Z node 13 :TX_COLUMNSHARD INFO: self_id=[13:7494345694924886103:2343];tablet_id=72075186224037888;parent=[13:7494345694924886074:2339];fline=manager.cpp:82;event=ask_data;request=request_id=3;3={portions_count=1};; 2025-04-17T18:08:52.797112Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-17T18:08:52.806147Z node 13 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-17T18:08:52.810349Z node 13 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2025-04-17T18:08:52.835281Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913332281, txId: 18446744073709551615] shutting down >> YdbLogStore::AlterLogTable [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck >> YdbImport::ImportFromS3ToExistingTable [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize >> Yq_1::ListConnections >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] >> TYqlDateTimeTests::DatetimeKey [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> Yq_1::DescribeJob >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed >> TTableProfileTests::OverwriteStoragePolicy [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 >> YdbYqlClient::TestReadTableMultiShardUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardOneRow >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] Test command err: 2025-04-17T18:08:30.417038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345603357909519:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:30.417081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f8/r3tmp/tmp9zsIL0/pdisk_1.dat 2025-04-17T18:08:31.019328Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:31.036770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:31.036873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:31.047205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19793, node 1 2025-04-17T18:08:31.258737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:31.258759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:31.258766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:31.258867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:31.645825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:35.294893Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345625588869199:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:35.294962Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f8/r3tmp/tmplfqUn5/pdisk_1.dat 2025-04-17T18:08:35.553687Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:35.619081Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:35.619176Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14264, node 4 2025-04-17T18:08:35.624211Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:35.742375Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:35.742396Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:35.742404Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:35.742575Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:35.978300Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:36.057446Z node 4 :TX_PROXY ERROR: Actor# [4:7494345629883837433:2607] txid# 281474976710658, issues: { message: "Unknown column \'BlaBla\' specified in key column list" severity: 1 } 2025-04-17T18:08:40.470274Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345646584777749:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:40.511140Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f8/r3tmp/tmpLnydhe/pdisk_1.dat 2025-04-17T18:08:40.762234Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:40.804903Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:40.804983Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:40.808316Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8218, node 7 2025-04-17T18:08:40.934069Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:40.934096Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:40.934106Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:40.934259Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:41.199801Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:45.823661Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345669306916526:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:45.824427Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f8/r3tmp/tmpbTTjJ8/pdisk_1.dat 2025-04-17T18:08:46.021722Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:46.061977Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:46.062053Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:46.065786Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16025, node 10 2025-04-17T18:08:46.254904Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:46.254919Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:46.254925Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:46.255042Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:46.557988Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:46.627656Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:46.849469Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:49.373143Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345686486787080:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:49.373236Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:49.373784Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345686486787092:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:49.377968Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T18:08:49.403589Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345686486787094:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T18:08:49.507867Z node 10 :TX_PROXY ERROR: Actor# [10:7494345686486787169:3075] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:50.456916Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f0v6v5e0ndv52k7davrvh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODg0OGViYjEtNjAzOWZlNTktNzRlOWY5NjMtNmI1OWQ1MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:50.478341Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f0v6v5e0ndv52k7davrvh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODg0OGViYjEtNjAzOWZlNTktNzRlOWY5NjMtNmI1OWQ1MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:50.654438Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2f0wad67eqgpk5xk8r6eza, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZmU5NDQ3MjYtZjIyZWNkNDAtZWJjYzEwZjMtZjVhYTg4ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:50.766274Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494345669306916526:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:50.766351Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:08:52.462906Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345697419354197:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:52.462980Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f8/r3tmp/tmpWTIhHF/pdisk_1.dat 2025-04-17T18:08:52.704798Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:52.739777Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:52.739857Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:52.745758Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4394, node 13 2025-04-17T18:08:52.964905Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:52.964931Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:52.964938Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:52.965077Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:53.335072Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> TGRpcYdbTest::ExecuteDmlQuery [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery >> TGRpcYdbTest::ReadTable [GOOD] >> TGRpcYdbTest::OperationTimeout >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::DatetimeKey [GOOD] Test command err: 2025-04-17T18:08:20.148527Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345559318812700:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:20.156517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0a/r3tmp/tmponQYnW/pdisk_1.dat 2025-04-17T18:08:20.670808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:20.670917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:20.671300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:20.680170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64468, node 1 2025-04-17T18:08:20.834792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:20.834813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:20.834842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:20.834958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:21.216755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21623 2025-04-17T18:08:21.527977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:21.548257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:22.052350Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345569763197242:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:22.052431Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:08:22.102335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:22.102461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:22.108643Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:08:22.109956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21623 2025-04-17T18:08:22.365408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21623 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913302820 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:23.203813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21623 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1744913303850 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:24.746905Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-17T18:08:24.752015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:08:26.828794Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345584415094569:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:26.899915Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0a/r3tmp/tmpNrXabv/pdisk_1.dat 2025-04-17T18:08:27.024431Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21863, node 4 2025-04-17T18:08:27.154348Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:27.154487Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:27.193083Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:27.226817Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:27.226844Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:27.226850Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:27.226990Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:27.450628Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27772 2025-04-17T18:08:27.733711Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:27.759933Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:28.272005Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494345594866435697:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.272506Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:08:28.299555Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:28.299630Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:28.304492Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-17T18:08:28.305274Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27772 2025-04-17T18:08:30.207951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0 ... 11558Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:43.692953Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:43.711036Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:47.098727Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:47.200025Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345676715296799:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:47.200115Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:47.200320Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345676715296811:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:47.205555Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:47.245948Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345676715296813:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:47.327854Z node 10 :TX_PROXY ERROR: Actor# [10:7494345676715296896:2791] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:47.503493Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0s2x265snkm08qf895yw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM1NGIyNWItZmUwNjViY2UtYWI5MzkyNWItZGFjMDM2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:47.731400Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f0sdjf51g12j849mg2xc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM1NGIyNWItZmUwNjViY2UtYWI5MzkyNWItZGFjMDM2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:47.764266Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494345655240459103:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:47.764362Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:08:48.033333Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f0skt1m68e2gv2zgm4tm4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM1NGIyNWItZmUwNjViY2UtYWI5MzkyNWItZGFjMDM2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:48.312932Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2f0sxb3aja7rce3bewq3n6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM1NGIyNWItZmUwNjViY2UtYWI5MzkyNWItZGFjMDM2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:50.421812Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345690948384167:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:50.424882Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a0a/r3tmp/tmpz7klLW/pdisk_1.dat 2025-04-17T18:08:50.721744Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:50.753683Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:50.753788Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:50.758280Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22683, node 13 2025-04-17T18:08:50.946860Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:50.946888Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:50.946897Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:50.947058Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:51.393837Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:54.852693Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.958017Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345708128254568:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:54.958126Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:54.958441Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345708128254583:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:54.964599Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:54.984668Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494345708128254585:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:55.055241Z node 13 :TX_PROXY ERROR: Actor# [13:7494345712423221952:2779] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:55.134227Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f10na4jc79kbq3xre9h8r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NTRjMDE2MDEtMTI2YTQ3YmYtMmI2OWMyNjctMjhiNGYxMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:55.255326Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f10ve680dq2ft7ftzbd1p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NTRjMDE2MDEtMTI2YTQ3YmYtMmI2OWMyNjctMjhiNGYxMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:55.422317Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7494345690948384167:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:55.422396Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:08:55.476296Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f10yye5nc64gr0pm33y2m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NTRjMDE2MDEtMTI2YTQ3YmYtMmI2OWMyNjctMjhiNGYxMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:55.682314Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2f115t6ya033qmtpske1q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NTRjMDE2MDEtMTI2YTQ3YmYtMmI2OWMyNjctMjhiNGYxMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> YdbYqlClient::TraceId [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::OverwriteStoragePolicy [GOOD] Test command err: 2025-04-17T18:08:17.997846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345545582781272:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:17.997903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a23/r3tmp/tmptO4RQL/pdisk_1.dat 2025-04-17T18:08:18.426423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.426522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.431174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:18.470961Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13574, node 1 2025-04-17T18:08:18.598532Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.598555Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:18.797743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.797776Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.797792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.797892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3877 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.349266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:3877 2025-04-17T18:08:19.639432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:19.662542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:20.167948Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345561482839926:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:20.168016Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:08:20.187229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:20.187329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:20.193714Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:08:20.194878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3877 2025-04-17T18:08:20.487841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3877 TClient::Ls request: /Root/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913300634 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:21.119381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3877 TClient::Ls request: /Root/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1744913301208 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:21.427434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3877 TClient::Ls request: /Root/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1744913301516 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:21.809119Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-17T18:08:21.809665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:08:24.886874Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345577478398109:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:24.888974Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a23/r3tmp/tmpa7rDoA/pdisk_1.dat 2025-04-17T18:08:25.106681Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2274, node 4 2025-04-17T18:08:25.211483Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:25.211601Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:25.252316Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:25.252336Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:25.252345Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:25.252466Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:08:25.256161Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:25.491319Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18824 2025-04-17T18:08:25.752694Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 28147497671565 ... meStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:48.831167Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:22747 2025-04-17T18:08:49.356230Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:49.397963Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:49.906472Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7494345684383533268:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:49.906548Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:08:49.956659Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:49.956843Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:49.973547Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-17T18:08:49.979384Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22747 2025-04-17T18:08:50.495291Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22747 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1744913331190 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:51.820375Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22747 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715661 CreateStep: 1744913332250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:52.805483Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:08:52.826282Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7494345676455533372:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:52.826369Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:22747 TClient::Ls request: /Root/ydb_ut_tenant/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715662 CreateStep: 1744913333290 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:54.013027Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.906484Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7494345684383533268:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:54.906581Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:22747 TClient::Ls request: /Root/ydb_ut_tenant/table-4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-4" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715663 CreateStep: 1744913334490 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-4" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:55.140959Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22747 TClient::Ls request: /Root/ydb_ut_tenant/table-5 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-5" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715664 CreateStep: 1744913335530 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-5" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-17T18:08:56.207765Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-17T18:08:56.222041Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:08:56.602808Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7494345714448309408:2570], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:08:56.603004Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:08:56.665944Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7494345714448309408:2570], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:08:56.835664Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7494345714448309408:2570], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:08:57.230619Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7494345714448309408:2570], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> YdbOlapStore::LogLast50ByResource [GOOD] >> YdbOlapStore::LogGrepNonExisting >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts >> KqpQueryService::ReturnAndCloseSameTime [GOOD] >> KqpQueryService::ReplaceIntoWithDefaultValue ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2025-04-17T18:08:37.430525Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345633810688711:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:37.430578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ed/r3tmp/tmpLx9330/pdisk_1.dat 2025-04-17T18:08:38.036807Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:38.049259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:38.049354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:38.059678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63565, node 1 2025-04-17T18:08:38.230849Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:38.230870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:38.230881Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:38.231002Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:38.657916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:40.938964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345646695591661:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.939094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:41.143716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:41.348763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345650990559132:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:41.348891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:41.349194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345650990559140:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:41.352944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:41.393870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345650990559142:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:41.463296Z node 1 :TX_PROXY ERROR: Actor# [1:7494345650990559231:2817] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:41.542495Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345650990559242:2356], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:43: Error: Failed to convert type: Struct<'Key':String,'Value':String> to Struct<'Key':Uint32?,'Value':String?>
:2:43: Error: Failed to convert 'Key': String to Optional
:2:43: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:08:41.542805Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDdjNTYxMzctMjJjM2EzODMtY2YxZWE0Zi0zOGNkM2U4OA==, ActorId: [1:7494345646695591658:2335], ActorState: ExecuteState, TraceId: 01js2f0kbzcykpx9pf45p27h44, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:08:43.199510Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345658566478418:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:43.199578Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ed/r3tmp/tmpZ8dl4j/pdisk_1.dat 2025-04-17T18:08:43.569148Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:43.632640Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:43.632692Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:43.639120Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20779, node 4 2025-04-17T18:08:43.778795Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:43.778816Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:43.778823Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:43.778954Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:44.047554Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:44.065174Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:46.299913Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345671451381292:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:46.299998Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:47.947337Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345675354592553:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:47.947544Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ed/r3tmp/tmpvA8Xp8/pdisk_1.dat 2025-04-17T18:08:48.244660Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:48.284662Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:48.284748Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:48.292638Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14572, node 7 2025-04-17T18:08:48.419153Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:48.419178Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:48.419185Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:48.419312Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ... SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:48.685396Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:51.473767Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345692534462789:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:51.473880Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:51.490898Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:51.595973Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345692534462956:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:51.596073Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:51.596264Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345692534462961:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:51.600672Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:51.626233Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345692534462963:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:51.694301Z node 7 :TX_PROXY ERROR: Actor# [7:7494345692534463034:2807] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:51.759575Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f0xcbeca3vst4hnnamzk0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2QyNTllOWEtZmNiMzdmNTQtMzAwMDZlM2MtNzdjNGE3N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:51.829425Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7494345692534463080:2362], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:08:51.829670Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=N2QyNTllOWEtZmNiMzdmNTQtMzAwMDZlM2MtNzdjNGE3N2I=, ActorId: [7:7494345692534462786:2335], ActorState: ExecuteState, TraceId: 01js2f0xjk2jcfj7txwtqgzmxe, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:08:51.891140Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2f0xm17zz0c55k5400x5c0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2QyNTllOWEtZmNiMzdmNTQtMzAwMDZlM2MtNzdjNGE3N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:52.031293Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2f0xp3ft4tsvfygsrypz49, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2QyNTllOWEtZmNiMzdmNTQtMzAwMDZlM2MtNzdjNGE3N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:53.729392Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345702767542632:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:53.734370Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ed/r3tmp/tmpc2f0pa/pdisk_1.dat 2025-04-17T18:08:54.025144Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:54.053282Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:54.053370Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:54.057601Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9498, node 10 2025-04-17T18:08:54.183203Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:54.183227Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:54.183235Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:54.183364Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:54.510454Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:57.338482Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345719947412869:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.338592Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.361496Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:57.492877Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345719947413029:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.492974Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.493601Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345719947413034:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.497731Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:57.524055Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345719947413036:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:57.604422Z node 10 :TX_PROXY ERROR: Actor# [10:7494345719947413107:2791] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:57.618219Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7494345719947413118:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:08:57.619862Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=YTZlOWM1MWQtOTQ0MzA4MTItMzRiMGY0MGItMzQzN2Q3OA==, ActorId: [10:7494345719947412851:2335], ActorState: ExecuteState, TraceId: 01js2f134kc9apaqzjqbqyw95c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:08:57.644309Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7494345719947413145:2361], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:08:57.645999Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=YTZlOWM1MWQtOTQ0MzA4MTItMzRiMGY0MGItMzQzN2Q3OA==, ActorId: [10:7494345719947412851:2335], ActorState: ExecuteState, TraceId: 01js2f138y1nq4dma59bkzgjde, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors >> Yq_1::Basic >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] >> YdbYqlClient::RenameTables [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> YdbS3Internal::TestAccessCheck [GOOD] >> YdbScripting::BasicV0 >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2025-04-17T18:08:18.155675Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345553133148307:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.158012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a34/r3tmp/tmpdre5NJ/pdisk_1.dat 2025-04-17T18:08:18.701199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.701291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.710798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.712327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18385, node 1 2025-04-17T18:08:18.982718Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.982746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.982755Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.982863Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.329674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:21.510865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345566018051233:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.510973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.762839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:21.936948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:22.091042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345570313018742:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:22.091115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345570313018747:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:22.091145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:22.094711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T18:08:22.132961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345570313018749:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T18:08:22.223089Z node 1 :TX_PROXY ERROR: Actor# [1:7494345570313018829:2832] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:22.253566Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7494345570313018850:2842], for# test_user@builtin, access# DescribeSchema 2025-04-17T18:08:22.253583Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7494345570313018850:2842], for# test_user@builtin, access# DescribeSchema 2025-04-17T18:08:22.262729Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345570313018840:2365], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:08:22.262943Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzA0NjkxYmUtNDkwNTliYjktMWM5ZTM2NmYtZjc2ZmIwMmE=, ActorId: [1:7494345570313018738:2354], ActorState: ExecuteState, TraceId: 01js2f00ja9eh19nvsv4t9fh6p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:08:24.039686Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345577057979677:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:24.039736Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a34/r3tmp/tmpi87Gda/pdisk_1.dat 2025-04-17T18:08:24.356421Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21746, node 4 2025-04-17T18:08:24.433196Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:24.433271Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:24.480781Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:24.480800Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:24.480806Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:24.480917Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:08:24.488605Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:24.719384Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:27.008168Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345589942882629:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:27.008260Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:27.019278Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:27.172222Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345589942882791:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:27.172327Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:27.172667Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345589942882796:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:27.176471Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:27.195993Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345589942882798:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:27.271251Z node 4 :TX_PROXY ERROR: Actor# [4:7494345589942882872:2796] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:27.351862Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f05h33jfh10gbtxkywpph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OGQ3ZDliYjctNWIxOWZjLTMxNThjMmUwLWYyY2E2ODcy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:27.492320Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f05qjc1qgw7pc0yd97j51, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZTUzZmJlYzgtNTBiNDJlMzUtZDU3ZDUzNjMtNTQ0NzNjYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:29.026840Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345600716793553:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:29.026901Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a34/r3tmp/tmpIR8Pb9/pdisk_1.dat 2025-04-17T18:08:29.389861Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:29.409534Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:29.409627Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:29.413611Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15196, node 7 2025-04-17T18:08:29.543424Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:29.543445Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:29.543452Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:29.543577Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17946 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:29.878350Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 2025-04-17T18:08:34.027347Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494345600716793553:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:34.027501Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-04-17T18:08:34.826534Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345622191631089:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:34.826644Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:34.826937Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345622191631101:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:34.831047Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:34.872307Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345622191631103:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:08:34.971586Z node 7 :TX_PROXY ERROR: Actor# [7:7494345622191631177:2683] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-04-17T18:08:39.506611Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345640752431898:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:39.509644Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a34/r3tmp/tmpis9s3y/pdisk_1.dat 2025-04-17T18:08:39.783943Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1313, node 10 2025-04-17T18:08:39.885225Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:39.885325Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:39.915684Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:40.010779Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:40.010807Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:40.010832Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:40.010971Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:40.332684Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 2025-04-17T18:08:44.506887Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494345640752431898:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:44.506965Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 2025-04-17T18:08:54.770417Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:08:54.770451Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TYqlDecimalTests::NegativeValues >> TYqlDecimalTests::DecimalKey [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword >> YdbYqlClient::TestReadTableMultiShardOneRow [GOOD] >> YdbYqlClient::TestReadTableBatchLimits >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2025-04-17T18:08:22.192070Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345570675046346:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:22.192372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a08/r3tmp/tmpydXANN/pdisk_1.dat 2025-04-17T18:08:22.702470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:22.712317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:22.712409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:22.719506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18288, node 1 2025-04-17T18:08:22.835055Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:22.835075Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:22.835083Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:22.835185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:23.164860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 2025-04-17T18:08:27.191058Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345570675046346:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:27.191143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-04-17T18:08:29.237523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345600739818404:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:29.237650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:29.238187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345600739818416:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:29.241999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:08:29.295823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345600739818418:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:08:29.349923Z node 1 :TX_PROXY ERROR: Actor# [1:7494345600739818488:2702] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-04-17T18:08:36.959320Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345628588990712:2089];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:36.959378Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a08/r3tmp/tmpQa6iew/pdisk_1.dat 2025-04-17T18:08:37.103250Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:37.133621Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:37.133714Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:37.140892Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6813, node 4 2025-04-17T18:08:37.394246Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:37.394278Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:37.394291Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:37.394423Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:37.651525Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:40.026359Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345645768860882:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.026447Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.062086Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:40.224674Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345645768861047:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.224796Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.225049Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345645768861052:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.228523Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:40.259494Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345645768861054:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:40.356664Z node 4 :TX_PROXY ERROR: Actor# [4:7494345645768861138:2786] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', e ... e: "Table-1" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:47532" 2025-04-17T18:09:00.771187Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017356:3568] txid# 281474976715672 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:09:00.771238Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017356:3568] txid# 281474976715672 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:00.771536Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017356:3568] txid# 281474976715672 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:00.771612Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017356:3568] HANDLE EvNavigateKeySetResult, txid# 281474976715672 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:00.771644Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017356:3568] txid# 281474976715672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715672 TabletId# 72057594046644480} 2025-04-17T18:09:00.771793Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017356:3568] txid# 281474976715672 HANDLE EvClientConnected 2025-04-17T18:09:00.772017Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:00.772139Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:09:00.774436Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2025-04-17T18:09:00.775246Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017356:3568] txid# 281474976715672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715672} 2025-04-17T18:09:00.775293Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017356:3568] txid# 281474976715672 SEND to# [13:7494345730169017355:2393] Source {TEvProposeTransactionStatus txid# 281474976715672 Status# 53} 2025-04-17T18:09:00.778400Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:09:00.778499Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:09:00.778512Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:09:00.778563Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:09:00.788261Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913340835, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:09:00.795886Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715672, done: 0, blocked: 1 2025-04-17T18:09:00.802095Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:09:00.802220Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:09:00.802308Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:09:00.802363Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:09:00.814478Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715672:0 2025-04-17T18:09:00.828933Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037890 not found 2025-04-17T18:09:00.830344Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:09:00.839206Z node 13 :GRPC_SERVER DEBUG: Got grpc request# DropTableRequest, traceId# 01js2f16d70jscr6stxkyxdbrh, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47538, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:09:00.839359Z node 13 :TX_PROXY DEBUG: actor# [13:7494345708694178908:2138] Handle TEvProposeTransaction 2025-04-17T18:09:00.839377Z node 13 :TX_PROXY DEBUG: actor# [13:7494345708694178908:2138] TxId# 281474976715673 ProcessProposeTransaction 2025-04-17T18:09:00.839421Z node 13 :TX_PROXY DEBUG: actor# [13:7494345708694178908:2138] Cookie# 0 userReqId# "" txid# 281474976715673 SEND to# [13:7494345730169017449:3655] 2025-04-17T18:09:00.841983Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017449:3655] txid# 281474976715673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:47538" 2025-04-17T18:09:00.842011Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017449:3655] txid# 281474976715673 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:09:00.842059Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017449:3655] txid# 281474976715673 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:00.842375Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017449:3655] txid# 281474976715673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:00.842451Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017449:3655] HANDLE EvNavigateKeySetResult, txid# 281474976715673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:00.842484Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017449:3655] txid# 281474976715673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2025-04-17T18:09:00.842623Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017449:3655] txid# 281474976715673 HANDLE EvClientConnected 2025-04-17T18:09:00.842855Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:09:00.843111Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:09:00.845440Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2025-04-17T18:09:00.845629Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017449:3655] txid# 281474976715673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715673} 2025-04-17T18:09:00.845698Z node 13 :TX_PROXY DEBUG: Actor# [13:7494345730169017449:3655] txid# 281474976715673 SEND to# [13:7494345730169017448:2397] Source {TEvProposeTransactionStatus txid# 281474976715673 Status# 53} 2025-04-17T18:09:00.849606Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:09:00.849726Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:09:00.849739Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:09:00.849794Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:09:00.875522Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913340919, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:09:00.878932Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715673, done: 0, blocked: 1 2025-04-17T18:09:00.882586Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:09:00.882677Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:09:00.882692Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:09:00.882739Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:09:00.886513Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:0 2025-04-17T18:09:00.890052Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7494345708694178685:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:00.890117Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:00.898322Z node 13 :GRPC_SERVER DEBUG: [0x51a000117c80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.898585Z node 13 :GRPC_SERVER DEBUG: [0x51a00011e280] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.898600Z node 13 :GRPC_SERVER DEBUG: [0x51a0000e7680] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.898836Z node 13 :GRPC_SERVER DEBUG: [0x51a00011dc80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.898853Z node 13 :GRPC_SERVER DEBUG: [0x51a000150c80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899025Z node 13 :GRPC_SERVER DEBUG: [0x51a000118280] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899047Z node 13 :GRPC_SERVER DEBUG: [0x51a00011a680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899235Z node 13 :GRPC_SERVER DEBUG: [0x51a00011a080] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899259Z node 13 :GRPC_SERVER DEBUG: [0x51a000119a80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899415Z node 13 :GRPC_SERVER DEBUG: [0x51a00011c480] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899443Z node 13 :GRPC_SERVER DEBUG: [0x51a00011be80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899595Z node 13 :GRPC_SERVER DEBUG: [0x51a00011ac80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899637Z node 13 :GRPC_SERVER DEBUG: [0x51a000119480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899785Z node 13 :GRPC_SERVER DEBUG: [0x51a000118e80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899813Z node 13 :GRPC_SERVER DEBUG: [0x51a000118880] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899973Z node 13 :GRPC_SERVER DEBUG: [0x51a00011b280] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.899991Z node 13 :GRPC_SERVER DEBUG: [0x51a00011e880] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-17T18:09:00.904940Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037889 not found 2025-04-17T18:09:00.906321Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> YdbYqlClient::Utf8DatabasePassViaHeader [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TGRpcYdbTest::OperationTimeout [GOOD] >> TGRpcYdbTest::OperationCancelAfter >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate |88.3%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2025-04-17T18:08:30.731313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345603755767806:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:30.731356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f6/r3tmp/tmp5Ge0YW/pdisk_1.dat 2025-04-17T18:08:31.309454Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:31.327337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:31.327418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:31.332237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7264, node 1 2025-04-17T18:08:31.458359Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:31.458381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:31.458388Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:31.458456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:31.844970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:34.026424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:34.267945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345620935638144:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:34.268023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345620935638154:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:34.268097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:34.271918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:34.316291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345620935638158:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:34.392683Z node 1 :TX_PROXY ERROR: Actor# [1:7494345620935638235:2799] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:34.848566Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0cekd3cdtrnvdygmmha2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y5NDU1YmUtY2M2MTZlN2MtOThhNzcxMTktNzMzZTI0ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:35.034094Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f0d20733428sxx8286a4d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y5NDU1YmUtY2M2MTZlN2MtOThhNzcxMTktNzMzZTI0ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:35.177665Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f0d7306jwh7h6jp9av1mz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y5NDU1YmUtY2M2MTZlN2MtOThhNzcxMTktNzMzZTI0ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:35.278315Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2f0dbgbqczmfa1gbp7ntwx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y5NDU1YmUtY2M2MTZlN2MtOThhNzcxMTktNzMzZTI0ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:35.382217Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2f0dem92xzkcwpsyf23m2t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y5NDU1YmUtY2M2MTZlN2MtOThhNzcxMTktNzMzZTI0ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:37.048378Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345627878801507:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:37.048790Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f6/r3tmp/tmpvQSmZr/pdisk_1.dat 2025-04-17T18:08:37.301354Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:37.332834Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:37.332922Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:37.338316Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19749, node 4 2025-04-17T18:08:37.387802Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:37.387827Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:37.387835Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:37.387954Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:37.648481Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:40.112821Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:40.201776Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345645058671692:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.201861Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.202157Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345645058671704:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.206208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:40.235876Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345645058671706:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:40.295493Z node 4 :TX_PROXY ERROR: Actor# [4:7494345645058671777:2784] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:40.361473Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f0j873fvgh98z22cafzhq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTA5ZDk3YWQtYWViNjA0YTMtZGU5MmY1ZDMtZmQ1YmI0MTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:40.499402Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2f0jdj3m0vgwcka9w92c4k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTA5ZDk3YWQtYWViNjA0YTMtZGU5MmY1ZDMtZmQ1YmI0MTE=, CurrentExecutionId: , CustomerSuppliedI ... 6710666. Ctx: { TraceId: 01js2f0y9yctkm11y08pvd6hxz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmI4ZDZhMmItZGUzNDBhZTUtYTk2MWEyMjctZjVjOTQ3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:52.982916Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2f0y9yctkm11y08pvd6hxz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmI4ZDZhMmItZGUzNDBhZTUtYTk2MWEyMjctZjVjOTQ3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:53.102494Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01js2f0yr6b3ka667rh7c46cfn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmI4ZDZhMmItZGUzNDBhZTUtYTk2MWEyMjctZjVjOTQ3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:53.115020Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494345681745633321:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:53.115790Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:08:53.217908Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01js2f0yvt9qzfjdx3vg77xc1d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmI4ZDZhMmItZGUzNDBhZTUtYTk2MWEyMjctZjVjOTQ3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:53.325856Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01js2f0yzb6m28m97e7r4c2ga9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmI4ZDZhMmItZGUzNDBhZTUtYTk2MWEyMjctZjVjOTQ3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:53.425250Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2f0z2qabk4ja3fm16d0y88, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmI4ZDZhMmItZGUzNDBhZTUtYTk2MWEyMjctZjVjOTQ3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:53.548043Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01js2f0z5pes08q5gdz6d9v0nc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmI4ZDZhMmItZGUzNDBhZTUtYTk2MWEyMjctZjVjOTQ3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:53.877569Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2f0z9gdw294zh6ppnqmwwt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmI4ZDZhMmItZGUzNDBhZTUtYTk2MWEyMjctZjVjOTQ3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:53.883220Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2f0z9gdw294zh6ppnqmwwt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmI4ZDZhMmItZGUzNDBhZTUtYTk2MWEyMjctZjVjOTQ3OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:55.745950Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345708760629624:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:55.746144Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f6/r3tmp/tmpeRGfQ1/pdisk_1.dat 2025-04-17T18:08:56.018927Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:56.071672Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:56.071744Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:56.078255Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3642, node 13 2025-04-17T18:08:56.262135Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:56.262153Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:56.262162Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:56.262328Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:56.631771Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:59.876321Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:59.993059Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345725940500032:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:59.993174Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:59.993582Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345725940500044:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:59.997181Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:00.027464Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494345725940500046:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:09:00.103686Z node 13 :TX_PROXY ERROR: Actor# [13:7494345730235467409:2792] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:00.229949Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f15jq128zh8s4h4bhhe6n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:00.454060Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2f15tz73hzw6jznxeswb76, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:00.611190Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2f161c1bndstqrsahhyrxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:00.746344Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7494345708760629624:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:00.746424Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:00.778907Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2f166n93h6se476601f1ha, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:00.932050Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2f16bp7bt32zdd7xc6cs5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:01.075364Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2f16gbe5qde29p5v1sy379, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:01.262347Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2f16my973q0wdxspccajat, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:01.536170Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2f16tk5jpqacpfw0djv399, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:01.867644Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2f1737868hf8v32wntmtyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:02.156561Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2f17dh5yaddj3b93qefnyv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjZkNDVjMjgtNThlODc0MTYtZmZjNTgwNmQtZjQ2ZTljOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2025-04-17T18:08:36.723683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345628737237065:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:36.723758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f2/r3tmp/tmpmSjoCL/pdisk_1.dat 2025-04-17T18:08:37.317069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:37.317152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:37.319751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:37.354922Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16647, node 1 2025-04-17T18:08:37.499544Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:37.499693Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:37.545608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:37.545638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:37.545647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:37.545763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:37.909381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:40.024390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345645917107050:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.024605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.360961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:40.511829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345645917107227:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.511911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.512136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345645917107232:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:40.515503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:40.539454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345645917107234:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:40.625937Z node 1 :TX_PROXY ERROR: Actor# [1:7494345645917107318:2799] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:40.711723Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0g3vayjr4spyaz2wppnh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE0Y2E4ZTYtNmJkMzUyODAtN2Q5MWYwN2UtZjE3NzM4NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:40.832431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f0js58w8hwg0nsdh1593z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI0MzkzOTMtM2U2YzgxN2ItMzkwYzcxOTEtNjM2ZDQyYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:40.838072Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913320864, txId: 281474976710662] shutting down 2025-04-17T18:08:42.367202Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345655775118794:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:42.367258Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f2/r3tmp/tmpXPlBwL/pdisk_1.dat 2025-04-17T18:08:42.929322Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:42.968036Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:42.968105Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:42.975320Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27492, node 4 2025-04-17T18:08:43.174895Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:43.174916Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:43.174923Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:43.175027Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:43.396821Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:43.414459Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:08:45.559137Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:27492 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid DyNumber string representation 2025-04-17T18:08:47.287100Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345677833361839:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:47.287150Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f2/r3tmp/tmpwPRVbk/pdisk_1.dat 2025-04-17T18:08:47.528935Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7987, node 7 2025-04-17T18:08:47.631808Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:47.631917Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:47.655247Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:47.664635Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:47.664657Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:47.664671Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:47.664810Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:47.933809Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:50.759828Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:50.861072Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345690718264959:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:50.861140Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345690718264967:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:50.861192Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:50.864209Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:50.890693Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345690718264973:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:50.966584Z node 7 :TX_PROXY ERROR: Actor# [7:7494345690718265044:2800] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:51.278866Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0wnbb8x463hgdmkbhtm5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzU2NzAzMzEtYWQ5ZTc5MjQtNzMyYmVhMmQtYTE4MDA4NmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:53.205761Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345701882523049:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:53.205801Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f2/r3tmp/tmpuTCGvs/pdisk_1.dat 2025-04-17T18:08:53.603739Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:53.651717Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:53.651797Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:53.657602Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3011, node 10 2025-04-17T18:08:53.842712Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:53.842730Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:53.842739Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:53.842851Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:54.182140Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:56.658649Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table 2025-04-17T18:08:58.651944Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345721085371796:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:58.652196Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f2/r3tmp/tmpc3njQx/pdisk_1.dat 2025-04-17T18:08:58.955095Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:58.987482Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:58.987558Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:59.001154Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12552, node 13 2025-04-17T18:08:59.135124Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:59.135148Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:59.135156Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:59.135301Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:59.280929Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:02.058599Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] Test command err: Trying to start YDB, gRPC: 25790, MsgBus: 12256 2025-04-17T18:04:48.667806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344649770704216:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:48.668314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bd0/r3tmp/tmpK2yoFG/pdisk_1.dat 2025-04-17T18:04:49.142199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:49.142322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:49.142637Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:49.146311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25790, node 1 2025-04-17T18:04:49.303224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:49.303250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:49.303256Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:49.303386Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12256 TClient is connected to server localhost:12256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:49.899396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:49.918877Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:04:49.929903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:50.122823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:50.290027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:50.373717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:52.112140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344666950575204:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:52.112260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:52.467196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.537722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.570350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.654932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.685524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.763630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.826937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:52.925082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344666950575883:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:52.925231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:52.925488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344666950575890:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:52.929101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:52.945425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344666950575892:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:04:53.013332Z node 1 :TX_PROXY ERROR: Actor# [1:7494344671245543242:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:53.662395Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344649770704216:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:53.662470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3163, MsgBus: 27930 2025-04-17T18:04:54.725057Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494344676366310810:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:54.725146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bd0/r3tmp/tmp78Wiq3/pdisk_1.dat 2025-04-17T18:04:54.861846Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:54.875320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:54.875401Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:54.876861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3163, node 2 2025-04-17T18:04:54.921503Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:54.921529Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:54.921538Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:54.921635Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27930 TClient is connected to server localhost:27930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:55.303827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.309932Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.315258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:55.391029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.534070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:55.585798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:57.670333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344689251214430:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.670422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:57.725610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.794516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.874391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.910330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:04:57.941695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.018253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.087783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:04:58.169093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344693546182401:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.169266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.169495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494344693546182406:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:58.172926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:04:58.183999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494344693546182408:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:04:58.258386Z node 2 :TX_PROXY ERROR: Actor# [2:7494344693546182465:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:04:59.731084Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494344676366310810:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:59.731141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:05:09.834483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:05:09.834524Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 27343, MsgBus: 12606 2025-04-17T18:09:00.479048Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345733787589644:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:00.481810Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bd0/r3tmp/tmpbJnS0f/pdisk_1.dat 2025-04-17T18:09:00.660778Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:00.693750Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:00.693849Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:00.695468Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27343, node 3 2025-04-17T18:09:00.824164Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:00.824190Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:00.824200Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:00.824388Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12606 TClient is connected to server localhost:12606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:09:01.375886Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:09:04.372854Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345750967459487:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:04.372900Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345750967459471:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:04.372952Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:04.377631Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:09:04.394798Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345750967459500:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:09:04.481260Z node 3 :TX_PROXY ERROR: Actor# [3:7494345750967459551:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:04.538714Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TGRpcYdbTest::BeginTxRequestError >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> YdbScripting::BasicV0 [GOOD] >> YdbScripting::BasicV1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2025-04-17T18:09:05.851431Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.851583Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:3:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.871278Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.881919Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.889791Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.889918Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.907650Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2025-04-17T18:09:05.909973Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.910120Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:3:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.915713Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.917913Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.922621Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.922744Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-17T18:09:05.928178Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> TGRpcYdbTest::OperationCancelAfter [GOOD] >> TGRpcYdbTest::KeepAlive |88.3%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] >> YdbLogStore::LogStore [GOOD] >> YdbLogStore::LogStoreNegative >> TYqlDecimalTests::NegativeValues [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2025-04-17T18:08:37.321674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345633829285153:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:37.321717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f1/r3tmp/tmp2REu9s/pdisk_1.dat 2025-04-17T18:08:37.862919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:37.863007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:37.866279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:37.892196Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21025, node 1 2025-04-17T18:08:38.033861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:38.033885Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:38.033894Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:38.034019Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:38.381784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:38.575399Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:57450) has now valid token of root@builtin 2025-04-17T18:08:38.653438Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:08:38.653478Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:08:38.653487Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:08:38.653543Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:08:42.265415Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345653482656654:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:42.265780Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f1/r3tmp/tmpL4CUjX/pdisk_1.dat 2025-04-17T18:08:42.492838Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:42.556442Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:42.556531Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:42.567477Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1447, node 4 2025-04-17T18:08:42.726882Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:42.726913Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:42.726922Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:42.727095Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:43.002189Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:43.181118Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:60312) has now valid token of root@builtin 2025-04-17T18:08:43.319454Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:08:43.319485Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:08:43.319494Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:08:43.319525Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:08:47.202877Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345675095118202:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:47.202968Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f1/r3tmp/tmpOK1LAW/pdisk_1.dat 2025-04-17T18:08:47.421860Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28522, node 7 2025-04-17T18:08:47.545750Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:47.545876Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:47.592659Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:47.640335Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:47.640365Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:47.640374Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:47.640542Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:47.913070Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744920527414105 Nodes { NodeId: 1024 Host: "localhost" Port: 19966 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744920527414105 } Nodes { NodeId: 7 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 8 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 9 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f1/r3tmp/tmpmbk7W6/pdisk_1.dat 2025-04-17T18:08:52.813126Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:52.899661Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:52.946296Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:52.946369Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:52.958285Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20650, node 10 2025-04-17T18:08:53.302244Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:53.302265Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:53.302273Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:53.302403Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:53.672157Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: UNAUTHORIZED Reason: "Cannot authorize node. Access denied" } 2025-04-17T18:08:57.926421Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345719027395971:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:57.926473Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f1/r3tmp/tmpXG2unF/pdisk_1.dat 2025-04-17T18:08:58.266070Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:58.320179Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:58.320267Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:58.335369Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30158, node 13 2025-04-17T18:08:58.583031Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:58.583055Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:58.583063Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:58.583196Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:59.031778Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:59.047890Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744920538226082 Nodes { NodeId: 1024 Host: "localhost" Port: 25084 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744920538226082 } Nodes { NodeId: 13 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 14 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 15 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-17T18:09:03.378549Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7494345743790135686:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:03.378640Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019f1/r3tmp/tmpcZFiUd/pdisk_1.dat 2025-04-17T18:09:03.598846Z node 16 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10538, node 16 2025-04-17T18:09:03.703717Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:03.703827Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:03.708514Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:03.735533Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:03.735553Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:03.735566Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:03.735688Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:04.027554Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node 2025-04-17T18:09:04.271628Z node 16 :TICKET_PARSER ERROR: Ticket 1D40C187EDC3A0407581F9F41E3303EAD1C9D4CC0D0DF215D096E314F9BB598B: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 6:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:101:2126] Leader for TabletID 72057594037927937 is [44:101:2126] sender: [44:155:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:97:2057] recipient: [45:36:2083] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:100:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:101:2057] recipient: [45:99:2125] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:103:2057] recipient: [45:99:2125] !Reboot 72057594037927937 (actor [45:56:2097]) rebooted! !Reboot 72057594037927937 (actor [45:56:2097]) tablet resolver refreshed! new actor is[45:102:2126] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:120:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:57:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:74:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:99:2057] recipient: [46:36:2083] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:102:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:103:2057] recipient: [46:101:2127] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:105:2057] recipient: [46:101:2127] !Reboot 72057594037927937 (actor [46:56:2097]) rebooted! !Reboot 72057594037927937 (actor [46:56:2097]) tablet resolver refreshed! new actor is[46:104:2128] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:158:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:57:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:74:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:99:2057] recipient: [47:36:2083] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:102:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:103:2057] recipient: [47:101:2127] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:105:2057] recipient: [47:101:2127] !Reboot 72057594037927937 (actor [47:56:2097]) rebooted! !Reboot 72057594037927937 (actor [47:56:2097]) tablet resolver refreshed! new actor is[47:104:2128] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:158:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:57:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:74:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:100:2057] recipient: [48:36:2083] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:103:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:104:2057] recipient: [48:102:2127] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:106:2057] recipient: [48:102:2127] !Reboot 72057594037927937 (actor [48:56:2097]) rebooted! !Reboot 72057594037927937 (actor [48:56:2097]) tablet resolver refreshed! new actor is[48:105:2128] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:123:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:50:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:57:2057] recipient: [49:50:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:74:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:102:2057] recipient: [49:36:2083] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:105:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:106:2057] recipient: [49:104:2129] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:108:2057] recipient: [49:104:2129] !Reboot 72057594037927937 (actor [49:56:2097]) rebooted! !Reboot 72057594037927937 (actor [49:56:2097]) tablet resolver refreshed! new actor is[49:107:2130] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:161:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:52:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:57:2057] recipient: [50:52:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:74:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:102:2057] recipient: [50:36:2083] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:105:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:106:2057] recipient: [50:104:2129] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:108:2057] recipient: [50:104:2129] !Reboot 72057594037927937 (actor [50:56:2097]) rebooted! !Reboot 72057594037927937 (actor [50:56:2097]) tablet resolver refreshed! new actor is[50:107:2130] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:161:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:57:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:74:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:103:2057] recipient: [51:36:2083] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:105:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:107:2057] recipient: [51:106:2129] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:109:2057] recipient: [51:106:2129] !Reboot 72057594037927937 (actor [51:56:2097]) rebooted! !Reboot 72057594037927937 (actor [51:56:2097]) tablet resolver refreshed! new actor is[51:108:2130] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:126:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:52:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:57:2057] recipient: [52:52:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:74:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:105:2057] recipient: [52:36:2083] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:108:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:109:2057] recipient: [52:107:2131] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:111:2057] recipient: [52:107:2131] !Reboot 72057594037927937 (actor [52:56:2097]) rebooted! !Reboot 72057594037927937 (actor [52:56:2097]) tablet resolver refreshed! new actor is[52:110:2132] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:164:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:51:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:57:2057] recipient: [53:51:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:74:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:105:2057] recipient: [53:36:2083] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:108:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:109:2057] recipient: [53:107:2131] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:111:2057] recipient: [53:107:2131] !Reboot 72057594037927937 (actor [53:56:2097]) rebooted! !Reboot 72057594037927937 (actor [53:56:2097]) tablet resolver refreshed! new actor is[53:110:2132] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:164:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:50:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:57:2057] recipient: [54:50:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:74:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:106:2057] recipient: [54:36:2083] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:109:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:110:2057] recipient: [54:108:2131] Leader for TabletID 72057594037927937 is [54:111:2132] sender: [54:112:2057] recipient: [54:108:2131] !Reboot 72057594037927937 (actor [54:56:2097]) rebooted! !Reboot 72057594037927937 (actor [54:56:2097]) tablet resolver refreshed! new actor is[54:111:2132] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:57:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:74:2057] recipient: [55:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] Test command err: 2025-04-17T18:08:43.555630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345660385674896:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:43.555681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e6/r3tmp/tmpSqBPsq/pdisk_1.dat 2025-04-17T18:08:44.002696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:44.002794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:44.008896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:44.031488Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15967, node 1 2025-04-17T18:08:44.118270Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:44.118291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:44.118297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:44.118397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:44.480051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:47.900750Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345676771926850:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:47.900813Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e6/r3tmp/tmprI0O8M/pdisk_1.dat 2025-04-17T18:08:48.114664Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:48.124854Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:48.124938Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:48.131821Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13592, node 4 2025-04-17T18:08:48.370922Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:48.370946Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:48.370954Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:48.371294Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:48.643897Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:48.667347Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:52.699837Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345696477672820:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:52.699893Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e6/r3tmp/tmppV1GRv/pdisk_1.dat 2025-04-17T18:08:53.120780Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:53.164890Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:53.164971Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:53.168892Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18452, node 7 2025-04-17T18:08:53.366931Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:53.366956Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:53.366964Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:53.367104Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:53.691434Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:58.316946Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345724206507437:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:58.316995Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e6/r3tmp/tmptvVeGO/pdisk_1.dat 2025-04-17T18:08:58.504326Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:58.547660Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:58.547756Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:58.553919Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5422, node 10 2025-04-17T18:08:58.722857Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:58.722875Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:58.722881Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:58.723026Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:59.039008Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:59.056842Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:03.733939Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345744673916898:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:03.734005Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e6/r3tmp/tmpEExtXF/pdisk_1.dat 2025-04-17T18:09:03.992062Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4825, node 13 2025-04-17T18:09:04.109405Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:04.109519Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:04.153294Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:04.203037Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:04.203065Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:04.203075Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:04.203281Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:09:04.554361Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 >> YdbYqlClient::TestReadTableBatchLimits [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::NegativeValues [GOOD] Test command err: 2025-04-17T18:08:41.370735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345651616483472:2232];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:41.371874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e7/r3tmp/tmpZImMA9/pdisk_1.dat 2025-04-17T18:08:41.781685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:41.781774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:41.803608Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24681, node 1 2025-04-17T18:08:41.902394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:41.926815Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:41.926840Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:42.034696Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:42.034718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:42.034725Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:42.034811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:42.453716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:44.629738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.012468s 2025-04-17T18:08:44.781956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345664501386378:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:44.782033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345664501386386:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:44.782102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:44.785702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:44.803073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345664501386392:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:44.863489Z node 1 :TX_PROXY ERROR: Actor# [1:7494345664501386471:2816] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:45.490409Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0pqb8xemdc6wpwcs4k7w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAwNTdmMmMtOTE3ZjQ3ZTEtNTgzNjc3NDMtODYwZWYyMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e7/r3tmp/tmpbmq9Oa/pdisk_1.dat 2025-04-17T18:08:47.145782Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:47.225555Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3608, node 4 2025-04-17T18:08:47.310826Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:47.310851Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:47.310862Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:47.310995Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:08:47.353654Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:47.353757Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:47.376787Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:47.553940Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:49.983965Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:51.955822Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345692389000007:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:51.955864Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e7/r3tmp/tmpd094Zf/pdisk_1.dat 2025-04-17T18:08:52.159648Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14198, node 7 2025-04-17T18:08:52.318989Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:52.319088Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:52.352503Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:52.374888Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:52.374913Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:52.374927Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:52.375074Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:52.675573Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:55.261959Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:57.259393Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345719752648634:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:57.259447Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e7/r3tmp/tmpMhIpe6/pdisk_1.dat 2025-04-17T18:08:57.517242Z node 1 ... ig is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:57.695274Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:57.695283Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:57.695420Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:57.996473Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:00.820109Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:00.912811Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345732637551715:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:00.912878Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345732637551723:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:00.912895Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:00.917085Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:00.935764Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345732637551729:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:09:01.006066Z node 10 :TX_PROXY ERROR: Actor# [10:7494345736932519100:2781] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:01.164462Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f16ff5j52cg2xkvd2bb94, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Y2FkOGQ1ZTEtYzM0MDVmNy0yY2E1YzI3Ny04NjRmNzE1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:01.307168Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f16r19bwkbs2mmp1b6fp1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Y2FkOGQ1ZTEtYzM0MDVmNy0yY2E1YzI3Ny04NjRmNzE1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:01.487876Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f16w216rm2pgamdq6jy6h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Y2FkOGQ1ZTEtYzM0MDVmNy0yY2E1YzI3Ny04NjRmNzE1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:01.600042Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2f171p5acf254pz41x0f4z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Y2FkOGQ1ZTEtYzM0MDVmNy0yY2E1YzI3Ny04NjRmNzE1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:01.772111Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2f1756f5h7nzpw28v3tqwq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Y2FkOGQ1ZTEtYzM0MDVmNy0yY2E1YzI3Ny04NjRmNzE1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:03.552386Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345743575748188:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:03.552777Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e7/r3tmp/tmpzHolW6/pdisk_1.dat 2025-04-17T18:09:03.792698Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19884, node 13 2025-04-17T18:09:03.869616Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:03.869710Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:03.916475Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:03.917338Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:03.917348Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:03.917357Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:03.917487Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:04.292355Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:06.939319Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:07.062365Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345760755618588:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:07.062475Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:07.066320Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345760755618600:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:07.070916Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:07.090004Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494345760755618602:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:09:07.192261Z node 13 :TX_PROXY ERROR: Actor# [13:7494345760755618679:2803] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:07.294369Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f1cfj53qw5qs8pa2e4b50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTQ4NDU1OWYtMjAxZDE3ZDctMWEzMzQ2NzMtNjU4MTFmMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:07.441837Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f1cqe040jf5vchckwr86c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTQ4NDU1OWYtMjAxZDE3ZDctMWEzMzQ2NzMtNjU4MTFmMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:07.605896Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f1cvtepanx37dbrg33smz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTQ4NDU1OWYtMjAxZDE3ZDctMWEzMzQ2NzMtNjU4MTFmMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:07.733410Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2f1d0z6seznpzy2h5vhjt5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTQ4NDU1OWYtMjAxZDE3ZDctMWEzMzQ2NzMtNjU4MTFmMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:07.915027Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2f1d527tmp9csc3tra1z1n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTQ4NDU1OWYtMjAxZDE3ZDctMWEzMzQ2NzMtNjU4MTFmMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] Test command err: 2025-04-17T18:08:43.519637Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345657173329798:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:43.519691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d8/r3tmp/tmph9gSWG/pdisk_1.dat 2025-04-17T18:08:44.039075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:44.039159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:44.042405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:44.053793Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4577, node 1 2025-04-17T18:08:44.146766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:44.146788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:44.146794Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:44.146926Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:44.477210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:44.621897Z node 1 :TX_PROXY ERROR: Actor# [1:7494345661468298034:2608] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-17T18:08:47.898070Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345676153065428:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:47.986701Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d8/r3tmp/tmproUDsK/pdisk_1.dat 2025-04-17T18:08:48.150967Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12495, node 4 2025-04-17T18:08:48.283568Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:48.283661Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:48.370391Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:48.481214Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:48.481234Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:48.481240Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:48.481345Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:48.791360Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:53.225118Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345703177402894:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:53.225241Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d8/r3tmp/tmpl7ilqE/pdisk_1.dat 2025-04-17T18:08:53.446783Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:53.482722Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:53.482796Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:53.485702Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4421, node 7 2025-04-17T18:08:53.790831Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:53.790850Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:53.790857Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:53.790963Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:54.153299Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:56.492518Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345716062305811:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:56.492618Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:56.870856Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:57.016046Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345720357273290:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.016119Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.016412Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345720357273295:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.020674Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:57.042443Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345720357273297:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:57.122564Z node 7 :TX_PROXY ERROR: Actor# [7:7494345720357273372:2798] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:57.315431Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f12xy437n97qy36vaqmcb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Zjc4OGJmMmQtOWQzYzNiYWUtYjk3YTE1NjctYzBjMDQ0MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:57.471978Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2f133y1xrrcwhmhvv96ezf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Zjc4OGJmMmQtOWQzYzNiYWUtYjk3YTE1NjctYzBjMDQ0MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:59.410476Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345726368475584:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:59.410564Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d8/r3tmp/tmpY2MIYB/pdisk_1.dat 2025-04-17T18:08:59.651491Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5776, node 10 2025-04-17T18:08:59.780508Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:59.780595Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:59.834580Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:59.958777Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:59.958807Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:59.958834Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:59.958973Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:00.266267Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:02.765355Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345739253378498:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.765388Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345739253378489:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.765451Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.768641Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:09:02.786734Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345739253378503:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:09:02.886327Z node 10 :TX_PROXY ERROR: Actor# [10:7494345739253378577:2671] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:03.010620Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7494345739253378621:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[Root/NotFound]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:09:03.012036Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NmQwODA4M2YtOWUwODQ0Yy00YzMyZTA1YS1lN2RiYTM0Yg==, ActorId: [10:7494345739253378462:2331], ActorState: ExecuteState, TraceId: 01js2f18ga97th40ajytcj4fd6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:09:04.448968Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345748917111137:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:04.449014Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d8/r3tmp/tmpknPTn2/pdisk_1.dat 2025-04-17T18:09:04.577491Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:04.601748Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:04.601810Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:04.605906Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20198, node 13 2025-04-17T18:09:04.662768Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:04.662791Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:04.662798Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:04.662916Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:04.942932Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:05.005326Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:05.109090Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:07.526835Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345761802014296:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:07.526963Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:07.527788Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345761802014308:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:07.532848Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:09:07.565342Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494345761802014310:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:09:07.641030Z node 13 :TX_PROXY ERROR: Actor# [13:7494345761802014384:2896] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:07.760917Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2f1cy44anhg62dbnptzan9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NzNmYTZiZDEtOGEwMzFjZjEtYTcyYzMyY2ItODc2NzM1NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:07.847127Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2f1d5v08fgcysm1hv8ad7e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NzNmYTZiZDEtOGEwMzFjZjEtYTcyYzMyY2ItODc2NzM1NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] Test command err: 2025-04-17T18:08:49.200008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345686339206697:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:49.221502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b0/r3tmp/tmpKCL61f/pdisk_1.dat 2025-04-17T18:08:49.667232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:49.689377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:49.689480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:49.699248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3189, node 1 2025-04-17T18:08:49.986134Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:49.986155Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:49.986160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:49.986267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:50.358803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:52.675882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345699224109605:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:52.676002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:52.979267Z node 1 :TX_PROXY ERROR: Actor# [1:7494345699224109644:2642] txid# 281474976710658, issues: { message: "Column Key has wrong key type Json" severity: 1 } 2025-04-17T18:08:53.027933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345703519076950:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:53.028107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:53.035206Z node 1 :TX_PROXY ERROR: Actor# [1:7494345703519076957:2652] txid# 281474976710659, issues: { message: "Column Key has wrong key type Yson" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b0/r3tmp/tmpu9pAbK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2410, node 4 TClient is connected to server localhost:19615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b0/r3tmp/tmpxUV6la/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22948, node 7 TClient is connected to server localhost:8373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:09:04.178580Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345747567510165:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:04.178686Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b0/r3tmp/tmpOu0mnX/pdisk_1.dat 2025-04-17T18:09:04.513559Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:04.567377Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:04.567464Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:04.569543Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1332, node 10 2025-04-17T18:09:04.718242Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:04.718263Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:04.718272Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:04.718400Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:04.986997Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:07.682451Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345760452413071:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:07.682518Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:07.682598Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345760452413083:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:07.685121Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:09:07.721552Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345760452413085:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:09:07.811308Z node 10 :TX_PROXY ERROR: Actor# [10:7494345760452413171:2679] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> KqpErrors::ResolveTableError >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties >> TExternalDataSourceTest::CreateExternalDataSource >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> KqpErrors::ProposeError >> TExternalDataSourceTest::SchemeErrors >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-04-17T18:08:30.506642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345601879527444:2179];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:30.507075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fc/r3tmp/tmp9iddFI/pdisk_1.dat 2025-04-17T18:08:31.066801Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:31.091238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:31.091331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:31.097594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21248, node 1 2025-04-17T18:08:31.391228Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:31.391255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:31.391266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:31.391551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:31.720661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:31.751826Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:22628 2025-04-17T18:08:33.879043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:34.128297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:34.128433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-17T18:08:34.130327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), operation: ALTER TABLE, path: Root/Foo/TimestampIndex/indexImplTable 2025-04-17T18:08:34.130561Z node 1 :TX_PROXY ERROR: Actor# [1:7494345619059397937:2937] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/Foo/TimestampIndex/indexImplTable\', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Error 128: Administrative access denied TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913314060 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-04-17T18:08:34.159769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710660:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-04-17T18:08:34.159910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:08:34.159928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-17T18:08:34.160303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:08:34.160318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-17T18:08:34.165958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable waiting... 2025-04-17T18:08:34.222234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913314263, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:08:34.237559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-04-17T18:08:34.237605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913314060 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-04-17T18:08:34.242871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710661:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-04-17T18:08:34.242980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:08:34.242995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-04-17T18:08:34.243300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:08:34.243320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:34.251340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710661, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable 2025-04-17T18:08:34.274084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913314319, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:08:34.285149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:0 2025-04-17T18:08:34.285193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:1 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913314060 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-04-17T18:08:34.290827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710662:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at sch ... 25-04-17T18:08:41.425109Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:41.425118Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:41.425290Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:08:41.428270Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:41.687570Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:41.773457Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:41.947829Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T18:08:42.163549Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T18:08:45.906793Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345667208846670:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:45.906866Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fc/r3tmp/tmptP2YU1/pdisk_1.dat 2025-04-17T18:08:46.076114Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:46.114613Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:46.114718Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:46.117600Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15661, node 10 2025-04-17T18:08:46.266538Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:46.266562Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:46.266570Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:46.266735Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:46.535001Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:50.993898Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345687230904602:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:50.993976Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019fc/r3tmp/tmpkqldKQ/pdisk_1.dat 2025-04-17T18:08:51.164928Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:51.205284Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:51.205379Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:51.208098Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19333, node 13 2025-04-17T18:08:51.384221Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:51.384247Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:51.384254Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:51.384396Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:08:51.671556Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:08:51.791089Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:40634" , at schemeshard: 72057594046644480 2025-04-17T18:08:51.791584Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:51.791616Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-04-17T18:08:51.795175Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976715658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:08:51.795327Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-17T18:08:51.795565Z node 13 :TX_PROXY ERROR: Actor# [13:7494345691525872814:2601] txid# 281474976715658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C9935BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CE50670) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8673 (0x1C4C5C71) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C4EEAF8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CE87866) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CE571F9) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C4EDCC4) NUnitTest::TTestFactory::Execute()+2438 (0x1CE58AC6) NUnitTest::RunMain(int, char**)+5213 (0x1CE81DDD) ??+0 (0x7F0D00609D90) __libc_start_main+128 (0x7F0D00609E40) _start+41 (0x19302029) >> KqpBatchUpdate::ManyPartitions_2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableBatchLimits [GOOD] Test command err: 2025-04-17T18:08:47.452093Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345677878670755:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:47.453838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ca/r3tmp/tmp9SXhVE/pdisk_1.dat 2025-04-17T18:08:47.907381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:47.907452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:47.916270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:47.947522Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25553, node 1 2025-04-17T18:08:47.987590Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:47.987621Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:48.127202Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:48.127221Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:48.127231Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:48.129613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:48.488976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:48.508605Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:48.562414Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01js2f0tdga1vhbtz860ga43s9, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33180, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.982796s 2025-04-17T18:08:48.599950Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01js2f0te34fhtqarp502rsm2d, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33182, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:50.671487Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01js2f0wffdk95zgptx9xhjhyx, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33184, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:50.672248Z node 1 :TX_PROXY DEBUG: actor# [1:7494345677878670860:2141] Handle TEvProposeTransaction 2025-04-17T18:08:50.672297Z node 1 :TX_PROXY DEBUG: actor# [1:7494345677878670860:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:08:50.672356Z node 1 :TX_PROXY DEBUG: actor# [1:7494345677878670860:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345690763573552:2636] 2025-04-17T18:08:50.741040Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:33184" 2025-04-17T18:08:50.741102Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:08:50.741490Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:08:50.741584Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:08:50.741749Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:08:50.741915Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:08:50.741960Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:08:50.742080Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:08:50.744441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:50.752301Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T18:08:50.752364Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345690763573552:2636] txid# 281474976710658 SEND to# [1:7494345690763573551:2336] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T18:08:50.754521Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:50.754619Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:50.754630Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:50.754765Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:50.814427Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573611:2693], Recipient [1:7494345690763573756:2347]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.815515Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573622:2704], Recipient [1:7494345690763573752:2343]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.816121Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573608:2690], Recipient [1:7494345690763573823:2353]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.816660Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573613:2695], Recipient [1:7494345690763573813:2351]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.817193Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573620:2702], Recipient [1:7494345690763573750:2341]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.817467Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573616:2698], Recipient [1:7494345690763573753:2344]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.817701Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573609:2691], Recipient [1:7494345690763573751:2342]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.822629Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573607:2689], Recipient [1:7494345690763573748:2339]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.823223Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573612:2694], Recipient [1:7494345690763573824:2354]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.823714Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573614:2696], Recipient [1:7494345690763573814:2352]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.827395Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573617:2699], Recipient [1:7494345690763573754:2345]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.827679Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7494345690763573614:2696], Recipient [1:7494345690763573814:2352]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:08:50.827936Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7494345690763573617:2699], Recipient [1:7494345690763573754:2345]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:08:50.828165Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037900 actor [1:7494345690763573814:2352] 2025-04-17T18:08:50.828186Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037897 actor [1:7494345690763573754:2345] 2025-04-17T18:08:50.828471Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:08:50.828474Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:08:50.840393Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7494345690763573611:2693], Recipient [1:7494345690763573756:2347]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:08:50.840441Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7494345690763573613:2695], Recipient [1:7494345690763573813:2351]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:08:50.840827Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037901 actor [1:7494345690763573813:2351] 2025-04-17T18:08:50.840836Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037903 actor [1:7494345690763573756:2347] 2025-04-17T18:08:50.841055Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:08:50.841090Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:08:50.853154Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7494345690763573615:2697], Recipient [1:7494345690763573758:2349]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:08:50.854344Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7494345690763573615:2697], Recipient [1:7494345690763573758:2349]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:08:50.854732Z node 1 :TX_DATASHARD I ... 679] at 72075186224037897 is Continue 2025-04-17T18:09:09.294034Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:09:09.294040Z node 10 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037897 2025-04-17T18:09:09.294048Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-17T18:09:09.294055Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-17T18:09:09.294073Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-17T18:09:09.294236Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715679, MessageQuota: 25 2025-04-17T18:09:09.294250Z node 10 :READ_TABLE_API DEBUG: [10:7494345771582249143:2401] Adding quota request to queue ShardId: 0, TxId: 281474976715678 2025-04-17T18:09:09.294271Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715679, MessageQuota: 25 2025-04-17T18:09:09.294276Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7494345771582249238:2253], Recipient [10:7494345758697345403:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T18:09:09.294289Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T18:09:09.294306Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-04-17T18:09:09.294313Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715679, at: 72075186224037897 2025-04-17T18:09:09.294359Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7494345758697345403:2342], Recipient [10:7494345758697345403:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:09:09.294369Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:09:09.294388Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-04-17T18:09:09.294398Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:09:09.294409Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715679] at 72075186224037897 for ReadTableScan 2025-04-17T18:09:09.294417Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit ReadTableScan 2025-04-17T18:09:09.294427Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715679] at 72075186224037897 error: , IsFatalError: 0 2025-04-17T18:09:09.294440Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Executed 2025-04-17T18:09:09.294450Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit ReadTableScan 2025-04-17T18:09:09.294459Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715679] at 72075186224037897 to execution unit FinishPropose 2025-04-17T18:09:09.294471Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit FinishPropose 2025-04-17T18:09:09.294487Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is DelayComplete 2025-04-17T18:09:09.294510Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit FinishPropose 2025-04-17T18:09:09.294520Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715679] at 72075186224037897 to execution unit CompletedOperations 2025-04-17T18:09:09.294528Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit CompletedOperations 2025-04-17T18:09:09.294547Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Executed 2025-04-17T18:09:09.294557Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit CompletedOperations 2025-04-17T18:09:09.294565Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715679] at 72075186224037897 has finished 2025-04-17T18:09:09.294575Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:09:09.294583Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-17T18:09:09.294590Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-17T18:09:09.294597Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-17T18:09:09.294618Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-17T18:09:09.294627Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715679] at 72075186224037897 on unit FinishPropose 2025-04-17T18:09:09.294641Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715679 at tablet 72075186224037897 send to client, exec latency: 26 ms, propose latency: 26 ms, status: COMPLETE 2025-04-17T18:09:09.294687Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-04-17T18:09:09.294992Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345404:2343]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.295021Z node 10 :READ_TABLE_API NOTICE: [10:7494345771582249143:2401] Finish grpc stream, status: 400000 2025-04-17T18:09:09.295035Z node 10 :READ_TABLE_API DEBUG: [10:7494345771582249143:2401] Send zero quota to Shard 0, TxId 281474976715678 2025-04-17T18:09:09.295045Z node 10 :READ_TABLE_API DEBUG: [10:7494345771582249143:2401] Send zero quota to Shard 0, TxId 281474976715678 2025-04-17T18:09:09.295052Z node 10 :READ_TABLE_API DEBUG: [10:7494345771582249143:2401] Send zero quota to Shard 0, TxId 281474976715678 2025-04-17T18:09:09.295060Z node 10 :READ_TABLE_API DEBUG: [10:7494345771582249143:2401] Send zero quota to Shard 0, TxId 281474976715678 2025-04-17T18:09:09.295069Z node 10 :READ_TABLE_API DEBUG: [10:7494345771582249143:2401] Send zero quota to Shard 0, TxId 281474976715678 2025-04-17T18:09:09.295163Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345414:2346]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.295251Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345412:2344]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.295294Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345423:2347]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.295365Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345428:2348]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.295413Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345413:2345]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.295465Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345396:2339]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.295515Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345397:2340]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.295572Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345402:2341]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.295622Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7494345771582249144:2401], Recipient [10:7494345758697345403:2342]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744913349305 TxId: 281474976715678 2025-04-17T18:09:09.299094Z node 10 :GRPC_SERVER DEBUG: [0x51a000080a80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299112Z node 10 :GRPC_SERVER DEBUG: [0x51a00003a880] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299316Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d1a80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299323Z node 10 :GRPC_SERVER DEBUG: [0x51a000190e80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299501Z node 10 :GRPC_SERVER DEBUG: [0x51a0000bdc80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299511Z node 10 :GRPC_SERVER DEBUG: [0x51a0000be880] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299655Z node 10 :GRPC_SERVER DEBUG: [0x51a000039c80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299661Z node 10 :GRPC_SERVER DEBUG: [0x51a00003c080] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299815Z node 10 :GRPC_SERVER DEBUG: [0x51a000117c80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299828Z node 10 :GRPC_SERVER DEBUG: [0x51a00017e280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.299994Z node 10 :GRPC_SERVER DEBUG: [0x51a000043e80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.300006Z node 10 :GRPC_SERVER DEBUG: [0x51a000027080] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.300149Z node 10 :GRPC_SERVER DEBUG: [0x51a000077480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.300184Z node 10 :GRPC_SERVER DEBUG: [0x51a000069080] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.300310Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d0280] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.300355Z node 10 :GRPC_SERVER DEBUG: [0x51a000109e80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-17T18:09:09.300463Z node 10 :GRPC_SERVER DEBUG: [0x51a00003cc80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::SchemeErrors [GOOD] >> YdbYqlClient::CreateTableWithMESettings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T18:09:11.827492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:09:11.827573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.827616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:09:11.827645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:09:11.827679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:09:11.827708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:09:11.827771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.827833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:09:11.828116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:09:11.912621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:09:11.912684Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:11.927590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:09:11.927682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:09:11.927826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:09:11.935050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:09:11.935418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:09:11.946323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:11.946581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:09:11.950779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.959100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:11.959159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.959277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:09:11.959335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:11.959376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:09:11.959510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:09:11.974812Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:09:12.156101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.156338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.156511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:09:12.156722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:09:12.156778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.159239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.159422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:09:12.159647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.159715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:09:12.159779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:09:12.159817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:09:12.161929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.161992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:09:12.162028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:09:12.163870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.163930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.163980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.164040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.167838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:09:12.169642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:09:12.169827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:09:12.170801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.170923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.170979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.171264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:09:12.171344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.171500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:09:12.171567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:09:12.173389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:12.173428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:12.173603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:12.173643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:09:12.173995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.174068Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:09:12.174156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.174212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.174274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.174304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.174345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:09:12.174392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.174425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:09:12.174455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:09:12.174516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:09:12.174553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:09:12.174585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:09:12.176405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:09:12.176502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:09:12.176555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:09:12.176591Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:09:12.176630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:09:12.176720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:09:12.179510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:09:12.180057Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:09:12.181116Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] Bootstrap 2025-04-17T18:09:12.196500Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] Become StateWork (SchemeCache [1:275:2266]) 2025-04-17T18:09:12.198498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.198735Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-04-17T18:09:12.198802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-17T18:09:12.198842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-17T18:09:12.199483Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:09:12.201146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.201321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2025-04-17T18:09:12.202057Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:09:12.202256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:09:12.202291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:09:12.202578Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:09:12.202635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:09:12.202663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:285:2276] TestWaitNotification: OK eventTxId 101 2025-04-17T18:09:12.203060Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:09:12.203180Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 144us result status StatusPathDoesNotExist 2025-04-17T18:09:12.203325Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TGRpcYdbTest::BeginTxRequestError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 6915, MsgBus: 12325 2025-04-17T18:06:34.804930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345106091066804:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:34.805041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ce1/r3tmp/tmpWxYm26/pdisk_1.dat 2025-04-17T18:06:35.057312Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6915, node 1 2025-04-17T18:06:35.123976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:35.123999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:35.124006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:35.124155Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:06:35.131521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:35.131617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:35.133430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12325 TClient is connected to server localhost:12325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:35.567556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:35.601184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:35.713552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:35.834812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:35.910588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:37.068755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345118975970484:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.068842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.290109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.315748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.339841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.367254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.395720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.443790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.473215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:37.523275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345118975971147:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.523356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345118975971152:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.523363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:37.526251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:37.533661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345118975971154:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:06:37.629415Z node 1 :TX_PROXY ERROR: Actor# [1:7494345118975971209:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:38.597658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:39.805070Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345106091066804:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:39.805159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13612, MsgBus: 15551 2025-04-17T18:06:48.225956Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345163193922029:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:48.226087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ce1/r3tmp/tmpOZJjRC/pdisk_1.dat 2025-04-17T18:06:48.336627Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13612, node 2 2025-04-17T18:06:48.376714Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:48.376827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:48.379460Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:48.409347Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:48.409373Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:48.409379Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:48.409531Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15551 TClient is connected to server localhost:15551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:48.799853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:48.813884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:48.888299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:49.041479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... k, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:08:46.345813Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:08:46.418572Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:08:46.469965Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:08:46.551569Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:08:46.633929Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:08:46.721408Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:08:46.826403Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494345672906590997:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:46.826522Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:46.826607Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494345672906591002:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:46.833646Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:08:46.851508Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494345672906591004:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:08:46.909889Z node 11 :TX_PROXY ERROR: Actor# [11:7494345672906591059:3563] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:48.854366Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 26342, MsgBus: 17915 2025-04-17T18:08:56.375161Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494345714483344435:2123];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:56.375286Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ce1/r3tmp/tmp2MIx76/pdisk_1.dat 2025-04-17T18:08:56.674851Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:56.704455Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:56.704594Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:56.706948Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26342, node 12 2025-04-17T18:08:56.864643Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:56.864672Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:56.864687Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:56.864879Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17915 TClient is connected to server localhost:17915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:57.978483Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:57.998074Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:58.137982Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:58.418415Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:58.552987Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:01.375212Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7494345714483344435:2123];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:01.375330Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:02.748460Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494345740253149928:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.748618Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.812255Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:02.905730Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:02.967342Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:03.022902Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:03.075074Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:03.121555Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:03.201643Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:03.355849Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494345744548117909:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:03.356035Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:03.356441Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494345744548117914:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:03.361724Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:03.374584Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494345744548117916:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:03.461898Z node 12 :TX_PROXY ERROR: Actor# [12:7494345744548117977:3569] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:05.165803Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-17T18:09:11.854008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:09:11.854108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.854153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:09:11.854222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:09:11.854265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:09:11.854298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:09:11.854370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.854446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:09:11.854785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:09:11.933940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:09:11.934017Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:11.955415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:09:11.955521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:09:11.955683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:09:11.967320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:09:11.967636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:09:11.968361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:11.968565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:09:11.970447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.971698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:11.971755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.971903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:09:11.971962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:11.972024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:09:11.972215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:09:11.978865Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:09:12.118549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.118805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.119034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:09:12.119283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:09:12.119352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.124136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.124282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:09:12.124475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.124536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:09:12.124570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:09:12.124601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:09:12.127189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.127255Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:09:12.127289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:09:12.129075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.129129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.129180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.129252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.144947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:09:12.150984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:09:12.151186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:09:12.152195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.152327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.152377Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.152670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:09:12.152731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.152897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:09:12.153006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:09:12.163108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:12.163156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:12.163327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:12.163366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:09:12.163733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.163784Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:09:12.163873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.163913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.163997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.164029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.164065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:09:12.164101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.164134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:09:12.164165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:09:12.164225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:09:12.164262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:09:12.164291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:09:12.166091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... e DataStream was not found" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.232224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: (NKikimr::NExternalSource::TExternalSourceException) External source with type DataStream was not found, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2025-04-17T18:09:12.235837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.236152Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2025-04-17T18:09:12.236242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-17T18:09:12.236437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2025-04-17T18:09:12.239041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.239238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-04-17T18:09:12.242480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.242832Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-04-17T18:09:12.242946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-17T18:09:12.243115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-04-17T18:09:12.245376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.245546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-04-17T18:09:12.248377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.248676Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-04-17T18:09:12.248754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-17T18:09:12.248901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-04-17T18:09:12.252511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.252692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-04-17T18:09:12.255155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.255342Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2025-04-17T18:09:12.255438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2025-04-17T18:09:12.255533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-04-17T18:09:12.257898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.258063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> YdbScripting::BasicV1 [GOOD] >> YdbScripting::MultiResults >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> TExternalDataSourceTest::DropExternalDataSource [GOOD] >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> TGRpcYdbTest::KeepAlive [GOOD] >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::Overload >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T18:09:11.849355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:09:11.849470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.849515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:09:11.849552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:09:11.849593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:09:11.849624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:09:11.849684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.849751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:09:11.850055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:09:11.927485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:09:11.927538Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:11.938146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:09:11.938695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:09:11.938839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:09:11.948053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:09:11.948229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:09:11.948925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:11.949502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:09:11.951976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.957585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:11.957646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.957832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:09:11.957876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:11.957975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:09:11.958129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:09:11.963936Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:09:12.084323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.085708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.086528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:09:12.087672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:09:12.087755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.103130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.103274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:09:12.103499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.103632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:09:12.103672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:09:12.103708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:09:12.115058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.115139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:09:12.115183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:09:12.122961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.123029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.123074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.123127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.132969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:09:12.142295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:09:12.142545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:09:12.143565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.143714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.143772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.144079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:09:12.144142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.144351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:09:12.144459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:09:12.151226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:12.151280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:12.151491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:12.151540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:09:12.151854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.151905Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:09:12.152022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.152062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.152099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.152149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.152192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:09:12.152247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.152285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:09:12.152317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:09:12.152409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:09:12.152445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:09:12.152481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:09:12.158806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... Id: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:09:13.065006Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:09:13.065046Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-17T18:09:13.065082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:09:13.066310Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:09:13.066385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:09:13.066412Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:09:13.066447Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T18:09:13.066475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:09:13.066534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T18:09:13.068053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T18:09:13.068175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T18:09:13.068936Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:13.069050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 8589936748 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:13.069110Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2025-04-17T18:09:13.069200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:09:13.069293Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-17T18:09:13.069454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:09:13.069507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:09:13.070349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:09:13.070991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:09:13.072038Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:13.072081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:13.072204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:09:13.072319Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:13.072345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T18:09:13.072375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T18:09:13.072643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:09:13.072682Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:09:13.072782Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:09:13.072810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:09:13.072846Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:09:13.072875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:09:13.072921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T18:09:13.072963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:09:13.072998Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:09:13.073029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:09:13.073087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:09:13.073121Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T18:09:13.073155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:09:13.073186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T18:09:13.073509Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:09:13.073583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:09:13.073610Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:09:13.073646Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:09:13.073687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:09:13.073981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:09:13.074020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:09:13.074071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:09:13.074302Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:09:13.074368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:09:13.074392Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:09:13.074416Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:09:13.074441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:09:13.074496Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T18:09:13.077559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:09:13.077979Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:09:13.078041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:09:13.078308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:09:13.078348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:09:13.078750Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:09:13.078833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:09:13.078881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:331:2322] TestWaitNotification: OK eventTxId 102 2025-04-17T18:09:13.079320Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:09:13.079511Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 219us result status StatusPathDoesNotExist 2025-04-17T18:09:13.079694Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T18:09:11.844256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:09:11.844372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.844408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:09:11.844431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:09:11.844465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:09:11.844493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:09:11.844560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.844619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:09:11.844909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:09:11.920205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:09:11.920277Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:11.934509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:09:11.934985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:09:11.935137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:09:11.948822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:09:11.948968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:09:11.949536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:11.951190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:09:11.955882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.957581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:11.957640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.957826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:09:11.957873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:11.957935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:09:11.958094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:09:11.963901Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:09:12.110550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.110809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.111045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:09:12.111325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:09:12.111393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.115505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.115646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:09:12.115840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.115922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:09:12.115959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:09:12.115993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:09:12.119285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.119348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:09:12.119419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:09:12.123280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.123342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.123396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.123448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.130864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:09:12.134781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:09:12.134934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:09:12.135954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.136087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.136140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.137292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:09:12.137375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.137626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:09:12.137725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:09:12.140413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:12.140465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:12.140647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:12.140695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:09:12.141091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.141143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:09:12.141268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.141338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.141405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.141471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.141512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:09:12.141550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.141586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:09:12.141618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:09:12.141673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:09:12.141711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:09:12.141743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:09:12.143607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... opId# 101:0 ProgressState 2025-04-17T18:09:12.872325Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:09:12.872363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:09:12.872408Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:09:12.872439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:09:12.872480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:09:12.872524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:09:12.872562Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:09:12.872597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:09:12.872675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:09:12.872720Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:09:12.872774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:09:12.872805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T18:09:12.873502Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.873587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.873619Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:09:12.873660Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:09:12.873712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:09:12.874793Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.874883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.874913Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:09:12.874940Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T18:09:12.874979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:09:12.875048Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:09:12.887396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:09:12.891091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:09:12.891362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:09:12.891404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:09:12.892045Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:09:12.892172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:09:12.892210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:303:2294] TestWaitNotification: OK eventTxId 101 2025-04-17T18:09:12.892733Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:09:12.892943Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 249us result status StatusSuccess 2025-04-17T18:09:12.893257Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-17T18:09:12.896522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.896829Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-04-17T18:09:12.896911Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-04-17T18:09:12.897050Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-17T18:09:12.903306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-17T18:09:12.903492Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:09:12.903790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:09:12.903835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:09:12.904228Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:09:12.904324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:09:12.904359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:311:2302] TestWaitNotification: OK eventTxId 102 2025-04-17T18:09:12.904808Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:09:12.905001Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 213us result status StatusSuccess 2025-04-17T18:09:12.905304Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:128:2058] recipient: [1:109:2141] 2025-04-17T18:09:11.825120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:09:11.825237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.825291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:09:11.825326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:09:11.825981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:09:11.826026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:09:11.826142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:09:11.826229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:09:11.827161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:09:11.901688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:09:11.901769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:11.921962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:09:11.922647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:09:11.922835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:09:11.939808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:09:11.940028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:09:11.943531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:11.945396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:09:11.953542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.957629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:11.957709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:11.957969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:09:11.958042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:11.958102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:09:11.958286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:09:11.965328Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:09:12.116420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:12.116643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.116837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:09:12.117076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:09:12.117137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.119745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.119893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:09:12.120090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.120145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:09:12.120180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:09:12.120212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:09:12.124002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.124057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:09:12.124095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:09:12.126397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.126470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.126515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.126566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.130471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:09:12.133934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:09:12.134943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:09:12.135957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:09:12.136088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:12.136150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.137293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:09:12.137392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:09:12.137576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:09:12.137690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:09:12.140374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:09:12.140421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:09:12.140645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:09:12.140695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:09:12.141124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:09:12.141175Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:09:12.141293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.141329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.141368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:09:12.141420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.141465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:09:12.141522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:09:12.141563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:09:12.141598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:09:12.141666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:09:12.141702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:09:12.141735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:09:12.143406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... ation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:09:12.991790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:09:12.991818Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:09:12.991845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:09:12.991899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:09:12.991945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:09:12.991986Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-04-17T18:09:12.992015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:09:12.992040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T18:09:12.992060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T18:09:12.992953Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.993035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.993070Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:09:12.993103Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:09:12.993137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:09:12.994290Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.994379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.994407Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:09:12.994431Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T18:09:12.994454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:09:12.995203Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.995264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:09:12.995287Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:09:12.995325Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T18:09:12.995351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:09:12.995405Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:09:12.998973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:09:12.999271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:09:13.000561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:09:13.000752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:09:13.000791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:09:13.001164Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:09:13.001278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:09:13.001317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:335:2326] TestWaitNotification: OK eventTxId 101 2025-04-17T18:09:13.001717Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:09:13.001889Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 206us result status StatusSuccess 2025-04-17T18:09:13.002164Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-17T18:09:13.008386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:09:13.008530Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-04-17T18:09:13.008628Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-04-17T18:09:13.010722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:09:13.010858Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T18:09:13.011094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T18:09:13.011128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T18:09:13.011521Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T18:09:13.011608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:09:13.011643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:343:2334] TestWaitNotification: OK eventTxId 103 2025-04-17T18:09:13.012121Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:09:13.012271Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 186us result status StatusSuccess 2025-04-17T18:09:13.012526Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2025-04-17T18:08:47.451415Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345674294357883:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:47.451497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d3/r3tmp/tmp0S0B3L/pdisk_1.dat 2025-04-17T18:08:47.961588Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:47.981927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:47.982015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:47.989590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5743, node 1 2025-04-17T18:08:48.107787Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:48.107808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:48.107818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:48.107946Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:48.609383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:50.724561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:52.526597Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345696943395464:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:52.526682Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d3/r3tmp/tmpUGPqE4/pdisk_1.dat 2025-04-17T18:08:52.683582Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:52.725001Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:52.725092Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:52.728710Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1967, node 4 2025-04-17T18:08:52.868580Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:52.868600Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:52.868614Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:52.868743Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:53.120984Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:55.555091Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:57.307861Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345717533042170:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:57.308144Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d3/r3tmp/tmp3piAv3/pdisk_1.dat 2025-04-17T18:08:57.612342Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:57.659181Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:57.659273Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:57.662833Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12956, node 7 2025-04-17T18:08:57.831984Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:57.832003Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:57.832010Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:57.832111Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:58.107681Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:01.035161Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:01.189917Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:01.253589Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:09:01.311808Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:09:03.115972Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345742823166098:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:03.116007Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d3/r3tmp/tmpZUkSeE/pdisk_1.dat 2025-04-17T18:09:03.256659Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:03.285009Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:03.285090Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:03.291804Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6580, node 10 2025-04-17T18:09:03.424792Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:03.424824Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:03.424831Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:03.425002Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:03.673567Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:06.189363Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:06.335111Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:06.344712Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-17T18:09:06.344746Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-17T18:09:08.204095Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345765879365812:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:08.204150Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d3/r3tmp/tmpkIxypR/pdisk_1.dat 2025-04-17T18:09:08.360549Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:08.407431Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:08.407517Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:08.415589Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22672, node 13 2025-04-17T18:09:08.550774Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:08.550797Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:08.550805Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:08.550937Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:08.831191Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:11.361087Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2025-04-17T18:08:47.814621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345676351718079:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:47.819419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c1/r3tmp/tmpo5L1WY/pdisk_1.dat 2025-04-17T18:08:48.363606Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:48.385543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:48.390292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:48.404637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16666, node 1 2025-04-17T18:08:48.706732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:48.706753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:48.706758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:48.710325Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:49.093900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:49.224184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:52.806647Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345695332382330:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:52.806721Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c1/r3tmp/tmpk7gUrC/pdisk_1.dat 2025-04-17T18:08:53.135414Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:53.174251Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:53.174332Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:53.178280Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32220, node 4 2025-04-17T18:08:53.374824Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:53.374842Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:53.374875Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:53.374985Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:53.699602Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:53.849525Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:53.931942Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.079553Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.165214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.238346Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.579531Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.667247Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.707777Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:08:58.017303Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345721246851150:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:58.017392Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c1/r3tmp/tmpvNMu0F/pdisk_1.dat 2025-04-17T18:08:58.190660Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:58.229981Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:58.230073Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:58.234733Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15895, node 7 2025-04-17T18:08:58.466840Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:58.466863Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:58.466871Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:58.467007Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:58.747826Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:58.853529Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:03.165809Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345745309503174:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:03.165871Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c1/r3tmp/tmpowXznD/pdisk_1.dat 2025-04-17T18:09:03.349749Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:03.367992Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:03.368053Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:03.371747Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12423, node 10 2025-04-17T18:09:03.450649Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:03.450670Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:03.450676Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:03.450817Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:03.679673Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:03.729988Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:03.844779Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:04.123095Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-17T18:09:07.988820Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345762919320561:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:07.988914Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c1/r3tmp/tmpvXVc6Z/pdisk_1.dat 2025-04-17T18:09:08.255932Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21549, node 13 2025-04-17T18:09:08.339998Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:08.340095Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:08.394264Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:08.458283Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:08.458304Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:08.458325Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:08.458477Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:08.724317Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:11.602905Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345780099190790:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:11.602976Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345780099190782:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:11.603126Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:11.607859Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:09:11.646051Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494345780099190796:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:09:11.727450Z node 13 :TX_PROXY ERROR: Actor# [13:7494345780099190874:2687] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:11.728633Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NmI3ZmM5YTUtYTIwYzk0NjQtZTBkNDljMTQtMWEwZDkzMDY=, ActorId: [13:7494345780099190779:2332], ActorState: ExecuteState, TraceId: 01js2f1gxh32m9q2r3jrzdtfze, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-04-17T18:09:11.732304Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NmI3ZmM5YTUtYTIwYzk0NjQtZTBkNDljMTQtMWEwZDkzMDY=, ActorId: [13:7494345780099190779:2332], ActorState: ExecuteState, TraceId: 01js2f1h1k5hm13cqzddjzrh4s, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-04-17T18:09:11.734274Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NmI3ZmM5YTUtYTIwYzk0NjQtZTBkNDljMTQtMWEwZDkzMDY=, ActorId: [13:7494345780099190779:2332], ActorState: ExecuteState, TraceId: 01js2f1h1n8bwjhdd1a5qwa51b, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> YdbYqlClient::TestExplicitPartitioning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::KeepAlive [GOOD] Test command err: 2025-04-17T18:08:48.170858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345679203584845:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:48.170920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019bf/r3tmp/tmpRtKZu9/pdisk_1.dat 2025-04-17T18:08:48.683911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:48.684000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:48.708902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:48.713013Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27947, node 1 2025-04-17T18:08:48.884859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:48.884882Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:48.884889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:48.885005Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:49.252896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:49.370070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019bf/r3tmp/tmpLPNZ3i/pdisk_1.dat 2025-04-17T18:08:53.601914Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:53.759585Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:53.787741Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:53.787844Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:53.795040Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12587, node 4 2025-04-17T18:08:53.930866Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:53.930892Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:53.930900Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:53.931028Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:54.255518Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:54.383100Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01js2f103eav21q57h2qdm5jwz, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54888, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:54.386252Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.392769Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:54.392889Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:54.392904Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:54.392962Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:54.501886Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:54.501992Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:54.502025Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:54.502075Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:54.576841Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01js2f109gcj756b6s36zf66f0, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54888, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:57.049200Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01js2f12ps5z8g9rdzxv22pran, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54888, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:57.052872Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345719088993555:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.052993Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.053350Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345719088993567:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:57.058473Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:57.066326Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:57.066411Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:57.066598Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:57.066686Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:57.085860Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:57.085890Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:08:57.085982Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:57.087111Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:08:57.091733Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345719088993569:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:08:57.157885Z node 4 :TX_PROXY ERROR: Actor# [4:7494345719088993645:2810] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:57.587892Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f12ps5z8g9rdzxv22pran, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDhkOWU4MGUtYWUyOWJjMGItZjc4MjIwODgtN2E1MzgyY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:57.610714Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01js2f138af3xdz529brq2fqwv, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54888, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:57.611101Z node 4 :READ_TABLE_API NOTICE: [4:7494345719088993703:2352] Finish grpc stream, status: 400010 2025-04-17T18:08:57.618487Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01js2f138j0q8e07x471n7sy62, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54888, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:08:57.633938Z node 4 :READ_TABLE_API DEBUG: [4:7494345719088993704:2353] Adding quota request to queue ShardId: 0, TxId: 281474976715662 2025-04-17T18:08:57.633986Z node 4 :READ_TABLE_API DEBUG: [4:7494345719088993704:2353] Assign stream quota to Shard 0, Quota 5, TxId 281474976715662 Reserved: 5 of 25, Queued: 0 2025-04-17T18:08:57.634780Z node 4 :READ_TABLE_API DEBUG: [4:7494345719088993704:2353] got stream part, size: 290, RU required: 128 rate limiter absent 2025-04-17T18:08:57.635078Z node 4 :READ_TABLE_API DEBUG: [4:7494345719088993704:2353] Starting inactivity timer for 600.000000s with tag 3 2025-04-17T18:08:57.680704Z node 4 :READ_TABLE_API NOTICE: [4:7494345719088993704:2353] Finish grpc stream, status: 400000 2025-04-17T18:08:57.686522Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# ... am quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2025-04-17T18:08:57.731603Z node 4 :READ_TABLE_API DEBUG: [4:7494345719088993753:2357] got stream part, size: 244, RU required: 128 rate limiter absent 2025-04-17T18:08:57.731922Z node 4 :READ_TABLE_API DEBUG: [4:7494345719088993753:2357] Starting inactivity timer for 600.000000s with tag 3 2025-04-17T18:08:57.769700Z node 4 :READ_TABLE_API NOTICE: [4:7494345719088993753:2357] Finish grpc stream, status: 400000 2025-04-17T18:08:57.771959Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d2080] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.772172Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d0880] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.772368Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d2680] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.772529Z node 4 :GRPC_SERVER DEBUG: [0x51a0000cfc80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.772683Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d1480] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.772825Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a6880] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.772956Z node 4 :GRPC_SERVER DEBUG: [0x51a0000ae680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.772960Z node 4 :GRPC_SERVER DEBUG: [0x51a0000ada80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.773072Z node 4 :GRPC_SERVER DEBUG: [0x51a0000ace80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.773146Z node 4 :GRPC_SERVER DEBUG: [0x51a000013e80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.773191Z node 4 :GRPC_SERVER DEBUG: [0x51a000014480] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.773294Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a6e80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.773298Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a5c80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.773420Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a7480] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.773459Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a6280] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.773528Z node 4 :GRPC_SERVER DEBUG: [0x51a0000bbe80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-17T18:08:57.773610Z node 4 :GRPC_SERVER DEBUG: [0x51a0000c3c80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-17T18:08:59.534336Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345725474951852:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:59.534420Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019bf/r3tmp/tmpY6AIWA/pdisk_1.dat 2025-04-17T18:08:59.771074Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:59.839373Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:59.839447Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:59.845227Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2230, node 7 2025-04-17T18:09:00.012742Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:00.012762Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:00.012771Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:00.012902Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:00.307419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation timeout. 2025-04-17T18:09:04.209155Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345749320690147:2119];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:04.225705Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019bf/r3tmp/tmpOj8LU8/pdisk_1.dat 2025-04-17T18:09:04.344505Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:04.364757Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:04.364825Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:04.366802Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15088, node 10 2025-04-17T18:09:04.432053Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:04.432076Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:04.432083Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:04.432219Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:04.668363Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation cancelled. 2025-04-17T18:09:08.803627Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345765806391151:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:08.803729Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019bf/r3tmp/tmpqvrtPf/pdisk_1.dat 2025-04-17T18:09:08.957573Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:08.996009Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:08.996084Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:09.000232Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20002, node 13 2025-04-17T18:09:09.071133Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:09.071153Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:09.071161Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:09.071288Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:09.375133Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> YdbLogStore::LogStoreNegative [GOOD] >> YdbLogStore::Dirs >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2025-04-17T18:08:27.414580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345591060073703:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:27.414626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a04/r3tmp/tmp0CZj6Q/pdisk_1.dat 2025-04-17T18:08:27.848241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:27.848328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:27.850308Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3694, node 1 2025-04-17T18:08:27.860514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:27.862341Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:27.862675Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:27.950654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:27.950677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:27.950695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:27.950843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:28.317526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:30.511567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345603944976629:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:30.511714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:30.815333Z node 1 :TX_PROXY ERROR: Actor# [1:7494345603944976650:2637] txid# 281474976710658, issues: { message: "Column Key has wrong key type Double" severity: 1 } 2025-04-17T18:08:32.374227Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345610352757861:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:32.374339Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a04/r3tmp/tmpMWZza9/pdisk_1.dat 2025-04-17T18:08:32.574698Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:32.616894Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:32.616968Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:32.624615Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9216, node 4 2025-04-17T18:08:32.777269Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:32.777291Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:32.777298Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:32.777493Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:33.083567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:35.802708Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345623237660842:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:35.804846Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:35.831309Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345623237660870:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:35.831468Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:35.865376Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:35.876135Z node 4 :TX_PROXY ERROR: Actor# [4:7494345623237660924:2651] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:08:35.876163Z node 4 :TX_PROXY ERROR: Actor# [4:7494345623237660923:2650] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:08:35.876245Z node 4 :TX_PROXY ERROR: Actor# [4:7494345623237660922:2649] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:08:35.876327Z node 4 :TX_PROXY ERROR: Actor# [4:7494345623237660959:2676] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:08:35.876344Z node 4 :TX_PROXY ERROR: Actor# [4:7494345623237660917:2645] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:08:35.876443Z node 4 :TX_PROXY ERROR: Actor# [4:7494345623237660925:2652] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:08:35.876460Z node 4 :TX_PROXY ERROR: Actor# [4:7494345623237660966:2681] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:08:35.899537Z node 4 :TX_PROXY ERROR: Actor# [4:7494345623237661000:2710] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:08:35.899702Z node 4 :TX_PROXY ERROR: Actor# [4:7494345623237661001:2711] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:08:36.017802Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345627532628431:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:36.017879Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:36.018025Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345627532628436:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:36.021949Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474 ... oad_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:36.247375Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2f0e5hbqdd7k7ya56jh8r3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjI2MDEyNmEtYzk2YjE5OTItNjlkMDBlOWYtZTU5YjVhNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:37.970114Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345633976763939:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:37.970159Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a04/r3tmp/tmpztnuEa/pdisk_1.dat 2025-04-17T18:08:38.203039Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:38.239580Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:38.239660Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:38.241795Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18323, node 7 2025-04-17T18:08:38.293512Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:38.293532Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:38.293539Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:38.293656Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:38.515378Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:38.532916Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:41.203660Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:41.303953Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345651156634347:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:41.303954Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494345651156634355:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:41.304072Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:41.307276Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:41.330335Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494345651156634361:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:41.408408Z node 7 :TX_PROXY ERROR: Actor# [7:7494345651156634438:2837] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:41.459507Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0kapbc9ppv7c06fy0hvg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MWYxMDdkLWY5NDdhMjEwLTFhOWQ5NzhkLWM2YjFlYTY5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:08:43.403858Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345658508954934:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:43.411810Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a04/r3tmp/tmpmQDZQD/pdisk_1.dat 2025-04-17T18:08:43.617640Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:43.639668Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:43.639753Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:43.644356Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20053, node 10 2025-04-17T18:08:43.730064Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:43.730103Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:43.730113Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:43.730320Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:44.092415Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:46.827256Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:48.402535Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494345658508954934:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:48.402608Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:08:58.578140Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:08:58.578171Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:12.829971Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345783063008751:2537], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:12.829996Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345783063008759:2540], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:12.830064Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:12.833340Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:12.871035Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345783063008765:2541], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:09:12.962000Z node 10 :TX_PROXY ERROR: Actor# [10:7494345783063008841:3203] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:13.088430Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f1j3wbmxbeg7rabk62z5k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjQ3OTI3Y2YtNzdlMmEzYWItZTJjZDliZmUtYWJmZjNmMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:13.531122Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2f1jcz06zhcdyv19ncg9hw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjQ3OTI3Y2YtNzdlMmEzYWItZTJjZDliZmUtYWJmZjNmMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> TSyncBrokerTests::ShouldReturnToken >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId >> TSyncNeighborsTests::SerDes2 [GOOD] >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TSyncBrokerTests::ShouldEnqueue >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TSyncBrokerTests::ShouldReleaseToken >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] >> TSyncBrokerTests::ShouldReleaseToken [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TTxLocatorTest::TestWithReboot >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> TTxLocatorTest::TestAllocateAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2025-04-17T18:09:17.692660Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-04-17T18:09:17.795090Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-04-17T18:09:17.795182Z node 2 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token released, active: 1, waiting: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2025-04-17T18:09:17.693225Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-04-17T18:09:17.693382Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:6:2053], token sent, active: 1, waiting: 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2025-04-17T18:09:17.694133Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-04-17T18:09:17.694291Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-04-17T18:09:17.790645Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-04-17T18:09:17.790773Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-04-17T18:09:17.790860Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:7:2054], enqueued, active: 1, waiting: 1 >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [GOOD] >> KqpQueryService::TableSink_ReplaceDuplicatesOlap >> TTxLocatorTest::TestAllocateAll [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> TTxLocatorTest::TestWithReboot [GOOD] >> YdbScripting::MultiResults [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2025-04-17T18:09:18.332170Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T18:09:18.334743Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T18:09:18.336903Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T18:09:18.347522Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.360567Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T18:09:18.374380Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.374492Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.374588Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T18:09:18.374774Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.375265Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.375395Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T18:09:18.375559Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T18:09:18.377068Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#281474976710655 2025-04-17T18:09:18.379247Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.379343Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.379447Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2025-04-17T18:09:18.379487Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2025-04-17T18:09:18.388285Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2109] requested range size#1 2025-04-17T18:09:18.388846Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-04-17T18:09:18.388893Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:75:2109] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |88.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2025-04-17T18:09:18.331693Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-17T18:09:18.334206Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-17T18:09:18.336592Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-17T18:09:18.347747Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.362341Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T18:09:18.376687Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.376792Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.376891Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-17T18:09:18.377040Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.377123Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.377241Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T18:09:18.377393Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T18:09:18.387216Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-04-17T18:09:18.387772Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-04-17T18:09:18.393774Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-04-17T18:09:18.394237Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-04-17T18:09:18.394851Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-04-17T18:09:18.395113Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.395233Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-04-17T18:09:18.395382Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.395651Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.395716Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.395798Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-04-17T18:09:18.395974Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.396025Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-04-17T18:09:18.396202Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.396442Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.396518Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.396589Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-04-17T18:09:18.396749Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-04-17T18:09:18.396916Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.397020Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-04-17T18:09:18.397062Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-04-17T18:09:18.397230Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.397300Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.397414Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-04-17T18:09:18.397446Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 100000 to# 200000 2025-04-17T18:09:18.397564Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-04-17T18:09:18.397593Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 200000 to# 300000 2025-04-17T18:09:18.397680Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.397737Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.397830Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.397906Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-04-17T18:09:18.397948Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 300000 to# 400000 2025-04-17T18:09:18.398056Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.398109Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-04-17T18:09:18.398133Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 400000 to# 500000 2025-04-17T18:09:18.398241Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.398309Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-04-17T18:09:18.398334Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 500000 to# 600000 2025-04-17T18:09:18.398443Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.398476Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-04-17T18:09:18.398500Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 600000 to# 700000 2025-04-17T18:09:18.398614Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.398675Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-04-17T18:09:18.398702Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 700000 to# 800000 2025-04-17T18:09:18.398751Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-04-17T18:09:18.398775Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 800000 to# 900000 2025-04-17T18:09:18.398881Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.398992Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.399076Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-04-17T18:09:18.399117Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-17T18:09:18.403535Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 2 Marker# TSYS31 2025-04-17T18:09:18.405710Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2025-04-17T18:09:18.406517Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:2:12:0:0:71:0] Snap: 2:1 for 72057594046447617 Marker# TRRH04 2025-04-17T18:09:18.406605Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:12:0:0:71:0], refs: [[72057594046447617:2:12:1:24576:76:0],] for 72057594046447617 2025-04-17T18:09:18.406788Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:1:0:0:42:0], refs: [[72057594046447617:2:1:1:28672:35:0],] for 72057594046447617 2025-04-17T18:09:18.406847Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:2:0:0:71:0], refs: [[72057594046447617:2:2:1:8192:71:0],] for 72057594046447617 2025-04-17T18:09:18.406888Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:3:0:0:69:0], refs: [[72057594046447617:2:3:1:24576:70:0],] for 72057594046447617 2025-04-17T18:09:18.406940Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:4:0:0:71:0], refs: [[72057594046447617:2:4:1:24576:76:0],] for 720575940 ... 2025-04-17T18:09:18.741428Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.741490Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.741562Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-04-17T18:09:18.741587Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:617:2548] TEvAllocateResult from# 9100000 to# 9200000 2025-04-17T18:09:18.741749Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.741839Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-04-17T18:09:18.741864Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:619:2550] TEvAllocateResult from# 9200000 to# 9300000 2025-04-17T18:09:18.741968Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.742039Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-04-17T18:09:18.742070Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:621:2552] TEvAllocateResult from# 9300000 to# 9400000 2025-04-17T18:09:18.742167Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-04-17T18:09:18.742391Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:623:2554] TEvAllocateResult from# 9400000 to# 9500000 2025-04-17T18:09:18.742493Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.742538Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.742592Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-04-17T18:09:18.742615Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:627:2558] TEvAllocateResult from# 9500000 to# 9600000 2025-04-17T18:09:18.742758Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-04-17T18:09:18.742795Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:625:2556] TEvAllocateResult from# 9600000 to# 9700000 2025-04-17T18:09:18.742896Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.742955Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.743047Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-04-17T18:09:18.743082Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:629:2560] TEvAllocateResult from# 9700000 to# 9800000 2025-04-17T18:09:18.743206Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.743289Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.743336Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-04-17T18:09:18.743358Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:631:2562] TEvAllocateResult from# 9800000 to# 9900000 2025-04-17T18:09:18.743452Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.743514Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-04-17T18:09:18.743536Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:633:2564] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-17T18:09:18.747403Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2025-04-17T18:09:18.748876Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2025-04-17T18:09:18.749575Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2025-04-17T18:09:18.749636Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2025-04-17T18:09:18.749786Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-04-17T18:09:18.749831Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-04-17T18:09:18.749947Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-04-17T18:09:18.749992Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-04-17T18:09:18.750032Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-04-17T18:09:18.750150Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2025-04-17T18:09:18.750243Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2025-04-17T18:09:18.750300Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2025-04-17T18:09:18.750355Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2025-04-17T18:09:18.750400Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2025-04-17T18:09:18.750535Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2025-04-17T18:09:18.750576Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-04-17T18:09:18.750606Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-04-17T18:09:18.750632Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-04-17T18:09:18.750654Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-04-17T18:09:18.750677Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-04-17T18:09:18.750748Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2025-04-17T18:09:18.750783Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2025-04-17T18:09:18.750810Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2025-04-17T18:09:18.750845Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2025-04-17T18:09:18.750873Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2025-04-17T18:09:18.750900Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2025-04-17T18:09:18.751161Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2025-04-17T18:09:18.752391Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.756475Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-17T18:09:18.756714Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-17T18:09:18.757515Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-17T18:09:18.757586Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1639:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.757679Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:09:18.757765Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 |88.5%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbScripting::MultiResults [GOOD] Test command err: 2025-04-17T18:08:49.906752Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345684505992342:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:49.906799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019a1/r3tmp/tmphWIqrO/pdisk_1.dat 2025-04-17T18:08:50.620646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:50.620754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:50.625364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:50.647973Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22011, node 1 2025-04-17T18:08:50.710760Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:50.730953Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:50.905541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:50.905566Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:50.905573Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:50.907693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:08:50.921017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:9749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:51.280971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:51.298524Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:08:53.462797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:54.098960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345705980831552:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:54.099059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345705980831544:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:54.099113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:54.102804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:54.121567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345705980831558:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:54.205165Z node 1 :TX_PROXY ERROR: Actor# [1:7494345705980831681:4164] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:54.586898Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f0zthdn3jz5tyw333gtxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDk1M2NiNDEtNjYxYzU3OGQtZTcxZTAyNDctODcwY2M4MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS 2025-04-17T18:08:56.616442Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345715906090449:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:56.616859Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019a1/r3tmp/tmpIyQWGa/pdisk_1.dat 2025-04-17T18:08:56.814056Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:56.838841Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:56.838928Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:56.842658Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18576, node 4 2025-04-17T18:08:56.898609Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:56.898627Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:56.898632Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:56.898744Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:57.127428Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:00.009108Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:00.560185Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345733085962584:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:00.560192Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345733085962595:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:00.560244Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:00.564042Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:00.586349Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494345733085962598:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:09:00.691413Z node 4 :TX_PROXY ERROR: Actor# [4:7494345733085962725:4127] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:00.826599Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f164e57yazbndwhhqe1qx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NjhmOTlhYzItMzI2NTk0YjEtNjdmZmU4YjktNTA2ZWM4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:00.892871Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:24052 2025-04-17T18:09:03.153540Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345742545427502:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:03.153619Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019a1/r3tmp/tmpPK5y4z/pdisk_1.dat 2025-04-17T18:09:03.277154Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:03.309419Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:03.309498Z node 7 :HIVE WARN: ... tion 2025-04-17T18:09:08.631182Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:08.631282Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:08.633601Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:08.741011Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:11.360386Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345777393820939:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:11.360495Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:11.440374Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:11.549306Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345777393821105:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:11.549419Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:11.549517Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494345777393821110:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:11.553866Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:11.572942Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494345777393821112:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:09:11.642294Z node 10 :TX_PROXY ERROR: Actor# [10:7494345777393821200:2800] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:11.693156Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f1e5y0ap6ek6zz036kqdx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWFmYWE3ODUtNmU2OTMxZWUtMzJjMDY3OGEtODRlNzRjZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:11.801506Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2f1h0v9qmbsaxhg5enmhxd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YTk0N2ZmNGItYzI1N2QzNmUtMTlhMzVhYzgtYjY3NTUxZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:11.805287Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913351839, txId: 281474976715662] shutting down 2025-04-17T18:09:13.502595Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7494345788457184221:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:13.503519Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019a1/r3tmp/tmpYz8ahL/pdisk_1.dat 2025-04-17T18:09:13.705326Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:13.723618Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:13.723721Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:13.732996Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28133, node 13 2025-04-17T18:09:13.812292Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:13.812322Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:13.812340Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:13.812496Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:14.051848Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:16.877607Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345801342087159:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:16.877705Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:17.164095Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:17.251465Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345805637054624:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:17.251553Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:17.251777Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494345805637054629:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:17.255440Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:17.280596Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494345805637054631:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:09:17.341259Z node 13 :TX_PROXY ERROR: Actor# [13:7494345805637054698:2804] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:17.389719Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f1kd37fke76ndzf8gcy12, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZjdkZDM0M2YtNjEzMWUwODUtODE4ODNmMWItOTFmYWJkMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:17.494683Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f1kd37fke76ndzf8gcy12, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzk3NDE4MzAtZjhiMGVjODYtNTM2ZWI2ZGEtN2YyZjE5YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:17.543301Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f1kd37fke76ndzf8gcy12, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzBjYjZmOS0yODVkNGFlMy1iNDg2NzhhYy02YjFiOWY5NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:17.694438Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2f1kd37fke76ndzf8gcy12, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NmNiZTMxNGYtM2Q3ZTZlYTQtNTc4NGZiOWYtYjY5NDQ3OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::LogTable >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> THiveTest::TestNoMigrationToSelf >> THiveTest::TestCreateTablet >> YdbTableBulkUpsert::Overload [GOOD] >> THiveTest::TestFollowers >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 >> KqpErrors::ResolveTableError [GOOD] >> THiveTest::TestLocalDisconnect >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TStorageBalanceTest::TestScenario1 >> THiveTest::TestDrain >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> KqpUniqueIndex::UpdateFkSameValue >> KqpMultishardIndex::SecondaryIndexSelectNull >> KqpIndexes::NullInIndexTableNoDataRead >> KqpMultishardIndex::WriteIntoRenamingSyncIndex >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn >> KqpIndexes::UpsertWithNullKeysSimple >> KqpMultishardIndex::DataColumnWriteNull >> KqpUniqueIndex::InsertFkPartialColumnSet >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex >> KqpIndexes::MultipleModifications ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: 2025-04-17T18:09:17.408859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:17.410134Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001713/r3tmp/tmpruPhhn/pdisk_1.dat 2025-04-17T18:09:17.973054Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:18.199866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:09:18.301982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:18.302684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:18.311680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:18.311772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:18.329023Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:09:18.338413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:18.338818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:18.701783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:09:20.587286Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Bootstrap done, become ReadyState 2025-04-17T18:09:20.587613Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 2, stages: 1 2025-04-17T18:09:20.589128Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-17T18:09:20.590372Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got request, become WaitResolveState 2025-04-17T18:09:20.592411Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key sets: 1 2025-04-17T18:09:20.592600Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-17T18:09:20.593577Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2025-04-17T18:09:20.595487Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] create compute task: 1 2025-04-17T18:09:20.599130Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-04-17T18:09:20.599261Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T18:09:20.604578Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Collect channels updates for task: 1 at actor [1:1558:2944] 2025-04-17T18:09:20.604687Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Sending channels info to compute actor: [1:1558:2944], channels: 0 2025-04-17T18:09:20.604776Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:09:20.604830Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Updating channels after the creation of compute actors 2025-04-17T18:09:20.604885Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Collect channels updates for task: 1 at actor [1:1558:2944] 2025-04-17T18:09:20.604944Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Sending channels info to compute actor: [1:1558:2944], channels: 0 2025-04-17T18:09:20.605048Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [1:1558:2944], 2025-04-17T18:09:20.605109Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1558:2944], 2025-04-17T18:09:20.605171Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-17T18:09:20.637291Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [1:1558:2944], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-17T18:09:20.637400Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [1:1558:2944], 2025-04-17T18:09:20.637456Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1558:2944], 2025-04-17T18:09:20.640480Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [1:1558:2944], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 19923 Tasks { TaskId: 1 CpuTimeUs: 9380 FinishTimeMs: 1744913360639 EgressBytes: 30 EgressRows: 3 ComputeCpuTimeUs: 1855 BuildCpuTimeUs: 7525 HostName: "ghrun-gkdwnlcfnq" NodeId: 1 CreateTimeMs: 1744913360610 UpdateTimeMs: 1744913360639 } MaxMemoryUsage: 1048576 } 2025-04-17T18:09:20.640646Z node 1 :KQP_EXECUTER INFO: TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Compute actor has finished execution: [1:1558:2944] 2025-04-17T18:09:20.640744Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Send Commit to BufferActor=[1:1554:2944] 2025-04-17T18:09:20.640801Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.019923s ReadRows: 0 ReadBytes: 0 ru: 13 rate limiter was not found force flag: 1 2025-04-17T18:09:20.684037Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-04-17T18:09:20.684110Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1555:2944] TxId: 281474976715658. Ctx: { TraceId: 01js2f1rtz95gn70pd8fw1929j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0M2U1NTctMmUyN2IwM2ItYWY4OGQ4NmMtNGQxNDA3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2025-04-17T18:09:20.758067Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:1576:2963], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2025-04-17T18:09:20.759597Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2EwN2NiMmUtY2MyOWNiNjctYTdjNDcyMDUtODgyMzU0NDE=, ActorId: [1:1574:2961], ActorState: ExecuteState, TraceId: 01js2f1ssk9psj0rxy3bx1zkf3, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::Overload [GOOD] Test command err: 2025-04-17T18:08:18.675813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345552687258352:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.734429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a15/r3tmp/tmpPoN7tp/pdisk_1.dat 2025-04-17T18:08:19.233335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:19.233416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:19.247654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:19.260535Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65072, node 1 2025-04-17T18:08:19.321709Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:19.321764Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:19.516485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:19.516507Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:19.516515Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:19.516594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.770978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:22.013076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-17T18:08:22.195972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345569867128728:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:22.196019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345569867128740:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:22.196095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:22.198870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:08:22.222055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345569867128742:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:08:22.311518Z node 1 :TX_PROXY ERROR: Actor# [1:7494345569867128819:2814] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:08:23.143865Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f00njeh1feqc326vqx05h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI4ZjkwYjUtNjY5NzFjYS1hOWJjYzgzMy01YWY2ZDkzMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-17T18:08:23.533371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f01mk0ppd7f5mssgmzenz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI4ZjkwYjUtNjY5NzFjYS1hOWJjYzgzMy01YWY2ZDkzMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-17T18:08:23.610137Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T18:08:23.646475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:08:23.662607Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345552687258352:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:23.663200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; SUCCESS 2025-04-17T18:08:24.146882Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2f0257ese1s9mwgcs32zqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZkOTNhYS1hODczYmI3Ny1kYTgzOGVlOC1mMDNiYzIzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-17T18:08:24.545329Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2f02k03101kry3xarhbczb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZkOTNhYS1hODczYmI3Ny1kYTgzOGVlOC1mMDNiYzIzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-17T18:08:24.605121Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T18:08:24.645663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-17T18:08:25.034980Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01js2f034ca7010txwz65gz27w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTMxZjZiM2ItNjNkZDEzMmYtZGJhNzM4YzMtMWRlM2I2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-17T18:08:25.407397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01js2f03eqdghhe9f9g2zvbsdp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTMxZjZiM2ItNjNkZDEzMmYtZGJhNzM4YzMtMWRlM2I2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-17T18:08:25.468291Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-17T18:08:25.494387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-17T18:08:25.869989Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2f03yk7b84yw4hsh27wnxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmMTc0MC1lYjIxMGEzOS0xY2RmMWE1NC00NjNkYTQ2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-17T18:08:26.213375Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2f048q0gq5zzpwtm9f3jyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmMTc0MC1lYjIxMGEzOS0xY2RmMWE1NC00NjNkYTQ2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-17T18:08:26.267252Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-17T18:08:26.296116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-17T18:08:26.754461Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01js2f04sb2mqxywtq6fppaccy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE3NWE0MzItOTgzNjgxMGMtMTUxOGUzNWItNTllMDZkMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-17T18:08:27.146305Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01js2f054h1qeg8vnnhf2x0ar7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE3NWE0MzItOTgzNjgxMGMtMTUxOGUzNWItNTllMDZkMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-17T18:08:27.214648Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-17T18:08:27.277903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-17T18:08:27.707195Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01js2f05qmd77mswxvr0b61xcq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTNlNTU0MWItY2RlNjQ1ZDQtOTEwMzEwZDQtN2VhMzgxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-17T18:08:28.053995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01js2f0627ahd4r8q7wbnk827c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTNlNTU0MWItY2RlNjQ1ZDQtOTEwMzEwZDQtN2VhMzgxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-17T18:08:28.113846Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-17T18:08:28.168498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself ... 970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.418521Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.418910Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-17T18:09:20.419316Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-17T18:09:20.419358Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-17T18:09:20.419730Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037890 2025-04-17T18:09:20.422716Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-17T18:09:20.422748Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:09:20.423596Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037890 2025-04-17T18:09:20.423623Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:09:20.424735Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.424758Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.424846Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-17T18:09:20.424860Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:09:20.424945Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-17T18:09:20.424954Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-17T18:09:20.425171Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.425198Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.425940Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.425956Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 .2025-04-17T18:09:20.428407Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.428435Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.437355Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.437384Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.437766Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.437779Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.438329Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037890 2025-04-17T18:09:20.438387Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.438400Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.438564Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-17T18:09:20.438920Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-17T18:09:20.441477Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.441494Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.442080Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-17T18:09:20.448491Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037890 2025-04-17T18:09:20.448543Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:09:20.448761Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-17T18:09:20.448783Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-17T18:09:20.449292Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.449315Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.452984Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-17T18:09:20.453014Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:09:20.453055Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-17T18:09:20.453081Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:09:20.453791Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.453819Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 .2025-04-17T18:09:20.456212Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.456237Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.456678Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.456708Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.481217Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.481250Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.482973Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-17T18:09:20.483070Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037890 2025-04-17T18:09:20.483456Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-17T18:09:20.489698Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037890 2025-04-17T18:09:20.489737Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:09:20.489840Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-17T18:09:20.489860Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-17T18:09:20.489917Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-17T18:09:20.489938Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:09:20.490662Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.490692Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.491072Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.491095Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.491765Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.491783Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.500056Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.500091Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.501043Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.501067Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.510414Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-17T18:09:20.526461Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.526497Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.544588Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-17T18:09:20.544627Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:09:20.546635Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.546669Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 .2025-04-17T18:09:20.552625Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.552652Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.553244Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.553263Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.553616Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.553630Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-17T18:09:20.561241Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:09:20.561267Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> THeavyPerfTest::TTestLoadEverything >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> KqpErrors::ProposeError [GOOD] >> KqpErrors::ProposeErrorEvWrite >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> YdbOlapStore::LogGrepNonExisting [GOOD] >> YdbOlapStore::LogGrepExisting >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2025-04-17T18:08:57.754715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345720366143718:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:57.754755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 18:08:58.121404908 417885 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:08:58.121544042 417885 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:08:58.759606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:59.101949Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25670: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25670 } ] 2025-04-17T18:08:59.169186Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25670: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25670 2025-04-17T18:08:59.756502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:00.758690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:00.910228Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25670: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25670 } ] 2025-04-17T18:09:01.759631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:02.083335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345741840980589:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:02.084952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:09:02.180838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345741840980589:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aa9/r3tmp/tmpvZMvlX/pdisk_1.dat 2025-04-17T18:09:02.345740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345741840980589:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 25670, node 1 2025-04-17T18:09:02.399755Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:02.399881Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:2907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:02.717929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:02.755928Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345720366143718:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:02.756016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:02.912915Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T18:09:02.912942Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.912948Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.913306Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T18:09:02.913342Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.913347Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.914158Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T18:09:02.914194Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.914200Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.921200Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T18:09:02.921222Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.921226Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.922126Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:09:02.924345Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T18:09:02.924367Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.924373Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.926085Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T18:09:02.926112Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.926123Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.926127Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T18:09:02.926143Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.926149Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.930643Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T18:09:02.930672Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T18:09:02.931648Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T18:09:02.931670Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.934307Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.936583Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T18:09:02.936602Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.936606Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.938665Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T18:09:02.938686Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.938692Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.939655Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T18:09:02.939673Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.939677Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.943152Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T18:09:02.943179Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.943185Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.946518Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T18:09:02.946537Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.946542Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.983638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345741840981187:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.983686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345741840981176:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.983802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.984241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345741840981193:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.984328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.984403Z node 1 ... 6 2025-04-17T18:09:20.493408Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7494345816222622058 RawX2: 4503616807242226 } } DstEndpoint { ActorId { RawX1: 7494345816222622059 RawX2: 4503616807242227 } } InMemory: true DstStageId: 1 } 2025-04-17T18:09:20.493434Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update output channelId: 1, peer: [4:7494345816222622059:2547] 2025-04-17T18:09:20.493448Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-17T18:09:20.493462Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-17T18:09:20.493486Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-17T18:09:20.493536Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7494345816222622058 RawX2: 4503616807242226 } } DstEndpoint { ActorId { RawX1: 7494345816222622059 RawX2: 4503616807242227 } } InMemory: true DstStageId: 1 } 2025-04-17T18:09:20.493543Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-17T18:09:20.493551Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-17T18:09:20.493567Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:20.493576Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-17T18:09:20.493611Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-17T18:09:20.494502Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. Recv TEvReadResult from ShardID=72075186224037894, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-04-17T18:09:20.494524Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. Taken 0 locks 2025-04-17T18:09:20.494534Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. new data for read #0 seqno = 1 finished = 1 2025-04-17T18:09:20.494552Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-17T18:09:20.494588Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:20.494603Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-17T18:09:20.494618Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. enter pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-17T18:09:20.494630Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. exit pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-17T18:09:20.494639Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. returned 0 rows; processed 0 rows 2025-04-17T18:09:20.494677Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. dropping batch for read #0 2025-04-17T18:09:20.494685Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. effective maxinflight 1 sorted 1 2025-04-17T18:09:20.494737Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-17T18:09:20.494751Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7494345816222622058:2546]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-17T18:09:20.494840Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:09:20.494863Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622059:2547], TxId: 281474976710688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. CustomerSuppliedId : . TraceId : 01js2f1saz1780mpfty5k3f8rx. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-17T18:09:20.494879Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 2. Finish input channelId: 1, from: [4:7494345816222622058:2546] 2025-04-17T18:09:20.494910Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622059:2547], TxId: 281474976710688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. CustomerSuppliedId : . TraceId : 01js2f1saz1780mpfty5k3f8rx. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:20.494928Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-17T18:09:20.494946Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622059:2547], TxId: 281474976710688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. CustomerSuppliedId : . TraceId : 01js2f1saz1780mpfty5k3f8rx. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:09:20.494961Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:20.494978Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622059:2547], TxId: 281474976710688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. CustomerSuppliedId : . TraceId : 01js2f1saz1780mpfty5k3f8rx. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:20.494981Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1. Tasks execution finished 2025-04-17T18:09:20.495010Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622058:2546], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. TraceId : 01js2f1saz1780mpfty5k3f8rx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:09:20.495016Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-17T18:09:20.495025Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 2. Tasks execution finished 2025-04-17T18:09:20.495055Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345816222622059:2547], TxId: 281474976710688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZjNiNzJlYS1hOGI3Yjg0NC1kYzA4Y2JiYy1lOWQ0Nzcz. CustomerSuppliedId : . TraceId : 01js2f1saz1780mpfty5k3f8rx. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:09:20.495116Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1. pass away 2025-04-17T18:09:20.495118Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 2. pass away 2025-04-17T18:09:20.495182Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710688;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:09:20.495183Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710688;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:09:21.298698Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:8977: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:8977 >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> KqpIndexes::UniqAndNoUniqSecondaryIndex >> THiveTest::TestUpdateChannelValues >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion >> KqpQueryService::TableSink_ReplaceDuplicatesOlap [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestReassignUseRelativeSpace >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestStorageBalancer >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2025-04-17T18:08:56.554958Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345713926731906:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:56.555007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 18:08:56.839979065 416754 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:08:56.840168545 416754 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:08:56.872826Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25246: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25246 } ] 2025-04-17T18:08:57.590297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:58.068480Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25246: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25246 } ] 2025-04-17T18:08:58.068804Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25246: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25246 2025-04-17T18:08:58.590569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:59.567231Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25246: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25246 } ] 2025-04-17T18:08:59.592011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:00.596822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ac9/r3tmp/tmpHw9LMd/pdisk_1.dat 2025-04-17T18:09:00.916703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:09:00.928137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345731106601720:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:01.030486Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:01.033705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345731106601720:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 25246, node 1 2025-04-17T18:09:01.163561Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:01.163586Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:01.163594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:01.163729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:09:01.557648Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345713926731906:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:01.557722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:23570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:01.689734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... E0417 18:09:01.836620907 417127 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:09:01.836776208 417127 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:09:02.056341Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T18:09:02.056372Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.056379Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.057567Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T18:09:02.057579Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.057584Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.058669Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T18:09:02.058692Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.058700Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.061130Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:09:02.070602Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T18:09:02.070632Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.070637Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.075980Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T18:09:02.076006Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.076013Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.078239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:02.078599Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T18:09:02.078620Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.078628Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.079530Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T18:09:02.079551Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.079556Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.080125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:09:02.080678Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T18:09:02.080698Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T18:09:02.080739Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T18:09:02.080748Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.080753Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.081524Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T18:09:02.081533Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.081538Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.081642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:09:02.081988Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T18:09:02.082000Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.082005Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.082341Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T18:09:02.082352Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.082357Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.083051Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T18:09:02.083073Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.083083Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.084897Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T18:09:02.084908Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.084917Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.109838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but ... ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.279134Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.279253Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.279272Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.279392Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.279410Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.279936Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.280021Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.280094Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.280162Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.280233Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.280299Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.280372Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.280441Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.280510Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.282399Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.282446Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.282595Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.282615Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.282688Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.282757Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.282828Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.282947Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.282965Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283078Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283146Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283213Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283276Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283339Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283362Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283428Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283495Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283570Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283674Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283745Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283771Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283832Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.283894Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284011Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284030Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284145Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284165Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284281Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284300Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284413Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284431Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284545Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284562Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284639Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284746Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284814Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284926Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.284956Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285025Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285131Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285161Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285276Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285295Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285411Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285432Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285499Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285598Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285667Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285731Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285754Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285819Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285884Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.285948Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286018Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286081Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286227Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286308Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286375Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286438Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286533Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286610Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286676Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286737Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286799Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.286860Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.294562Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.294621Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.294705Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.294778Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.294895Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.294913Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295032Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295051Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295167Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295186Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295312Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295330Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295396Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295473Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295544Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295659Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295728Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295756Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295825Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.295901Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296026Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296050Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296165Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296181Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296281Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296346Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296415Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296482Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296511Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296577Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296644Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296708Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296816Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296885Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296910Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.296978Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.297092Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.297112Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.297296Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.297324Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.297401Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298225Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298342Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298415Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298485Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298558Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298624Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298692Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298758Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298823Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:25.298890Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestHiveBalancer >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> KqpIndexes::MultipleModifications [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex [GOOD] >> KqpIndexes::SecondaryIndexSelectUsingScripting >> KqpUniqueIndex::InsertFkPartialColumnSet [GOOD] >> KqpUniqueIndex::InsertFkPkOverlap >> KqpIndexes::UpsertWithNullKeysSimple [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex >> KqpIndexes::NullInIndexTableNoDataRead [GOOD] >> KqpIndexes::NullInIndexTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> KqpMultishardIndex::DataColumnWriteNull [GOOD] >> KqpMultishardIndex::DuplicateUpsert >> Yq_1::ModifyQuery [GOOD] >> YdbOlapStore::ManyTables [GOOD] >> YdbOlapStore::LogWithUnionAllAscending >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 >> KqpMultishardIndex::SecondaryIndexSelectNull [GOOD] >> KqpMultishardIndex::SecondaryIndexSelect >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk >> KqpUniqueIndex::UpdateFkSameValue [GOOD] >> KqpUniqueIndex::UpdateFkPkOverlap >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2025-04-17T18:08:56.567614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345715596823260:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:56.567672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 18:08:57.021287515 416762 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:08:57.021400210 416762 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:08:57.586644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:57.993340Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:7144: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:7144 } ] 2025-04-17T18:08:58.014451Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:7144: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:7144 2025-04-17T18:08:58.594787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:59.600470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:59.753670Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:7144: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:7144 } ] 2025-04-17T18:09:00.602517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:00.758539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:09:00.768461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345732776692865:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ac0/r3tmp/tmpN75pWo/pdisk_1.dat 2025-04-17T18:09:00.854427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345732776692865:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:00.992685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:00.993334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:01.001448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:01.026407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345732776692865:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 7144, node 1 2025-04-17T18:09:01.568827Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345715596823260:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:01.568921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:21511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:01.741509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:01.937270Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T18:09:01.937298Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T18:09:01.964213Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T18:09:01.964243Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:01.964252Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:01.964601Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T18:09:01.964646Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:01.964669Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:01.974296Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T18:09:01.974308Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T18:09:01.974325Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:01.974332Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:01.974334Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:01.974342Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:01.975197Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T18:09:01.975216Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:01.975222Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:01.975230Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T18:09:01.975242Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:01.975246Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:01.982344Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T18:09:01.982369Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:01.982374Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:01.983493Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T18:09:01.983511Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:01.983536Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:01.984268Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T18:09:01.984281Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:01.984286Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:01.985009Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T18:09:01.985019Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:01.985050Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:01.985683Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T18:09:01.985693Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:01.985698Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:01.985887Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T18:09:01.985914Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:01.985920Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:01.986918Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T18:09:01.986937Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:01.986943Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.016703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345741366628052:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.016702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345741366628060:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.016823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.022643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 E0417 18:09:02.023249580 417150 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:09:02.023395766 417150 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:09:02.027084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [Wor ... P_COMPUTE DEBUG: TxId: 281474976715802, task: 2, CA Id [4:7494345852523629357:3062]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-17T18:09:28.671289Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2, CA Id [4:7494345852523629357:3062]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-17T18:09:28.671317Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2, CA Id [4:7494345852523629357:3062]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 1 freeSpace: 8388600 2025-04-17T18:09:28.671338Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2, CA Id [4:7494345852523629357:3062]. returned 1 rows; processed 1 rows 2025-04-17T18:09:28.671391Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2, CA Id [4:7494345852523629357:3062]. dropping batch for read #0 2025-04-17T18:09:28.671404Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2, CA Id [4:7494345852523629357:3062]. effective maxinflight 1024 sorted 0 2025-04-17T18:09:28.671420Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2, CA Id [4:7494345852523629357:3062]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-17T18:09:28.671437Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2, CA Id [4:7494345852523629357:3062]. returned async data processed rows 1 left freeSpace 8388600 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-17T18:09:28.672019Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 DstTaskId: 3 SrcEndpoint { ActorId { RawX1: 7494345852523629357 RawX2: 4503616807242742 } } DstEndpoint { ActorId { RawX1: 7494345852523629358 RawX2: 4503616807242743 } } InMemory: true SrcStageId: 1 DstStageId: 2 } Update { Id: 3 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 3 SrcEndpoint { ActorId { RawX1: 7494345852523629358 RawX2: 4503616807242743 } } DstEndpoint { ActorId { RawX1: 7494345852523629352 RawX2: 4503616807242088 } } InMemory: true } 2025-04-17T18:09:28.672054Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7494345852523629357:3062] 2025-04-17T18:09:28.674730Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629357:3062], TxId: 281474976715802, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2f21jbesrshf20pmmqerxg. SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:09:28.674781Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629357:3062], TxId: 281474976715802, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2f21jbesrshf20pmmqerxg. SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:28.674830Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-17T18:09:28.674974Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-17T18:09:28.675163Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 DstTaskId: 3 SrcEndpoint { ActorId { RawX1: 7494345852523629357 RawX2: 4503616807242742 } } DstEndpoint { ActorId { RawX1: 7494345852523629358 RawX2: 4503616807242743 } } InMemory: true SrcStageId: 1 DstStageId: 2 } Update { Id: 3 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 3 SrcEndpoint { ActorId { RawX1: 7494345852523629358 RawX2: 4503616807242743 } } DstEndpoint { ActorId { RawX1: 7494345852523629352 RawX2: 4503616807242088 } } InMemory: true } 2025-04-17T18:09:28.675244Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:28.675270Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-17T18:09:28.675316Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 3. Finish input channelId: 1, from: [4:7494345852523629357:3062] 2025-04-17T18:09:28.675398Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:28.675612Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:09:28.675649Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629357:3062], TxId: 281474976715802, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2f21jbesrshf20pmmqerxg. SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-17T18:09:28.675683Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629357:3062], TxId: 281474976715802, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2f21jbesrshf20pmmqerxg. SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:28.675707Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2. Tasks execution finished 2025-04-17T18:09:28.675734Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629357:3062], TxId: 281474976715802, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2f21jbesrshf20pmmqerxg. SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:09:28.675865Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 2. pass away 2025-04-17T18:09:28.675977Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715802;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:09:28.676375Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:28.676410Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-17T18:09:28.676439Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 3. Tasks execution finished 2025-04-17T18:09:28.676449Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345852523629358:3063], TxId: 281474976715802, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=. TraceId : 01js2f21jbesrshf20pmmqerxg. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:09:28.676498Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715802, task: 3. pass away 2025-04-17T18:09:28.676549Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715802;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:09:28.678063Z node 4 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=, ActorId: [4:7494345800984016959:2408], ActorState: ReadyState, Internal error, message: TKqpSessionActor in state ReadyState received unexpected event NKikimr::NGRpcService::TEvClientLost(0x108c0001) sender: [4:8320808721877066593:7169396] 2025-04-17T18:09:28.679365Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715803. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=4&id=YjY4MDZkZDEtOGQ4NWY5MWUtNzczYjI3ZDctNzZlMWQ2Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-04-17T18:09:28.680204Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DB Error, Status: CLIENT_CANCELLED, Issues: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:29810 } ], Query: --!syntax_v1 -- Query name: GetTask(read) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $now as Timestamp; SELECT `generation`, `internal`, `query` FROM `queries` WHERE `scope` = $scope AND `query_id` = $query_id; SELECT `assigned_until` FROM `pending_small` WHERE `tenant` = $tenant AND `scope` = $scope AND `query_id` = $query_id AND `assigned_until` < $now; 2025-04-17T18:09:28.683287Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint [::]:29810 2025-04-17T18:09:29.619959Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:29810: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:29810 >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL [GOOD] >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> KqpErrors::ProposeErrorEvWrite [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndex [GOOD] >> KqpIndexes::Uint8Index >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> KqpIndexes::InnerJoinWithNonIndexWherePredicate >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 >> Yq_1::Basic_TaggedLiteral [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: 2025-04-17T18:09:17.478549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:17.479412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:17.479559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:09:17.480503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:17.481102Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:17.481151Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001709/r3tmp/tmpi9epcU/pdisk_1.dat 2025-04-17T18:09:17.975424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:18.207354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:09:18.321035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:18.321186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:18.326040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:18.326138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:18.344003Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:09:18.344452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:18.344766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:18.700964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:09:19.866374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1598:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:19.866503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1607:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:19.866598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:19.874512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:09:20.570545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1612:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:09:20.775073Z node 1 :TX_PROXY ERROR: Actor# [1:1764:3058] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:21.117270Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-04-17T18:09:21.117362Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-04-17T18:09:21.117432Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-17T18:09:21.117488Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-17T18:09:21.117555Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-17T18:09:21.125249Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-04-17T18:09:21.135114Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.433549s, cancelAfter: (empty maybe) 2025-04-17T18:09:21.135196Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2025-04-17T18:09:21.135252Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-17T18:09:21.135320Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-17T18:09:21.135391Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-17T18:09:21.136022Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2025-04-17T18:09:21.136273Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-17T18:09:21.136588Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-17T18:09:21.136641Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-17T18:09:21.136834Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-17T18:09:21.137141Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-04-17T18:09:21.137323Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-17T18:09:21.137468Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-04-17T18:09:21.139116Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2025-04-17T18:09:21.140353Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-04-17T18:09:21.140894Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2f1rzp4045fz4s6wfdz7x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MTMwNGYtZGI0NWM1ZDgtZjI2NzAyMzItMzlhODcwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:21.1 ... ck '( (let $1 (Just (Uint32 '5))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1))))) )))) ) 2025-04-17T18:09:33.680195Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] create compute task: 1 2025-04-17T18:09:33.680291Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:09:33.680346Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T18:09:33.680607Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [3:2050:3203] 2025-04-17T18:09:33.680669Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [3:2050:3203], channels: 0 2025-04-17T18:09:33.680731Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:09:33.680778Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-17T18:09:33.680825Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [3:2050:3203] 2025-04-17T18:09:33.680877Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [3:2050:3203], channels: 0 2025-04-17T18:09:33.680953Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2050:3203], 2025-04-17T18:09:33.681032Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2050:3203], 2025-04-17T18:09:33.681111Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-17T18:09:33.681995Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2050:3203], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-17T18:09:33.682073Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2050:3203], 2025-04-17T18:09:33.682131Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2050:3203], 2025-04-17T18:09:33.683055Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2050:3203], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 645 Tasks { TaskId: 1 CpuTimeUs: 113 FinishTimeMs: 1744913373682 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 18 BuildCpuTimeUs: 95 HostName: "ghrun-gkdwnlcfnq" NodeId: 3 CreateTimeMs: 1744913373681 UpdateTimeMs: 1744913373682 } MaxMemoryUsage: 1048576 } 2025-04-17T18:09:33.683172Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:2050:3203] 2025-04-17T18:09:33.683244Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[3:2046:3203] 2025-04-17T18:09:33.683322Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000645s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:09:33.701357Z node 3 :KQP_COMPUTE WARN: SelfId: [3:2053:3203], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:2037:3203]Got OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2053:3203]. Ignored this error. 2025-04-17T18:09:33.701490Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:2046:3203], SessionActorId: [3:2037:3203], statusCode=OVERLOADED. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 . sessionActorId=[3:2037:3203]. isRollback=0 2025-04-17T18:09:33.701840Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, ActorId: [3:2037:3203], ActorState: ExecuteState, TraceId: 01js2f26djcvvdg6x6dr8846tp, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [3:2047:3203] from: [3:2046:3203] 2025-04-17T18:09:33.702033Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got EvAbortExecution, status: OVERLOADED, message: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-04-17T18:09:33.702125Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-04-17T18:09:33.702240Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-17T18:09:33.702404Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: OVERLOADED Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2006 severity: 1 } Result { Stats { CpuTimeUs: 645 } } , to ActorId: [3:2037:3203] 2025-04-17T18:09:33.702462Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-04-17T18:09:33.702584Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:09:33.702644Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2047:3203] TxId: 281474976715672. Ctx: { TraceId: 01js2f26djcvvdg6x6dr8846tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-17T18:09:33.702839Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmIzOTJjNDEtYTEwMjI3YTItOGVhNTljZGUtMjcxZTE5YjU=, ActorId: [3:2037:3203], ActorState: ExecuteState, TraceId: 01js2f26djcvvdg6x6dr8846tp, Create QueryResponse for error on request, msg: >> Yq_1::DescribeQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 17527, msgbus: 28792 2025-04-17T18:06:52.013058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345182523977139:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:52.013115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001360/r3tmp/tmpjyVbsp/pdisk_1.dat 2025-04-17T18:06:52.405095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:52.423424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:52.423559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17527, node 1 2025-04-17T18:06:52.457897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:52.458159Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:52.458192Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:52.483562Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:52.483592Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:52.483601Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:52.483768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28792 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:06:52.690974Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] Handle TEvNavigate describe path dc-1 2025-04-17T18:06:52.691023Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977866:2437] HANDLE EvNavigateScheme dc-1 2025-04-17T18:06:52.692265Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977866:2437] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:52.730512Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977866:2437] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:06:52.744204Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977866:2437] Handle TEvDescribeSchemeResult Forward to# [1:7494345182523977865:2436] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:06:52.788187Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] Handle TEvProposeTransaction 2025-04-17T18:06:52.788220Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:06:52.788347Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494345182523977881:2445] 2025-04-17T18:06:52.916116Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:52.916214Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:06:52.916236Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:52.916332Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:52.916621Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:52.916753Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:06:52.916829Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:06:52.917016Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:06:52.917861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.920851Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:06:52.920908Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977881:2445] txid# 281474976710657 SEND to# [1:7494345182523977880:2444] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:06:52.931286Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:06:52.953978Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] Handle TEvProposeTransaction 2025-04-17T18:06:52.954005Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:06:52.954052Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345182523977921:2481] 2025-04-17T18:06:52.956496Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:52.956556Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:06:52.956571Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:52.956621Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:52.956868Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:52.956965Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:06:52.957008Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:06:52.957169Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:06:52.957583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.959458Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:06:52.959502Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182523977921:2481] txid# 281474976710658 SEND to# [1:7494345182523977920:2480] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:06:53.004289Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] Handle TEvProposeTransaction 2025-04-17T18:06:53.004316Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T18:06:53.004364Z node 1 :TX_PROXY DEBUG: actor# [1:7494345182523977377:2120] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494345186818945235:2491] 2025-04-17T18:06:53.006615Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345186818945235:2491] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builti ... LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:33.494253Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309845:2530] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-17T18:09:33.494426Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309845:2530] txid# 281474976710660 HANDLE EvClientConnected 2025-04-17T18:09:33.495849Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-17T18:09:33.500325Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309845:2530] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-17T18:09:33.500388Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309845:2530] txid# 281474976710660 SEND to# [59:7494345872579309844:2342] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-17T18:09:33.524619Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7494345872579309844:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-17T18:09:33.579311Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] Handle TEvProposeTransaction 2025-04-17T18:09:33.579343Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] TxId# 281474976710661 ProcessProposeTransaction 2025-04-17T18:09:33.579395Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7494345872579309913:2578] 2025-04-17T18:09:33.582143Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\377\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-17T18:09:33.582209Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:09:33.582230Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-17T18:09:33.582941Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:09:33.583035Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:33.583216Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:33.583349Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:33.583398Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-17T18:09:33.583536Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 HANDLE EvClientConnected 2025-04-17T18:09:33.587025Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T18:09:33.587168Z node 59 :TX_PROXY ERROR: Actor# [59:7494345872579309913:2578] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:33.587203Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309913:2578] txid# 281474976710661 SEND to# [59:7494345872579309844:2342] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-17T18:09:33.610613Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] Handle TEvProposeTransaction 2025-04-17T18:09:33.610643Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] TxId# 281474976710662 ProcessProposeTransaction 2025-04-17T18:09:33.610686Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7494345872579309936:2589] 2025-04-17T18:09:33.613048Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38626" 2025-04-17T18:09:33.613129Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:09:33.613150Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:33.613195Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:33.613465Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:33.613557Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:33.613612Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-17T18:09:33.613740Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] txid# 281474976710662 HANDLE EvClientConnected 2025-04-17T18:09:33.621165Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-17T18:09:33.621224Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309936:2589] txid# 281474976710662 SEND to# [59:7494345872579309935:2335] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-17T18:09:33.705199Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] Handle TEvProposeTransaction 2025-04-17T18:09:33.705231Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] TxId# 281474976710663 ProcessProposeTransaction 2025-04-17T18:09:33.705283Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7494345872579309969:2603] 2025-04-17T18:09:33.708253Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309969:2603] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38652" 2025-04-17T18:09:33.708330Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309969:2603] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:09:33.708357Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309969:2603] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-04-17T18:09:33.708413Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309969:2603] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:33.708762Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309969:2603] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:33.708826Z node 59 :TX_PROXY ERROR: Actor# [59:7494345872579309969:2603] txid# 281474976710663, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2025-04-17T18:09:33.708934Z node 59 :TX_PROXY ERROR: Actor# [59:7494345872579309969:2603] txid# 281474976710663, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-17T18:09:33.708965Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345872579309969:2603] txid# 281474976710663 SEND to# [59:7494345872579309968:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T18:09:33.709524Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=OWIyMjJjOTEtZDQyZmQzMi03NzdkNDhiNC04NWZjZDFhYw==, ActorId: [59:7494345872579309954:2352], ActorState: ExecuteState, TraceId: 01js2f26f7ewa8wjmwsss3ja9s, Create QueryResponse for error on request, msg: 2025-04-17T18:09:33.710091Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] Handle TEvExecuteKqpTransaction 2025-04-17T18:09:33.710115Z node 59 :TX_PROXY DEBUG: actor# [59:7494345851104472465:2111] TxId# 281474976710664 ProcessProposeKqpTransaction 2025-04-17T18:09:33.837693Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7494345851104472440:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:33.837803Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |88.5%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] >> KqpIndexes::SecondaryIndexReplace-UseSink >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2025-04-17T18:08:56.549909Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345715561761071:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:56.550037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 18:08:56.988813405 416753 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:08:56.988943655 416753 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:08:57.600201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:58.041038Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26272: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26272 } ] 2025-04-17T18:08:58.187324Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26272: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26272 2025-04-17T18:08:58.601105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:59.307183Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26272: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26272 } ] 2025-04-17T18:08:59.602047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:00.606515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001abb/r3tmp/tmpjVcb4f/pdisk_1.dat 2025-04-17T18:09:00.900420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345732741630547:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:00.900638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:09:00.983067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345732741630547:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 26272, node 1 2025-04-17T18:09:01.083918Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:01.083942Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:01.544708Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345715561761071:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:01.544783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:25756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:01.653368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... E0417 18:09:01.989998874 417108 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:09:01.990153795 417108 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:09:02.323244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:09:02.323542Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T18:09:02.323568Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.323575Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.328490Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T18:09:02.328520Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.328527Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.336489Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T18:09:02.336511Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.336510Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T18:09:02.336517Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.336522Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.336527Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.337148Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T18:09:02.337162Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T18:09:02.340184Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T18:09:02.340209Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.340214Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.346293Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T18:09:02.346324Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.346337Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.362558Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T18:09:02.362598Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.362604Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.363024Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T18:09:02.363038Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.363044Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.363607Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T18:09:02.363622Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.363627Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.363878Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T18:09:02.363898Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.363904Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.364659Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T18:09:02.364676Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.364682Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.364797Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T18:09:02.364834Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.364841Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.367401Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T18:09:02.367441Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.367448Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.404729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345741331565669:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.404815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345741331565661:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.405177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.409753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:02.411623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345741331565710:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.411698Z node 1 :KQP_WORKLOAD_SERVICE WARN: ... hannels_count=0; 2025-04-17T18:09:32.976944Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Start compute actor [4:7494345869797580325:2382], task: 1 2025-04-17T18:09:32.976961Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Set execution timeout 299.998213s 2025-04-17T18:09:32.977316Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\036\n\016Root/yq/quotas\020\200\202\224\204\200\200\200\200\001\030\020(\001\"\023\n\014subject_type\020\001 \201 \"\021\n\nsubject_id\020\002 \201 \"\022\n\013metric_name\020\003 \201 *\026\n\020limit_updated_at\020\005 2*\022\n\014metric_limit\020\004 \004*\022\n\013metric_name\020\003 \201 *\022\n\014metric_usage\020\006 \004*\021\n\nsubject_id\020\002 \201 *\023\n\014subject_type\020\001 \201 *\026\n\020usage_updated_at\020\007 20\370\247\200\200\200\200@8\004@\000H\001R\022\t!^3\241\334C\001h\021N\t\000\000\004\000\020\000X\000`\000h\004h\003h\002h\005h\001h\000h\006x\000" } } } 2025-04-17T18:09:32.977441Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-17T18:09:32.977465Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: 2025-04-17T18:09:32.977530Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2025-04-17T18:09:32.977636Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. Add data: 78 / 78 2025-04-17T18:09:32.977680Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. Send data=78, closed=1, bufferActorId=[4:7494345869797580321:2382] 2025-04-17T18:09:32.977700Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 78 2025-04-17T18:09:32.977718Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. Tasks execution finished 2025-04-17T18:09:32.977729Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Waiting finish of sink[0] 2025-04-17T18:09:32.977748Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-17T18:09:32.977765Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: 2025-04-17T18:09:32.977777Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. Tasks execution finished 2025-04-17T18:09:32.977783Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Waiting finish of sink[0] 2025-04-17T18:09:32.977816Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:32.977832Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. Tasks execution finished 2025-04-17T18:09:32.977840Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Waiting finish of sink[0] 2025-04-17T18:09:32.977898Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580321:2382], SessionActorId: [4:7494345826847902744:2382], Create new TableWriteActor for table `Root/yq/quotas` ([72057594046644480:16:1]). lockId=281474976715768 [4:7494345869797580328:2382] 2025-04-17T18:09:32.977928Z node 4 :KQP_COMPUTE DEBUG: Table: `Root/yq/quotas` ([72057594046644480:16:1]), SessionActorId: [4:7494345826847902744:2382]Open: token=0 2025-04-17T18:09:32.977949Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580321:2382], SessionActorId: [4:7494345826847902744:2382], ProcessRequestQueue [72057594046644480:16:1] NOT READY queue=1 2025-04-17T18:09:32.977992Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580328:2382], Table: `Root/yq/quotas` ([72057594046644480:16:1]), SessionActorId: [4:7494345826847902744:2382]Write: token=0 2025-04-17T18:09:32.978090Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580328:2382], Table: `Root/yq/quotas` ([72057594046644480:16:1]), SessionActorId: [4:7494345826847902744:2382]Close: token=0 2025-04-17T18:09:32.978161Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580327:2382], TxId: 281474976715771, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [4:7494345869797580321:2382] 2025-04-17T18:09:32.978191Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580327:2382], TxId: 281474976715771, task: 1. Finished 2025-04-17T18:09:32.978211Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:32.978229Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. Tasks execution finished 2025-04-17T18:09:32.978243Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580325:2382], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01js2f25se0da4neq0g60erc26. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjNjZmE4ZWMtYjE4NjE0YTktZmZjM2IyZGEtY2ViM2NiNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:09:32.978314Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. pass away 2025-04-17T18:09:32.978383Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715771;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:09:32.978657Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580321:2382], SessionActorId: [4:7494345826847902744:2382], Start immediate commit 2025-04-17T18:09:32.978667Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580328:2382], Table: `Root/yq/quotas` ([72057594046644480:16:1]), SessionActorId: [4:7494345826847902744:2382]SetImmediateCommit 2025-04-17T18:09:32.978681Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580321:2382], SessionActorId: [4:7494345826847902744:2382], Flush data 2025-04-17T18:09:32.978814Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580328:2382], Table: `Root/yq/quotas` ([72057594046644480:16:1]), SessionActorId: [4:7494345826847902744:2382]Send EvWrite to ShardID=72075186224037897, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715768 DataShard: 72075186224037897 Generation: 1 Counter: 17 SchemeShard: 72057594046644480 PathId: 16, Size=136, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3 2025-04-17T18:09:32.980331Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580315:2588], Table: `Root/yq/quotas` ([72057594046644480:16:1]), SessionActorId: [4:7494345844027774023:2588]Recv EvWriteResult from ShardID=72075186224037897, Status=STATUS_COMPLETED, TxId=37, Locks= , Cookie=1 2025-04-17T18:09:32.980362Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580315:2588], Table: `Root/yq/quotas` ([72057594046644480:16:1]), SessionActorId: [4:7494345844027774023:2588]Got completed result TxId=37, TabletId=72075186224037897, Cookie=1, Mode=3, Locks= 2025-04-17T18:09:32.980404Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580301:2588], SessionActorId: [4:7494345844027774023:2588], Committed TxId=0 2025-04-17T18:09:32.998537Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580328:2382], Table: `Root/yq/quotas` ([72057594046644480:16:1]), SessionActorId: [4:7494345826847902744:2382]Recv EvWriteResult from ShardID=72075186224037897, Status=STATUS_COMPLETED, TxId=38, Locks= , Cookie=1 2025-04-17T18:09:32.998577Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580328:2382], Table: `Root/yq/quotas` ([72057594046644480:16:1]), SessionActorId: [4:7494345826847902744:2382]Got completed result TxId=38, TabletId=72075186224037897, Cookie=1, Mode=3, Locks= 2025-04-17T18:09:32.998636Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345869797580321:2382], SessionActorId: [4:7494345826847902744:2382], Committed TxId=0 2025-04-17T18:09:33.438643Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:24228: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:24228 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2025-04-17T18:08:58.269396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345725082069044:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:58.269438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 18:08:58.756855963 418264 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:08:58.757016204 418264 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:08:59.276584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:59.844069Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28489: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:28489 } ] 2025-04-17T18:08:59.849666Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28489: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:28489 2025-04-17T18:09:00.279989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:01.286581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:01.630305Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28489: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:28489 } ] 2025-04-17T18:09:02.128757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:09:02.132391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345742261938640:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aa1/r3tmp/tmp4PiRWg/pdisk_1.dat 2025-04-17T18:09:02.206993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345742261938640:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:02.289679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:02.357951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345742261938640:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:02.362452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:02.362557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:02.369090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28489, node 1 TClient is connected to server localhost:14591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:02.697283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:03.269794Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345725082069044:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:03.269859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0417 18:09:03.758817225 418618 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:09:03.758953662 418618 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:09:03.779271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:03.779291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:03.779304Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:03.779650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:03.790122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:09:03.948526Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T18:09:03.948556Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:03.948566Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:03.950252Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T18:09:03.950284Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:03.950292Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:03.950864Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T18:09:03.950879Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:03.950884Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:03.954996Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T18:09:03.955016Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:03.955021Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:03.958573Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T18:09:03.958591Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:03.958598Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:03.967615Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T18:09:03.967650Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:03.967656Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:03.967790Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T18:09:03.967812Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:03.967818Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:03.968725Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T18:09:03.968736Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:03.968740Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:03.969387Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T18:09:03.969398Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:03.969403Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:03.970263Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:09:04.027460Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T18:09:04.027486Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:04.027492Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:04.042710Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T18:09:04.042733Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:04.042739Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:04.051291Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T18:09:04.051307Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:04.051313Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:04.055737Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T18:09:04.055766Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:04.055772Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:04.057141Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T18:09:04.057157Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T18:09:04.120672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:04.139744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:04.141593Z node 1 :F ... 1402Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598258:2795], TxId: 281474976710743, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 5 SrcEndpoint { ActorId { RawX1: 7494345854314598258 RawX2: 4503616807242475 } } DstEndpoint { ActorId { RawX1: 7494345854314598260 RawX2: 4503616807242477 } } InMemory: true DstStageId: 4 } 2025-04-17T18:09:29.631413Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598258:2795], TxId: 281474976710743, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update output channelId: 2, peer: [4:7494345854314598260:2797] 2025-04-17T18:09:29.631445Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598259:2796], TxId: 281474976710743, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. CustomerSuppliedId : . TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 DstTaskId: 3 SrcEndpoint { ActorId { RawX1: 7494345854314598255 RawX2: 4503616807242473 } } DstEndpoint { ActorId { RawX1: 7494345854314598259 RawX2: 4503616807242476 } } InMemory: true SrcStageId: 1 DstStageId: 2 } Update { Id: 4 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 3 SrcEndpoint { ActorId { RawX1: 7494345854314598259 RawX2: 4503616807242476 } } DstEndpoint { ActorId { RawX1: 7494345854314598251 RawX2: 4503616807242116 } } InMemory: true } 2025-04-17T18:09:29.631468Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598259:2796], TxId: 281474976710743, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. CustomerSuppliedId : . TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-17T18:09:29.631494Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598258:2795], TxId: 281474976710743, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:09:29.631507Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598258:2795], TxId: 281474976710743, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-17T18:09:29.631538Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Start compute actor [4:7494345854314598260:2797], task: 5 2025-04-17T18:09:29.631561Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Set execution timeout 299.977349s 2025-04-17T18:09:29.631564Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598258:2795], TxId: 281474976710743, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 5 SrcEndpoint { ActorId { RawX1: 7494345854314598258 RawX2: 4503616807242475 } } DstEndpoint { ActorId { RawX1: 7494345854314598260 RawX2: 4503616807242477 } } InMemory: true DstStageId: 4 } 2025-04-17T18:09:29.631593Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710743, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2025-04-17T18:09:29.631606Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598258:2795], TxId: 281474976710743, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:29.631644Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710743, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2025-04-17T18:09:29.631739Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-17T18:09:29.631853Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 5 SrcEndpoint { ActorId { RawX1: 7494345854314598258 RawX2: 4503616807242475 } } DstEndpoint { ActorId { RawX1: 7494345854314598260 RawX2: 4503616807242477 } } InMemory: true DstStageId: 4 } Update { Id: 3 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 5 SrcEndpoint { ActorId { RawX1: 7494345854314598260 RawX2: 4503616807242477 } } DstEndpoint { ActorId { RawX1: 7494345854314598251 RawX2: 4503616807242116 } } InMemory: true } 2025-04-17T18:09:29.631866Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Update input channelId: 2, peer: [4:7494345854314598258:2795] 2025-04-17T18:09:29.631900Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-17T18:09:29.631996Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 5 SrcEndpoint { ActorId { RawX1: 7494345854314598258 RawX2: 4503616807242475 } } DstEndpoint { ActorId { RawX1: 7494345854314598260 RawX2: 4503616807242477 } } InMemory: true DstStageId: 4 } Update { Id: 3 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 5 SrcEndpoint { ActorId { RawX1: 7494345854314598260 RawX2: 4503616807242477 } } DstEndpoint { ActorId { RawX1: 7494345854314598251 RawX2: 4503616807242116 } } InMemory: true } 2025-04-17T18:09:29.632019Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-04-17T18:09:29.632043Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710743, task: 5. Finish input channelId: 2, from: [4:7494345854314598258:2795] 2025-04-17T18:09:29.632087Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-17T18:09:29.632099Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598258:2795], TxId: 281474976710743, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-17T18:09:29.632130Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598258:2795], TxId: 281474976710743, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:09:29.632150Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710743, task: 1. Tasks execution finished 2025-04-17T18:09:29.632172Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598258:2795], TxId: 281474976710743, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:09:29.632246Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710743, task: 1. pass away 2025-04-17T18:09:29.632283Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:09:29.632359Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710743;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:09:29.632464Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7494345854314598260:2797], TxId: 281474976710743, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OGJkOThkZjMtY2M1ZjQzOGMtMWExMDgyNTUtZGYyZDNlYzM=. TraceId : 01js2f22g6aj32kwvxmegmef33. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 >> KqpMultishardIndex::DuplicateUpsert [GOOD] >> KqpIndexes::CheckUpsertNonEquatableType+NotNull >> KqpMultishardIndex::DataColumnUpsertMixedSemantic >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink [GOOD] >> KqpIndexes::DuplicateUpsertInterleave >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 26145, MsgBus: 26583 2025-04-17T18:09:23.215921Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345830225589429:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.216022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ce5/r3tmp/tmpdaQuab/pdisk_1.dat 2025-04-17T18:09:23.640487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:23.683426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.683504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.689619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26145, node 1 2025-04-17T18:09:23.871853Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.871885Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.871892Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.872016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26583 TClient is connected to server localhost:26583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.663057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.692582Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:24.710126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.868178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.049098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.153490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.565655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345843110493111:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:26.565781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.098292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.130492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.175875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.214578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.284446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.359526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.428498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.478481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345847405461083:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.478548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.478687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345847405461088:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.482889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.494266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345847405461090:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.552842Z node 1 :TX_PROXY ERROR: Actor# [1:7494345847405461146:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.215995Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345830225589429:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.216065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.603909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 28882, MsgBus: 13331 2025-04-17T18:09:30.818388Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345862340502181:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:30.818491Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ce5/r3tmp/tmp285ky5/pdisk_1.dat 2025-04-17T18:09:30.949677Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:30.991430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:30.991517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:30.996205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28882, node 2 2025-04-17T18:09:31.049467Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:31.049485Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:31.049491Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:31.049576Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13331 TClient is connected to server localhost:13331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:09:31.480135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:09:31.502524Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:31.517751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:31.598543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:31.736776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:31.827747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:33.782358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345875225405825:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:33.782440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:33.828840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:33.863543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:33.887195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:33.914047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:33.939483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:33.986356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.032276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.123989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345879520373780:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.124062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.124227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345879520373785:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.127774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:34.139416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345879520373787:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:34.219990Z node 2 :TX_PROXY ERROR: Actor# [2:7494345879520373843:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:35.081128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:35.815619Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345862340502181:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:35.815690Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> Yq_1::CreateQuery_Without_Connection [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 29943, MsgBus: 20397 2025-04-17T18:09:23.244812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345832541404252:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.244917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb4/r3tmp/tmppPkF6q/pdisk_1.dat 2025-04-17T18:09:23.694541Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:23.712740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.712848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.714843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29943, node 1 2025-04-17T18:09:23.871106Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.871122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.871129Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.871256Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20397 TClient is connected to server localhost:20397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.735015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.766396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.960695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.141922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.241454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.660283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345845426307906:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:26.660427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.098915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.145312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.174948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.207289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.240147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.313165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.348423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.433283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345849721275865:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.433372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.433484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345849721275870:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.439162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.448391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345849721275872:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.548142Z node 1 :TX_PROXY ERROR: Actor# [1:7494345849721275932:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.244312Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345832541404252:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.244391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.662729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 62293, MsgBus: 26448 2025-04-17T18:09:31.453464Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345866500686540:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:31.453586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb4/r3tmp/tmp15D1aH/pdisk_1.dat 2025-04-17T18:09:31.572959Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62293, node 2 2025-04-17T18:09:31.597022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:31.597142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:31.623034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:31.654741Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:31.654774Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:31.654782Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:31.654915Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26448 TClient is connected to server localhost:26448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:32.143744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:32.154237Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:32.173261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:32.255638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:32.412663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:32.496562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:34.714220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345879385590193:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.714310Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.758317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.792772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.822750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.892289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.931875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.972868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.004448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.061580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345883680558148:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.061662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.061731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345883680558153:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.064947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:35.076076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345883680558155:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:35.154097Z node 2 :TX_PROXY ERROR: Actor# [2:7494345883680558210:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:36.069514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:36.484064Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345866500686540:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:36.484739Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::NullInIndexTable [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] Test command err: Trying to start YDB, gRPC: 18710, MsgBus: 25680 2025-04-17T18:09:23.264585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345830327440640:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.264629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb7/r3tmp/tmpggoMsC/pdisk_1.dat 2025-04-17T18:09:23.814400Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:23.841047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.841163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.842823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18710, node 1 2025-04-17T18:09:23.939243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.939261Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.939267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.939364Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25680 TClient is connected to server localhost:25680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.657092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.684783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.829706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.988158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.065448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.732451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345843212344262:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:26.732592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.098675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.171287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.229110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.269518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.307192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.347901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.406388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.466926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345847507312220:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.467019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.467272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345847507312225:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.470955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.483212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345847507312227:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.560329Z node 1 :TX_PROXY ERROR: Actor# [1:7494345847507312283:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.264789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345830327440640:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.264840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.632046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:28.781470Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710674 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "IndexName" index_columns: "IndexColumn" global_index { settings { } } } } 2025-04-17T18:09:28.786959Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:28.787046Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName, IndexColumn: IndexColumn, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345851802280036:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:28.788124Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715757 2025-04-17T18:09:28.788206Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName, IndexColumn: IndexColumn, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345851802280036:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:28.788582Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:28.788621Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName, IndexColumn: IndexColumn, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345851802280036:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotS ... erMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744913376248, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:36.246587Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Applying to Unlocking 2025-04-17T18:09:36.246778Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715678 2025-04-17T18:09:36.246815Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715678, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7494345886059871358:2559], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744913376248, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:36.246904Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976715678, txId# 281474976710760 2025-04-17T18:09:36.246949Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715678, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7494345886059871358:2559], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744913376248, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:36.247184Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715678 2025-04-17T18:09:36.247222Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715678, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7494345886059871358:2559], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744913376248, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:36.247925Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976715678, cookie: 281474976715678, txId: 281474976710760, status: StatusAccepted 2025-04-17T18:09:36.248020Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715678, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7494345886059871358:2559], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744913376248, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046644480 PathId: 17 2025-04-17T18:09:36.248407Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715678 2025-04-17T18:09:36.248445Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715678, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7494345886059871358:2559], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744913376248, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:36.251128Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 281474976715678 2025-04-17T18:09:36.251189Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 281474976715678, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7494345886059871358:2559], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744913376248, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:36.251458Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715678 2025-04-17T18:09:36.251500Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715678, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7494345886059871358:2559], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744913376248, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:36.251519Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:09:36.251767Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715678 2025-04-17T18:09:36.251804Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715678, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7494345886059871358:2559], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744913376248, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:36.251816Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715678, subscribers count# 0 2025-04-17T18:09:36.299382Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715678 2025-04-17T18:09:36.299581Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715678 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "IndexName2" index_columns: "IndexColumn2" global_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-17T18:09:36.518130Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 8944, msgbus: 15237 2025-04-17T18:06:51.026434Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345178132665641:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:51.030758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00136a/r3tmp/tmpV1J37e/pdisk_1.dat 2025-04-17T18:06:51.443167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:51.443272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:51.447339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:51.481715Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8944, node 1 2025-04-17T18:06:51.511676Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:51.511696Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:51.631358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:51.631378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:51.631386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:51.631486Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15237 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:06:51.978613Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] Handle TEvNavigate describe path dc-1 2025-04-17T18:06:51.978665Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345178132666250:2448] HANDLE EvNavigateScheme dc-1 2025-04-17T18:06:51.981543Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345178132666250:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:52.033288Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345178132666250:2448] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:06:52.049679Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345178132666250:2448] Handle TEvDescribeSchemeResult Forward to# [1:7494345178132666249:2447] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:06:52.074661Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] Handle TEvProposeTransaction 2025-04-17T18:06:52.074691Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:06:52.075514Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494345182427633569:2457] 2025-04-17T18:06:52.151664Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:52.155398Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:06:52.155440Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:52.155605Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:52.156084Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:52.156403Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:06:52.156497Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:06:52.156734Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:06:52.191821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:52.208797Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:06:52.208866Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633569:2457] txid# 281474976710657 SEND to# [1:7494345182427633568:2456] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-04-17T18:06:52.253185Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] Handle TEvProposeTransaction 2025-04-17T18:06:52.253247Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:06:52.253302Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345182427633613:2496] 2025-04-17T18:06:52.255785Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:52.255858Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:06:52.255875Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:52.255944Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:52.256270Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:52.256372Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:06:52.256418Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:06:52.256556Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:06:52.257029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.261808Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:06:52.261852Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633613:2496] txid# 281474976710658 SEND to# [1:7494345182427633612:2495] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:06:52.385513Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] Handle TEvProposeTransaction 2025-04-17T18:06:52.385569Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T18:06:52.385655Z node 1 :TX_PROXY DEBUG: actor# [1:7494345178132665737:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494345182427633633:2506] 2025-04-17T18:06:52.387998Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345182427633633:2506] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\0 ... Actor# [59:7494345888424280534:2531] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-04-17T18:09:36.828756Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280534:2531] txid# 281474976715660 SEND to# [59:7494345888424280533:2342] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-04-17T18:09:36.851989Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7494345888424280533:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:09:36.905412Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] Handle TEvProposeTransaction 2025-04-17T18:09:36.905449Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] TxId# 281474976715661 ProcessProposeTransaction 2025-04-17T18:09:36.905523Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7494345888424280616:2593] 2025-04-17T18:09:36.908398Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-17T18:09:36.908460Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:36.908485Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-17T18:09:36.908658Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:36.908713Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:09:36.909475Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:09:36.909571Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:36.909771Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:36.909922Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:36.909995Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-17T18:09:36.910132Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 HANDLE EvClientConnected 2025-04-17T18:09:36.913251Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T18:09:36.913384Z node 59 :TX_PROXY ERROR: Actor# [59:7494345888424280616:2593] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:36.913425Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280616:2593] txid# 281474976715661 SEND to# [59:7494345888424280533:2342] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-17T18:09:36.930648Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] Handle TEvProposeTransaction 2025-04-17T18:09:36.930683Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] TxId# 281474976715662 ProcessProposeTransaction 2025-04-17T18:09:36.930735Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7494345888424280640:2605] 2025-04-17T18:09:36.933313Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56946" 2025-04-17T18:09:36.933388Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:36.933411Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:36.933462Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:36.934032Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:36.934154Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:36.934234Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-17T18:09:36.934351Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] txid# 281474976715662 HANDLE EvClientConnected 2025-04-17T18:09:36.941123Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-17T18:09:36.941183Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345888424280640:2605] txid# 281474976715662 SEND to# [59:7494345888424280639:2335] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-17T18:09:37.000955Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] Handle TEvProposeTransaction 2025-04-17T18:09:37.000988Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] TxId# 281474976715663 ProcessProposeTransaction 2025-04-17T18:09:37.001061Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7494345892719247969:2619] 2025-04-17T18:09:37.003964Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345892719247969:2619] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56966" 2025-04-17T18:09:37.004047Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345892719247969:2619] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:37.004074Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345892719247969:2619] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-17T18:09:37.004238Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345892719247969:2619] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:37.004283Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345892719247969:2619] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:09:37.004329Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345892719247969:2619] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:37.004600Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345892719247969:2619] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:37.004626Z node 59 :TX_PROXY ERROR: Actor# [59:7494345892719247969:2619] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-04-17T18:09:37.004718Z node 59 :TX_PROXY ERROR: Actor# [59:7494345892719247969:2619] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-04-17T18:09:37.004748Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345892719247969:2619] txid# 281474976715663 SEND to# [59:7494345892719247968:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T18:09:37.006291Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=NmQyNzIyYTItNGJjNDEwNjAtODM2YWNlNDQtNzNmNjcxODM=, ActorId: [59:7494345888424280658:2352], ActorState: ExecuteState, TraceId: 01js2f29prd8a9f2p4d0xwabkc, Create QueryResponse for error on request, msg: 2025-04-17T18:09:37.007222Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] Handle TEvExecuteKqpTransaction 2025-04-17T18:09:37.007250Z node 59 :TX_PROXY DEBUG: actor# [59:7494345871244410433:2109] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-04-17T18:09:37.279918Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7494345871244410596:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:37.280000Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] >> KqpIndexes::DeleteByIndex >> PrivateApi::Nodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2025-04-17T18:08:56.619767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345713360666078:2176];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:56.632823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 18:08:57.008857548 416755 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:08:57.009713401 416755 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:08:57.648725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:57.986351Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15723: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15723 } ] 2025-04-17T18:08:58.082640Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15723: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:15723 2025-04-17T18:08:58.650597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:59.621527Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15723: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15723 } ] 2025-04-17T18:08:59.659618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:00.545604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:09:00.585288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345730540535563:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:00.654551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:00.666577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345730540535563:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001acd/r3tmp/tmpNQJXvc/pdisk_1.dat 2025-04-17T18:09:00.822377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345730540535563:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 15723, node 1 2025-04-17T18:09:01.015543Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:01.015752Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:01.228003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:01.229991Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:01.230006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:01.230015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:01.230133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-04-17T18:09:01.622307Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345713360666078:2176];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:01.622392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:01.681132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:01.722853Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 E0417 18:09:02.011911411 417163 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:09:02.012072589 417163 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:09:02.102319Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T18:09:02.102360Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T18:09:02.118480Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T18:09:02.118516Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.118525Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.119850Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T18:09:02.119873Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.119881Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.124744Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T18:09:02.124771Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.124777Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.126403Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T18:09:02.126427Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.126432Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.139032Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T18:09:02.139058Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.139063Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.139852Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T18:09:02.139863Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.139869Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.159442Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T18:09:02.159487Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.159498Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.160399Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T18:09:02.160428Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.160434Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.161175Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T18:09:02.161195Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.161200Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.162036Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T18:09:02.162064Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.162069Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.163119Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T18:09:02.163142Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.163152Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.163954Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T18:09:02.163975Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.163979Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.164913Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T18:09:02.164941Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.164963Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.173562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:02.176853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:02.181198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345739130471076:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource po ... .986057Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.986296Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.987644Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.987691Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.987751Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.987814Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.987872Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.987926Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988030Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988067Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988110Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988173Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988253Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988330Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988453Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988515Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988600Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988712Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988757Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988827Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988870Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.988960Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989000Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989090Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989121Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989180Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989253Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989393Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989461Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989580Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989783Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989834Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989944Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.989997Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990047Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990091Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990144Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990190Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990302Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990348Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990439Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990544Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990586Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990628Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990717Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990751Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990804Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990838Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990903Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.990933Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991016Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991071Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991146Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991219Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991250Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991296Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991331Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991434Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991460Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991511Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991539Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991594Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991622Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991670Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991725Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991793Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991821Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991866Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991889Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991941Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.991963Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.992026Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.992056Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.992098Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.992134Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:35.992171Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.034450Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.034689Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.034736Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.034914Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.035036Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.035262Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.035295Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.035612Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.035683Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.036072Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.036136Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.036376Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.036414Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.036543Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.036633Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.036798Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.036951Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037126Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037374Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037414Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037473Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037525Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037713Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037740Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037839Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037862Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037929Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.037982Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038016Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038057Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038088Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038161Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038203Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038276Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038351Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038377Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038438Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038511Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038543Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038596Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038619Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038666Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038688Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038734Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038785Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038827Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038896Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038928Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.038959Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.039019Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.039048Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.039098Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.039120Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-17T18:09:36.039402Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: [good] Yq_1::CreateQuery_Without_Connection >> KqpMultishardIndex::DataColumnWrite+UseSink >> KqpUniqueIndex::InsertNullInPk >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestRestartTablets >> KqpIndexMetadata::HandleNotReadyIndex >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] >> KqpIndexes::Uint8Index [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2755, MsgBus: 11983 2025-04-17T18:04:44.140811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344634428875690:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:44.141147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bea/r3tmp/tmpfr9aKS/pdisk_1.dat 2025-04-17T18:04:44.495060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:04:44.509465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:04:44.509569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:04:44.512463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2755, node 1 2025-04-17T18:04:44.649147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:04:44.649168Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:04:44.649180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:04:44.649331Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11983 TClient is connected to server localhost:11983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:04:45.160989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:04:45.173758Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnShard1` (Col1 Int64 NOT NULL, Col2 Int32 NOT NULL, PRIMARY KEY (Col1)) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1000); 2025-04-17T18:04:47.064078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344647313778105:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:47.064189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:04:47.736735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:04:49.138269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494344634428875690:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:04:49.138337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:04:52.665103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:52.665378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:52.665707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:52.665884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:52.666044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:52.666452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:52.666621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:52.666785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:52.666953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:52.667088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:52.667198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:52.667326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7494344668788618699:2376];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:52.667427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:52.667486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:52.667676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:52.667798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:04:52.667906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:04:52.668011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:04:52.668147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:04:52.668255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:04:52.668369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:04:52.668498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:04:52.668640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:04:52.668764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7494344668788618641:2356];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:04:52.703415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038869;self_id=[1:7494344668788618624:2350];tablet_id=72075186224038869;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:04:52.703467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038869;self_id=[1:7494344668788618624:2350];tablet_id=72075186224038869;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:04:52.703691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038869;self_id=[1:7494344668788618624:2350];tablet_id=72075186224038869;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:04:52.703809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038869;self_id=[1:7494344668788618624:2350];tablet_id=72075186224038869;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesClean ... stract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:09:23.627840Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:09:23.627910Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:09:23.628245Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:09:23.628312Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:09:23.628498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:09:23.628533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:09:23.674354Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.676908Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.679251Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.685901Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.692916Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.699701Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.709360Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.715266Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.721911Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.722416Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-04-17T18:09:23.750317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345831326987722:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:23.750422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:23.750475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345831326987727:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:23.754172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:23.768734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345831326987729:2406], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:09:23.830463Z node 2 :TX_PROXY ERROR: Actor# [2:7494345831326987780:2591] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:23.917253Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-04-17T18:09:23.950476Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; Trying to start YDB, gRPC: 17949, MsgBus: 29483 2025-04-17T18:09:25.813990Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345839783088200:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:25.814088Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bea/r3tmp/tmpTynJrG/pdisk_1.dat 2025-04-17T18:09:26.079124Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:26.079634Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:26.079723Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:26.082159Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17949, node 3 2025-04-17T18:09:26.194841Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:26.194866Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:26.194878Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:26.195058Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29483 TClient is connected to server localhost:29483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:26.774808Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:30.568252Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345861257925344:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:30.568370Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:30.612856Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:30.814634Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345839783088200:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:30.814839Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:30.869463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:31.231772Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345865552894033:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:31.231971Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:31.232075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345865552894038:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:31.237748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:09:31.254955Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345865552894040:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:09:31.352578Z node 3 :TX_PROXY ERROR: Actor# [3:7494345865552894116:3271] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2025-04-17T18:08:56.578811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345715176166728:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:56.579778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 18:08:56.899697604 416766 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:08:56.899850267 416766 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:08:57.586209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:57.869248Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17578: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17578 } ] 2025-04-17T18:08:58.054630Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17578: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17578 2025-04-17T18:08:58.590684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:59.351362Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17578: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17578 } ] 2025-04-17T18:08:59.603995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:00.610744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:00.922475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:09:00.924150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345732356036326:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ab7/r3tmp/tmp7JTQWJ/pdisk_1.dat 2025-04-17T18:09:01.006358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345732356036326:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 17578, node 1 TClient is connected to server localhost:65141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-04-17T18:09:01.571957Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345715176166728:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:01.572004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:01.655985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... E0417 18:09:01.902896885 417146 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:09:01.903048215 417146 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:09:02.216566Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T18:09:02.216599Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.216608Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:02.217910Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T18:09:02.217933Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.217939Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:02.219704Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T18:09:02.219725Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.219730Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:02.220643Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T18:09:02.220663Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.220668Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:02.221432Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T18:09:02.221447Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.221453Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:02.221514Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T18:09:02.221528Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.221532Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:02.226621Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T18:09:02.226639Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.226645Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:02.226678Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T18:09:02.226692Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.226697Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:02.241655Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T18:09:02.241702Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.241710Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:02.242818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:09:02.248118Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T18:09:02.248144Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.248365Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:02.251141Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T18:09:02.251167Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.251171Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:02.266685Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T18:09:02.266725Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.266731Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:02.275094Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T18:09:02.275123Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T18:09:02.285153Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T18:09:02.285183Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.285189Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:02.302082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345740945971504:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.302205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.302541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345740945971516:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.307081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:09:02.308508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345740945971548:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:02.308565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345740945971551:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error ... h_limit=8388608;inputs=0;input_channels_count=0; 2025-04-17T18:09:37.642906Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599232:2386], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=NTg5MDQxOWQtZjY3YmNkNjQtNTdjZTBiZDktZDJjNThhOWM=. TraceId : 01js2f2ab86dn340p2kb4696xe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Start compute actor [7:7494345888618599232:2386], task: 1 2025-04-17T18:09:37.642935Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599232:2386], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=NTg5MDQxOWQtZjY3YmNkNjQtNTdjZTBiZDktZDJjNThhOWM=. TraceId : 01js2f2ab86dn340p2kb4696xe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Set execution timeout 299.997944s 2025-04-17T18:09:37.643329Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599232:2386], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=NTg5MDQxOWQtZjY3YmNkNjQtNTdjZTBiZDktZDJjNThhOWM=. TraceId : 01js2f2ab86dn340p2kb4696xe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\035\n\rRoot/yq/nodes\020\200\202\224\204\200\200\200\200\001\030\022(\001\"\r\n\006tenant\020\001 \201 \"\r\n\007node_id\020\003 \002*\024\n\016active_workers\020\005 \004*\022\n\013data_center\020\013 \201 *\017\n\texpire_at\020\010 2*\017\n\010hostname\020\004 \201 *\022\n\013instance_id\020\002 \201 *\027\n\021interconnect_port\020\t \002*\026\n\020memory_allocated\020\007 \004*\022\n\014memory_limit\020\006 \004*\023\n\014node_address\020\n \201 *\r\n\007node_id\020\003 \002*\r\n\006tenant\020\001 \201 0\237\200\200\200\200\200@8\007@\000H\001R\022\t: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:09:37.644862Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599228:2386], SessionActorId: [7:7494345849963890982:2386], Start immediate commit 2025-04-17T18:09:37.644876Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599235:2386], Table: `Root/yq/nodes` ([72057594046644480:18:1]), SessionActorId: [7:7494345849963890982:2386]SetImmediateCommit 2025-04-17T18:09:37.644893Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599228:2386], SessionActorId: [7:7494345849963890982:2386], Flush data 2025-04-17T18:09:37.645041Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599235:2386], Table: `Root/yq/nodes` ([72057594046644480:18:1]), SessionActorId: [7:7494345849963890982:2386]Send EvWrite to ShardID=72075186224037899, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710687 DataShard: 72075186224037899 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 18, Size=212, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3 2025-04-17T18:09:37.651266Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599235:2386], Table: `Root/yq/nodes` ([72057594046644480:18:1]), SessionActorId: [7:7494345849963890982:2386]Recv EvWriteResult from ShardID=72075186224037899, Status=STATUS_COMPLETED, TxId=7, Locks= , Cookie=1 2025-04-17T18:09:37.651307Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599235:2386], Table: `Root/yq/nodes` ([72057594046644480:18:1]), SessionActorId: [7:7494345849963890982:2386]Got completed result TxId=7, TabletId=72075186224037899, Cookie=1, Mode=3, Locks= 2025-04-17T18:09:37.651360Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494345888618599228:2386], SessionActorId: [7:7494345849963890982:2386], Committed TxId=0 2025-04-17T18:09:38.044298Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:1507: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:1507 E0417 18:09:38.176189167 431767 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:09:38.176359560 431767 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:09:38.261671Z node 7 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:1507: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:1507 >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] Test command err: Trying to start YDB, gRPC: 12396, MsgBus: 5925 2025-04-17T18:09:23.215576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345832047596553:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.216992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cae/r3tmp/tmpByv7nB/pdisk_1.dat 2025-04-17T18:09:23.676441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:23.680351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.682254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.707552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12396, node 1 2025-04-17T18:09:23.871669Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.871710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.871723Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.871847Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5925 TClient is connected to server localhost:5925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.670206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.702739Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:24.720721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.888374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.049605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.137807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.434084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345844932500197:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:26.434271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.098132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.136775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.174232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.202901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.234685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.266882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.323931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.439845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345849227468163:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.439935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.440255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345849227468168:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.443451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.453020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345849227468170:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.512041Z node 1 :TX_PROXY ERROR: Actor# [1:7494345849227468224:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.215998Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345832047596553:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.216057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.612823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 2851, MsgBus: 4913 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cae/r3tmp/tmpUMK12a/pdisk_1.dat 2025-04-17T18:09:31.958740Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:32.039781Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:32.051032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:32.051107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:32.052861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2851, node 2 2025-04-17T18:09:32.158785Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:32.158804Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:32.158812Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:32.158920Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4913 TClient is connected to server localhost:4913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:32.648265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:32.666627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:32.751951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:32.930793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:09:33.017959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:35.183326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345880447734868:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.183417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.232241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.263271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.337122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.365965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.399681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.429356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.457872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.544065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345880447735531:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.544198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.544260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345880447735536:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.547930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:35.558902Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345880447735538:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:35.620968Z node 2 :TX_PROXY ERROR: Actor# [2:7494345880447735593:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:36.674447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::Uint8Index [GOOD] Test command err: Trying to start YDB, gRPC: 24515, MsgBus: 2403 2025-04-17T18:09:25.195371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345837250727370:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:25.195415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001caa/r3tmp/tmp3JfYoM/pdisk_1.dat 2025-04-17T18:09:25.710566Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:25.724685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:25.724774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:25.727203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24515, node 1 2025-04-17T18:09:25.914160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:25.914200Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:25.914210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:25.914323Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2403 TClient is connected to server localhost:2403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:26.496316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.518263Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:26.526110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.689347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.837021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.922704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:28.513455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345850135631032:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:28.514880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:28.903983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:28.941624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:28.978065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.015134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.052217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.097039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.171071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.238639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345854430598998:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:29.238715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:29.239037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345854430599003:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:29.248220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:29.257927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345854430599005:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:29.331619Z node 1 :TX_PROXY ERROR: Actor# [1:7494345854430599060:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:30.195552Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345837250727370:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:30.195652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:30.384088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:32.297904Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f248y6spcssvx8bgjgb2s, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU5NmU2NDUtNGNhNjEzNmYtNmI2ZWU3MTMtOTE3N2NiNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:32.314267Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODU5NmU2NDUtNGNhNjEzNmYtNmI2ZWU3MTMtOTE3N2NiNDU=, ActorId: [1:7494345858725566690:2513], ActorState: ExecuteState, TraceId: 01js2f248y6spcssvx8bgjgb2s, Create QueryResponse for error on request, msg: 2025-04-17T18:09:32.799016Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345867315501912:2599], TxId: 281474976710678, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODU5NmU2NDUtNGNhNjEzNmYtNmI2ZWU3MTMtOTE3N2NiNDU=. TraceId : 01js2f255a4b757axysnxtzzrp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:09:32.799557Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345867315501913:2600], TxId: 281474976710678, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2f255a4b757axysnxtzzrp. SessionId : ydb://session/3?node_id=1&id=ODU5NmU2NDUtNGNhNjEzNmYtNmI2ZWU3MTMtOTE3N2NiNDU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494345867315501909:2513], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:09:32.800003Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODU5NmU2NDUtNGNhNjEzNmYtNmI2ZWU3MTMtOTE3N2NiNDU=, ActorId: [1:7494345858725566690:2513], ActorState: ExecuteState, TraceId: 01js2f255a4b757axysnxtzzrp, Create QueryResponse for error on request, msg: 2025-04-17T18:09:33.740794Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f25mj7mwwqtq5xmfnvqr8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU5NmU2NDUtNGNhNjEzNmYtNmI2ZWU3MTMtOTE3N2NiNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:33.741034Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODU5NmU2NDUtNGNhNjEzNmYtNmI2ZWU3MTMtOTE3N2NiNDU=, ActorId: [1:7494345858725566690:2513], ActorState: ExecuteState, TraceId: 01js2f25mj7mwwqtq5xmfnvqr8, Create QueryResponse for error on request, msg: 2025-04-17T18:09:33.770456Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:33.798117Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:33.825044Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:34.464169Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:34.506514Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 21372, MsgBus: 25740 2025-04-17T18:09:35.132167Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345880643154075:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:35.133504Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001caa/r3tmp/tmpMtWbWx/pdisk_1.dat 2025-04-17T18:09:35.222843Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21372, node 2 2025-04-17T18:09:35.267652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:35.267742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:35.268977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:35.282619Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:35.282639Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:35.282646Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:35.282908Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25740 TClient is connected to server localhost:25740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:35.708122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:35.712903Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:35.722385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:35.773223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:35.943456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:36.050646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:38.487338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345893528057755:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.487425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.544993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.582314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.653988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.693109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.767070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.805958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.877347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.977015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345893528058426:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.977117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.977179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345893528058431:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.980132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:38.991590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345893528058433:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:39.089359Z node 2 :TX_PROXY ERROR: Actor# [2:7494345897823025784:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:40.130615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:40.134775Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345880643154075:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:40.136243Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:40.306346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:09:40.356966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] >> KqpIndexes::InnerJoinWithNonIndexWherePredicate [GOOD] >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 12955, msgbus: 29270 2025-04-17T18:06:51.373699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345177672779344:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:51.374610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001366/r3tmp/tmpigoM2U/pdisk_1.dat 2025-04-17T18:06:51.726790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:51.729057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:51.729329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12955, node 1 2025-04-17T18:06:51.740454Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:51.742298Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:51.762499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:51.792504Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:51.792537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:51.792552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:51.792693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29270 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:06:52.010789Z node 1 :TX_PROXY DEBUG: actor# [1:7494345177672779580:2115] Handle TEvNavigate describe path dc-1 2025-04-17T18:06:52.010866Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747381:2441] HANDLE EvNavigateScheme dc-1 2025-04-17T18:06:52.012013Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747381:2441] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:52.043936Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747381:2441] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:06:52.060274Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747381:2441] Handle TEvDescribeSchemeResult Forward to# [1:7494345181967747380:2440] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:06:52.082479Z node 1 :TX_PROXY DEBUG: actor# [1:7494345177672779580:2115] Handle TEvProposeTransaction 2025-04-17T18:06:52.082506Z node 1 :TX_PROXY DEBUG: actor# [1:7494345177672779580:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:06:52.082628Z node 1 :TX_PROXY DEBUG: actor# [1:7494345177672779580:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494345181967747394:2447] 2025-04-17T18:06:52.178071Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:52.178155Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:06:52.178190Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:52.178309Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:52.178561Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:52.178703Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:06:52.178751Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:06:52.178885Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:06:52.192159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.209344Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:06:52.209399Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747394:2447] txid# 281474976710657 SEND to# [1:7494345181967747393:2446] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:06:52.252155Z node 1 :TX_PROXY DEBUG: actor# [1:7494345177672779580:2115] Handle TEvProposeTransaction 2025-04-17T18:06:52.252179Z node 1 :TX_PROXY DEBUG: actor# [1:7494345177672779580:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:06:52.252209Z node 1 :TX_PROXY DEBUG: actor# [1:7494345177672779580:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345181967747437:2486] 2025-04-17T18:06:52.254426Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:52.254476Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:06:52.254489Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:52.254653Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:52.254884Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:52.254986Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:06:52.255024Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:06:52.255152Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:06:52.255584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:06:52.257962Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:06:52.258001Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345181967747437:2486] txid# 281474976710658 SEND to# [1:7494345181967747436:2485] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:06:54.232775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345190557682125:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:54.232775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345190557682133:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:54.232847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:54.233101Z node 1 :TX_PROXY DEBUG: actor# [1:7494345177672779580:2115] H ... strator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:40.680957Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168863:2597] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:40.680992Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168863:2597] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:40.681302Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168863:2597] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:40.681389Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168863:2597] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:40.681428Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168863:2597] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-17T18:09:40.681557Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168863:2597] txid# 281474976715661 HANDLE EvClientConnected 2025-04-17T18:09:40.694357Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168863:2597] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-17T18:09:40.694429Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168863:2597] txid# 281474976715661 SEND to# [59:7494345902129168862:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-17T18:09:40.806758Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] Handle TEvProposeTransaction 2025-04-17T18:09:40.806787Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-04-17T18:09:40.806835Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7494345902129168883:2611] 2025-04-17T18:09:40.809281Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:39578" 2025-04-17T18:09:40.809358Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:40.809382Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:40.809439Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:40.809835Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:40.809966Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:40.810026Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-17T18:09:40.810198Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] txid# 281474976715662 HANDLE EvClientConnected 2025-04-17T18:09:40.810791Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:40.816228Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-17T18:09:40.816287Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168883:2611] txid# 281474976715662 SEND to# [59:7494345902129168882:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-17T18:09:40.893785Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] Handle TEvProposeTransaction 2025-04-17T18:09:40.893818Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-04-17T18:09:40.893881Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7494345902129168914:2628] 2025-04-17T18:09:40.896478Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:39600" 2025-04-17T18:09:40.896563Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:40.896590Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:40.896641Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:40.896961Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:40.897102Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:40.897166Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-04-17T18:09:40.897343Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] txid# 281474976715663 HANDLE EvClientConnected 2025-04-17T18:09:40.915670Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-04-17T18:09:40.915741Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168914:2628] txid# 281474976715663 SEND to# [59:7494345902129168913:2349] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-04-17T18:09:40.922375Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7494345880654331366:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:40.922438Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:40.974162Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] Handle TEvProposeTransaction 2025-04-17T18:09:40.974681Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-04-17T18:09:40.974740Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7494345902129168949:2646] 2025-04-17T18:09:40.977954Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168949:2646] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NjU4MCwiaWF0IjoxNzQ0OTEzMzgwLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.I6FGVQY-IWpPoGfk3MkQGZ784gfmLwUXJMbU67tS_XT2NtRtF03M-fnGY4eq7mL4msFfLgaiENPlODHWOMS2sbglSIJFt1ZfsaVhih_4S4dJEyObWOqaxZAjimSc-UgR0hFIhWy67x0IlSRB2fc70QrmFrJetK3ZJA-LW6t42VkHpRNz65u_W5iezSnxu9qb_mIkSe-3pA9mxqbwjO0rA_bSssVKvUCziWn_GBZIUBPU2YbYunlcqB1heJcTfUUrJ-qcq1PO5-hKPnmTkXSl5Mx41bk7aDFiNNbOodL9WB7O_mQd4WGQo39A36o2OEFm7H2p2kgc2a945PCJQ9kXtQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NjU4MCwiaWF0IjoxNzQ0OTEzMzgwLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:39638" 2025-04-17T18:09:40.978033Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168949:2646] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:40.978059Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168949:2646] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-17T18:09:40.978415Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168949:2646] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:40.978467Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168949:2646] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:09:40.978513Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168949:2646] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:40.978775Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168949:2646] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:40.978801Z node 59 :TX_PROXY ERROR: Actor# [59:7494345902129168949:2646] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-04-17T18:09:40.978893Z node 59 :TX_PROXY ERROR: Actor# [59:7494345902129168949:2646] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-17T18:09:40.978924Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345902129168949:2646] txid# 281474976715664 SEND to# [59:7494345902129168948:2362] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T18:09:40.979195Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=OWQ4ZjI0NDgtYjA3ZGQxNWUtZmZmYjNmYTktNWNmOTVlMjk=, ActorId: [59:7494345902129168936:2362], ActorState: ExecuteState, TraceId: 01js2f2djwe02p0j80z1vfjgtz, Create QueryResponse for error on request, msg: 2025-04-17T18:09:40.979535Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] Handle TEvExecuteKqpTransaction 2025-04-17T18:09:40.979556Z node 59 :TX_PROXY DEBUG: actor# [59:7494345880654331589:2113] TxId# 281474976715665 ProcessProposeKqpTransaction >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel1 >> KqpUniqueIndex::UpdateOnFkAlreadyExist >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 11788, msgbus: 19550 2025-04-17T18:06:53.934240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345187567653027:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:53.934464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00135a/r3tmp/tmpqx7yvf/pdisk_1.dat 2025-04-17T18:06:54.272345Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11788, node 1 2025-04-17T18:06:54.293841Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:54.293870Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:54.330821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:54.330944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:54.335212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:54.350390Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:54.350411Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:54.350418Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:54.350534Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19550 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:06:54.544332Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] Handle TEvNavigate describe path dc-1 2025-04-17T18:06:54.544385Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621053:2438] HANDLE EvNavigateScheme dc-1 2025-04-17T18:06:54.545522Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621053:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:54.586836Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621053:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:06:54.602330Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621053:2438] Handle TEvDescribeSchemeResult Forward to# [1:7494345191862621052:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:06:54.625241Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] Handle TEvProposeTransaction 2025-04-17T18:06:54.625274Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:06:54.625411Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494345191862621068:2446] 2025-04-17T18:06:54.736103Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:54.736199Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:06:54.736220Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:54.736288Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:54.736597Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:54.736713Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:06:54.736767Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:06:54.736974Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:06:54.737745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:54.743050Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:06:54.743237Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621068:2446] txid# 281474976710657 SEND to# [1:7494345191862621067:2445] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:06:54.754987Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] Handle TEvProposeTransaction 2025-04-17T18:06:54.755010Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:06:54.755043Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345191862621110:2484] 2025-04-17T18:06:54.757372Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:54.757438Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:06:54.757453Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:54.757496Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:54.757797Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:54.757882Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:06:54.757923Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:06:54.758035Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:06:54.758422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:06:54.760664Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:06:54.760710Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621110:2484] txid# 281474976710658 SEND to# [1:7494345191862621109:2483] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:06:54.786480Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] Handle TEvProposeTransaction 2025-04-17T18:06:54.786506Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T18:06:54.786543Z node 1 :TX_PROXY DEBUG: actor# [1:7494345187567653256:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494345191862621128:2494] 2025-04-17T18:06:54.788907Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345191862621128:2494] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000\n!\010\000\022\035\010\001\020\200\010\032\024ordinaryuser@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\03 ... LE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:41.070767Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524561:2574] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-17T18:09:41.070919Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524561:2574] txid# 281474976710660 HANDLE EvClientConnected 2025-04-17T18:09:41.073665Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524561:2574] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T18:09:41.073786Z node 59 :TX_PROXY ERROR: Actor# [59:7494345908942524561:2574] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:41.073811Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524561:2574] txid# 281474976710660 SEND to# [59:7494345904647557188:2341] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-17T18:09:41.106810Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] Handle TEvProposeTransaction 2025-04-17T18:09:41.106849Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] TxId# 281474976710661 ProcessProposeTransaction 2025-04-17T18:09:41.106911Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7494345908942524585:2586] 2025-04-17T18:09:41.109552Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45758" 2025-04-17T18:09:41.109616Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:41.109638Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:41.109689Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:41.110023Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:41.110126Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:41.110623Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-17T18:09:41.110788Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] txid# 281474976710661 HANDLE EvClientConnected 2025-04-17T18:09:41.122933Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-17T18:09:41.122996Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524585:2586] txid# 281474976710661 SEND to# [59:7494345908942524584:2334] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-17T18:09:41.245189Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] Handle TEvProposeTransaction 2025-04-17T18:09:41.245225Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] TxId# 281474976710662 ProcessProposeTransaction 2025-04-17T18:09:41.245277Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7494345908942524605:2600] 2025-04-17T18:09:41.247967Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45764" 2025-04-17T18:09:41.248041Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:41.248063Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:41.248122Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:41.248415Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:41.248526Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:41.248581Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-17T18:09:41.248720Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] txid# 281474976710662 HANDLE EvClientConnected 2025-04-17T18:09:41.249281Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.252607Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-17T18:09:41.252669Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524605:2600] txid# 281474976710662 SEND to# [59:7494345908942524604:2347] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-17T18:09:41.289862Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7494345887467687240:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:41.289944Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:41.320937Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] Handle TEvProposeTransaction 2025-04-17T18:09:41.320968Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] TxId# 281474976710663 ProcessProposeTransaction 2025-04-17T18:09:41.321013Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7494345908942524647:2623] 2025-04-17T18:09:41.323618Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524647:2623] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NjU4MSwiaWF0IjoxNzQ0OTEzMzgxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.JRek3PfjupkYwFABGMvnm-p6Ie5uNI1lFjgyNJAFHu-L6vlEAv87KWSToF9HES32AaOwtNcuyHMH9uG0I6QLV6smzMJ1GL7evclbfwC55tL3EaozIeMsz8zEhvujJu68Jyxa3CvkFKzuEzEFqbsOtjiKNwlSoo14pmc8B2px83FMHOHIJVZzfR-GjzMzErGX2iQFGri_akbFnb6bVOPEwCcJ9Qo7Dd_vmbOLOIztqRd-4zNlovgGKS-iqod2GAvX-2RSmse5RBI0Sfx_BSZbIQ3WFRR74-xtxirPpnbc2LhOU6pSnQXH-E04WeMZXeHKmid79Bv7N0l-vlwa9rQDyw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NjU4MSwiaWF0IjoxNzQ0OTEzMzgxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45794" 2025-04-17T18:09:41.323684Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524647:2623] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:41.323709Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524647:2623] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-17T18:09:41.323838Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524647:2623] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:41.323878Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524647:2623] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:09:41.323919Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524647:2623] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:41.324160Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524647:2623] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:41.324182Z node 59 :TX_PROXY ERROR: Actor# [59:7494345908942524647:2623] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-04-17T18:09:41.324276Z node 59 :TX_PROXY ERROR: Actor# [59:7494345908942524647:2623] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-17T18:09:41.324304Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345908942524647:2623] txid# 281474976710663 SEND to# [59:7494345908942524646:2353] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T18:09:41.325060Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=NTJjMjcwYjAtZjI1ZmEyYjktMjhiMTU0YjktYTUwY2ZlMmU=, ActorId: [59:7494345908942524628:2353], ActorState: ExecuteState, TraceId: 01js2f2dxq7emger7b8rarqmwn, Create QueryResponse for error on request, msg: 2025-04-17T18:09:41.325463Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] Handle TEvExecuteKqpTransaction 2025-04-17T18:09:41.325485Z node 59 :TX_PROXY DEBUG: actor# [59:7494345887467687285:2112] TxId# 281474976710664 ProcessProposeKqpTransaction >> KqpIndexes::DuplicateUpsertInterleave [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 25670, MsgBus: 24428 2025-04-17T18:09:23.219389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345829667098233:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.219495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ccc/r3tmp/tmpEQArBf/pdisk_1.dat 2025-04-17T18:09:23.754796Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:23.780968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.781065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.782408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25670, node 1 2025-04-17T18:09:23.871660Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.871688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.871701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.871850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24428 TClient is connected to server localhost:24428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.601589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.633926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.792400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.976755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.047057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.666704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345842552001812:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:26.666855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.098165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.167985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.205398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.247348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.286211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.347599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.382321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.453247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345846846969776:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.453350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.453585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345846846969781:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.456416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.466360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345846846969783:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.567217Z node 1 :TX_PROXY ERROR: Actor# [1:7494345846846969840:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.217748Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345829667098233:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.217808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.612716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 9926, MsgBus: 13631 2025-04-17T18:09:32.499128Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345869892928492:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:32.499190Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ccc/r3tmp/tmpQOpS9K/pdisk_1.dat 2025-04-17T18:09:32.624670Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:32.650719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:32.650806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:32.652492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9926, node 2 2025-04-17T18:09:32.728277Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:32.728298Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:32.728306Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:32.728429Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13631 TClient is connected to server localhost:13631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:33.203852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:33.211997Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:33.226107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:33.314033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:33.511903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:33.592060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:35.749907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345882777832139:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.749981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.799036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.835264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.915446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.952964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.995320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:36.088278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:36.165993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:36.287940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345887072800112:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:36.288030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:36.288514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345887072800117:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:36.293109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:36.308391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345887072800119:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:36.365405Z node 2 :TX_PROXY ERROR: Actor# [2:7494345887072800174:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:37.364518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:37.502458Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345869892928492:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:37.505843Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::CheckUpsertNonEquatableType+NotNull [GOOD] >> KqpIndexes::CheckUpsertNonEquatableType-NotNull >> KqpIndexes::SelectConcurentTX2 >> KqpIndexes::SecondaryIndexOrderBy >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit >> KqpMultishardIndex::DataColumnUpsertMixedSemantic [GOOD] >> KqpMultishardIndex::DataColumnSelect >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 19748, msgbus: 24649 2025-04-17T18:06:58.711212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345207734774963:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:58.711312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001350/r3tmp/tmp1VxwqU/pdisk_1.dat 2025-04-17T18:06:59.025304Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19748, node 1 2025-04-17T18:06:59.070898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:59.071087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:59.097607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:59.131766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:59.131786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:59.131800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:59.131924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24649 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:06:59.332651Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] Handle TEvNavigate describe path dc-1 2025-04-17T18:06:59.332700Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029742990:2439] HANDLE EvNavigateScheme dc-1 2025-04-17T18:06:59.333752Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029742990:2439] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:59.363519Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029742990:2439] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:06:59.376543Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029742990:2439] Handle TEvDescribeSchemeResult Forward to# [1:7494345212029742989:2438] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:06:59.403938Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] Handle TEvProposeTransaction 2025-04-17T18:06:59.403975Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:06:59.404127Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494345212029743003:2445] 2025-04-17T18:06:59.522143Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:59.522260Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:06:59.522280Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:59.522367Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:59.522708Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:59.522835Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:06:59.522894Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:06:59.523071Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:06:59.523835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.526475Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:06:59.526538Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743003:2445] txid# 281474976710657 SEND to# [1:7494345212029743002:2444] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:06:59.540195Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] Handle TEvProposeTransaction 2025-04-17T18:06:59.540227Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:06:59.540257Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345212029743046:2484] 2025-04-17T18:06:59.541931Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:59.541984Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:06:59.541995Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:59.542027Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:59.542288Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:59.542353Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:06:59.542386Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:06:59.542481Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:06:59.542793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.544351Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:06:59.544388Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743046:2484] txid# 281474976710658 SEND to# [1:7494345212029743045:2483] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:06:59.579647Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] Handle TEvProposeTransaction 2025-04-17T18:06:59.579713Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T18:06:59.579759Z node 1 :TX_PROXY DEBUG: actor# [1:7494345207734775206:2123] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494345212029743064:2494] 2025-04-17T18:06:59.582308Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345212029743064:2494] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44910" 2025-04-17T18:06:59.582368Z node 1 :TX_PROXY DEBUG: ... 63038Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621706:2523] txid# 281474976715660 HANDLE EvClientConnected 2025-04-17T18:09:42.764359Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:09:42.775520Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621706:2523] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-04-17T18:09:42.775601Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621706:2523] txid# 281474976715660 SEND to# [59:7494345913213621705:2342] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-04-17T18:09:42.797268Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7494345913213621705:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:09:42.874539Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] Handle TEvProposeTransaction 2025-04-17T18:09:42.874572Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-04-17T18:09:42.874628Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7494345913213621788:2585] 2025-04-17T18:09:42.877741Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-17T18:09:42.877793Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:42.877816Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-17T18:09:42.877989Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:42.878029Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:09:42.878632Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:09:42.878740Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:42.878932Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:42.879080Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:42.879135Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-17T18:09:42.879279Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 HANDLE EvClientConnected 2025-04-17T18:09:42.882745Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T18:09:42.882884Z node 59 :TX_PROXY ERROR: Actor# [59:7494345913213621788:2585] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:42.882919Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621788:2585] txid# 281474976715661 SEND to# [59:7494345913213621705:2342] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-17T18:09:42.899733Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] Handle TEvProposeTransaction 2025-04-17T18:09:42.899762Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-04-17T18:09:42.899808Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7494345913213621812:2597] 2025-04-17T18:09:42.902435Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37084" 2025-04-17T18:09:42.902500Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:42.902522Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:42.902571Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:42.902951Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:42.903090Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:42.903159Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-17T18:09:42.903330Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] txid# 281474976715662 HANDLE EvClientConnected 2025-04-17T18:09:42.912606Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-17T18:09:42.912667Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621812:2597] txid# 281474976715662 SEND to# [59:7494345913213621811:2335] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-17T18:09:42.981987Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] Handle TEvProposeTransaction 2025-04-17T18:09:42.982032Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-04-17T18:09:42.982080Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7494345913213621848:2614] 2025-04-17T18:09:42.984461Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621848:2614] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37096" 2025-04-17T18:09:42.984538Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621848:2614] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:42.984560Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621848:2614] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-17T18:09:42.984685Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621848:2614] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:42.984731Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621848:2614] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:09:42.984772Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621848:2614] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:42.985017Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621848:2614] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:42.985052Z node 59 :TX_PROXY ERROR: Actor# [59:7494345913213621848:2614] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-04-17T18:09:42.985139Z node 59 :TX_PROXY ERROR: Actor# [59:7494345913213621848:2614] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-04-17T18:09:42.985176Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345913213621848:2614] txid# 281474976715663 SEND to# [59:7494345913213621847:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T18:09:42.985432Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=YjlmNzg0NGMtMmE0NzgwYWMtYzkxZmU1ZGItZGM4MjQ4MWY=, ActorId: [59:7494345913213621833:2352], ActorState: ExecuteState, TraceId: 01js2f2fhq7ek255kzc0yhkkxn, Create QueryResponse for error on request, msg: 2025-04-17T18:09:42.985881Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] Handle TEvExecuteKqpTransaction 2025-04-17T18:09:42.985909Z node 59 :TX_PROXY DEBUG: actor# [59:7494345896033751823:2113] TxId# 281474976715664 ProcessProposeKqpTransaction >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> KqpIndexes::ExplainCollectFullDiagnostics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 10881, msgbus: 4128 2025-04-17T18:06:54.265306Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345190009612466:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:54.265399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001352/r3tmp/tmp54eZLB/pdisk_1.dat 2025-04-17T18:06:54.613554Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10881, node 1 2025-04-17T18:06:54.639745Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:54.654840Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:54.663659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:54.663787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:54.667175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:54.680182Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:54.680231Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:54.680240Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:54.680361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4128 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:06:54.909593Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] Handle TEvNavigate describe path dc-1 2025-04-17T18:06:54.909643Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613194:2435] HANDLE EvNavigateScheme dc-1 2025-04-17T18:06:54.910805Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613194:2435] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:54.944547Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613194:2435] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:06:54.958906Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613194:2435] Handle TEvDescribeSchemeResult Forward to# [1:7494345190009613193:2434] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:06:54.978798Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] Handle TEvProposeTransaction 2025-04-17T18:06:54.978825Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:06:54.978955Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494345190009613212:2446] 2025-04-17T18:06:55.044594Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:55.044675Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:06:55.044713Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:55.044808Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:55.045102Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:55.045228Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:06:55.045281Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:06:55.045426Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:06:55.046031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:55.051367Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:06:55.051456Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345190009613212:2446] txid# 281474976710657 SEND to# [1:7494345190009613211:2445] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:06:55.066962Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] Handle TEvProposeTransaction 2025-04-17T18:06:55.066999Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:06:55.067033Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345194304580546:2480] 2025-04-17T18:06:55.069418Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:55.069477Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:06:55.069492Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:55.069546Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:55.069887Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:55.070020Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:06:55.070096Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:06:55.070334Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:06:55.070729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:06:55.072641Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:06:55.072684Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580546:2480] txid# 281474976710658 SEND to# [1:7494345194304580545:2479] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:06:55.108127Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] Handle TEvProposeTransaction 2025-04-17T18:06:55.108155Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T18:06:55.108202Z node 1 :TX_PROXY DEBUG: actor# [1:7494345190009612725:2134] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494345194304580564:2490] 2025-04-17T18:06:55.110729Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345194304580564:2490] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\0 ... ministrator: 1 2025-04-17T18:09:43.148518Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627598:2608] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:43.148571Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627598:2608] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:43.148886Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627598:2608] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:43.149019Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627598:2608] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:43.149086Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627598:2608] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-17T18:09:43.149264Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627598:2608] txid# 281474976715661 HANDLE EvClientConnected 2025-04-17T18:09:43.161496Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627598:2608] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-17T18:09:43.161561Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627598:2608] txid# 281474976715661 SEND to# [59:7494345915319627597:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-17T18:09:43.239781Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] Handle TEvProposeTransaction 2025-04-17T18:09:43.239820Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] TxId# 281474976715662 ProcessProposeTransaction 2025-04-17T18:09:43.239886Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7494345915319627618:2622] 2025-04-17T18:09:43.242599Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37844" 2025-04-17T18:09:43.242680Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:43.242707Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:43.242767Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:43.243083Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:43.243186Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:43.243242Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-17T18:09:43.243391Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] txid# 281474976715662 HANDLE EvClientConnected 2025-04-17T18:09:43.243933Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.253777Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-17T18:09:43.253840Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627618:2622] txid# 281474976715662 SEND to# [59:7494345915319627617:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-17T18:09:43.310726Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] Handle TEvProposeTransaction 2025-04-17T18:09:43.310763Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] TxId# 281474976715663 ProcessProposeTransaction 2025-04-17T18:09:43.310819Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7494345915319627651:2641] 2025-04-17T18:09:43.313460Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37860" 2025-04-17T18:09:43.313522Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:43.313545Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:43.313594Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:43.313914Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:43.314029Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:43.314086Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-04-17T18:09:43.314265Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] txid# 281474976715663 HANDLE EvClientConnected 2025-04-17T18:09:43.321232Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-04-17T18:09:43.321289Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627651:2641] txid# 281474976715663 SEND to# [59:7494345915319627650:2349] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-04-17T18:09:43.367244Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] Handle TEvProposeTransaction 2025-04-17T18:09:43.367275Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] TxId# 281474976715664 ProcessProposeTransaction 2025-04-17T18:09:43.367320Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7494345915319627679:2653] 2025-04-17T18:09:43.369839Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627679:2653] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NjU4MywiaWF0IjoxNzQ0OTEzMzgzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.YrMlJo5051Drb-MNeIfzLoCB57E03Xjije6DB9BDM0kR9LzMCCDKhDdeKJKh64uuyvwDlEau2Du9-eDI8v3aOARt_L8PoKFVe88bjSXkrdj_hmq047gJ3iPZhkoH9-gl6EsNV_JpO4JQ8ToKqYGM-JfHA3DWIyRKb7uqwbfw7jQTdNvRtCBtOwv4c7RbrtShTDuDcwhS6lgYZneWFCcAEOxBLU8kfFjaajR-VRuom669AZTbSzkH0GwXMYR8dC4M-7gsmP_qXO_Gt6vqDpJ_JwNm5Mw1uEOl_Ke44qVdaabqn-j4yQTmqdsDowsyRvvJ4I6M5melA-IvBnlayOZaIg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NjU4MywiaWF0IjoxNzQ0OTEzMzgzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37890" 2025-04-17T18:09:43.369910Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627679:2653] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:43.369931Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627679:2653] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-17T18:09:43.370054Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627679:2653] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:43.370093Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627679:2653] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:09:43.370138Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627679:2653] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:43.370399Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627679:2653] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:43.370422Z node 59 :TX_PROXY ERROR: Actor# [59:7494345915319627679:2653] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-04-17T18:09:43.370509Z node 59 :TX_PROXY ERROR: Actor# [59:7494345915319627679:2653] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-17T18:09:43.370536Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345915319627679:2653] txid# 281474976715664 SEND to# [59:7494345915319627678:2361] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T18:09:43.370726Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZDQ5MjIyNDctODBjZmNjYTYtYjM0ZDJiMzktOTc2MGI0MjU=, ActorId: [59:7494345915319627669:2361], ActorState: ExecuteState, TraceId: 01js2f2fxs89v5x43dqkr92hfp, Create QueryResponse for error on request, msg: 2025-04-17T18:09:43.370925Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] Handle TEvExecuteKqpTransaction 2025-04-17T18:09:43.370953Z node 59 :TX_PROXY DEBUG: actor# [59:7494345893844790235:2112] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-17T18:09:43.523342Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7494345893844790096:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:43.523414Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19341, MsgBus: 2483 2025-04-17T18:09:23.250968Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345832115793763:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.251196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb5/r3tmp/tmpxA6HfU/pdisk_1.dat 2025-04-17T18:09:23.827100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.827214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.831550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:23.834139Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19341, node 1 2025-04-17T18:09:23.918578Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.918620Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.918627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.918720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2483 TClient is connected to server localhost:2483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.586386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.612507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:24.634319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:24.778710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:25.005315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:25.093643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:09:26.550895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345845000697265:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:26.551071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.098854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.134046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.167560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.212589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.261370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.340315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.403800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.496381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345849295665227:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.496515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.496748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345849295665233:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.500850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.519843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345849295665235:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.591710Z node 1 :TX_PROXY ERROR: Actor# [1:7494345849295665293:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.267644Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345832115793763:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.267700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.632602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.278364Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 21048, MsgBus: 20913 2025-04-17T18:09:30.778250Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345858675714036:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:30.778321Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb5/r3tmp/tmpqpRQSt/pdisk_1.dat 2025-04-17T18:09:30.886827Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21048, node 2 2025-04-17T18:09:30.917330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:30.917409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:30.919415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:30.966537Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:30.966558Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:30.966567Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:30.966688Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20913 TClient is connected to server localhost:20913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:31.440286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:31.459489Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:31.477498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... chemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.625828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.699638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.745157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.794158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.858309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345875855585677:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.858373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.858425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345875855585682:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.862082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:34.873117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345875855585684:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:34.974961Z node 2 :TX_PROXY ERROR: Actor# [2:7494345875855585743:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:35.778591Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345858675714036:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:35.778656Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:35.919104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22737, MsgBus: 4593 2025-04-17T18:09:37.320263Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345892322148239:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:37.320304Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb5/r3tmp/tmpCcw09h/pdisk_1.dat 2025-04-17T18:09:37.436319Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:37.464355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:37.464445Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:37.465808Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22737, node 3 2025-04-17T18:09:37.510752Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:37.510782Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:37.510790Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:37.510907Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4593 TClient is connected to server localhost:4593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:37.995161Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:38.017496Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:38.093220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:38.272786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:38.358286Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.772688Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345905207051915:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:40.772788Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:40.816726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:40.856309Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:40.895385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:40.936792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:40.982541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.023602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.068273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.150803Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345909502019873:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.150895Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.151103Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345909502019878:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.155671Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:41.171497Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345909502019880:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:41.246904Z node 3 :TX_PROXY ERROR: Actor# [3:7494345909502019937:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:42.322288Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345892322148239:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:42.322368Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:42.424072Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.160963Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:43.814062Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-04-17T18:08:51.207496Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345692312940942:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:51.207546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001997/r3tmp/tmpHaiIj5/pdisk_1.dat 2025-04-17T18:08:51.716771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:51.724214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:51.724315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:51.728917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9125, node 1 2025-04-17T18:08:51.826355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:51.826382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:51.826389Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:51.826527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:52.161105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:52.296853Z node 1 :TICKET_PARSER DEBUG: Ticket 8547C9FFC4FD99528B24B014B9517F96A4C53EDE91A0805F8AAA39D9E2F4266B (ipv6:[::1]:54464) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-17T18:08:52.297473Z node 1 :TICKET_PARSER ERROR: Ticket 8547C9FFC4FD99528B24B014B9517F96A4C53EDE91A0805F8AAA39D9E2F4266B: Cannot create token from certificate. Client certificate failed verification 2025-04-17T18:08:52.388012Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:54480) has now valid token of root@builtin 2025-04-17T18:08:52.498630Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:08:52.498663Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:08:52.498672Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:08:52.498707Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:08:55.793511Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345711781970084:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:55.793600Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001997/r3tmp/tmpHk98Q2/pdisk_1.dat 2025-04-17T18:08:56.068522Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15396, node 4 2025-04-17T18:08:56.170715Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:56.170800Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:56.197184Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:56.360980Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:56.361004Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:56.361012Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:56.361127Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:56.711564Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:56.831062Z node 4 :TICKET_PARSER DEBUG: Ticket 8547C9FFC4FD99528B24B014B9517F96A4C53EDE91A0805F8AAA39D9E2F4266B (ipv6:[::1]:41440) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-17T18:08:56.831595Z node 4 :TICKET_PARSER ERROR: Ticket 8547C9FFC4FD99528B24B014B9517F96A4C53EDE91A0805F8AAA39D9E2F4266B: Cannot create token from certificate. Client certificate failed verification 2025-04-17T18:08:56.960757Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:41456) has now valid token of root@builtin 2025-04-17T18:08:57.063846Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:08:57.063883Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:08:57.063892Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:08:57.063945Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:09:00.837437Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345733642665606:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:00.837513Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001997/r3tmp/tmpxLg7CV/pdisk_1.dat 2025-04-17T18:09:01.116234Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:01.155028Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:01.155116Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:01.160365Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12326, node 7 2025-04-17T18:09:01.303020Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:01.303049Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:01.303063Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:01.303205Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:01.560763Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... E0417 18:09:01.633873007 420706 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0417 18:09:01.638847323 420303 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0417 18:09:01.646900616 420707 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0417 18:09:01.659075251 420488 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0417 18:09:01.674975853 420727 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0417 18:09:01.677870111 420487 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0417 18:09:01.683507609 420303 ssl_transport_security.cc:1431] Handshake faile ... ENT. E0417 18:09:28.348883977 434550 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:1271'; Got args: {grpc.client_channel_factory=0x50200008a390, grpc.default_authority=localhost:1271, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x50600050bf40, grpc.internal.event_engine=0x502000cd7370, grpc.internal.subchannel_pool=0x50400002dd10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x504000445b50, grpc.server_uri=dns:///localhost:1271} E0417 18:09:28.355188152 434550 ssl_transport_security.cc:791] Invalid private key. E0417 18:09:28.355342041 434550 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0417 18:09:28.355522158 434550 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:1271'; Got args: {grpc.client_channel_factory=0x50200008a390, grpc.default_authority=localhost:1271, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060006bb400, grpc.internal.event_engine=0x502000adcb70, grpc.internal.subchannel_pool=0x50400002dd10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x504000445b50, grpc.server_uri=dns:///localhost:1271} E0417 18:09:28.356894272 434550 ssl_transport_security.cc:791] Invalid private key. E0417 18:09:28.357025701 434550 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0417 18:09:28.357192916 434550 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:1271'; Got args: {grpc.client_channel_factory=0x50200008a390, grpc.default_authority=localhost:1271, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060006bb400, grpc.internal.event_engine=0x50200060e090, grpc.internal.subchannel_pool=0x50400002dd10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x504000445b50, grpc.server_uri=dns:///localhost:1271} E0417 18:09:28.362425998 434550 ssl_transport_security.cc:791] Invalid private key. E0417 18:09:28.362574669 434550 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0417 18:09:28.362836699 434550 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:1271'; Got args: {grpc.client_channel_factory=0x50200008a390, grpc.default_authority=localhost:1271, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060001e39e0, grpc.internal.event_engine=0x502000134c30, grpc.internal.subchannel_pool=0x50400002dd10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x504000445b50, grpc.server_uri=dns:///localhost:1271} E0417 18:09:28.364395415 434550 ssl_transport_security.cc:791] Invalid private key. E0417 18:09:28.364525122 434550 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0417 18:09:28.364770595 434550 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:1271'; Got args: {grpc.client_channel_factory=0x50200008a390, grpc.default_authority=localhost:1271, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060001e39e0, grpc.internal.event_engine=0x50200092e5b0, grpc.internal.subchannel_pool=0x50400002dd10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x504000445b50, grpc.server_uri=dns:///localhost:1271} 2025-04-17T18:09:33.647954Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7494345874436405964:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:33.649163Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001997/r3tmp/tmpkPP57O/pdisk_1.dat 2025-04-17T18:09:33.843112Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:33.888439Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:33.888553Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:33.892143Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19034, node 25 2025-04-17T18:09:34.077109Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:34.077139Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:34.077148Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:34.077326Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:34.460307Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:34.638567Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:56990) has now valid token of root@builtin 2025-04-17T18:09:34.718541Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:09:34.718581Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:09:34.718596Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:09:34.718649Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:09:39.644274Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7494345900585736359:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:39.644345Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001997/r3tmp/tmplMcpck/pdisk_1.dat 2025-04-17T18:09:39.892268Z node 28 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24712, node 28 2025-04-17T18:09:39.992822Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:39.992940Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:40.041962Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:40.083355Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:40.083387Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:40.083398Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:40.083576Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:40.488445Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.749221Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:41470) has now valid token of root@builtin 2025-04-17T18:09:40.854596Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:09:40.854631Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:09:40.854649Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:09:40.854696Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink >> KqpIndexes::ForbidViewModification >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> KqpIndexes::SelectConcurentTX >> KqpIndexes::DeleteByIndex [GOOD] >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 5912, msgbus: 5360 2025-04-17T18:07:00.856804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345218006459989:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:00.857048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001340/r3tmp/tmpv3QYc9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5912, node 1 2025-04-17T18:07:01.236541Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:07:01.239817Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:07:01.239840Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:07:01.249422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:01.250865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:01.265342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:01.279068Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:01.279096Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:01.279105Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:01.279317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5360 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:01.462561Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:01.462616Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428024:2445] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:01.463559Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428024:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:01.505243Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428024:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:07:01.514548Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428024:2445] Handle TEvDescribeSchemeResult Forward to# [1:7494345222301428023:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:07:01.541431Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] Handle TEvProposeTransaction 2025-04-17T18:07:01.541455Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:07:01.541545Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494345222301428039:2453] 2025-04-17T18:07:01.643284Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:07:01.643378Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:07:01.643408Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:07:01.643497Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:07:01.643823Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:01.643965Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:07:01.644030Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:07:01.644221Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:07:01.644942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:01.647610Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:07:01.647669Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428039:2453] txid# 281474976710657 SEND to# [1:7494345222301428038:2452] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:07:01.659785Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] Handle TEvProposeTransaction 2025-04-17T18:07:01.659812Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:07:01.659872Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345222301428079:2489] 2025-04-17T18:07:01.662211Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:07:01.662264Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:07:01.662279Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:07:01.662353Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:07:01.662592Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:01.662688Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:07:01.662731Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:07:01.662870Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:07:01.663296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:07:01.665563Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:07:01.665603Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428079:2489] txid# 281474976710658 SEND to# [1:7494345222301428078:2488] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:07:01.687479Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] Handle TEvProposeTransaction 2025-04-17T18:07:01.687512Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T18:07:01.687556Z node 1 :TX_PROXY DEBUG: actor# [1:7494345218006460254:2139] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494345222301428097:2499] 2025-04-17T18:07:01.689650Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345222301428097:2499] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032 ... 7494345922776679327:2585] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:44.809840Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679327:2585] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:09:44.810648Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679327:2585] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:09:44.810740Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679327:2585] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:44.810925Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679327:2585] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:44.811068Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679327:2585] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:44.811120Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679327:2585] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-17T18:09:44.811253Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679327:2585] txid# 281474976710661 HANDLE EvClientConnected 2025-04-17T18:09:44.814422Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679327:2585] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T18:09:44.814560Z node 59 :TX_PROXY ERROR: Actor# [59:7494345922776679327:2585] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:44.814593Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679327:2585] txid# 281474976710661 SEND to# [59:7494345922776679253:2342] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-17T18:09:44.830657Z node 59 :TX_PROXY DEBUG: actor# [59:7494345905596809358:2111] Handle TEvProposeTransaction 2025-04-17T18:09:44.830686Z node 59 :TX_PROXY DEBUG: actor# [59:7494345905596809358:2111] TxId# 281474976710662 ProcessProposeTransaction 2025-04-17T18:09:44.830729Z node 59 :TX_PROXY DEBUG: actor# [59:7494345905596809358:2111] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7494345922776679351:2597] 2025-04-17T18:09:44.833349Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44204" 2025-04-17T18:09:44.833412Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:44.833434Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:44.833484Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:44.833769Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:44.833867Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:44.833921Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-17T18:09:44.834055Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] txid# 281474976710662 HANDLE EvClientConnected 2025-04-17T18:09:44.843359Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-17T18:09:44.843411Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679351:2597] txid# 281474976710662 SEND to# [59:7494345922776679350:2335] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-17T18:09:44.855448Z node 59 :TX_PROXY DEBUG: actor# [59:7494345905596809358:2111] Handle TEvProposeTransaction 2025-04-17T18:09:44.855491Z node 59 :TX_PROXY DEBUG: actor# [59:7494345905596809358:2111] TxId# 281474976710663 ProcessProposeTransaction 2025-04-17T18:09:44.855548Z node 59 :TX_PROXY DEBUG: actor# [59:7494345905596809358:2111] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7494345922776679364:2606] 2025-04-17T18:09:44.858299Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44214" 2025-04-17T18:09:44.858366Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:44.858389Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:44.859876Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:44.860223Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:44.860340Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:44.860398Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-17T18:09:44.860545Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] txid# 281474976710663 HANDLE EvClientConnected 2025-04-17T18:09:44.860986Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.863593Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-17T18:09:44.863658Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679364:2606] txid# 281474976710663 SEND to# [59:7494345922776679363:2347] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-04-17T18:09:44.934590Z node 59 :TX_PROXY DEBUG: actor# [59:7494345905596809358:2111] Handle TEvProposeTransaction 2025-04-17T18:09:44.934621Z node 59 :TX_PROXY DEBUG: actor# [59:7494345905596809358:2111] TxId# 281474976710664 ProcessProposeTransaction 2025-04-17T18:09:44.934666Z node 59 :TX_PROXY DEBUG: actor# [59:7494345905596809358:2111] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7494345922776679399:2620] 2025-04-17T18:09:44.937064Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44242" 2025-04-17T18:09:44.937127Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:44.937151Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2025-04-17T18:09:44.937292Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:44.937331Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2025-04-17T18:09:44.937373Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:44.937603Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:44.937699Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:44.937752Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-04-17T18:09:44.937885Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 HANDLE EvClientConnected 2025-04-17T18:09:44.942021Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-04-17T18:09:44.942090Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345922776679399:2620] txid# 281474976710664 SEND to# [59:7494345922776679398:2353] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update >> KqpIndexes::UpdateDeletePlan+UseSink >> KqpUniqueIndex::InsertNullInPk [GOOD] >> KqpUniqueIndex::InsertNullInFk >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel1 >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DeleteByIndex [GOOD] Test command err: Trying to start YDB, gRPC: 11494, MsgBus: 25127 2025-04-17T18:09:26.942886Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345843418985763:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:26.943094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca6/r3tmp/tmpQYWM1N/pdisk_1.dat 2025-04-17T18:09:27.394485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:27.399767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:27.399861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:27.403514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11494, node 1 2025-04-17T18:09:27.536995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:27.537021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:27.537028Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:27.537170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25127 TClient is connected to server localhost:25127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:28.061351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:28.075158Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:28.092484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:28.237055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:28.385389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:28.462095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:30.264287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345860598856479:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:30.264406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:30.592187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:30.623020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:30.655994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:30.681149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:30.711242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:30.780043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:30.820438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:30.890566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345860598857138:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:30.890648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:30.890904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345860598857143:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:30.894380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:30.907634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345860598857145:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:30.997535Z node 1 :TX_PROXY ERROR: Actor# [1:7494345860598857204:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:31.939326Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345843418985763:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:31.939401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:31.995738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:32.124877Z node 1 :TX_PROXY ERROR: Actor# [1:7494345869188792403:3940] txid# 281474976710673, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:33.069136Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 29688, MsgBus: 64438 2025-04-17T18:09:33.926741Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345875164992178:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:33.928002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca6/r3tmp/tmpx7l14g/pdisk_1.dat 2025-04-17T18:09:34.043661Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29688, node 2 2025-04-17T18:09:34.124434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:34.124578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:34.126119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:34.138713Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:34.138733Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:34.138739Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:34.138846Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64438 TClient is connected to server localhost:64438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:34.458411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 ... 8865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:37.218251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:37.255528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:37.315421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345892344863813:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:37.315499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:37.315563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345892344863818:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:37.319190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:37.332360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345892344863820:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:37.432869Z node 2 :TX_PROXY ERROR: Actor# [2:7494345892344863878:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:38.343864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.442802Z node 2 :TX_PROXY ERROR: Actor# [2:7494345896639831781:3940] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:38.926556Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345875164992178:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:38.926617Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:39.300862Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:39.322266Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 4263, MsgBus: 13312 2025-04-17T18:09:40.249086Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345901969359446:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:40.249171Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca6/r3tmp/tmpm9kQsK/pdisk_1.dat 2025-04-17T18:09:40.370060Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4263, node 3 2025-04-17T18:09:40.394669Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:40.394790Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:40.396941Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:40.486390Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:40.486412Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:40.486421Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:40.486530Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13312 TClient is connected to server localhost:13312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:40.987747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.995518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.072843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.222036Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.287809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:43.448147Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345914854263085:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.448234Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.491420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.523531Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.556607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.583927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.613539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.646893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.680510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.771592Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345914854263747:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.771698Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.771867Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345914854263752:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.776246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:43.790872Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345914854263754:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:43.867225Z node 3 :TX_PROXY ERROR: Actor# [3:7494345914854263811:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:44.938969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.250280Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345901969359446:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:45.250345Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant >> KqpUniqueIndex::InsertFkAlreadyExist >> KqpIndexes::CheckUpsertNonEquatableType-NotNull [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL >> KqpIndexMetadata::HandleNotReadyIndex [GOOD] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn >> THiveTest::TestStopTenant [GOOD] >> TScaleRecommenderTest::BasicTest >> KqpIndexes::WriteWithParamsFieldOrder >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> KqpIndexes::SelectConcurentTX2 [GOOD] >> KqpIndexes::SelectFromAsyncIndexedTable >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> KqpUniqueIndex::UpdateOnFkAlreadyExist [GOOD] >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 >> KqpIndexes::ExplainCollectFullDiagnostics [GOOD] >> KqpIndexes::ForbidDirectIndexTableCreation >> TScaleRecommenderTest::BasicTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21108, MsgBus: 16396 2025-04-17T18:09:30.583228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345858562365761:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:30.583454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9c/r3tmp/tmplBZQQl/pdisk_1.dat 2025-04-17T18:09:31.018710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:31.037684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:31.037837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:31.040684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21108, node 1 2025-04-17T18:09:31.118320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:31.118356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:31.118389Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:31.118540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16396 TClient is connected to server localhost:16396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:31.765685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:31.779129Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:31.794465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:31.973348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:32.144905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:32.222862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:34.124553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345875742236698:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.124686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.447785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.513999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.544468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.609300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.677965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.715005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.757088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:34.827092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345875742237365:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.827155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.827511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345875742237370:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:34.830569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:34.842310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345875742237372:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:34.944459Z node 1 :TX_PROXY ERROR: Actor# [1:7494345875742237430:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:35.583498Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345858562365761:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:35.583567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:36.024828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 query_phases { duration_us: 748 cpu_time_us: 748 } query_phases { duration_us: 9313 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 5449 affected_shards: 1 } query_phases { duration_us: 1591 cpu_time_us: 1591 } query_phases { duration_us: 4095 cpu_time_us: 3807 } query_phases { duration_us: 7665 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 3038 affected_shards: 2 } compilation { duration_us: 574888 cpu_time_us: 534845 } process_cpu_time_us: 5208 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":27,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":26,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_1_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744913376724,\"TaskId\":1,\"Host\":\"ghrun-gkdwnlcfnq\",\"ComputeTimeUs\":60}],\"CpuTimeUs\":650}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1744913376723,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":650,\"Max\":650,\"Min\":650}},\"CTE Name\":\"precompute_1_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":25,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":24,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1744913376723,\"FinishedTasks\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_3_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":23,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":22,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744913376723,\"TaskId\":2,\"Host\":\"ghrun-gkdwnlcfnq\",\"ComputeTimeUs\":88}],\"CpuTimeUs\":943}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1744913376723,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":943,\"Max\":943,\"Min\":943}},\"CTE Name\":\"precompute_3_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":20,\"Subplan Name\":\"CTE precompute_3_0\",\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Operators\":[{\"Inputs\":[{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpressi ... 057594046644480 2025-04-17T18:09:41.511190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.543409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.578066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.655486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.722300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345909192820236:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.722375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.723122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345909192820241:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.726210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:41.752048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345909192820243:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:41.821568Z node 2 :TX_PROXY ERROR: Actor# [2:7494345909192820300:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:42.687919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.196425Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345896307915916:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:43.196484Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:43.274054Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 10336, MsgBus: 29873 2025-04-17T18:09:44.092169Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345919222102357:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:44.092227Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c9c/r3tmp/tmpbYYV2S/pdisk_1.dat 2025-04-17T18:09:44.204394Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:44.235701Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:44.235796Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:44.238272Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10336, node 3 2025-04-17T18:09:44.298686Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:44.298709Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:44.298719Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:44.298840Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29873 TClient is connected to server localhost:29873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:44.796939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.811065Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.834478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.912796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.069036Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.156722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.335091Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345932107006017:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.335181Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.382468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.420064Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.497275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.574682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.607279Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.646249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.721616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.789428Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345932107006689:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.789506Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.789643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345932107006694:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.793110Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:47.812427Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345932107006696:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:47.872561Z node 3 :TX_PROXY ERROR: Actor# [3:7494345932107006750:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:48.922274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.094573Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345919222102357:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:49.094639Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> YdbOlapStore::LogGrepExisting [GOOD] >> YdbOlapStore::LogExistingUserId >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TScaleRecommenderTest::BasicTest [GOOD] Test command err: 2025-04-17T18:09:25.224670Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:25.228413Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:25.228652Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-17T18:09:25.229316Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:09:25.230551Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-17T18:09:25.230605Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:25.231547Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-04-17T18:09:25.231600Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:25.231743Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:25.232051Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:25.246437Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:25.246491Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:25.248391Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:25.248598Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:25.248746Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:25.248910Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:25.249033Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:25.249180Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:25.249356Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:25.249383Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:25.249461Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-04-17T18:09:25.249491Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-04-17T18:09:25.249544Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:25.249581Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:25.250512Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:25.250663Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:25.290724Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:25.290787Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:25.290833Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:25.292374Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-04-17T18:09:25.292411Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-04-17T18:09:25.292459Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-17T18:09:25.292511Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:25.292541Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:25.292598Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-17T18:09:25.299622Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-17T18:09:25.299875Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:25.300148Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:25.300582Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-17T18:09:25.300901Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:25.300950Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-17T18:09:25.301026Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-17T18:09:25.301051Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-17T18:09:25.301288Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:25.301447Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:25.301574Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2025-04-17T18:09:25.301601Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:52:2092] 2025-04-17T18:09:25.301929Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-17T18:09:25.301985Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:52:2092] 2025-04-17T18:09:25.302093Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:25.302221Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:25.302271Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:25.302448Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-17T18:09:25.302494Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-17T18:09:25.302623Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-04-17T18:09:25.302654Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-04-17T18:09:25.306666Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:25.306792Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-17T18:09:25.306848Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:25.307438Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-17T18:09:25.307603Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-17T18:09:25.307680Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-17T18:09:25.308118Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:25.308950Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:25.309164Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:25.309230Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-17T18:09:25.309289Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-17T18:09:25.309387Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-04-17T18:09:25.309419Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-04-17T18:09:25.309482Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:25.309633Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-17T18:09:25.309661Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:25.309759Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-17T18:09:25.309970Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-17T18:09:25.310008Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 2146435075 Sender# [1:49:2091] SessionId# [0:0:0] Cookie# 0 2025-04-17T18:09:25.310053Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.017321s 2025-04-17T18:09:25.310108Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-17T18:09:25.310140Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025- ... 72075186224037888] shutdown pipe due to pending shutdown request [24:566:2479] 2025-04-17T18:09:51.934485Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [24:566:2479] 2025-04-17T18:09:51.934579Z node 24 :PIPE_SERVER DEBUG: [72075186224037888] HandleSend Sender# [24:565:2478] EventType# 2146435094 2025-04-17T18:09:51.935038Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [24:569:2482] 2025-04-17T18:09:51.935098Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [24:569:2482] 2025-04-17T18:09:51.935200Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:51.935278Z node 24 :TABLET_RESOLVER DEBUG: SelectForward node 24 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [24:419:2364] 2025-04-17T18:09:51.935349Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [24:569:2482] 2025-04-17T18:09:51.935412Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] received pending shutdown [24:569:2482] 2025-04-17T18:09:51.935474Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [24:569:2482] 2025-04-17T18:09:51.935537Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [24:569:2482] 2025-04-17T18:09:51.935649Z node 24 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [24:569:2482] 2025-04-17T18:09:51.935836Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [24:569:2482] 2025-04-17T18:09:51.935898Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [24:569:2482] 2025-04-17T18:09:51.935946Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [24:569:2482] 2025-04-17T18:09:51.936010Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] shutdown pipe due to pending shutdown request [24:569:2482] 2025-04-17T18:09:51.936062Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [24:569:2482] 2025-04-17T18:09:51.936138Z node 24 :PIPE_SERVER DEBUG: [72075186224037888] HandleSend Sender# [24:568:2481] EventType# 268697642 2025-04-17T18:09:51.936627Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [24:572:2485] 2025-04-17T18:09:51.936669Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [24:572:2485] 2025-04-17T18:09:51.936736Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:51.936795Z node 24 :TABLET_RESOLVER DEBUG: SelectForward node 24 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [24:419:2364] 2025-04-17T18:09:51.936846Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [24:572:2485] 2025-04-17T18:09:51.936889Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] received pending shutdown [24:572:2485] 2025-04-17T18:09:51.936932Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [24:572:2485] 2025-04-17T18:09:51.936996Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [24:572:2485] 2025-04-17T18:09:51.937079Z node 24 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [24:572:2485] 2025-04-17T18:09:51.937175Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [24:572:2485] 2025-04-17T18:09:51.937216Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [24:572:2485] 2025-04-17T18:09:51.937250Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [24:572:2485] 2025-04-17T18:09:51.937295Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] shutdown pipe due to pending shutdown request [24:572:2485] 2025-04-17T18:09:51.937331Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [24:572:2485] 2025-04-17T18:09:51.937381Z node 24 :PIPE_SERVER DEBUG: [72075186224037888] HandleSend Sender# [24:571:2484] EventType# 268697612 2025-04-17T18:09:51.937552Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:12} Tx{22, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-04-17T18:09:51.937612Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:12} Tx{22, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:09:51.937759Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:12} Tx{22, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{15, redo 82b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-17T18:09:51.937820Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:12} Tx{22, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:09:51.951059Z node 24 :BS_PROXY_PUT INFO: [8b41b9ac8186ade8] bootstrap ActorId# [24:575:2488] Group# 2147483648 BlobCount# 1 BlobIDs# [[72075186224037888:1:12:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T18:09:51.951227Z node 24 :BS_PROXY_PUT DEBUG: [8b41b9ac8186ade8] Id# [72075186224037888:1:12:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T18:09:51.951314Z node 24 :BS_PROXY_PUT DEBUG: [8b41b9ac8186ade8] restore Id# [72075186224037888:1:12:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T18:09:51.951397Z node 24 :BS_PROXY_PUT DEBUG: [8b41b9ac8186ade8] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224037888:1:12:0:0:92:1] Marker# BPG33 2025-04-17T18:09:51.951462Z node 24 :BS_PROXY_PUT DEBUG: [8b41b9ac8186ade8] Sending missing VPut part# 0 to# 0 blob Id# [72075186224037888:1:12:0:0:92:1] Marker# BPG32 2025-04-17T18:09:51.951644Z node 24 :BS_PROXY DEBUG: Send to queueActorId# [24:425:2367] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72075186224037888:1:12:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T18:09:51.956904Z node 24 :BS_PROXY_PUT DEBUG: [8b41b9ac8186ade8] received {EvVPutResult Status# OK ID# [72075186224037888:1:12:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [80000000:1:0:0:0] Marker# BPP01 2025-04-17T18:09:51.957044Z node 24 :BS_PROXY_PUT DEBUG: [8b41b9ac8186ade8] Result# TEvPutResult {Id# [72075186224037888:1:12:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483648 Marker# BPP12 2025-04-17T18:09:51.957147Z node 24 :BS_PROXY_PUT INFO: [8b41b9ac8186ade8] SendReply putResult# TEvPutResult {Id# [72075186224037888:1:12:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T18:09:51.957352Z node 24 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2147483648 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.831 sample PartId# [72075186224037888:1:12:0:0:92:1] QueryCount# 1 VDiskId# [80000000:1:0:0:0] NodeId# 24 } TEvVPutResult{ TimestampMs# 6.128 VDiskId# [80000000:1:0:0:0] NodeId# 24 Status# OK } ] } 2025-04-17T18:09:51.957532Z node 24 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72075186224037888:1:12:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:09:51.957701Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:13} commited cookie 1 for step 12 2025-04-17T18:09:51.958086Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [24:577:2490] 2025-04-17T18:09:51.958138Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [24:577:2490] 2025-04-17T18:09:51.958268Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:51.958351Z node 24 :TABLET_RESOLVER DEBUG: SelectForward node 24 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [24:419:2364] 2025-04-17T18:09:51.958418Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [24:577:2490] 2025-04-17T18:09:51.958466Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] received pending shutdown [24:577:2490] 2025-04-17T18:09:51.958509Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [24:577:2490] 2025-04-17T18:09:51.958575Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [24:577:2490] 2025-04-17T18:09:51.958680Z node 24 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [24:577:2490] 2025-04-17T18:09:51.958851Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [24:577:2490] 2025-04-17T18:09:51.958915Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [24:577:2490] 2025-04-17T18:09:51.958963Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [24:577:2490] 2025-04-17T18:09:51.959025Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] shutdown pipe due to pending shutdown request [24:577:2490] 2025-04-17T18:09:51.959071Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [24:577:2490] 2025-04-17T18:09:51.959149Z node 24 :PIPE_SERVER DEBUG: [72075186224037888] HandleSend Sender# [24:576:2489] EventType# 2146435094 2025-04-17T18:09:51.959690Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [24:580:2493] 2025-04-17T18:09:51.959757Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [24:580:2493] 2025-04-17T18:09:51.959851Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:51.959922Z node 24 :TABLET_RESOLVER DEBUG: SelectForward node 24 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [24:419:2364] 2025-04-17T18:09:51.959998Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [24:580:2493] 2025-04-17T18:09:51.960062Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] received pending shutdown [24:580:2493] 2025-04-17T18:09:51.960150Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [24:580:2493] 2025-04-17T18:09:51.960209Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [24:580:2493] 2025-04-17T18:09:51.960332Z node 24 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [24:580:2493] 2025-04-17T18:09:51.960473Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [24:580:2493] 2025-04-17T18:09:51.960533Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [24:580:2493] 2025-04-17T18:09:51.960582Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [24:580:2493] 2025-04-17T18:09:51.960653Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] shutdown pipe due to pending shutdown request [24:580:2493] 2025-04-17T18:09:51.960715Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [24:580:2493] 2025-04-17T18:09:51.960784Z node 24 :PIPE_SERVER DEBUG: [72075186224037888] HandleSend Sender# [24:579:2492] EventType# 268697642 >> KqpIndexes::ForbidViewModification [GOOD] >> KqpIndexes::IndexOr >> KqpMultishardIndex::DataColumnSelect [GOOD] >> KqpIndexes::UpdateDeletePlan+UseSink [GOOD] >> KqpIndexes::UpdateDeletePlan-UseSink >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin >> KqpMultishardIndex::SortedRangeReadDesc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3317, MsgBus: 25850 2025-04-17T18:09:23.266991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345829875118652:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.267063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cd9/r3tmp/tmp5deSfp/pdisk_1.dat 2025-04-17T18:09:23.732599Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:23.739553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.739653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.747482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3317, node 1 2025-04-17T18:09:23.874677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.874703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.874710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.874811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25850 TClient is connected to server localhost:25850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.586368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.626835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.779643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.956810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.024154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.620319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345842760022089:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:26.622690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.104865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.144051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.218665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.256767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.297647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.376711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.426903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.499933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345847054990054:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.500003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.500214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345847054990059:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.502917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.514392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345847054990061:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.583130Z node 1 :TX_PROXY ERROR: Actor# [1:7494345847054990116:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.259072Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345829875118652:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.259123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.664034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:28.707230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:09:28.792733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5153, MsgBus: 18470 2025-04-17T18:09:31.168571Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345865859700447:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:31.168614Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cd9/r3tmp/tmpMXR7LU/pdisk_1.dat 2025-04-17T18:09:31.298937Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:31.319100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:31.319175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:31.320272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5153, node 2 2025-04-17T18:09:31.349442Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:31.349467Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:31.349473Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:31.349568Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18470 TClient is connected to server localhost:18470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:31.785419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:31.805276Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, a ... 04Z node 2 :TX_PROXY ERROR: Actor# [2:7494345883039572129:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:36.034228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:36.079642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:09:36.170609Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345865859700447:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:36.170664Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:36.195903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4319, MsgBus: 6794 2025-04-17T18:09:39.097316Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345899040043169:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:39.097397Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cd9/r3tmp/tmpVF1nig/pdisk_1.dat 2025-04-17T18:09:39.228521Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:39.242372Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:39.242450Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:39.244958Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4319, node 3 2025-04-17T18:09:39.298334Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:39.298360Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:39.298369Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:39.298493Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6794 TClient is connected to server localhost:6794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:39.805955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.814959Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:39.819014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.907721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.138311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.226729Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.750324Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345911924946822:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:42.750444Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:42.807366Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:42.839557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:42.873824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:42.915245Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:42.984009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.019575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.088778Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.173275Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345916219914786:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.173403Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.173596Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345916219914791:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.177103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:43.189544Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345916219914793:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:43.272349Z node 3 :TX_PROXY ERROR: Actor# [3:7494345916219914850:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:44.097332Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345899040043169:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:44.097430Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:44.456138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.566840Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:45.613932Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:45.639225Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:46.889230Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:46.908291Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:47.882732Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:47.913691Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:48.564015Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:48.624805Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:49.428622Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:49.475694Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:50.094023Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:50.109428Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:50.662276Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:51.248290Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:51.603694Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:51.619485Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:51.633951Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:52.059305Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:52.102304Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestGetStorageInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 22973, MsgBus: 21864 2025-04-17T18:09:23.219540Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345828853438228:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.219630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cc7/r3tmp/tmpC3cIGV/pdisk_1.dat 2025-04-17T18:09:23.750437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.750504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.754501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22973, node 1 2025-04-17T18:09:23.786693Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:23.897687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.897710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.897719Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.897846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21864 TClient is connected to server localhost:21864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.752626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.779200Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:24.796535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:24.939600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:25.110777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.196221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.783971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345841738341875:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:26.784070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.158611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.232087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.306647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.346098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.387107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.435940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.503947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.573814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345846033309840:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.573912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.574171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345846033309845:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.578315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.593476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345846033309847:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.686504Z node 1 :TX_PROXY ERROR: Actor# [1:7494345846033309903:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.219464Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345828853438228:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.219510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.682913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:30.465819Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f22mkcxy19201artbfvxk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTEyZDczMjctNmMwY2Y1MTItZGFmOWVkNzYtNGFkZDJiYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:30.476628Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTEyZDczMjctNmMwY2Y1MTItZGFmOWVkNzYtNGFkZDJiYzM=, ActorId: [1:7494345850328278305:2572], ActorState: ExecuteState, TraceId: 01js2f22mkcxy19201artbfvxk, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 21860, MsgBus: 10740 2025-04-17T18:09:32.322984Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345869814414574:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:32.323025Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cc7/r3tmp/tmpuqbV4n/pdisk_1.dat 2025-04-17T18:09:32.507357Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:32.541582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:32.541679Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21860, node 2 2025-04-17T18:09:32.543846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:32.602887Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:32.602908Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:32.602917Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:32.603017Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10740 TClient is connected to server localhost:10740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:33.024630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:33.068038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:33.147975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:33.320136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:33.386870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.570003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345882699318222:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.570102Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.614864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.642206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.671313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.701250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.769471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.805773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.845074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.925006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345882699318881:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.925108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.925467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345882699318886:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:35.928638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:35.939496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345882699318888:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:36.004250Z node 2 :TX_PROXY ERROR: Actor# [2:7494345886994286239:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:36.913148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:37.323394Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345869814414574:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:37.323453Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:42.479431Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f2e9b806exys57cst88r4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y3MzMxNjUtZDVmNjVkYzQtZTBiOTkxM2EtMTllODU0OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:42.479723Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y3MzMxNjUtZDVmNjVkYzQtZTBiOTkxM2EtMTllODU0OGY=, ActorId: [2:7494345891289254665:2570], ActorState: ExecuteState, TraceId: 01js2f2e9b806exys57cst88r4, Create QueryResponse for error on request, msg: 2025-04-17T18:09:47.324700Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-17T18:09:47.324727Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-17T18:09:47.358937Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037936 not found 2025-04-17T18:09:47.358976Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037937 not found 2025-04-17T18:09:47.359493Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found 2025-04-17T18:09:47.360083Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037940 not found 2025-04-17T18:09:47.364065Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037939 not found 2025-04-17T18:09:47.364533Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037935 not found 2025-04-17T18:09:47.497195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:09:47.497223Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:49.643587Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f2myp8r3xpzkxx48dj0d5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y3MzMxNjUtZDVmNjVkYzQtZTBiOTkxM2EtMTllODU0OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:49.643810Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y3MzMxNjUtZDVmNjVkYzQtZTBiOTkxM2EtMTllODU0OGY=, ActorId: [2:7494345891289254665:2570], ActorState: ExecuteState, TraceId: 01js2f2myp8r3xpzkxx48dj0d5, Create QueryResponse for error on request, msg: 2025-04-17T18:09:50.793975Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f2p5wddccf5r1rgmdy3gy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y3MzMxNjUtZDVmNjVkYzQtZTBiOTkxM2EtMTllODU0OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:50.794254Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y3MzMxNjUtZDVmNjVkYzQtZTBiOTkxM2EtMTllODU0OGY=, ActorId: [2:7494345891289254665:2570], ActorState: ExecuteState, TraceId: 01js2f2p5wddccf5r1rgmdy3gy, Create QueryResponse for error on request, msg: 2025-04-17T18:09:52.219533Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f2rha6tj5nk2t6xp0gzx8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y3MzMxNjUtZDVmNjVkYzQtZTBiOTkxM2EtMTllODU0OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:52.219803Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y3MzMxNjUtZDVmNjVkYzQtZTBiOTkxM2EtMTllODU0OGY=, ActorId: [2:7494345891289254665:2570], ActorState: ExecuteState, TraceId: 01js2f2rha6tj5nk2t6xp0gzx8, Create QueryResponse for error on request, msg: 2025-04-17T18:09:52.297506Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-17T18:09:52.297799Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-17T18:09:52.315179Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-17T18:09:52.315218Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-17T18:09:52.315236Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin >> KqpUniqueIndex::ReplaceFkPartialColumnSet >> KqpIndexes::SelectConcurentTX [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnSelect [GOOD] Test command err: Trying to start YDB, gRPC: 16469, MsgBus: 27068 2025-04-17T18:09:38.213231Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345896783701314:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:38.213288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c95/r3tmp/tmpwoy0SP/pdisk_1.dat 2025-04-17T18:09:38.588412Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:38.627928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:38.628026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16469, node 1 2025-04-17T18:09:38.631026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:38.723144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:38.723178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:38.723192Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:38.723330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27068 TClient is connected to server localhost:27068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:39.225798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.266509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.389179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.561715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.636786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.292276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345909668604914:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.292374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.580947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.613197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.640699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.676578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.702506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.732691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.766928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.848877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345909668605578:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.848981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.849621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345909668605583:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.853493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:41.862293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345909668605585:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:41.945278Z node 1 :TX_PROXY ERROR: Actor# [1:7494345909668605641:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:42.928249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:43.213690Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345896783701314:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:43.213742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13341, MsgBus: 6639 2025-04-17T18:09:44.965788Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345920131926385:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:44.965842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c95/r3tmp/tmp9sPSVn/pdisk_1.dat 2025-04-17T18:09:45.074337Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:45.075140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:45.075208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:45.076882Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13341, node 2 2025-04-17T18:09:45.130343Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:45.130365Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:45.130382Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:45.130486Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6639 TClient is connected to server localhost:6639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:09:45.579979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.600429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.680727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.846640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.930710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.938306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345933016830019:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.938395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.992340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.028547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.057832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.088393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.126577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.164720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.200770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.279352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345937311797977:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.279425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.279585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345937311797982:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.282713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:48.315029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345937311797984:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:48.417184Z node 2 :TX_PROXY ERROR: Actor# [2:7494345937311798043:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:49.432766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.500338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.579718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.967074Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345920131926385:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:49.967129Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:51.153411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::SecondaryIndexOrderBy [GOOD] >> KqpIndexes::SecondaryIndexOrderBy2 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-04-17T18:08:46.392189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345671755198713:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:46.392265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d4/r3tmp/tmpLiBUS8/pdisk_1.dat 2025-04-17T18:08:46.763321Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:46.809557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:46.809608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7345, node 1 2025-04-17T18:08:46.832948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:46.995027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:46.995044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:46.995049Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:46.995118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:47.298977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:47.384169Z node 1 :TICKET_PARSER DEBUG: Ticket 3096AF34C158DFC0A7E2FC10B4A169087266472445A1E196564BD9B0262D6B03 (ipv6:[::1]:39228) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:08:47.510429Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:39254) has now valid token of root@builtin 2025-04-17T18:08:47.619425Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:08:47.619462Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:08:47.619471Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:08:47.619499Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:08:51.028832Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345693379152096:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:51.029788Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d4/r3tmp/tmpnVdv4I/pdisk_1.dat 2025-04-17T18:08:51.295597Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13676, node 4 2025-04-17T18:08:51.363492Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:51.363596Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:51.401697Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:51.627459Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:51.627491Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:51.627508Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:51.627650Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:51.923779Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:52.033794Z node 4 :TICKET_PARSER DEBUG: Ticket 3096AF34C158DFC0A7E2FC10B4A169087266472445A1E196564BD9B0262D6B03 (ipv6:[::1]:44346) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:08:52.134027Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:44370) has now valid token of root@builtin 2025-04-17T18:08:52.204865Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:08:52.204893Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:08:52.204901Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:08:52.204930Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:08:56.262495Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345714233639635:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:56.262597Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d4/r3tmp/tmphOsV4P/pdisk_1.dat 2025-04-17T18:08:56.482392Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3180, node 7 2025-04-17T18:08:56.632466Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:56.632582Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:56.729126Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:56.813010Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:56.813032Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:56.813041Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:56.813162Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:57.120544Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:57.282636Z node 7 :TICKET_PARSER DEBUG: Ticket E95DF89305168D86D8C40B0C2C179C5A59274C76C1F8416747C54B101E64B643 (ipv6:[::1]:44708) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:08:57.355689Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:44722) has now valid token of root@builtin 2025-04-17T18:08:57.429978Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:08:57.430003Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:08:57.430027Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:08:57.430055Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:09:01.693541Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345736222744883:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:01.693588Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d4/r3tmp/tmpqCJ2Zp/pdisk_1.dat 2025-04-17T18:09:01.980995Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14061, node 10 2025-04-17T18:09:02.108135Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:02.108217Z node 10 :HIVE WARN: HIVE#72057594037968897 ... schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:22.102298Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7494345804376640266:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:22.102404Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.078111Z node 19 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:57246) has now valid token of root@builtin 2025-04-17T18:09:28.151131Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:09:28.151169Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:09:28.151179Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:09:28.151220Z node 19 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:09:29.868179Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7494345855646613481:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:29.868244Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d4/r3tmp/tmpjqTmqw/pdisk_1.dat 2025-04-17T18:09:30.079819Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:30.128557Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:30.128663Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:30.132558Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10212, node 22 2025-04-17T18:09:30.196792Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:30.196830Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:30.196843Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:30.197024Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:30.604082Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:34.869325Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7494345855646613481:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:34.869411Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:40.806695Z node 22 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:35302) has now valid token of root@builtin 2025-04-17T18:09:40.925328Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:09:40.925378Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:09:40.925391Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:09:40.925442Z node 22 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:09:42.594724Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7494345913268591744:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:42.594844Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d4/r3tmp/tmpkBkhPu/pdisk_1.dat 2025-04-17T18:09:42.825508Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:42.835814Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:42.835926Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:42.839847Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29639, node 25 2025-04-17T18:09:42.932997Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:42.933036Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:42.933046Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:42.933212Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:43.292248Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:43.436703Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:43198) has now valid token of root@builtin 2025-04-17T18:09:43.490165Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:09:43.490228Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:09:43.490243Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:09:43.490284Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:09:48.854252Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7494345938258919294:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:48.854365Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019d4/r3tmp/tmpmpoCl8/pdisk_1.dat 2025-04-17T18:09:49.129440Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:49.181108Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:49.181232Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:49.195433Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61400, node 28 2025-04-17T18:09:49.306974Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:49.307004Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:49.307017Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:49.307198Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:49.719964Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.891921Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:42180) has now valid token of root@builtin 2025-04-17T18:09:49.958716Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:09:49.958763Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:09:49.958775Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:09:49.958822Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 >> KqpUniqueIndex::InsertNullInFk [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> KqpIndexes::UpsertMultipleUniqIndexes >> KqpUniqueIndex::ReplaceFkAlreadyExist >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestExternalBoot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23800, MsgBus: 64435 2025-04-17T18:09:39.395157Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345901108989406:2249];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:39.395529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c94/r3tmp/tmpXsVleW/pdisk_1.dat 2025-04-17T18:09:39.856558Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:39.872830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:39.872932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:39.874821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23800, node 1 2025-04-17T18:09:40.014695Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:40.014718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:40.014730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:40.014846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64435 TClient is connected to server localhost:64435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:40.559546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.576178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.743643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.909757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.985393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.699507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345913993892877:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:42.699569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.024903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.054142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.079534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.109842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.138266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.174348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.205600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.290095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345918288860840:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.290173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.290257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345918288860845:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.293507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:43.302549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345918288860847:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:43.378376Z node 1 :TX_PROXY ERROR: Actor# [1:7494345918288860903:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:44.367232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.394219Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345901108989406:2249];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:44.394385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:45.883095Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 13393, MsgBus: 16238 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c94/r3tmp/tmpC9EGlT/pdisk_1.dat 2025-04-17T18:09:47.336814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:47.404946Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13393, node 2 2025-04-17T18:09:47.424118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:47.424271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:47.426489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:47.462680Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:47.462697Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:47.462703Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:47.462789Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16238 TClient is connected to server localhost:16238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:09:47.914329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.951429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:48.004029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:48.162090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:48.257458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.420201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345944588265640:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:50.420321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:50.471243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.506135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.536504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.564375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.597000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.635328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.713216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.779303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345944588266306:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:50.779398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:50.779742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345944588266311:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:50.783824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:50.798233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345944588266313:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:50.885685Z node 2 :TX_PROXY ERROR: Actor# [2:7494345944588266370:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:51.953822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.406838Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpUniqueIndex::InsertFkAlreadyExist [GOOD] >> KqpUniqueIndex::InsertFkDuplicate >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInFk [GOOD] Test command err: Trying to start YDB, gRPC: 24201, MsgBus: 2521 2025-04-17T18:09:40.778230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345903369869523:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:40.778261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c8f/r3tmp/tmp90zI2z/pdisk_1.dat 2025-04-17T18:09:41.287372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:41.304849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:41.304971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:41.306731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24201, node 1 2025-04-17T18:09:41.406167Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:41.406205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:41.406235Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:41.406360Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2521 TClient is connected to server localhost:2521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:41.913054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.931393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:41.939929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.077395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.231192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.311121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:43.891823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345916254773114:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.891994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.151751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.185432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.217625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.248084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.287324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.327292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.398594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.491042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345920549741081:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.491119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.491321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345920549741086:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.494911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:44.504924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345920549741088:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:44.590156Z node 1 :TX_PROXY ERROR: Actor# [1:7494345920549741144:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:45.580930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.778681Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345903369869523:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:45.779853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:47.407566Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345933434644638:2635], TxId: 281474976710680, task: 1. Ctx: { TraceId : 01js2f2kh52qerwpps3xe2fs2a. SessionId : ydb://session/3?node_id=1&id=NzdlNDdmZTQtYWQxOGE5OWMtMTM4YWNiYzItMzVkMjRlYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:09:47.407842Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345933434644640:2636], TxId: 281474976710680, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NzdlNDdmZTQtYWQxOGE5OWMtMTM4YWNiYzItMzVkMjRlYjI=. TraceId : 01js2f2kh52qerwpps3xe2fs2a. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494345933434644635:2571], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:09:47.408122Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzdlNDdmZTQtYWQxOGE5OWMtMTM4YWNiYzItMzVkMjRlYjI=, ActorId: [1:7494345924844709549:2571], ActorState: ExecuteState, TraceId: 01js2f2kh52qerwpps3xe2fs2a, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5307, MsgBus: 28960 2025-04-17T18:09:48.295586Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345937389241272:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:48.295621Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c8f/r3tmp/tmpFLj9yn/pdisk_1.dat 2025-04-17T18:09:48.460247Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:48.475969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:48.476050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:48.478836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5307, node 2 2025-04-17T18:09:48.594765Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:48.594787Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:48.594795Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:48.594908Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28960 TClient is connected to server localhost:28960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:49.102269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.112494Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:49.136575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.201532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.368468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.444900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:51.642829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345950274144871:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.642938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.692822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.728828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.755686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.788608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.857248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.900720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.938261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.028826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345954569112837:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:52.028925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:52.029127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345954569112842:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:52.032612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:52.045651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345954569112844:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:52.108293Z node 2 :TX_PROXY ERROR: Actor# [2:7494345954569112901:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:53.101466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:53.298149Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345937389241272:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:53.298259Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout >> KqpUniqueIndex::InsertNullInComplexFk >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootWhenLocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 24545, MsgBus: 17048 2025-04-17T18:09:38.087128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345895523656641:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:38.087178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c98/r3tmp/tmphxoDk3/pdisk_1.dat 2025-04-17T18:09:38.522444Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:38.575414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:38.575543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:38.577886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24545, node 1 2025-04-17T18:09:38.621355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:38.621378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:38.621391Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:38.621535Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17048 TClient is connected to server localhost:17048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:39.203898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.218029Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:39.234501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.420707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.582233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:39.658928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.328834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345908408560306:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.328960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.662045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.696267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.721503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.746931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.779513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.810265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.883468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.971832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345908408560976:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.971945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.972247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345908408560981:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:41.975367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:41.984055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345908408560983:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:42.052131Z node 1 :TX_PROXY ERROR: Actor# [1:7494345912703528333:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:43.024617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.089346Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345895523656641:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:43.089425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:43.508950Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:43.524289Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 11418, MsgBus: 16919 2025-04-17T18:09:44.246243Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345920914776912:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:44.246274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c98/r3tmp/tmpXvmIdH/pdisk_1.dat 2025-04-17T18:09:44.372696Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11418, node 2 2025-04-17T18:09:44.412157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:44.412263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:44.415379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:44.440267Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:44.440282Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:44.440288Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:44.440376Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16919 TClient is connected to server localhost:16919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:44.769326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.775048Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:44.780945Z node 2 :FLAT_TX_SCHEMESHARD W ... d: 72057594046644480 2025-04-17T18:09:47.127878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.166708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.211711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.292400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345933799681237:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.292448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.292569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345933799681242:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.297800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:47.309929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345933799681244:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:47.393666Z node 2 :TX_PROXY ERROR: Actor# [2:7494345933799681300:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:48.238037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.790323Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:48.830282Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 1498, MsgBus: 1348 2025-04-17T18:09:49.736348Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345943082571637:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:49.736382Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c98/r3tmp/tmp8NA1QH/pdisk_1.dat 2025-04-17T18:09:49.960623Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:49.976735Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:49.976820Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:49.979162Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1498, node 3 2025-04-17T18:09:50.036107Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:50.036128Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:50.036136Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:50.036240Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1348 TClient is connected to server localhost:1348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:50.482290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.490820Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:50.508432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:50.588573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.764501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.824959Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:53.236679Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345960262442576:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.236751Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.274968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.313675Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.348799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.424872Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.469817Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.540525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.623844Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.684144Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345960262443247:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.684253Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.684491Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345960262443252:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.688835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:53.704531Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345960262443254:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:53.778947Z node 3 :TX_PROXY ERROR: Actor# [3:7494345960262443308:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:54.736883Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345943082571637:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:54.736968Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:54.956115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.053202Z node 3 :TX_PROXY ERROR: Actor# [3:7494345968852378532:3963] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:55.249116Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:55.278837Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::WriteWithParamsFieldOrder [GOOD] >> KqpMultishardIndex::CheckPushTopSort >> KqpMultishardIndex::DataColumnWrite+UseSink [GOOD] >> KqpMultishardIndex::DataColumnWrite-UseSink >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive >> KqpIndexes::UpdateIndexSubsetPk >> KqpIndexes::ForbidDirectIndexTableCreation [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn [GOOD] >> KqpIndexes::IndexTopSortPushDown >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: Took 8.537883 seconds >> KqpUniqueIndex::UpsertExplicitNullInComplexFk >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] >> THiveTest::TestExternalBootWhenLocked [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel1 >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2025-04-17T18:08:49.157913Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345686039989890:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:49.158010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b7/r3tmp/tmplZN6MS/pdisk_1.dat 2025-04-17T18:08:49.633908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:49.668155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:49.668222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:49.681251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10674, node 1 2025-04-17T18:08:49.957505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:49.957530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:49.957537Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:49.957730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:50.294151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:50.451959Z node 1 :TICKET_PARSER DEBUG: Ticket 3924486F47CF79DE221356D39C64F4F8794016CDCDC632CDEF97C5B22851BA54 (ipv6:[::1]:59234) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:08:50.554740Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:59250) has now valid token of root@builtin 2025-04-17T18:08:50.708014Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:08:50.708047Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:08:50.708057Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:08:50.708091Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:08:53.986521Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345700929317283:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:53.986582Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b7/r3tmp/tmphlD4Ih/pdisk_1.dat 2025-04-17T18:08:54.274070Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12835, node 4 2025-04-17T18:08:54.349095Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:54.350012Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:54.377443Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:54.418900Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:54.418939Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:54.418948Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:54.419097Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:54.631438Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:54.763255Z node 4 :TICKET_PARSER DEBUG: Ticket 3924486F47CF79DE221356D39C64F4F8794016CDCDC632CDEF97C5B22851BA54 (ipv6:[::1]:38754) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:08:54.848944Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:38768) has now valid token of root@builtin 2025-04-17T18:08:54.919729Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:08:54.919757Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:08:54.919765Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:08:54.919795Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:08:58.861648Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345723606248204:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:58.861729Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b7/r3tmp/tmptqDUgm/pdisk_1.dat 2025-04-17T18:08:59.335941Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:59.404422Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:59.404493Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:59.406901Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65382, node 7 2025-04-17T18:08:59.706706Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:59.706729Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:59.706741Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:59.706930Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:00.027583Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:00.046737Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:00.140132Z node 7 :TICKET_PARSER DEBUG: Ticket 9F1610A0F189065126D3FF73C23F2FE04DE41EAD37847E8447B94D668336B3A7 (ipv6:[::1]:53350) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-17T18:09:00.258458Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:53360) has now valid token of root@builtin 2025-04-17T18:09:00.342559Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-17T18:09:00.342587Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:09:00.342598Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-17T18:09:00.342627Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-17T18:09:03.938701Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494345745293650569:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:03.938742Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b7/r3tmp/tmpT6NwKW/pdisk_1.dat 2025-04-17T18:09:04.137352Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:04.168521Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0 ... ient_certificate:certificate verify failed. E0417 18:09:27.537172051 433913 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:27.553540210 434110 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-04-17T18:09:32.782469Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7494345868090593230:2103];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:32.782660Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b7/r3tmp/tmpIxU8Ua/pdisk_1.dat 2025-04-17T18:09:33.108310Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:33.170052Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:33.170149Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:33.181123Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2399, node 25 2025-04-17T18:09:33.326593Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:33.326615Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:33.326625Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:33.326782Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:33.690046Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:37.779361Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7494345868090593230:2103];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:37.779448Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0417 18:09:43.795353689 436969 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:43.816641531 441122 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:43.846514982 436970 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:43.866659824 436970 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:43.919945516 437237 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:43.937918172 437237 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:43.967915398 441147 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:43.987988498 436970 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:44.029670185 436969 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:44.048073578 436970 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:44.088002943 437237 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:44.108958252 441159 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-04-17T18:09:45.810703Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7494345925190721908:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:45.810773Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b7/r3tmp/tmpdLkKDt/pdisk_1.dat 2025-04-17T18:09:46.095113Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:46.153869Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:46.153994Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:46.162343Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21677, node 28 2025-04-17T18:09:46.378096Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:46.378128Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:46.378149Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:46.378375Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:46.824494Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.811118Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7494345925190721908:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:50.811201Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0417 18:09:57.004099044 442144 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.042335655 446008 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.091275816 446009 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.112057955 442144 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.171413813 442472 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.190621033 442140 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.216230845 442140 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.236357980 442140 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.277651991 442140 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.300795117 442140 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.349288150 442473 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0417 18:09:57.400118127 446152 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2025-04-17T18:09:22.165908Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.168711Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.169497Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-17T18:09:22.171839Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:09:22.174432Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-17T18:09:22.174515Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.175782Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:73:2076] ControllerId# 72057594037932033 2025-04-17T18:09:22.175830Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.176946Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.177281Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.191790Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.191831Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.193637Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:81:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.193741Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:82:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.193826Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:83:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.193942Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:84:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.194075Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:85:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.194467Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:86:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.194625Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:87:2087] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.194652Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.194712Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:73:2076] 2025-04-17T18:09:22.194759Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:73:2076] 2025-04-17T18:09:22.194805Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.194851Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.195596Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.195674Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.197775Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.197847Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.198409Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:96:2074] ControllerId# 72057594037932033 2025-04-17T18:09:22.198430Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.198462Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.198573Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.199985Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.200034Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.201261Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:102:2078] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.201364Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:103:2079] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.201438Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:104:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.201526Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:105:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.201666Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:106:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.201749Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:107:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.201832Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:108:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.201848Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.201880Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:96:2074] 2025-04-17T18:09:22.201898Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:96:2074] 2025-04-17T18:09:22.201921Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.201939Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.202257Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.202353Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.204267Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.204347Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.205021Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:114:2074] ControllerId# 72057594037932033 2025-04-17T18:09:22.205051Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.205097Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.205275Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.207031Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.207063Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.208382Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:120:2078] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.208536Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:121:2079] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.208692Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:122:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.208825Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:123:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.208959Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:124:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.209098Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:125:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.209249Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:126:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.209277Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.209342Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:114:2074] 2025-04-17T18:09:22.209372Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:114:2074] 2025-04-17T18:09:22.209402Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.209429Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.209762Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.210026Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:114:2074] 2025-04-17T18:09:22.210069Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.210097Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.210280Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.224235Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:73:2076] 2025-04-17T18:09:22.224291Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.224319Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.232535Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.232739Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:96:2074] 2025-04-17T18:09:22.232790Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.232843Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.232979Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.233250Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeLis ... st# true Marker# BPP21 2025-04-17T18:09:58.522441Z node 58 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.959 sample PartId# [72057594037927937:2:8:0:0:200:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 58 } TEvVPutResult{ TimestampMs# 2.307 VDiskId# [0:1:0:0:0] NodeId# 58 Status# OK } ] } 2025-04-17T18:09:58.522611Z node 58 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:200:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:09:58.522763Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2025-04-17T18:09:58.522982Z node 58 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [58:96:2093], reason = ReasonStop Marker# TSYS29 2025-04-17T18:09:58.523035Z node 58 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2025-04-17T18:09:58.523281Z node 58 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2025-04-17T18:09:58.523326Z node 58 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2025-04-17T18:09:58.523475Z node 58 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2025-04-17T18:09:58.524109Z node 58 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [58:97:2093] 2025-04-17T18:09:58.524159Z node 58 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [58:97:2093] 2025-04-17T18:09:58.524241Z node 58 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [58:96:2093] EventType# 268960257 2025-04-17T18:09:58.524341Z node 58 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [58:440:2351] 2025-04-17T18:09:58.524391Z node 58 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [58:440:2351] 2025-04-17T18:09:58.524659Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-04-17T18:09:58.524736Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:09:58.524872Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:09:58.524962Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:09:58.525209Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-17T18:09:58.525281Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:09:58.525382Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:09:58.525450Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:09:58.525926Z node 58 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [58:453:2358] 2025-04-17T18:09:58.525975Z node 58 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [58:453:2358] 2025-04-17T18:09:58.526090Z node 58 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:58.529576Z node 58 :TABLET_RESOLVER DEBUG: SelectForward node 58 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [58:374:2299] 2025-04-17T18:09:58.529741Z node 58 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [58:453:2358] 2025-04-17T18:09:58.529805Z node 58 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [58:453:2358] 2025-04-17T18:09:58.529922Z node 58 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [58:453:2358] 2025-04-17T18:09:58.529983Z node 58 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [58:453:2358] 2025-04-17T18:09:58.530094Z node 58 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-04-17T18:09:58.530379Z node 58 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:58.530580Z node 58 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-17T18:09:58.530673Z node 58 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-17T18:09:58.530707Z node 58 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-17T18:09:58.530774Z node 58 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [58:374:2299] CurrentLeaderTablet: [58:389:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:09:58.530878Z node 58 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [58:374:2299] CurrentLeaderTablet: [58:389:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:09:58.530980Z node 58 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [58:374:2299] CurrentLeaderTablet: [58:389:2311] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:09:58.531167Z node 58 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-17T18:09:58.531476Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [59:455:2093] 2025-04-17T18:09:58.531531Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [59:455:2093] 2025-04-17T18:09:58.531617Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:58.531685Z node 59 :TABLET_RESOLVER DEBUG: SelectForward node 59 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [58:323:2263] 2025-04-17T18:09:58.531745Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [59:455:2093] 2025-04-17T18:09:58.531796Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [59:455:2093] 2025-04-17T18:09:58.531848Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 58 [59:455:2093] 2025-04-17T18:09:58.531984Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [59:455:2093] 2025-04-17T18:09:58.532049Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [59:455:2093] 2025-04-17T18:09:58.532267Z node 58 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [59:455:2093] 2025-04-17T18:09:58.532537Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [59:455:2093] 2025-04-17T18:09:58.532592Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [59:455:2093] 2025-04-17T18:09:58.532633Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [59:455:2093] 2025-04-17T18:09:58.532711Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [59:455:2093] 2025-04-17T18:09:58.532766Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [59:455:2093] 2025-04-17T18:09:58.532822Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [59:455:2093] 2025-04-17T18:09:58.533089Z node 58 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [59:443:2088] EventType# 268697624 2025-04-17T18:09:58.533258Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-04-17T18:09:58.533339Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:09:58.533531Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-17T18:09:58.533624Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:09:58.546999Z node 58 :BS_PROXY_PUT INFO: [c85e1a21dcb31b54] bootstrap ActorId# [58:458:2361] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T18:09:58.547155Z node 58 :BS_PROXY_PUT DEBUG: [c85e1a21dcb31b54] Id# [72057594037927937:2:9:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T18:09:58.547230Z node 58 :BS_PROXY_PUT DEBUG: [c85e1a21dcb31b54] restore Id# [72057594037927937:2:9:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T18:09:58.547301Z node 58 :BS_PROXY_PUT DEBUG: [c85e1a21dcb31b54] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG33 2025-04-17T18:09:58.547356Z node 58 :BS_PROXY_PUT DEBUG: [c85e1a21dcb31b54] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG32 2025-04-17T18:09:58.547511Z node 58 :BS_PROXY DEBUG: Send to queueActorId# [58:58:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T18:09:58.548694Z node 58 :BS_PROXY_PUT DEBUG: [c85e1a21dcb31b54] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-17T18:09:58.548816Z node 58 :BS_PROXY_PUT DEBUG: [c85e1a21dcb31b54] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-17T18:09:58.548913Z node 58 :BS_PROXY_PUT INFO: [c85e1a21dcb31b54] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T18:09:58.549092Z node 58 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.758 sample PartId# [72057594037927937:2:9:0:0:92:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 58 } TEvVPutResult{ TimestampMs# 1.958 VDiskId# [0:1:0:0:0] NodeId# 58 Status# OK } ] } 2025-04-17T18:09:58.549262Z node 58 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:09:58.549405Z node 58 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 25905, MsgBus: 24000 2025-04-17T18:09:42.126277Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345913633443399:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:42.126513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c88/r3tmp/tmpXHsjJZ/pdisk_1.dat 2025-04-17T18:09:42.473518Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25905, node 1 2025-04-17T18:09:42.496337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:42.496441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:42.499550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:42.547746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:42.547770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:42.547781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:42.547855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24000 TClient is connected to server localhost:24000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:43.085537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:43.103442Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:43.114141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:43.277136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.445136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:43.514770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.984674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345922223379697:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.984785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:45.385512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.422605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.458108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.484641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.558120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.647407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.713898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.783249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345926518347660:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:45.783335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:45.783626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345926518347665:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:45.786977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:45.803262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345926518347667:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:45.896242Z node 1 :TX_PROXY ERROR: Actor# [1:7494345926518347725:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:46.872828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.118236Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345913633443399:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:47.118294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26716, MsgBus: 5032 2025-04-17T18:09:50.757092Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345944852318275:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c88/r3tmp/tmpA2fWL1/pdisk_1.dat 2025-04-17T18:09:50.775505Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:09:50.847389Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26716, node 2 2025-04-17T18:09:50.871719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:50.871816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:50.873950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:50.942704Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:50.942732Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:50.942738Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:50.942841Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5032 TClient is connected to server localhost:5032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:51.377287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:51.398707Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:51.416338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:51.503395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:51.645585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:51.709548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:53.957825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345957737221777:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.957913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.010917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.041897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.073640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.124235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.158318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.205483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.276589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.343115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345962032189738:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.343205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.343244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345962032189743:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.346861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:54.356958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345962032189745:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:54.429572Z node 2 :TX_PROXY ERROR: Actor# [2:7494345962032189800:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:55.448529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:55.715265Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345944852318275:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:55.715591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:57.509100Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f2wt73t2hjek19vwqh8qn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTcxODI5OTEtN2FmZTgzYjctYWI2NmNmMGEtNTAzMDU3YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:57.550498Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTcxODI5OTEtN2FmZTgzYjctYWI2NmNmMGEtNTAzMDU3YTU=, ActorId: [2:7494345966327158226:2570], ActorState: ExecuteState, TraceId: 01js2f2wt73t2hjek19vwqh8qn, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 9713, msgbus: 4444 2025-04-17T18:07:05.419025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345236053337117:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:07:05.419073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001338/r3tmp/tmph7jKLT/pdisk_1.dat 2025-04-17T18:07:05.796559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9713, node 1 2025-04-17T18:07:05.863585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:07:05.866017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:07:05.869618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:07:05.885369Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:07:05.885402Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:07:05.885410Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:07:05.885526Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4444 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:07:06.084502Z node 1 :TX_PROXY DEBUG: actor# [1:7494345236053337343:2115] Handle TEvNavigate describe path dc-1 2025-04-17T18:07:06.084553Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305143:2439] HANDLE EvNavigateScheme dc-1 2025-04-17T18:07:06.085706Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305143:2439] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:06.115586Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305143:2439] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:07:06.127558Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305143:2439] Handle TEvDescribeSchemeResult Forward to# [1:7494345240348305142:2438] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:07:06.148124Z node 1 :TX_PROXY DEBUG: actor# [1:7494345236053337343:2115] Handle TEvProposeTransaction 2025-04-17T18:07:06.148155Z node 1 :TX_PROXY DEBUG: actor# [1:7494345236053337343:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:07:06.148266Z node 1 :TX_PROXY DEBUG: actor# [1:7494345236053337343:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494345240348305158:2447] 2025-04-17T18:07:06.290035Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:07:06.290123Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:07:06.290148Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:07:06.290269Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:07:06.290539Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:06.290655Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:07:06.290711Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:07:06.290900Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:07:06.291560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:07:06.294115Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:07:06.294211Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305158:2447] txid# 281474976710657 SEND to# [1:7494345240348305157:2446] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:07:06.310868Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:07:06.322297Z node 1 :TX_PROXY DEBUG: actor# [1:7494345236053337343:2115] Handle TEvProposeTransaction 2025-04-17T18:07:06.322324Z node 1 :TX_PROXY DEBUG: actor# [1:7494345236053337343:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:07:06.322357Z node 1 :TX_PROXY DEBUG: actor# [1:7494345236053337343:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345240348305203:2488] 2025-04-17T18:07:06.324740Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:07:06.324793Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:07:06.324816Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:07:06.324878Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:07:06.325110Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:07:06.325185Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:07:06.325221Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:07:06.325383Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:07:06.325799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:07:06.331702Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:07:06.331745Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345240348305203:2488] txid# 281474976710658 SEND to# [1:7494345240348305202:2487] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:07:08.301821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345248938239891:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:08.301922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345248938239902:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:08.302267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:08.302654Z node 1 :TX_PROXY DEBUG: actor# [1:7494345236053337343:2115] Handle TEvProposeTr ... wd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44872" 2025-04-17T18:09:57.496444Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925762:2589] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:57.496467Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925762:2589] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:57.496518Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925762:2589] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:57.496802Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925762:2589] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:57.496910Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925762:2589] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:57.496969Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925762:2589] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-17T18:09:57.497198Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925762:2589] txid# 281474976710661 HANDLE EvClientConnected 2025-04-17T18:09:57.510252Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925762:2589] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-17T18:09:57.510298Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925762:2589] txid# 281474976710661 SEND to# [59:7494345978168925761:2333] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-17T18:09:57.694298Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] Handle TEvProposeTransaction 2025-04-17T18:09:57.694339Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] TxId# 281474976710662 ProcessProposeTransaction 2025-04-17T18:09:57.694405Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7494345978168925789:2606] 2025-04-17T18:09:57.697858Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44886" 2025-04-17T18:09:57.697938Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:57.697965Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:57.698032Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:57.700354Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:57.700505Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:57.700574Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-17T18:09:57.700756Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] txid# 281474976710662 HANDLE EvClientConnected 2025-04-17T18:09:57.701398Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.705358Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-17T18:09:57.705425Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925789:2606] txid# 281474976710662 SEND to# [59:7494345978168925788:2348] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-17T18:09:57.773376Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] Handle TEvProposeTransaction 2025-04-17T18:09:57.773403Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] TxId# 281474976710663 ProcessProposeTransaction 2025-04-17T18:09:57.773445Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7494345978168925826:2629] 2025-04-17T18:09:57.775877Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44892" 2025-04-17T18:09:57.775934Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:57.775956Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:09:57.776004Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:57.776272Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:57.776361Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:09:57.776411Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-17T18:09:57.776548Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] txid# 281474976710663 HANDLE EvClientConnected 2025-04-17T18:09:57.787503Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-17T18:09:57.787561Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925826:2629] txid# 281474976710663 SEND to# [59:7494345978168925825:2350] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-04-17T18:09:57.862352Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] Handle TEvProposeTransaction 2025-04-17T18:09:57.862388Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] TxId# 281474976710664 ProcessProposeTransaction 2025-04-17T18:09:57.862440Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7494345978168925856:2643] 2025-04-17T18:09:57.865943Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925856:2643] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NjU5NywiaWF0IjoxNzQ0OTEzMzk3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.T3NoqW0Vfa5XZFUCpGENjK_pimzUObrmjD1zZQI4KbQATJ8c88PmM8T6T_PNAxfHaQNftGJeDJrSL4xg823cEMOFP4oJeHLsFYfcVOjc7EeLtgHN3vBJYaKMtCPIbI6ovupz8J43fjP5UXRVnrYjnF-QRekF67GN5l-mI6S9lZMFT43pcv7jEKemr3AcB0PZFTuNN0GwdnXM0VP1dZ47q9GeTfCuHUT9yV24ZRwonrKxpCz-QfSh43kr8O04Tlt3r9oXEQf5gOixPPn6TIOE_LUX-eTXEL-OkV03kHfw7d8NSBW2bdKtDa2k9G8nxJ86g65pxQNlWlBrMj9vt_LH9Q\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1NjU5NywiaWF0IjoxNzQ0OTEzMzk3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44906" 2025-04-17T18:09:57.866018Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925856:2643] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:09:57.866040Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925856:2643] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-17T18:09:57.866187Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925856:2643] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:09:57.866238Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925856:2643] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:09:57.866282Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925856:2643] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:09:57.866539Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925856:2643] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:09:57.866564Z node 59 :TX_PROXY ERROR: Actor# [59:7494345978168925856:2643] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-04-17T18:09:57.866652Z node 59 :TX_PROXY ERROR: Actor# [59:7494345978168925856:2643] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-17T18:09:57.866682Z node 59 :TX_PROXY DEBUG: Actor# [59:7494345978168925856:2643] txid# 281474976710664 SEND to# [59:7494345978168925855:2362] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T18:09:57.867603Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=MmY4MzQ3NjYtNjVjYjBkYjgtYjZiZDlmNTQtMjBmNDExZTM=, ActorId: [59:7494345978168925844:2362], ActorState: ExecuteState, TraceId: 01js2f2y2d4104d1149qekxmm8, Create QueryResponse for error on request, msg: 2025-04-17T18:09:57.867990Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] Handle TEvExecuteKqpTransaction 2025-04-17T18:09:57.868008Z node 59 :TX_PROXY DEBUG: actor# [59:7494345956694088280:2109] TxId# 281474976710665 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2025-04-17T18:09:22.406531Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.410074Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.410359Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-17T18:09:22.410918Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:09:22.412138Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-17T18:09:22.412195Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.413027Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-04-17T18:09:22.413077Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.413221Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.413528Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.424051Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.424105Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.426263Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.426433Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.426568Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.426735Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.426927Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.427060Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.427210Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.427245Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.427320Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-04-17T18:09:22.427353Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-04-17T18:09:22.427411Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.427457Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.428144Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.428336Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.461862Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:22.461968Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.462013Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.464032Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-04-17T18:09:22.464074Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-04-17T18:09:22.464135Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.464182Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-17T18:09:22.469661Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-17T18:09:22.469930Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.470967Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.471669Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-17T18:09:22.471822Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-17T18:09:22.471939Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:22.472187Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.472539Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.472591Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-17T18:09:22.472679Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-17T18:09:22.472709Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-17T18:09:22.472794Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2025-04-17T18:09:22.472834Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:52:2092] 2025-04-17T18:09:22.473249Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-17T18:09:22.473313Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.473512Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.473581Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.473664Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:52:2092] 2025-04-17T18:09:22.477657Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.477775Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-17T18:09:22.477823Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-17T18:09:22.479449Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.479547Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-17T18:09:22.479948Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-17T18:09:22.480013Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-17T18:09:22.480046Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-17T18:09:22.480719Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-04-17T18:09:22.480768Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-04-17T18:09:22.480927Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:22.481183Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:22.481371Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:22.481513Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:22.481566Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-17T18:09:22.481592Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-17T18:09:22.481667Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-04-17T18:09:22.481688Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-04-17T18:09:22.481766Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.481867Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-17T18:09:22.481902Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.481988Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-17T18:09:22.482101Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-17T18:09:22.482141Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-04-17T18:09:22.482164Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-04-17T18:09:22.482277Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-17T18:09:22.482389Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-17T18:09:22.482421Z node 1 :BS_NODE ... 31Z node 37 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 1} 2025-04-17T18:09:59.187784Z node 37 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 2} 2025-04-17T18:09:59.188018Z node 38 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [37:326:2265] CurrentLeaderTablet: [37:340:2273] CurrentGeneration: 2 CurrentStep: 0} 2025-04-17T18:09:59.188108Z node 38 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [37:326:2265] CurrentLeaderTablet: [37:340:2273] CurrentGeneration: 2 CurrentStep: 0} 2025-04-17T18:09:59.188235Z node 38 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594046678944 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594046678944 Cookie: 0 CurrentLeader: [37:326:2265] CurrentLeaderTablet: [37:340:2273] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:09:59.188304Z node 38 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594046678944 followers: 0 2025-04-17T18:09:59.188381Z node 38 :TABLET_RESOLVER DEBUG: SelectForward node 38 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [37:326:2265] 2025-04-17T18:09:59.188503Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594046678944] forward result remote node 37 [38:549:2090] 2025-04-17T18:09:59.188652Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594046678944] remote node connected [38:549:2090] 2025-04-17T18:09:59.188709Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [38:549:2090] 2025-04-17T18:09:59.188931Z node 37 :PIPE_SERVER DEBUG: [72057594046678944] Accept Connect Originator# [38:549:2090] 2025-04-17T18:09:59.189256Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [38:549:2090] 2025-04-17T18:09:59.189306Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [38:549:2090] 2025-04-17T18:09:59.189409Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [38:549:2090] 2025-04-17T18:09:59.189435Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [38:549:2090] 2025-04-17T18:09:59.189496Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [38:549:2090] 2025-04-17T18:09:59.189640Z node 37 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [38:548:2090] EventType# 271122945 2025-04-17T18:09:59.189787Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2025-04-17T18:09:59.189876Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:09:59.190094Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:09:59.190372Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:09:59.192033Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [38:555:2091] 2025-04-17T18:09:59.192067Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [38:555:2091] 2025-04-17T18:09:59.192103Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [38:556:2092] 2025-04-17T18:09:59.192122Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [38:556:2092] 2025-04-17T18:09:59.192348Z node 38 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:59.192409Z node 38 :TABLET_RESOLVER DEBUG: SelectForward node 38 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [37:325:2264] 2025-04-17T18:09:59.192545Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [38:555:2091] 2025-04-17T18:09:59.192613Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [38:556:2092] 2025-04-17T18:09:59.192780Z node 38 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:59.193059Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 37 [38:555:2091] 2025-04-17T18:09:59.193320Z node 38 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:59.193384Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [38:555:2091] 2025-04-17T18:09:59.193418Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [38:555:2091] 2025-04-17T18:09:59.198064Z node 37 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-17T18:09:59.198215Z node 37 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-17T18:09:59.198280Z node 37 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-17T18:09:59.198388Z node 37 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [38:555:2091] 2025-04-17T18:09:59.199023Z node 38 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [37:463:2365] CurrentLeaderTablet: [37:478:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:09:59.199277Z node 38 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [37:463:2365] CurrentLeaderTablet: [37:478:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:09:59.199419Z node 38 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [37:463:2365] CurrentLeaderTablet: [37:478:2377] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:09:59.199460Z node 38 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-04-17T18:09:59.199530Z node 38 :TABLET_RESOLVER DEBUG: SelectForward node 38 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [37:463:2365] 2025-04-17T18:09:59.199605Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 37 [38:556:2092] 2025-04-17T18:09:59.200160Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [38:556:2092] 2025-04-17T18:09:59.200231Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [38:556:2092] 2025-04-17T18:09:59.200393Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [38:555:2091] 2025-04-17T18:09:59.200451Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [38:555:2091] 2025-04-17T18:09:59.200503Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [38:555:2091] 2025-04-17T18:09:59.200653Z node 38 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [38:555:2091] 2025-04-17T18:09:59.201078Z node 37 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [38:556:2092] 2025-04-17T18:09:59.201621Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [38:556:2092] 2025-04-17T18:09:59.201686Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [38:556:2092] 2025-04-17T18:09:59.201721Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [38:556:2092] 2025-04-17T18:09:59.201820Z node 38 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [38:556:2092] 2025-04-17T18:09:59.202085Z node 37 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [38:552:2091] EventType# 268959744 2025-04-17T18:09:59.202437Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-17T18:09:59.202522Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:09:59.202775Z node 37 :HIVE WARN: HIVE#72057594037927937 Node(38, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:59.202905Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 208b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-17T18:09:59.202991Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:09:59.203144Z node 37 :PIPE_SERVER DEBUG: [72075186224037888] Push Sender# [38:553:2092] EventType# 268959744 2025-04-17T18:09:59.203416Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-17T18:09:59.203479Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:09:59.203612Z node 37 :HIVE WARN: HIVE#72075186224037888 Node(38, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:59.203697Z node 37 :HIVE WARN: HIVE#72075186224037888 Node(38, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:59.203784Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-17T18:09:59.203838Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:09:59.204063Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-17T18:09:59.204157Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:09:59.204278Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:09:59.204372Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:09:59.204494Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-17T18:09:59.204545Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:09:59.204609Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:09:59.204644Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 20277, MsgBus: 29893 2025-04-17T18:09:23.224426Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345832715949001:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.236020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cbc/r3tmp/tmp9x4NZY/pdisk_1.dat 2025-04-17T18:09:23.686165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:23.710062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.710158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.731528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20277, node 1 2025-04-17T18:09:23.873419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.873441Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.873460Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.873590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29893 TClient is connected to server localhost:29893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.653414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.668438Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:24.679750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.928696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.142144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.222780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.716054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345845600852580:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:26.716163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.106714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.153721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.202128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.269114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.302319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.375992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.437163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.533077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345849895820550:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.533179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.533436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345849895820555:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.537483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.550479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345849895820557:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.614477Z node 1 :TX_PROXY ERROR: Actor# [1:7494345849895820614:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.221369Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345832715949001:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.221429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.633787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8120, MsgBus: 29257 2025-04-17T18:09:30.447411Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345859988609938:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:30.447489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cbc/r3tmp/tmprlEkDw/pdisk_1.dat 2025-04-17T18:09:30.554885Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8120, node 2 2025-04-17T18:09:30.588285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:30.588383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:30.591116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:30.609023Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:30.609044Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:30.609053Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:30.609180Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29257 TClient is connected to server localhost:29257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:31.065667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:31.078445Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:31.096473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:31. ... 85:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:34.223154Z node 2 :TX_PROXY ERROR: Actor# [2:7494345877168481641:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:35.450278Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345859988609938:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:35.450350Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:35.477254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2025-04-17T18:09:35.526155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.581079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.615906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.688398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:09:35.731873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17692, MsgBus: 9144 2025-04-17T18:09:43.840552Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345916648817304:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:43.840603Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cbc/r3tmp/tmpqpe73Q/pdisk_1.dat 2025-04-17T18:09:43.995247Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:44.014171Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:44.014273Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:44.018327Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17692, node 3 2025-04-17T18:09:44.055843Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:44.055869Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:44.055877Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:44.056010Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9144 TClient is connected to server localhost:9144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:44.563323Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.580120Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:44.589512Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.654622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.841665Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.931927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.246272Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345933828688277:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.246378Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.300995Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.340258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.376792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.419488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.489120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.568848Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.649203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.719406Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345933828688952:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.719512Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.719767Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345933828688957:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.723381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:47.735675Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345933828688959:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:47.824926Z node 3 :TX_PROXY ERROR: Actor# [3:7494345933828689016:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:48.840598Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345916648817304:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:48.840657Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:49.006463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2025-04-17T18:09:49.097565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.197129Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.261210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.355696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.467170Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] >> KqpMultishardIndex::SortedRangeReadDesc [GOOD] >> KqpMultishardIndex::SortByPk >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets >> KqpIndexes::IndexOr [GOOD] >> KqpIndexes::IndexFilterPushDown >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] Test command err: Trying to start YDB, gRPC: 7431, MsgBus: 29276 2025-04-17T18:09:43.644283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345914949328677:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:43.644343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c86/r3tmp/tmpZxCKof/pdisk_1.dat 2025-04-17T18:09:43.955732Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7431, node 1 2025-04-17T18:09:44.046838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:44.047010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:44.048660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:44.053597Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:44.053632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:44.053639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:44.053969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29276 TClient is connected to server localhost:29276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:44.585039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.606873Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:44.635295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.805331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.991420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:45.058660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:09:46.695950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345927834232336:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:46.696055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:46.979146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.011690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.045972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.073383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.111431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.152863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.234507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.335659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345932129200300:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.335730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.335950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345932129200305:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.339693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:47.353973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345932129200307:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:47.461783Z node 1 :TX_PROXY ERROR: Actor# [1:7494345932129200364:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:48.420597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:48.645977Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345914949328677:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:48.646030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:50.130689Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f2nwz97faxmqga39rx31m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZDgyYTAtZDE4MWM2YzMtYmM2NDJjMTMtMWEzYWFmOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:50.140143Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjA0ZDgyYTAtZDE4MWM2YzMtYmM2NDJjMTMtMWEzYWFmOTM=, ActorId: [1:7494345936424168774:2570], ActorState: ExecuteState, TraceId: 01js2f2nwz97faxmqga39rx31m, Create QueryResponse for error on request, msg: 2025-04-17T18:09:50.809834Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f2pj888xdwtpq5j5ts5gt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZDgyYTAtZDE4MWM2YzMtYmM2NDJjMTMtMWEzYWFmOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:50.810070Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjA0ZDgyYTAtZDE4MWM2YzMtYmM2NDJjMTMtMWEzYWFmOTM=, ActorId: [1:7494345936424168774:2570], ActorState: ExecuteState, TraceId: 01js2f2pj888xdwtpq5j5ts5gt, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 28565, MsgBus: 5599 2025-04-17T18:09:51.738097Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345952322711009:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:51.738153Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c86/r3tmp/tmpS3hkam/pdisk_1.dat 2025-04-17T18:09:51.875549Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28565, node 2 2025-04-17T18:09:51.905873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:51.905977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:51.909183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:51.932370Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:51.932394Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:51.932401Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:51.932520Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5599 TClient is connected to server localhost:5599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:52.457514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:52.469409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.554752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:52.722507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.800007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:54.911705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345965207614672:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.911789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.982050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.013881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.043917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.076463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.109942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.161263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.206710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.276927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345969502582631:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:55.276987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:55.277138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345969502582636:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:55.280458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:55.296028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345969502582638:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:55.392133Z node 2 :TX_PROXY ERROR: Actor# [2:7494345969502582695:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:56.332664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56.740428Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345952322711009:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:56.740491Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:58.669814Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f2xz30snerc218dqj7k55, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTAzNWNmMGQtYjYzMjE0NDctYjMzNGRhNDUtZmQ4NDlhNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:09:58.670074Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTAzNWNmMGQtYjYzMjE0NDctYjMzNGRhNDUtZmQ4NDlhNWE=, ActorId: [2:7494345973797551102:2570], ActorState: ExecuteState, TraceId: 01js2f2xz30snerc218dqj7k55, Create QueryResponse for error on request, msg: 2025-04-17T18:10:00.256495Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f2z2mbs0c34wh5at0sn56, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTAzNWNmMGQtYjYzMjE0NDctYjMzNGRhNDUtZmQ4NDlhNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:00.256721Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTAzNWNmMGQtYjYzMjE0NDctYjMzNGRhNDUtZmQ4NDlhNWE=, ActorId: [2:7494345973797551102:2570], ActorState: ExecuteState, TraceId: 01js2f2z2mbs0c34wh5at0sn56, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8471, MsgBus: 1523 2025-04-17T18:09:47.952518Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345933287472642:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:47.952563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c77/r3tmp/tmpZE1IDI/pdisk_1.dat 2025-04-17T18:09:48.443030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:48.443118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:48.445243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:48.447817Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8471, node 1 2025-04-17T18:09:48.554728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:48.554749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:48.554757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:48.554870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1523 TClient is connected to server localhost:1523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:49.157163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.205918Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:49.216627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.404992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.571609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.640977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:51.232176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345950467343588:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.232279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.564377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.605514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.637425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.669936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.701260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.734128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.801223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.899221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345950467344250:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.899419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.899628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345950467344257:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.903075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:51.919338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345950467344259:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:52.024774Z node 1 :TX_PROXY ERROR: Actor# [1:7494345954762311613:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:52.952517Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345933287472642:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:52.952653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:52.959411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15977, MsgBus: 27240 2025-04-17T18:09:54.389655Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345962199213571:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:54.390829Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c77/r3tmp/tmpIk2t4t/pdisk_1.dat 2025-04-17T18:09:54.529494Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:54.548324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:54.548397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:54.549998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15977, node 2 2025-04-17T18:09:54.600453Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:54.600477Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:54.600484Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:54.600589Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27240 TClient is connected to server localhost:27240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:55.058239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:55.067018Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:55.076482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:55.138505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:55.300879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:55.373017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.747784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345975084117204:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.747878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.796004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.830823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.911466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.954423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.991238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.072170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.147083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.228960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345979379085172:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.229125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.229612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345979379085177:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.233552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:58.247375Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-04-17T18:09:58.250316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345979379085180:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:58.348591Z node 2 :TX_PROXY ERROR: Actor# [2:7494345979379085238:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:59.389153Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345962199213571:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:59.400806Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:59.528625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> KqpUniqueIndex::ReplaceFkPartialColumnSet [GOOD] >> KqpUniqueIndex::UpdateFkAlreadyExist >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexOrderBy2 [GOOD] >> KqpIndexes::SecondaryIndexReplace+UseSink >> KqpMultishardIndex::CheckPushTopSort [GOOD] >> KqpUniqueIndex::InsertFkDuplicate [GOOD] >> KqpUniqueIndex::ReplaceFkAlreadyExist [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 9049, MsgBus: 22573 2025-04-17T18:09:39.485127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345898401053060:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:39.485177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c92/r3tmp/tmpYZQ998/pdisk_1.dat 2025-04-17T18:09:39.917990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:39.918086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:39.926738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:39.932198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9049, node 1 2025-04-17T18:09:40.002045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:40.002066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:40.002074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:40.002202Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22573 TClient is connected to server localhost:22573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:40.549044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.571003Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:40.577751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.733093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:40.914012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:41.000677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:09:42.659082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345911285956725:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:42.659187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.024073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.052093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.077564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.106511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.138312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.216329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.251104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:43.319838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345915580924686:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.319902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.320056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345915580924691:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.324132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:43.333736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345915580924693:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:43.390498Z node 1 :TX_PROXY ERROR: Actor# [1:7494345915580924747:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:44.278483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.485607Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345898401053060:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:44.485673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:45.285559Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:45.335756Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345924170860104:2566], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestTable/Index/indexImplTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:09:45.335945Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjUzMDVjZmEtNTk5YmYzZmYtM2JkMWJmMzItNGU0MjBkYzk=, ActorId: [1:7494345919875892377:2512], ActorState: ExecuteState, TraceId: 01js2f2htq6xnghm8fpj5yh1ev, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:09:45.363915Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345924170860111:2569], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Required global index not found, index name: WrongView, code: 2003 2025-04-17T18:09:45.365216Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjUzMDVjZmEtNTk5YmYzZmYtM2JkMWJmMzItNGU0MjBkYzk=, ActorId: [1:7494345919875892377:2512], ActorState: ExecuteState, TraceId: 01js2f2hvwebfx85hdzx75wwm1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:09:45.844379Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:46.046796Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:46.544813Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:46.860626Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 1606, MsgBus: 9372 2025-04-17T18:09:47.754437Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345934485368918:2166];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:47.754632Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c92/r3tmp/tmpB9JKwd/pdisk_1.dat 2025-04-17T18:09:47.887365Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1606, node 2 2025-04-17T18:09:47.936559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:47.936684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:47.948036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:48.006780Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:48.006812Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:48.006821Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:48.006947Z node 2 :NET_ ... pCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.414793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.481604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345951665240418:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.481655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345951665240423:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.481677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.484383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:51.496168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345951665240425:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:51.562996Z node 2 :TX_PROXY ERROR: Actor# [2:7494345951665240479:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:52.554752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.754646Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345934485368918:2166];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:52.754713Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:53.210907Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:53.750886Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:53.763264Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:54.108116Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:09:54.126314Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 6520, MsgBus: 14175 2025-04-17T18:09:55.176978Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345966187219330:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:55.181039Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c92/r3tmp/tmpRu9Aa0/pdisk_1.dat 2025-04-17T18:09:55.327323Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:55.343103Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:55.343224Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:55.345330Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6520, node 3 2025-04-17T18:09:55.438974Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:55.439000Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:55.439009Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:55.439174Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14175 TClient is connected to server localhost:14175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:56.012291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56.028037Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:56.044659Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56.115397Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56.314277Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56.410285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.010816Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345979072122843:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.010938Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.069573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.142970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.185182Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.254514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.325102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.370770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.427268Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.496577Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345983367090807:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.496678Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.496928Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345983367090812:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.500594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:59.510340Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345983367090814:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:59.577003Z node 3 :TX_PROXY ERROR: Actor# [3:7494345983367090868:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:00.170472Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345966187219330:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:00.170549Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:00.836632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.932232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> KqpUniqueIndex::InsertNullInComplexFk [GOOD] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> KqpIndexes::UpdateIndexSubsetPk [GOOD] >> KqpIndexes::UpdateOnReadColumns >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] >> DataStreams::TestPutRecordsOfAnauthorizedUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::CheckPushTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 28556, MsgBus: 12997 2025-04-17T18:09:51.006581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345950358703952:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:51.006658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c70/r3tmp/tmpQPRmkn/pdisk_1.dat 2025-04-17T18:09:51.370006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:51.407422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:51.407528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28556, node 1 2025-04-17T18:09:51.409419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:51.449257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:51.449283Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:51.449291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:51.449401Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12997 TClient is connected to server localhost:12997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:51.978046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:52.018460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:52.161170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:52.327609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:52.426116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:54.196005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345963243607612:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.196121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.600024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.658802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.697307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.731722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.767391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.844577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.899903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:54.952270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345963243608274:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.952345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.952574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345963243608279:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:54.955903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:54.965255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345963243608281:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:55.016277Z node 1 :TX_PROXY ERROR: Actor# [1:7494345967538575631:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:56.006746Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345950358703952:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:56.006827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:56.080709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:56.800151Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 9494, MsgBus: 61032 2025-04-17T18:09:57.740637Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345976586342631:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:57.740909Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c70/r3tmp/tmpsp2hmM/pdisk_1.dat 2025-04-17T18:09:57.868448Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:57.925311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:57.925393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:57.927447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9494, node 2 2025-04-17T18:09:58.025247Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:58.025272Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:58.025280Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:58.025414Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61032 TClient is connected to server localhost:61032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:58.679319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.687769Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:58.700285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.788703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.954193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.039638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.232171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345993766213577:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.232271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.282238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.316806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.353180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.389084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.461550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.502261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.536370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.607851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345993766214240:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.607969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.608359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345993766214245:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.612593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:01.627480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345993766214247:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:01.706783Z node 2 :TX_PROXY ERROR: Actor# [2:7494345993766214305:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:02.740963Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345976586342631:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:02.741025Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:02.754884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 3645, MsgBus: 17427 2025-04-17T18:09:49.616615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345943834704100:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:49.616823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c71/r3tmp/tmpbpPGAn/pdisk_1.dat 2025-04-17T18:09:50.026205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:50.065969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:50.066048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3645, node 1 2025-04-17T18:09:50.071639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:50.128998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:50.129020Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:50.129031Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:50.129159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17427 TClient is connected to server localhost:17427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:50.741111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.763329Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.769113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.939141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:51.103646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.180606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:52.828212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345956719607619:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:52.828319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.175896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.208363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.237812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.268911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.301407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.334495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.382332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.473224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345961014575578:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.473293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.473438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345961014575583:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:53.476739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:53.490718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345961014575585:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:53.594211Z node 1 :TX_PROXY ERROR: Actor# [1:7494345961014575642:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:54.597190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:54.618269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345943834704100:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:54.618336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:56.069263Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345973899479079:2615], TxId: 281474976710677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=N2E5MjQ1MDgtNTMxOTI2NzYtNGIxMzQxMjItNjYzZTBhODM=. TraceId : 01js2f2vymfem6v7ytk47jhxmg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:09:56.069627Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345973899479081:2616], TxId: 281474976710677, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=N2E5MjQ1MDgtNTMxOTI2NzYtNGIxMzQxMjItNjYzZTBhODM=. TraceId : 01js2f2vymfem6v7ytk47jhxmg. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494345973899479073:2572], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:09:56.070141Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2E5MjQ1MDgtNTMxOTI2NzYtNGIxMzQxMjItNjYzZTBhODM=, ActorId: [1:7494345965309544061:2572], ActorState: ExecuteState, TraceId: 01js2f2vymfem6v7ytk47jhxmg, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 19698, MsgBus: 15140 2025-04-17T18:09:56.863678Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345972938636978:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:56.863783Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c71/r3tmp/tmpglMCkH/pdisk_1.dat 2025-04-17T18:09:57.050403Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:57.055966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:57.056067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:57.057715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19698, node 2 2025-04-17T18:09:57.181235Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:57.181259Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:57.181266Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:57.181363Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15140 TClient is connected to server localhost:15140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:57.616261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.624688Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:57.644486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.752602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.925375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.044534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.423177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345990118507902:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.423273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.469688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.507123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.545098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.591718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.627798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.660835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.735313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.804972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345990118508563:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.805067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.805118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345990118508568:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.808566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:00.827591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345990118508570:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:00.885868Z node 2 :TX_PROXY ERROR: Actor# [2:7494345990118508625:3536] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:01.860669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.865063Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345972938636978:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:01.865126Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-04-17T18:10:03.417024Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346003003412045:2615], TxId: 281474976710677, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZmM2MjlkMWUtNmFlZmIxOTktYmFjMzllOWItNWU4Y2JhNmM=. TraceId : 01js2f333afe7bx953s156wqdn. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-17T18:10:03.417295Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346003003412047:2616], TxId: 281474976710677, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZmM2MjlkMWUtNmFlZmIxOTktYmFjMzllOWItNWU4Y2JhNmM=. TraceId : 01js2f333afe7bx953s156wqdn. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346003003412042:2571], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:10:03.417642Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM2MjlkMWUtNmFlZmIxOTktYmFjMzllOWItNWU4Y2JhNmM=, ActorId: [2:7494345998708444322:2571], ActorState: ExecuteState, TraceId: 01js2f333afe7bx953s156wqdn, Create QueryResponse for error on request, msg: >> DataStreams::TestStreamStorageRetention >> DataStreams::TestGetRecordsStreamWithSingleShard >> DataStreams::TestGetShardIterator >> DataStreams::TestReservedResourcesMetering >> KqpIndexes::UpsertMultipleUniqIndexes [GOOD] >> KqpIndexes::UpsertNoIndexColumns >> DataStreams::TestUpdateStream >> DataStreams::TestControlPlaneAndMeteringData >> DataStreams::TestDeleteStream >> Yq_1::Basic_EmptyDict [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel1 >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3054, MsgBus: 20117 2025-04-17T18:09:45.946329Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345924806696179:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:45.946933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7c/r3tmp/tmptgXqwl/pdisk_1.dat 2025-04-17T18:09:46.412884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:46.415260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:46.415343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:46.419158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3054, node 1 2025-04-17T18:09:46.493994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:46.494017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:46.494023Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:46.494106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20117 TClient is connected to server localhost:20117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:47.073561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.098882Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:47.120848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.279492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.448575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.519688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.263897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345941986567132:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:49.263988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:49.648382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.690769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.728249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.809247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.845574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.894623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.975256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.063530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345946281535106:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:50.063604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:50.064003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345946281535111:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:50.067709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:50.081318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345946281535113:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:50.189950Z node 1 :TX_PROXY ERROR: Actor# [1:7494345946281535168:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:50.942311Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345924806696179:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:50.942372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:51.107624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22849, MsgBus: 1094 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7c/r3tmp/tmp4m7NCt/pdisk_1.dat 2025-04-17T18:09:52.389549Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:52.441115Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22849, node 2 2025-04-17T18:09:52.485199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:52.485283Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:52.500032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:52.578765Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:52.578788Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:52.578796Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:52.578907Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1094 TClient is connected to server localhost:1094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:52.997003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:53.006608Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:53.015788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:53.073482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 w ... on part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.940316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.976696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:56.015245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:56.057288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:56.148025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345970917634735:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:56.148100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:56.148312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345970917634740:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:56.150998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:56.162767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345970917634742:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:56.233786Z node 2 :TX_PROXY ERROR: Actor# [2:7494345970917634798:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:57.221250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.274932Z node 2 :TX_PROXY ERROR: Actor# [2:7494345975212602490:3831] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:57.288490Z node 2 :TX_PROXY ERROR: Actor# [2:7494345975212602504:3837] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:57.306661Z node 2 :TX_PROXY ERROR: Actor# [2:7494345975212602511:3842] txid# 281474976715675, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23527, MsgBus: 27232 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7c/r3tmp/tmpTSyYAw/pdisk_1.dat 2025-04-17T18:09:58.392224Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:58.419766Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:58.428555Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:58.428644Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:58.430845Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23527, node 3 2025-04-17T18:09:58.606617Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:58.606639Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:58.606646Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:58.606778Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27232 TClient is connected to server localhost:27232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:59.081851Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.091028Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:59.101947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.230966Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.380293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.454410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:02.095340Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345996446723131:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.095435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.169647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.207419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.259850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.299294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.331688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.371684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.410472Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.469567Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345996446723797:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.469632Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.469697Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345996446723802:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.472742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:02.481866Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345996446723804:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:02.539663Z node 3 :TX_PROXY ERROR: Actor# [3:7494345996446723859:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:03.817769Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] >> KqpUniqueIndex::UpsertExplicitNullInComplexFk [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk >> KqpBatchDelete::SimplePartitions [GOOD] >> KqpIndexes::IndexTopSortPushDown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2025-04-17T18:09:01.839630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345734488754379:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:01.840386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0417 18:09:02.216164994 420733 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:09:02.216263662 420733 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:09:02.842232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:03.272722Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13376: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13376 } ] 2025-04-17T18:09:03.273139Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13376: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13376 2025-04-17T18:09:03.843080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:04.779308Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13376: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13376 } ] 2025-04-17T18:09:04.854013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a96/r3tmp/tmpgthMJp/pdisk_1.dat 2025-04-17T18:09:05.386999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345751668624050:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:09:05.387105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:09:05.483515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494345751668624050:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 13376, node 1 2025-04-17T18:09:05.542932Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:05.542936Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:16323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:05.918823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:05.968726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:05.968748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:05.968754Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:05.968868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:09:05.973421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:06.045232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:06.045366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:06.049771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:06.842089Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345734488754379:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:06.842173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:07.155741Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-17T18:09:07.155776Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:07.155783Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-17T18:09:07.157912Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:09:07.159854Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-17T18:09:07.159883Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:07.159906Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-17T18:09:07.160168Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-17T18:09:07.160185Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:07.160194Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-17T18:09:07.160883Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-17T18:09:07.160895Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:07.160900Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-17T18:09:07.161049Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-17T18:09:07.161070Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:07.161074Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-17T18:09:07.161826Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-17T18:09:07.161851Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-17T18:09:07.161890Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-17T18:09:07.161938Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:07.161945Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-17T18:09:07.191061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:07.194510Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-17T18:09:07.194534Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:07.194539Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-17T18:09:07.195463Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-17T18:09:07.195473Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:07.195482Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-17T18:09:07.198513Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-17T18:09:07.198529Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:07.198535Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-17T18:09:07.213409Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-17T18:09:07.213426Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:07.213431Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-17T18:09:07.214387Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-17T18:09:07.214399Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:07.214403Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-17T18:09:07.215066Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-17T18:09:07.215077Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:07.215081Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-17T18:09:07.215724Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-17T18:09:07.215734Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:07.215737Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-17T18:09:07.218707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 E0417 18:09:07.259790207 420901 dns_resolver.cc:162] no server name supplied in dns URI E0417 18:09:07.259887640 420901 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-17T18:09:07.260649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:09:07.261757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... ssionActorId: [7:7494345971117423384:2604], Flush data 2025-04-17T18:10:03.507322Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196791:2604], Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345971117423384:2604]Send EvWrite to ShardID=72075186224037893, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715765 DataShard: 72075186224037893 Generation: 1 Counter: 16 SchemeShard: 72057594046644480 PathId: 8, Size=136, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3 2025-04-17T18:10:03.507485Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\036\n\016Root/yq/quotas\020\200\202\224\204\200\200\200\200\001\030\010(\001\"\023\n\014subject_type\020\001 \201 \"\021\n\nsubject_id\020\002 \201 \"\022\n\013metric_name\020\003 \201 *\026\n\020limit_updated_at\020\005 2*\022\n\014metric_limit\020\004 \004*\022\n\013metric_name\020\003 \201 *\022\n\014metric_usage\020\006 \004*\021\n\nsubject_id\020\002 \201 *\023\n\014subject_type\020\001 \201 *\026\n\020usage_updated_at\020\007 20\366\247\200\200\200\200@8\007@\000H\001R\022\t8\204U8\373C\001h\021Q\t\000\000\007\000\020\000X\000`\000h\004h\003h\002h\005h\001h\000h\006x\000" } } } 2025-04-17T18:10:03.507640Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-17T18:10:03.507675Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: 2025-04-17T18:10:03.507745Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2025-04-17T18:10:03.507859Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 1. Add data: 78 / 78 2025-04-17T18:10:03.507902Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 1. Send data=78, closed=1, bufferActorId=[7:7494346001182196792:2385] 2025-04-17T18:10:03.507920Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 78 2025-04-17T18:10:03.507938Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 1. Tasks execution finished 2025-04-17T18:10:03.507950Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Waiting finish of sink[0] 2025-04-17T18:10:03.507969Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-17T18:10:03.507985Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: 2025-04-17T18:10:03.507999Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 1. Tasks execution finished 2025-04-17T18:10:03.508007Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Waiting finish of sink[0] 2025-04-17T18:10:03.508038Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-17T18:10:03.508052Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 1. Tasks execution finished 2025-04-17T18:10:03.508063Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Waiting finish of sink[0] 2025-04-17T18:10:03.508116Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196792:2385], SessionActorId: [7:7494345949642584640:2385], Create new TableWriteActor for table `Root/yq/quotas` ([72057594046644480:8:1]). lockId=281474976715766 [7:7494346001182196799:2385] 2025-04-17T18:10:03.508149Z node 7 :KQP_COMPUTE DEBUG: Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345949642584640:2385]Open: token=0 2025-04-17T18:10:03.508171Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196792:2385], SessionActorId: [7:7494345949642584640:2385], ProcessRequestQueue [72057594046644480:8:1] NOT READY queue=1 2025-04-17T18:10:03.508214Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196799:2385], Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345949642584640:2385]Write: token=0 2025-04-17T18:10:03.508328Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196799:2385], Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345949642584640:2385]Close: token=0 2025-04-17T18:10:03.508401Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196798:2385], TxId: 281474976715770, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [7:7494346001182196792:2385] 2025-04-17T18:10:03.508413Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196798:2385], TxId: 281474976715770, task: 1. Finished 2025-04-17T18:10:03.508429Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-17T18:10:03.508446Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 1. Tasks execution finished 2025-04-17T18:10:03.508458Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196796:2385], TxId: 281474976715770, task: 1. Ctx: { TraceId : 01js2f33kf6x3gxemyrezjne56. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OTUwYTRkZGEtMjJjMjc0Y2UtODM0ZGU4NWEtMWYzZTVlYjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-17T18:10:03.508521Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 1. pass away 2025-04-17T18:10:03.508582Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715770;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:10:03.508841Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196792:2385], SessionActorId: [7:7494345949642584640:2385], Start immediate commit 2025-04-17T18:10:03.508868Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196799:2385], Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345949642584640:2385]SetImmediateCommit 2025-04-17T18:10:03.508884Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196792:2385], SessionActorId: [7:7494345949642584640:2385], Flush data 2025-04-17T18:10:03.509005Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196799:2385], Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345949642584640:2385]Send EvWrite to ShardID=72075186224037893, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715766 DataShard: 72075186224037893 Generation: 1 Counter: 17 SchemeShard: 72057594046644480 PathId: 8, Size=136, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3 2025-04-17T18:10:03.515218Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196791:2604], Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345971117423384:2604]Recv EvWriteResult from ShardID=72075186224037893, Status=STATUS_COMPLETED, TxId=37, Locks= , Cookie=1 2025-04-17T18:10:03.515269Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196791:2604], Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345971117423384:2604]Got completed result TxId=37, TabletId=72075186224037893, Cookie=1, Mode=3, Locks= 2025-04-17T18:10:03.515326Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196781:2604], SessionActorId: [7:7494345971117423384:2604], Committed TxId=0 2025-04-17T18:10:03.516480Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196799:2385], Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345949642584640:2385]Recv EvWriteResult from ShardID=72075186224037893, Status=STATUS_COMPLETED, TxId=38, Locks= , Cookie=1 2025-04-17T18:10:03.516521Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196799:2385], Table: `Root/yq/quotas` ([72057594046644480:8:1]), SessionActorId: [7:7494345949642584640:2385]Got completed result TxId=38, TabletId=72075186224037893, Cookie=1, Mode=3, Locks= 2025-04-17T18:10:03.516560Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7494346001182196792:2385], SessionActorId: [7:7494345949642584640:2385], Committed TxId=0 2025-04-17T18:10:03.554138Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:19578: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:19578 2025-04-17T18:10:04.556559Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:19578: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:19578 >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 32055, MsgBus: 25018 2025-04-17T18:09:44.342284Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345919177050783:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:44.342322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c81/r3tmp/tmpYUJZjR/pdisk_1.dat 2025-04-17T18:09:44.701893Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32055, node 1 2025-04-17T18:09:44.727427Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:44.727701Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:44.739105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:44.739245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:44.741888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:44.750763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:44.750785Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:44.750796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:44.750896Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25018 TClient is connected to server localhost:25018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:45.219082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.248226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.416904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.564959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.649631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.380433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345932061954456:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.380524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.681072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.704342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.732311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.758254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.786735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.826193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.857840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.938804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345932061955113:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.938896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.939107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345932061955118:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.941982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:47.954054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345932061955120:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:48.031608Z node 1 :TX_PROXY ERROR: Actor# [1:7494345936356922472:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:48.983463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.343638Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345919177050783:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:49.343698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:50.712618Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710679; 2025-04-17T18:09:50.756893Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345944946857908:2512], Table: `/Root/TestTable/Index/indexImplTable` ([72057594046644480:19:1]), SessionActorId: [1:7494345936356922805:2512]Got LOCKS BROKEN for table `/Root/TestTable/Index/indexImplTable`. ShardID=72075186224037922, Sink=[1:7494345944946857908:2512].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:09:50.757394Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345944946857881:2512], SessionActorId: [1:7494345936356922805:2512], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestTable/Index/indexImplTable`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494345936356922805:2512]. isRollback=0 2025-04-17T18:09:50.757649Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTM1ZDZkMDItMmY1MTE2MTQtYjRiZWQ5ZTItYzQxYmViZWU=, ActorId: [1:7494345936356922805:2512], ActorState: ExecuteState, TraceId: 01js2f2pna23yq73rdmg7rzyjj, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494345944946857893:2512] from: [1:7494345944946857881:2512] 2025-04-17T18:09:50.757738Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494345944946857893:2512] TxId: 281474976710679. Ctx: { TraceId: 01js2f2pna23yq73rdmg7rzyjj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM1ZDZkMDItMmY1MTE2MTQtYjRiZWQ5ZTItYzQxYmViZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestTable/Index/indexImplTable`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:09:50.757943Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTM1ZDZkMDItMmY1MTE2MTQtYjRiZWQ5ZTItYzQxYmViZWU=, ActorId: [1:7494345936356922805:2512], ActorState: ExecuteState, TraceId: 01js2f2pna23yq73rdmg7rzyjj, Create QueryResponse for error on request, msg: 2025-04-17T18:09:50.759697Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710679; 2025-04-17T18:09:50.759836Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744913390759 : 281474976710679] from 72075186224037923 at tablet 72075186224037923, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } Trying to start YDB, gRPC: 3967, MsgBus: 18501 2025-04-17T18:09:51.612220Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345951594648504:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:51.613413Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c81/r3tmp/tmpDMoSXj/pdisk_1.dat 2025-04-17T18:09:51.729887Z node 2 :IMPORT WA ... 59Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjQ4OTJlMzUtYWRiOTQ4NWYtZTM0ODY0NmUtMTFiYmE3YzQ=, ActorId: [2:7494345973069487804:2512], ActorState: ExecuteState, TraceId: 01js2f2wr0f737j2ks4s64hmtf, Create QueryResponse for error on request, msg: 2025-04-17T18:09:56.505812Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494345973069488046:2512] TxId: 281474976715675. Ctx: { TraceId: 01js2f2wrp476573nekd35z2ea, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjQ4OTJlMzUtYWRiOTQ4NWYtZTM0ODY0NmUtMTFiYmE3YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:19:1] only with StaleRO isolation level 2025-04-17T18:09:56.506011Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjQ4OTJlMzUtYWRiOTQ4NWYtZTM0ODY0NmUtMTFiYmE3YzQ=, ActorId: [2:7494345973069487804:2512], ActorState: ExecuteState, TraceId: 01js2f2wrp476573nekd35z2ea, Create QueryResponse for error on request, msg: 2025-04-17T18:09:56.516754Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494345973069488055:2512] TxId: 281474976715677. Ctx: { TraceId: 01js2f2ws36cba4gnfr072zx23, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjQ4OTJlMzUtYWRiOTQ4NWYtZTM0ODY0NmUtMTFiYmE3YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:19:1] only with StaleRO isolation level 2025-04-17T18:09:56.516950Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjQ4OTJlMzUtYWRiOTQ4NWYtZTM0ODY0NmUtMTFiYmE3YzQ=, ActorId: [2:7494345973069487804:2512], ActorState: ExecuteState, TraceId: 01js2f2ws36cba4gnfr072zx23, Create QueryResponse for error on request, msg: 2025-04-17T18:09:56.537293Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494345973069488064:2512] TxId: 281474976715679. Ctx: { TraceId: 01js2f2wsqf7nc21k25kxweqzm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjQ4OTJlMzUtYWRiOTQ4NWYtZTM0ODY0NmUtMTFiYmE3YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:19:1] only with StaleRO isolation level 2025-04-17T18:09:56.537442Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjQ4OTJlMzUtYWRiOTQ4NWYtZTM0ODY0NmUtMTFiYmE3YzQ=, ActorId: [2:7494345973069487804:2512], ActorState: ExecuteState, TraceId: 01js2f2wsqf7nc21k25kxweqzm, Create QueryResponse for error on request, msg: 2025-04-17T18:09:56.614552Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345951594648504:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:56.614606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19095, MsgBus: 13070 2025-04-17T18:09:57.544638Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345976466669891:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:57.588220Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c81/r3tmp/tmpalTABz/pdisk_1.dat 2025-04-17T18:09:57.711708Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:57.731127Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:57.731232Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:57.733123Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19095, node 3 2025-04-17T18:09:57.785884Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:57.785903Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:57.785911Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:57.786009Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13070 TClient is connected to server localhost:13070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:58.474809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.481141Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:58.493146Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.569260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.725570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:58.798985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.424810Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345993646540681:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.424898Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.502192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.534985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.578765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.627100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.663999Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.710568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.793397Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.883260Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345993646541357:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.883355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.883383Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345993646541362:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.887076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:01.903800Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345993646541364:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:01.980606Z node 3 :TX_PROXY ERROR: Actor# [3:7494345993646541419:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:02.539942Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345976466669891:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:02.549316Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:03.010036Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.059041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.136622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] Test command err: Trying to start YDB, gRPC: 3205, MsgBus: 29536 2025-04-17T18:09:41.338822Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345909716605421:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:41.344032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c8e/r3tmp/tmpG1X0qV/pdisk_1.dat 2025-04-17T18:09:41.645436Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3205, node 1 2025-04-17T18:09:41.726852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:41.726872Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:41.726883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:41.727012Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:09:41.728903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:41.729002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:41.730796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29536 TClient is connected to server localhost:29536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:42.253200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.274819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.432929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.591005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.672488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.157800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345922601509075:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.157939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.511339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.546024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.580184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.651688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.689390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.762240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.845982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.908730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345922601509749:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.908803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.908875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345922601509754:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.912214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:44.923530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345922601509756:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:45.020769Z node 1 :TX_PROXY ERROR: Actor# [1:7494345926896477110:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:45.914524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:46.326273Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345909716605421:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:46.326338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18634, MsgBus: 7765 2025-04-17T18:09:50.003087Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345948498696719:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:50.003165Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c8e/r3tmp/tmpbCw9ir/pdisk_1.dat 2025-04-17T18:09:50.163117Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18634, node 2 2025-04-17T18:09:50.207604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:50.207691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:50.210884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:50.332919Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:50.332941Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:50.332962Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:50.333099Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7765 TClient is connected to server localhost:7765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:09:50.807954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.822605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.882795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:51.049165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itse ... :"\/Root\/tg","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"E-Rows":"No estimate","Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Input":"precompute_1_0","Name":"PartitionByKey"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_1_0"}],"Table":"tg","PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"TopSort-Filter"}],"Node Type":"Merge","SortColumns":["system_date (Desc)","id (Desc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Limit":"11","Name":"Top","TopBy":"[row.b,row.pa_id,row.system_date,row.id]"}],"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1}],"Limit":"11","Name":"Limit"},{"E-Rows":"No estimate","Inputs":[{"ExternalPlanNodeId":4}],"Predicate":"NOT If AND item.status != $status_1 AND item.am != $am_1","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"Tables":["tg\/tg_index\/indexImplTable"],"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_0","Reverse":true,"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tg\/tg_index\/indexImplTable","E-Rows":"No estimate","ReadRangesPointPrefixLen":"2","ReadRangesKeys":["b","pa_id","system_date","id"],"Table":"tg\/tg_index\/indexImplTable","ReadColumns":["am","b","id","pa_id","status","system_date","type"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Limit-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Top"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"Stage"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tg","reads":[{"lookup_by":["b","id"],"columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"type":"Lookup"}]},{"name":"\/Root\/tg\/tg_index\/indexImplTable","reads":[{"columns":["am","b","id","pa_id","status","system_date","type"],"reverse":true,"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Limit":"1001","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"Limit":"1001","Name":"TopSort","TopSortBy":"[row.system_date,row.id]"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"Name":"TableLookup","E-Cost":"No estimate","E-Size":"No estimate","LookupKeyColumns":["b","id"],"Table":"tg"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Filter"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2}}} Trying to start YDB, gRPC: 10455, MsgBus: 65197 2025-04-17T18:09:57.086918Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345977387571711:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:57.086971Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c8e/r3tmp/tmpZX6B48/pdisk_1.dat 2025-04-17T18:09:57.279859Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:57.283902Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:57.283975Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:57.285516Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10455, node 3 2025-04-17T18:09:57.390015Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:57.390037Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:57.390045Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:57.390227Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65197 TClient is connected to server localhost:65197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:57.978784Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.992309Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.088328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.278140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.382866Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.651302Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345990272475372:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.651406Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.704538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.739144Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.788537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.829497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.861141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.933008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.988080Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.052376Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345994567443332:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.052462Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.052845Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345994567443337:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.056968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:01.075436Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345994567443339:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:01.152896Z node 3 :TX_PROXY ERROR: Actor# [3:7494345994567443396:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:02.090357Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345977387571711:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:02.090431Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:02.212874Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpMultishardIndex::SortByPk [GOOD] |88.7%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 15105, MsgBus: 2772 2025-04-17T18:06:38.388789Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345120706578692:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:38.388896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000cbf/r3tmp/tmpYhxtDq/pdisk_1.dat 2025-04-17T18:06:38.666238Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:38.682824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:38.682960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:38.684922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15105, node 1 2025-04-17T18:06:38.744457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:38.744481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:38.744488Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:38.744601Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2772 TClient is connected to server localhost:2772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:39.186252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:39.204925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:39.305970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:39.437559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:39.510464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:41.022765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345133591482353:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.022904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.272802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:06:41.301066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:06:41.366778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:06:41.390447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:06:41.420236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:06:41.452941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:06:41.522393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:06:41.590315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345133591483024:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.590399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.590412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345133591483029:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:06:41.593235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:06:41.600995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345133591483031:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:06:41.662840Z node 1 :TX_PROXY ERROR: Actor# [1:7494345133591483085:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:06:43.388903Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345120706578692:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:43.389005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7278, MsgBus: 19629 2025-04-17T18:06:47.190760Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345161188968089:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:47.191568Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000cbf/r3tmp/tmpFbtASU/pdisk_1.dat 2025-04-17T18:06:47.304774Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7278, node 2 2025-04-17T18:06:47.335843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:47.335933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:47.337947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:06:47.359476Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:47.359504Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:47.359512Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:47.359647Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19629 TClient is connected to server localhost:19629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:06:47.738462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:47.744630Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:06:47.757148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:47.807937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:06:47.924766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... 09:41.629451Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.679434Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.719189Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.758451Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.800807Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.881429Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.901718Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7494345886064597781:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:41.901775Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:41.929543Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:42.009583Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [15:7494345911834404007:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:42.009728Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:42.010248Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [15:7494345911834404012:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:42.015317Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:42.033328Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [15:7494345911834404014:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:42.109604Z node 15 :TX_PROXY ERROR: Actor# [15:7494345911834404072:3555] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20718, MsgBus: 31064 2025-04-17T18:09:52.078767Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7494345956106658067:2256];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:52.079250Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000cbf/r3tmp/tmp63HcIm/pdisk_1.dat 2025-04-17T18:09:52.236304Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:52.269475Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:52.269605Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:52.272544Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20718, node 16 2025-04-17T18:09:52.320278Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:52.320305Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:52.320314Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:52.320483Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31064 TClient is connected to server localhost:31064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:52.918356Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:52.937582Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:53.023821Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:53.278119Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:53.392214Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.074296Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7494345956106658067:2256];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:57.074379Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:57.175639Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7494345977581496099:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.175760Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.262517Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.349669Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.429979Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.469205Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.503481Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.555941Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.647104Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.734413Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7494345977581496772:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.734508Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.734754Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7494345977581496777:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.739046Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:57.753712Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7494345977581496779:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:57.841366Z node 16 :TX_PROXY ERROR: Actor# [16:7494345977581496834:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexTopSortPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 29355, MsgBus: 24215 2025-04-17T18:09:35.261359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345881293674874:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:35.262306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c99/r3tmp/tmpkwiSWE/pdisk_1.dat 2025-04-17T18:09:35.616911Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29355, node 1 2025-04-17T18:09:35.652947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:35.653338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:35.682150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:35.720896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:35.720918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:35.720930Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:35.721023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24215 TClient is connected to server localhost:24215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:36.278671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:36.304470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:36.477168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:36.629829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:36.721348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:38.297010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345894178578554:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.297124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.575798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.620160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.646718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.675326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.703107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.739665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.774990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:38.863590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345894178579221:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.863662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.863870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345894178579226:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:38.868086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:38.878284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345894178579228:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:38.935352Z node 1 :TX_PROXY ERROR: Actor# [1:7494345894178579282:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:39.894817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:39.945618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:09:39.999857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:09:40.262274Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345881293674874:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:40.262339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6143, MsgBus: 17082 2025-04-17T18:09:43.097668Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345914967923261:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:43.097759Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c99/r3tmp/tmpudQI6i/pdisk_1.dat 2025-04-17T18:09:43.241051Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:43.266509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:43.266588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:43.268434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6143, node 2 2025-04-17T18:09:43.306674Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:43.306694Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:43.306701Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:43.306802Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17082 TClient is connected to server localhost:17082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:43.735554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:43.744258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is un ... N: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:46.179553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:46.218759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:46.295118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:46.347354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:46.413378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345927852827593:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:46.413478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:46.413836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345927852827598:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:46.417427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:46.431027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345927852827600:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:46.489161Z node 2 :TX_PROXY ERROR: Actor# [2:7494345927852827655:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:47.452986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.100862Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345914967923261:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:48.100942Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:48.419276Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 12053, MsgBus: 22309 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c99/r3tmp/tmpe6toho/pdisk_1.dat 2025-04-17T18:09:58.492085Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:09:58.495176Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:58.520870Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:58.520993Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:58.523793Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12053, node 3 2025-04-17T18:09:58.641679Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:58.641702Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:58.641711Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:58.641857Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22309 TClient is connected to server localhost:22309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:59.251423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.263105Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.276062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.359094Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.586668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.674065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:02.065189Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345998720409495:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.065304Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.152606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.196237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.243610Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.289240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.327473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.372518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.445980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.554524Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345998720410167:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.554644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.554902Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494345998720410172:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.559582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:02.578602Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494345998720410174:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:02.645041Z node 3 :TX_PROXY ERROR: Actor# [3:7494345998720410229:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:03.868426Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.914441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.037793Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel2 >> KqpIndexes::IndexFilterPushDown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk [GOOD] Test command err: Trying to start YDB, gRPC: 13220, MsgBus: 17324 2025-04-17T18:09:54.467876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345961593247976:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:54.467926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c6b/r3tmp/tmpJrA3S2/pdisk_1.dat 2025-04-17T18:09:54.843876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:54.843986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:54.845838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:54.877593Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13220, node 1 2025-04-17T18:09:54.907855Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:54.907887Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:09:54.990467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:54.990497Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:54.990508Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:54.990607Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17324 TClient is connected to server localhost:17324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:55.524783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:55.544556Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:55.553851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:55.703560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:55.877336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:55.953228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.885305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345974478151572:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.885437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.330004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.362441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.400597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.470410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.506896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.550237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.637314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.730482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345978773119551:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.730579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.734418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345978773119556:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.739020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:58.751840Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:09:58.752727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345978773119559:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:58.826625Z node 1 :TX_PROXY ERROR: Actor# [1:7494345978773119613:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:59.467930Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345961593247976:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:59.467982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:59.789107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 14211, MsgBus: 13810 2025-04-17T18:10:01.748361Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345994701039673:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:01.750304Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c6b/r3tmp/tmpgeIfnm/pdisk_1.dat 2025-04-17T18:10:01.866646Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:01.889672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:01.889747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:01.891681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14211, node 2 2025-04-17T18:10:02.001080Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:02.001100Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:02.001107Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:02.001203Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13810 TClient is connected to server localhost:13810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:10:02.474160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.500142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:02.573477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:10:02.759331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.818137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:05.016026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346011880910482:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:05.016136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:05.063455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.095794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.128430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.161725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.202545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.239636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.271837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.336954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346011880911141:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:05.337038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:05.337110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346011880911146:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:05.340647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:05.354423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346011880911148:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:05.449481Z node 2 :TX_PROXY ERROR: Actor# [2:7494346011880911205:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:06.405887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:06.736812Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345994701039673:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.736891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TA] $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate >> AnalyzeColumnshard::AnalyzeDeadline >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> AnalyzeDatashard::AnalyzeTwoTables >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> AnalyzeDatashard::AnalyzeOneTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexFilterPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 63115, MsgBus: 10534 2025-04-17T18:09:47.366449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345931885270338:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:47.366508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7a/r3tmp/tmp8AABqk/pdisk_1.dat 2025-04-17T18:09:47.779050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:47.782261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:47.782368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:47.786919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63115, node 1 2025-04-17T18:09:47.922834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:47.922855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:47.922861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:47.922975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10534 TClient is connected to server localhost:10534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:48.622220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:48.656354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:48.774728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.942032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.043504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.808165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345944770173797:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:50.808292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.118646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.189267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.227167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.265791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.333494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.381792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.430897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.522000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345949065141767:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.522083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.522459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345949065141772:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.526886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:51.541543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345949065141774:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:51.629285Z node 1 :TX_PROXY ERROR: Actor# [1:7494345949065141832:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:52.386842Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345931885270338:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:52.386894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:52.615246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.215894Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345957655077062:2545], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {'(', DEFAULT, DISCARD, FROM, PROCESS, REDUCE, SELECT, VALUES} 2025-04-17T18:09:53.217045Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2I2MmFiYzktM2FiZTA2MzQtYWE4M2ExZi0yN2VkMmE4Yg==, ActorId: [1:7494345953360109460:2513], ActorState: ExecuteState, TraceId: 01js2f2shm8w3khv5drr56zcqy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:09:53.234222Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345957655077066:2547], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {'(', DEFAULT, DISCARD, FROM, PROCESS, REDUCE, SELECT, VALUES} 2025-04-17T18:09:53.235419Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2I2MmFiYzktM2FiZTA2MzQtYWE4M2ExZi0yN2VkMmE4Yg==, ActorId: [1:7494345953360109460:2513], ActorState: ExecuteState, TraceId: 01js2f2sjcfr0kqa639xkw3rnx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:09:53.253311Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345957655077070:2549], status: GENERIC_ERROR, issues:
:3:41: Error: mismatched input 'VIEW' expecting {ON, SET} 2025-04-17T18:09:53.253755Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2I2MmFiYzktM2FiZTA2MzQtYWE4M2ExZi0yN2VkMmE4Yg==, ActorId: [1:7494345953360109460:2513], ActorState: ExecuteState, TraceId: 01js2f2sjyfkcfjtcvn2px6ype, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:09:53.273024Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494345957655077074:2551], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {, ';'} 2025-04-17T18:09:53.274279Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2I2MmFiYzktM2FiZTA2MzQtYWE4M2ExZi0yN2VkMmE4Yg==, ActorId: [1:7494345953360109460:2513], ActorState: ExecuteState, TraceId: 01js2f2skj7hv6fpv4pb6xah95, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 29338, MsgBus: 5040 2025-04-17T18:09:53.992406Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345959181027475:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:53.992454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7a/r3tmp/tmpfCLkI8/pdisk_1.dat 2025-04-17T18:09:54.158240Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:54.177561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:54.177642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:54.179053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29338, node 2 2025-04-17T18:09:54.254775Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:54.254796Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17 ... 4976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.625193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:57.704480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345976360899104:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.704627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.704937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345976360899109:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:57.708856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:57.721447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345976360899111:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:57.778477Z node 2 :TX_PROXY ERROR: Actor# [2:7494345976360899165:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:58.712406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.776102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.857272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.992725Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345959181027475:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:58.992792Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17783, MsgBus: 13376 2025-04-17T18:10:01.967362Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345992240859862:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:01.968165Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7a/r3tmp/tmp1cnEaV/pdisk_1.dat 2025-04-17T18:10:02.144446Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:02.171062Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:02.171156Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:02.172687Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17783, node 3 2025-04-17T18:10:02.246722Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:02.246743Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:02.246752Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:02.246878Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13376 TClient is connected to server localhost:13376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:10:02.759301Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.798151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:10:02.891265Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.073263Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:03.157965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:05.638601Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346009420730801:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:05.638700Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:05.700511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.753063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.836443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.874458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.921351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.965615Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.047453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.127795Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346013715698769:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.127924Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.128184Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346013715698775:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.132391Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:06.143594Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494346013715698777:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:06.229022Z node 3 :TX_PROXY ERROR: Actor# [3:7494346013715698832:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:06.974286Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345992240859862:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.974365Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:07.504582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:07.557403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:10:07.653146Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 18329, MsgBus: 9071 2025-04-17T18:09:45.654705Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345926279693608:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:45.654769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7d/r3tmp/tmpfqPuq6/pdisk_1.dat 2025-04-17T18:09:46.079135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:46.079251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:46.082074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:46.110506Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18329, node 1 2025-04-17T18:09:46.220791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:46.220811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:46.220824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:46.220964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9071 TClient is connected to server localhost:9071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:46.802644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:46.822454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:46.842042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:47.019683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.181813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.277243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:48.923948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345939164597261:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.924039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:49.250239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.278488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.302124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.327497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.370468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.445393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.526043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.632279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345943459565234:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:49.632406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:49.632721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345943459565239:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:49.636278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:49.648458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345943459565241:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:49.734521Z node 1 :TX_PROXY ERROR: Actor# [1:7494345943459565299:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:50.657721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345926279693608:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:50.658327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:50.712809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.769200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24832, MsgBus: 31544 2025-04-17T18:09:54.448962Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345963204232451:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:54.449021Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7d/r3tmp/tmpeIFRw4/pdisk_1.dat 2025-04-17T18:09:54.538116Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24832, node 2 2025-04-17T18:09:54.579773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:54.579857Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:54.581101Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:54.626699Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:54.626723Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:54.626731Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:54.626851Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31544 TClient is connected to server localhost:31544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:09:55.068064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:09:55.081496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, a ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.324100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.367879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:58.454044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345980384104070:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.454119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.454428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345980384104075:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.457630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:58.469125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345980384104077:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:58.544517Z node 2 :TX_PROXY ERROR: Actor# [2:7494345980384104136:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:59.450160Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345963204232451:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:59.450305Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:59.632464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.701325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29613, MsgBus: 27017 2025-04-17T18:10:02.457856Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345996363548374:2075];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7d/r3tmp/tmpTBI5cK/pdisk_1.dat 2025-04-17T18:10:02.475848Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:02.626136Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:02.626429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:02.626502Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:02.635342Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29613, node 3 2025-04-17T18:10:02.695982Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:02.696011Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:02.696020Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:02.696140Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27017 TClient is connected to server localhost:27017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:10:03.231061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.242355Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:03.257329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:03.341025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:10:03.540759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:03.624694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.018219Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346013543419315:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.018413Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.075196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.111631Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.158246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.195389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.234955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.279718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.324780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.400224Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346013543419975:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.400316Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.400532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346013543419980:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.406097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:06.419371Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494346013543419982:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:06.517674Z node 3 :TX_PROXY ERROR: Actor# [3:7494346013543420041:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:07.458586Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345996363548374:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:07.458655Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:07.785124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:07.849603Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpIndexes::SecondaryIndexReplace+UseSink [GOOD] >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> AnalyzeColumnshard::AnalyzeTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 16953, MsgBus: 9008 2025-04-17T18:09:57.636785Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345977048665980:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:57.636993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c5e/r3tmp/tmpCiqkMA/pdisk_1.dat 2025-04-17T18:09:58.266634Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:58.315608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:58.315687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:58.323201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16953, node 1 2025-04-17T18:09:58.507582Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:58.507607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:58.507613Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:58.507722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9008 TClient is connected to server localhost:9008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:59.144558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.170380Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:59.185155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.348199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.518343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:59.605725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.170618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345994228536727:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.170725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.466683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.497030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.568729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.597513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.627104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.671731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.725453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.776218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345994228537387:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.776290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.776662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345994228537392:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.780279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:01.795134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345994228537394:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:01.854002Z node 1 :TX_PROXY ERROR: Actor# [1:7494345994228537449:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:02.637026Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345977048665980:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:02.637089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:02.837155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 9984, MsgBus: 25725 2025-04-17T18:10:05.683626Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346012433970626:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:05.683803Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c5e/r3tmp/tmpElJxcB/pdisk_1.dat 2025-04-17T18:10:05.800256Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9984, node 2 2025-04-17T18:10:05.824116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:05.824207Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:05.827870Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:05.862255Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:05.862278Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:05.862285Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:05.862399Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25725 TClient is connected to server localhost:25725 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:06.295291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:06.306975Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:10:06.318437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:06.407594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:06.589737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:06.676636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:09.058242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346029613841577:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:09.058358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:09.121678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.157259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.190280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.217455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.262484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.300571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.339910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.425348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346029613842239:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:09.425445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:09.425899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346029613842244:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:09.429368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:09.440576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346029613842246:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:09.539258Z node 2 :TX_PROXY ERROR: Actor# [2:7494346029613842303:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:10.445476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:10.686163Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346012433970626:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:10.686255Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7191, MsgBus: 11048 2025-04-17T18:09:44.464572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345919355835859:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:44.475855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c80/r3tmp/tmpRDUp1G/pdisk_1.dat 2025-04-17T18:09:44.869578Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:44.898969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:44.899083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7191, node 1 2025-04-17T18:09:44.901572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:44.976513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:44.976534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:44.976540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:44.976635Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11048 TClient is connected to server localhost:11048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:45.573612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.600210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:45.612129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.779409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.945955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:46.020403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:47.770125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345932240739528:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.770234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.129461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.163070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.196138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.266781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.299827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.369971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.452754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.536873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345936535707504:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.536925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.536960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345936535707509:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.540689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:48.550524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345936535707511:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:48.632592Z node 1 :TX_PROXY ERROR: Actor# [1:7494345936535707566:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:49.464926Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345919355835859:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:49.465003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:49.587717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21890, MsgBus: 17691 2025-04-17T18:09:55.711885Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345966546746273:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:55.711935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c80/r3tmp/tmpvhVz35/pdisk_1.dat 2025-04-17T18:09:55.872609Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:55.908668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:55.908737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:55.912939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21890, node 2 2025-04-17T18:09:56.014560Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:56.014579Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:56.014585Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:56.014673Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17691 TClient is connected to server localhost:17691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:56.531412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56.538827Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:56.548480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56. ... .342058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.378902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.424217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.467023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.526911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345983726617870:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.526998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.527235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345983726617875:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.530363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:59.543078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345983726617877:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:59.634501Z node 2 :TX_PROXY ERROR: Actor# [2:7494345983726617933:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:00.721194Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345966546746273:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:00.721258Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:00.834936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26369, MsgBus: 9508 2025-04-17T18:10:04.649761Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346008160587952:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:04.649840Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c80/r3tmp/tmprQCR8W/pdisk_1.dat 2025-04-17T18:10:04.776859Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:04.808600Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:04.808726Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:04.810444Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26369, node 3 2025-04-17T18:10:04.872481Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:04.872503Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:04.872513Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:04.872633Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9508 TClient is connected to server localhost:9508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:05.359814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:05.377736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:05.458883Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:05.682615Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:05.765368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.500886Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346025340458884:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:08.500989Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:08.595645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.641840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.682356Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.712833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.756116Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.846171Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.887634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.974377Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346025340459550:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:08.974494Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:08.974777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346025340459555:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:08.980296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:08.991159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494346025340459557:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:09.089383Z node 3 :TX_PROXY ERROR: Actor# [3:7494346029635426910:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:09.650348Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494346008160587952:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:09.650420Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:10.248582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.043038Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:11.707321Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:11.737563Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] >> AnalyzeColumnshard::AnalyzeStatus >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel2 >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> KqpIndexes::UpsertNoIndexColumns [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] Test command err: Trying to start YDB, gRPC: 21453, MsgBus: 23052 2025-04-17T18:09:55.351810Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345968955496147:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:55.352636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c6a/r3tmp/tmpt9bSEk/pdisk_1.dat 2025-04-17T18:09:55.853598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:55.853691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:55.856229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:55.907820Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21453, node 1 2025-04-17T18:09:56.069538Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:56.069570Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:56.069578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:56.069687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23052 TClient is connected to server localhost:23052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:56.748029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56.779443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56.937644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.105243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.186814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.829398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345981840399810:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:58.829510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.179267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.211366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.242483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.276541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.316957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.403952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.454292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:59.533093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345986135367772:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.533179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.533333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345986135367777:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.537119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:59.549944Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:09:59.550803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345986135367779:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:59.625857Z node 1 :TX_PROXY ERROR: Actor# [1:7494345986135367837:3556] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:00.352581Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345968955496147:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:00.352787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:00.552807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 18236, MsgBus: 4777 2025-04-17T18:10:03.174723Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346002337846353:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:03.174784Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c6a/r3tmp/tmpWvDlVK/pdisk_1.dat 2025-04-17T18:10:03.299923Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18236, node 2 2025-04-17T18:10:03.314655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:03.314733Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:03.315692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:03.374771Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:03.374799Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:03.374806Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:03.374922Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4777 TClient is connected to server localhost:4777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:10:03.799494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.805273Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:10:03.820582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:03.905239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:04.076829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:04.152999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:06.173757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346015222750015:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.173848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.234487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.269117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.302064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.362523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.394701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.454815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.531706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.596803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346015222750679:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.596904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.597208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346015222750684:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:06.601413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:06.612558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346015222750686:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:06.705090Z node 2 :TX_PROXY ERROR: Actor# [2:7494346015222750744:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:07.664855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.175050Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346002337846353:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:08.175126Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:09.657770Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f38pzerv82bb7ntkmhfmr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMzMWEyOTAtYzY3MTM4YS1lMDBmODJmNS05NDZjNWU4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:09.670236Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMzMWEyOTAtYzY3MTM4YS1lMDBmODJmNS05NDZjNWU4YQ==, ActorId: [2:7494346019517719173:2570], ActorState: ExecuteState, TraceId: 01js2f38pzerv82bb7ntkmhfmr, Create QueryResponse for error on request, msg: 2025-04-17T18:10:10.531049Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f39mx218xpzqatcyet29f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMzMWEyOTAtYzY3MTM4YS1lMDBmODJmNS05NDZjNWU4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:10.531302Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMzMWEyOTAtYzY3MTM4YS1lMDBmODJmNS05NDZjNWU4YQ==, ActorId: [2:7494346019517719173:2570], ActorState: ExecuteState, TraceId: 01js2f39mx218xpzqatcyet29f, Create QueryResponse for error on request, msg: 2025-04-17T18:10:10.551229Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f3afca5rj8cenyxxq7mns, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMzMWEyOTAtYzY3MTM4YS1lMDBmODJmNS05NDZjNWU4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:10.551453Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMzMWEyOTAtYzY3MTM4YS1lMDBmODJmNS05NDZjNWU4YQ==, ActorId: [2:7494346019517719173:2570], ActorState: ExecuteState, TraceId: 01js2f3afca5rj8cenyxxq7mns, Create QueryResponse for error on request, msg: 2025-04-17T18:10:11.375548Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f3afz7qc5chdp8yhqg35v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMzMWEyOTAtYzY3MTM4YS1lMDBmODJmNS05NDZjNWU4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:11.375813Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMzMWEyOTAtYzY3MTM4YS1lMDBmODJmNS05NDZjNWU4YQ==, ActorId: [2:7494346019517719173:2570], ActorState: ExecuteState, TraceId: 01js2f3afz7qc5chdp8yhqg35v, Create QueryResponse for error on request, msg: >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestGetRecordsWithCount >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 19644, MsgBus: 7368 2025-04-17T18:09:56.615023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345971697855038:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:56.615070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c5f/r3tmp/tmpUiVerP/pdisk_1.dat 2025-04-17T18:09:57.207863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:57.212803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:57.212921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:57.215229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19644, node 1 2025-04-17T18:09:57.342513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:57.342549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:57.342563Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:57.342679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7368 TClient is connected to server localhost:7368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:58.141030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.166581Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:58.196354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.383043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.580216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.671775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.358467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345988877726023:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.358582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.711123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.742809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.774264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.805967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.848803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.929705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.010042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.078078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345993172693981:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.078240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.078766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345993172693986:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.084071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:01.098065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345993172693989:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:01.194588Z node 1 :TX_PROXY ERROR: Actor# [1:7494345993172694047:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:01.617834Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345971697855038:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:01.617895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:02.233048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:03.945902Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f33dffdcgn8sna4k2pd25, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAwZjYyODEtZGJkN2FlMmYtOTAyMGVhYWMtMzRhN2Q1ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:03.956739Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjAwZjYyODEtZGJkN2FlMmYtOTAyMGVhYWMtMzRhN2Q1ZDc=, ActorId: [1:7494345997467662475:2572], ActorState: ExecuteState, TraceId: 01js2f33dffdcgn8sna4k2pd25, Create QueryResponse for error on request, msg: 2025-04-17T18:10:04.605666Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f341x5z6jwyv9mrrg9yav, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAwZjYyODEtZGJkN2FlMmYtOTAyMGVhYWMtMzRhN2Q1ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:04.605906Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjAwZjYyODEtZGJkN2FlMmYtOTAyMGVhYWMtMzRhN2Q1ZDc=, ActorId: [1:7494345997467662475:2572], ActorState: ExecuteState, TraceId: 01js2f341x5z6jwyv9mrrg9yav, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2477, MsgBus: 22475 2025-04-17T18:10:05.520230Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346012142112511:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:05.520275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c5f/r3tmp/tmpwNw77Q/pdisk_1.dat 2025-04-17T18:10:05.612051Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2477, node 2 2025-04-17T18:10:05.647740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:05.647855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:05.651400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:05.678605Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:05.678630Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:05.678639Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:05.678753Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22475 TClient is connected to server localhost:22475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:10:06.171338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.176453Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:10:06.185759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:06.249530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:10:06.410061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:10:06.541849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.578004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346025027016142:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:08.578090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:08.648574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.685225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.733771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.769094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.800072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.847574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.921758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.012009Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346029321984111:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:09.012081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:09.012249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346029321984116:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:09.015459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:09.030584Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346029321984118:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:09.126623Z node 2 :TX_PROXY ERROR: Actor# [2:7494346029321984174:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:10.152537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:10.520420Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346012142112511:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:10.520471Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:12.017694Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f3b5se8tatt3tx2axw5qf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFmYjIzNjktNDFmZDc4ZC1lMzA1NjAyNi1lNTM5ZTBhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:12.017945Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjFmYjIzNjktNDFmZDc4ZC1lMzA1NjAyNi1lNTM5ZTBhMA==, ActorId: [2:7494346033616952584:2570], ActorState: ExecuteState, TraceId: 01js2f3b5se8tatt3tx2axw5qf, Create QueryResponse for error on request, msg: 2025-04-17T18:10:12.943280Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f3byabjpwcnw5akt1y8jm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFmYjIzNjktNDFmZDc4ZC1lMzA1NjAyNi1lNTM5ZTBhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:12.943551Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjFmYjIzNjktNDFmZDc4ZC1lMzA1NjAyNi1lNTM5ZTBhMA==, ActorId: [2:7494346033616952584:2570], ActorState: ExecuteState, TraceId: 01js2f3byabjpwcnw5akt1y8jm, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertNoIndexColumns [GOOD] Test command err: Trying to start YDB, gRPC: 14481, MsgBus: 62538 2025-04-17T18:09:56.590060Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345970871339172:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:56.590543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c64/r3tmp/tmpEKMRZ0/pdisk_1.dat 2025-04-17T18:09:57.044123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:57.056666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:57.056781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:57.059117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14481, node 1 2025-04-17T18:09:57.227053Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:57.227086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:57.227093Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:57.227265Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62538 TClient is connected to server localhost:62538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:57.968809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:57.988114Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:57.996845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.157151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.380477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.489634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.067341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345988051209974:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.067451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.409378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.440475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.477672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.506969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.542904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.589041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.646730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:00.701417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345988051210634:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.701503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.701852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345988051210639:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:00.705892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:00.717012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345988051210641:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:00.812788Z node 1 :TX_PROXY ERROR: Actor# [1:7494345988051210699:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:01.592084Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345970871339172:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:01.592174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:01.790744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.122391Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:03.146003Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:04.244098Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f339v3wwqt0q4g3j72whr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UzODlkM2MtZDMyY2JjZjgtZWNjNmI1YjAtMjFjMmY5Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:04.258599Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2UzODlkM2MtZDMyY2JjZjgtZWNjNmI1YjAtMjFjMmY5Mjc=, ActorId: [1:7494345992346178326:2513], ActorState: ExecuteState, TraceId: 01js2f339v3wwqt0q4g3j72whr, Create QueryResponse for error on request, msg: 2025-04-17T18:10:04.284858Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:04.328834Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:05.429965Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:05.452637Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 63620, MsgBus: 65351 2025-04-17T18:10:06.331465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346017120568465:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.331511Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c64/r3tmp/tmpjsuiAA/pdisk_1.dat 2025-04-17T18:10:06.581509Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:06.584636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:06.584709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:06.586049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63620, node 2 2025-04-17T18:10:06.678722Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:06.678738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:06.678743Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:06.678818Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65351 TClient is connected to server localhost:65351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:07.364418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:07.371660Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:10:07.380971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:07.504709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:07.729498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:10:07.812440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.008932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346034300439415:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:10.009049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:10.058864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.088835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.122098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.159908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.192536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.266453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.354199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.415925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346034300440086:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:10.416005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:10.416353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346034300440091:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:10.419972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:10.431703Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346034300440093:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:10.497868Z node 2 :TX_PROXY ERROR: Actor# [2:7494346034300440149:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:11.332264Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346017120568465:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:11.332336Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:11.521291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.593250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.671551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> AnalyzeColumnshard::AnalyzeServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23336, MsgBus: 15853 2025-04-17T18:09:40.504935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345905708084802:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:40.505005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c90/r3tmp/tmpQoZYYs/pdisk_1.dat 2025-04-17T18:09:40.915417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:40.916801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:40.916903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:40.920368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23336, node 1 2025-04-17T18:09:41.084269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:41.084290Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:41.084296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:41.084417Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15853 TClient is connected to server localhost:15853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:41.557240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.575397Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.588517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:09:41.705371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.861412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:41.925861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:43.698514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345918592988471:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.698592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:43.983645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.013400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.052210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.083041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.112777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.148336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.187317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.251983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345922887956426:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.252039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.252199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345922887956431:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.255062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:44.265307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345922887956433:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:44.335806Z node 1 :TX_PROXY ERROR: Actor# [1:7494345922887956487:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:45.345417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.528090Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345905708084802:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:45.532398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:52.575839Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345957247699003:2855], TxId: 281474976710720, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ODQ5ZjlkMzItZDllMTIyNGQtMWM3YzZjYTctNzhlMDQ5OGI=. TraceId : 01js2f2rjbbg5jn75xycder74w. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:09:52.576326Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345957247699005:2856], TxId: 281474976710720, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODQ5ZjlkMzItZDllMTIyNGQtMWM3YzZjYTctNzhlMDQ5OGI=. TraceId : 01js2f2rjbbg5jn75xycder74w. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494345957247699000:2512], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:09:52.576740Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODQ5ZjlkMzItZDllMTIyNGQtMWM3YzZjYTctNzhlMDQ5OGI=, ActorId: [1:7494345927182924116:2512], ActorState: ExecuteState, TraceId: 01js2f2rjbbg5jn75xycder74w, Create QueryResponse for error on request, msg: 2025-04-17T18:09:55.778066Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710742 at tablet 72075186224037934 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037934) | 2025-04-17T18:09:55.780114Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710742 at tablet 72075186224037935 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037935) | 2025-04-17T18:09:55.801219Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710742 at tablet 72075186224037936 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037936) | 2025-04-17T18:09:55.802409Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710742 at tablet 72075186224037937 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037937) | 2025-04-17T18:09:55.865657Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-17T18:09:55.881337Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-17T18:09:55.884668Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-04-17T18:09:55.884700Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-17T18:09:55.887916Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-04-17T18:09:55.916425Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-04-17T18:09:55.917089Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabl ... node 1, TabletId: 72075186224037936 not found 2025-04-17T18:09:55.917107Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2025-04-17T18:09:55.917120Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-04-17T18:09:55.917136Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-17T18:09:55.922048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:09:55.922062Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 15086, MsgBus: 61980 2025-04-17T18:09:57.788480Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345977675128008:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:57.813358Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c90/r3tmp/tmpnKPoZ6/pdisk_1.dat 2025-04-17T18:09:57.936601Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:57.972649Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:57.972761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:57.974372Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15086, node 2 2025-04-17T18:09:58.082790Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:58.082819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:58.082827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:58.082979Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61980 TClient is connected to server localhost:61980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:58.670727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.682065Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:58.689538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.779079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:58.960260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.052831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.450338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345994854998800:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.450425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.514226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.549132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.616678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.651516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.685559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.736211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.785914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.885147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345994854999466:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.885228Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.885282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345994854999471:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.890053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:01.914596Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345994854999473:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:01.986533Z node 2 :TX_PROXY ERROR: Actor# [2:7494345994854999532:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:02.786135Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345977675128008:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:02.786210Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:03.180537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:11.540779Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346037804676700:2852], TxId: 281474976715732, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ODYzZDMwOWUtNDczNGFlMjItNDcyYjhlZTAtZGFjYTM2YzI=. TraceId : 01js2f3b3514e64djwwa0pygx7. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:10:11.541476Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346037804676701:2853], TxId: 281474976715732, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODYzZDMwOWUtNDczNGFlMjItNDcyYjhlZTAtZGFjYTM2YzI=. TraceId : 01js2f3b3514e64djwwa0pygx7. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494346037804676697:2504], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:10:11.541822Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODYzZDMwOWUtNDczNGFlMjItNDcyYjhlZTAtZGFjYTM2YzI=, ActorId: [2:7494346003444934420:2504], ActorState: ExecuteState, TraceId: 01js2f3b3514e64djwwa0pygx7, Create QueryResponse for error on request, msg: 2025-04-17T18:10:12.913568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:12.913598Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:13.632413Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-17T18:10:13.639879Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-17T18:10:13.639918Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-17T18:10:13.661273Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-17T18:10:13.661327Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-04-17T18:10:13.662229Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-17T18:10:13.662265Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037935 not found 2025-04-17T18:10:13.662281Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037939 not found >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup >> TraverseDatashard::TraverseTwoTables >> YdbOlapStore::BulkUpsert [GOOD] >> YdbOlapStore::DuplicateRows >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 61869, MsgBus: 7819 2025-04-17T18:09:58.750198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345980516175897:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:58.752119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c52/r3tmp/tmpNo3rmu/pdisk_1.dat 2025-04-17T18:09:59.284525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:59.289723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:59.290105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:59.292011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61869, node 1 2025-04-17T18:09:59.428457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:59.428481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:59.428491Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:59.428613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7819 TClient is connected to server localhost:7819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:00.125812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.148982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.294803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.534874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.617119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:02.303960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345997696046871:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.304055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.657387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.742356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.790154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.822074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.848956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.892773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.934077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.039566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346001991014838:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.039630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.040161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346001991014843:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.044691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:03.057186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346001991014845:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:03.158404Z node 1 :TX_PROXY ERROR: Actor# [1:7494346001991014903:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:03.750593Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345980516175897:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:03.750649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:04.085179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 1355, MsgBus: 24266 2025-04-17T18:10:07.607528Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346021570950466:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:07.608675Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c52/r3tmp/tmpp1CAab/pdisk_1.dat 2025-04-17T18:10:07.754606Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:07.772395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.772444Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.776127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1355, node 2 2025-04-17T18:10:07.916231Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.916251Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.916260Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.916368Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24266 TClient is connected to server localhost:24266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.431238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.442874Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:10:08.472260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:10:08.550112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:10:08.709935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.771436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:10.690340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346034455854119:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:10.690437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:10.747135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.816963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.856176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.888839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.925882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.962125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.997659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.092756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346038750822079:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:11.092827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:11.092962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346038750822084:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:11.096430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:11.106327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346038750822086:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:11.212994Z node 2 :TX_PROXY ERROR: Actor# [2:7494346038750822142:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:12.148921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:12.598387Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346021570950466:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:12.598452Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:17.823421Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f3gk28gvs0esggbnah21d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTA1OGQ2Y2MtNjdlZGYwMzItNGE1YzkxZC1jNjEwZTViYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:17.834728Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTA1OGQ2Y2MtNjdlZGYwMzItNGE1YzkxZC1jNjEwZTViYQ==, ActorId: [2:7494346043045790548:2570], ActorState: ExecuteState, TraceId: 01js2f3gk28gvs0esggbnah21d, Create QueryResponse for error on request, msg: >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61629, MsgBus: 10067 2025-04-17T18:09:47.701189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345932963693912:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:47.701238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c78/r3tmp/tmpQhl0o3/pdisk_1.dat 2025-04-17T18:09:48.113242Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:48.145146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:48.145313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61629, node 1 2025-04-17T18:09:48.147151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:48.270603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:48.270641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:48.270649Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:48.270779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10067 TClient is connected to server localhost:10067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:48.909267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:48.923855Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:48.943583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:09:49.103015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.295835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.360477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:51.208387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345950143564827:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.208514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.568071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.646801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.717694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.748644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.793567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.868646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.904580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:51.963315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345950143565500:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.963388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.963673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345950143565505:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:51.967830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:51.977732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345950143565507:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:52.082865Z node 1 :TX_PROXY ERROR: Actor# [1:7494345954438532860:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:52.701551Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345932963693912:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:52.701618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:53.126978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62144, MsgBus: 13237 2025-04-17T18:09:55.434412Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345967549026179:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c78/r3tmp/tmpevJsl8/pdisk_1.dat 2025-04-17T18:09:55.519579Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:09:55.568842Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:55.575259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:55.575336Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:55.576992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62144, node 2 2025-04-17T18:09:55.638444Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:55.638465Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:55.638471Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:55.638565Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13237 TClient is connected to server localhost:13237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:56.087377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:56.096173Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:09:56.109066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:5 ... iled to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.325409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494345984728897648:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:59.329233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:59.346084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494345984728897650:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:09:59.422256Z node 2 :TX_PROXY ERROR: Actor# [2:7494345984728897707:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:00.372227Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345967549026179:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:00.374679Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:00.471993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.226221Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:01.286305Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:03.362969Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:03.419251Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:04.153144Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:04.184013Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:04.556802Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:05.702991Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:05.725555Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 7532, MsgBus: 25618 2025-04-17T18:10:07.386969Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346017932069358:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:07.387054Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c78/r3tmp/tmpx23SDQ/pdisk_1.dat 2025-04-17T18:10:07.538698Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:07.576810Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.576915Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.583384Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7532, node 3 2025-04-17T18:10:07.686846Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.686885Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.686896Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.687029Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25618 TClient is connected to server localhost:25618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.310878Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.340139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.450939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.658502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.736007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:11.171197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346035111940279:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:11.171287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:11.228534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.269839Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.319040Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.353710Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.386860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.424420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.487648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.560381Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346035111940943:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:11.560479Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:11.560749Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346035111940948:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:11.564299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:11.578673Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494346035111940950:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:11.658004Z node 3 :TX_PROXY ERROR: Actor# [3:7494346035111941007:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:12.401042Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494346017932069358:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:12.401108Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:12.843511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:13.903222Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:13.958440Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:15.384954Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:17.178937Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:19.026282Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:19.054303Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:19.345271Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> KqpMultishardIndex::WriteIntoRenamingSyncIndex [FAIL] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2025-04-17T18:10:06.712243Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346013907514771:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.712310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00193f/r3tmp/tmpdNf28H/pdisk_1.dat 2025-04-17T18:10:07.388728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.388839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.403258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:07.453654Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22074, node 1 2025-04-17T18:10:07.630831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.630858Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.630866Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.630971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.183560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.351171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:12108 2025-04-17T18:10:08.562628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:09.040751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.340887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:12.049652Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346039179773567:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:12.050681Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00193f/r3tmp/tmpkfEUPi/pdisk_1.dat 2025-04-17T18:10:12.387163Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:12.411852Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:12.411933Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:12.415018Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12486, node 4 2025-04-17T18:10:12.526568Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:12.526590Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:12.526596Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:12.526734Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:12.819970Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:12.884814Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20600 2025-04-17T18:10:13.127580Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2025-04-17T18:10:14.365206Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480 2025-04-17T18:10:15.602210Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:15.735424Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:15.901027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.216003Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:17.759449Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494346064438167845:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:17.759498Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00193f/r3tmp/tmpwzNeil/pdisk_1.dat 2025-04-17T18:10:17.939006Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:17.970469Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:17.970558Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:17.979803Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18993, node 7 2025-04-17T18:10:18.087633Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:18.087655Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:18.088258Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:18.088454Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:18.355396Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:18.371034Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:18.458830Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17729 2025-04-17T18:10:18.654484Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:18.667106Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> DataStreams::TestShardPagination [GOOD] >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestPutRecords >> YdbOlapStore::LogExistingUserId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] Test command err: Trying to start YDB, gRPC: 17509, MsgBus: 7497 2025-04-17T18:09:59.987504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345986944454305:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:59.988059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c43/r3tmp/tmp76sVNc/pdisk_1.dat 2025-04-17T18:10:00.511745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:00.534297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:00.534380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:00.536050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17509, node 1 2025-04-17T18:10:00.614733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:00.614757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:00.614764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:00.614891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7497 TClient is connected to server localhost:7497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:01.199790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.216644Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:01.228759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.414614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.592762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.679126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:03.422682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346004124325119:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.422860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.745066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.780465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.844553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.888362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.926815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.013711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.084924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.174428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346008419293095:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:04.174500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:04.174624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346008419293100:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:04.178019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:04.191452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346008419293102:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:04.274878Z node 1 :TX_PROXY ERROR: Actor# [1:7494346008419293157:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:04.981644Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345986944454305:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:04.981708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:05.211643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:07.967547Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f375qd7tw7xejensf0v6d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:07.986522Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=, ActorId: [1:7494346012714260784:2513], ActorState: ExecuteState, TraceId: 01js2f375qd7tw7xejensf0v6d, Create QueryResponse for error on request, msg: 2025-04-17T18:10:08.842503Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:09.717496Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f38v8dqxaxns80af8yprb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:09.717703Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=, ActorId: [1:7494346012714260784:2513], ActorState: ExecuteState, TraceId: 01js2f38v8dqxaxns80af8yprb, Create QueryResponse for error on request, msg: 2025-04-17T18:10:09.762437Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:09.798980Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:10.130640Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346034189098132:2662], TxId: 281474976710702, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=. CustomerSuppliedId : . TraceId : 01js2f39rk46by52n5n2qkfnh4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:10:10.130898Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346034189098133:2663], TxId: 281474976710702, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=. TraceId : 01js2f39rk46by52n5n2qkfnh4. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494346034189098129:2513], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:10:10.131157Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=, ActorId: [1:7494346012714260784:2513], ActorState: ExecuteState, TraceId: 01js2f39rk46by52n5n2qkfnh4, Create QueryResponse for error on request, msg: 2025-04-17T18:10:11.026873Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f3a2wcadsbfbm3m2yzc9h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:11.027133Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=, ... ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f3az7epdc8ebd6z5pvk4z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:12.077915Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODZjNTM2NGMtMTdlZjMxYmItMmZlYmNmMTgtYWFjZWMzYjE=, ActorId: [1:7494346012714260784:2513], ActorState: ExecuteState, TraceId: 01js2f3az7epdc8ebd6z5pvk4z, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 20843, MsgBus: 29298 2025-04-17T18:10:12.893970Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346042287175352:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:12.894024Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c43/r3tmp/tmpQNeKmJ/pdisk_1.dat 2025-04-17T18:10:13.163572Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:13.166885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:13.166974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:13.169599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20843, node 2 2025-04-17T18:10:13.294334Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:13.294355Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:13.294363Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:13.294477Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29298 TClient is connected to server localhost:29298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:13.934827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:13.947453Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:13.960900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:14.030640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:14.227044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:14.301865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:16.464680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346059467046294:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:16.464806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:16.513167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.543920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.575825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.607314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.643000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.715390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.748473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.836573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346059467046960:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:16.836666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:16.836938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346059467046965:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:16.841131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:16.851751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346059467046967:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:16.908346Z node 2 :TX_PROXY ERROR: Actor# [2:7494346059467047022:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:17.873193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:17.902807Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346042287175352:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:17.902864Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:19.745511Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f3jnqc1a6cfsxbys70q39, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDMyNDRlZjctOTNlNTRiNDEtNmY0NjIxNzAtMjFlYWMzNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:19.745687Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDMyNDRlZjctOTNlNTRiNDEtNmY0NjIxNzAtMjFlYWMzNjE=, ActorId: [2:7494346063762014656:2512], ActorState: ExecuteState, TraceId: 01js2f3jnqc1a6cfsxbys70q39, Create QueryResponse for error on request, msg: 2025-04-17T18:10:20.119709Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346076646917178:2598], TxId: 281474976710678, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDMyNDRlZjctOTNlNTRiNDEtNmY0NjIxNzAtMjFlYWMzNjE=. TraceId : 01js2f3kfb7hspt9ekzyahbxg0. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:10:20.119935Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346076646917180:2599], TxId: 281474976710678, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDMyNDRlZjctOTNlNTRiNDEtNmY0NjIxNzAtMjFlYWMzNjE=. TraceId : 01js2f3kfb7hspt9ekzyahbxg0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346076646917175:2512], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:10:20.120153Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDMyNDRlZjctOTNlNTRiNDEtNmY0NjIxNzAtMjFlYWMzNjE=, ActorId: [2:7494346063762014656:2512], ActorState: ExecuteState, TraceId: 01js2f3kfb7hspt9ekzyahbxg0, Create QueryResponse for error on request, msg: 2025-04-17T18:10:20.943894Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2f3kv03wgrh5w8mt4x000t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDMyNDRlZjctOTNlNTRiNDEtNmY0NjIxNzAtMjFlYWMzNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:10:20.944130Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDMyNDRlZjctOTNlNTRiNDEtNmY0NjIxNzAtMjFlYWMzNjE=, ActorId: [2:7494346063762014656:2512], ActorState: ExecuteState, TraceId: 01js2f3kv03wgrh5w8mt4x000t, Create QueryResponse for error on request, msg: 2025-04-17T18:10:20.961556Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:10:20.976426Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestCreateTabletChangeToExternal |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2025-04-17T18:10:06.716624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346015943373998:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.738456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00197d/r3tmp/tmppsIUsM/pdisk_1.dat 2025-04-17T18:10:07.304688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:07.308053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.308147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.344356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22250, node 1 2025-04-17T18:10:07.718788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.718813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.718819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.718918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.189061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.327977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11967 2025-04-17T18:10:08.614506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting...
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2025-04-17T18:10:09.185244Z node 1 :TX_PROXY ERROR: Actor# [1:7494346028828277991:3398] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:11.833887Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346037630995682:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:11.833967Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00197d/r3tmp/tmpVSmpuq/pdisk_1.dat 2025-04-17T18:10:12.010001Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:12.055315Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:12.055387Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:12.059041Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6961, node 4 2025-04-17T18:10:12.187399Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:12.187427Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:12.187434Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:12.187545Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:12.479547Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:12.621340Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11098 2025-04-17T18:10:12.837728Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:16.831546Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494346037630995682:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:16.832619Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:18.904973Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494346068466343882:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:18.911367Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00197d/r3tmp/tmp81EaHK/pdisk_1.dat 2025-04-17T18:10:19.081824Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:19.115637Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.115732Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.126121Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64007, node 7 2025-04-17T18:10:19.179027Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:19.179053Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:19.179061Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:19.179209Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:19.448391Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:19.531839Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:7491 2025-04-17T18:10:19.721304Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:19.730660Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 6905, MsgBus: 12674 2025-04-17T18:10:18.767540Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346068584608727:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:18.768186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e58/r3tmp/tmpYOVoy6/pdisk_1.dat 2025-04-17T18:10:19.122326Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6905, node 1 2025-04-17T18:10:19.196607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.206855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.211386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:19.230557Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:19.230592Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:19.230600Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:19.230709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12674 TClient is connected to server localhost:12674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:19.742499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:19.782915Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:19.800409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:19.924535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:20.080863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:20.160660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:21.656184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346081469512240:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:21.656320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.010743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.043438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.075827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.106437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.140306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.173572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.250531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.312888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346085764480204:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.312952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.313006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346085764480209:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.316110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:22.327106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346085764480211:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:22.431100Z node 1 :TX_PROXY ERROR: Actor# [1:7494346085764480268:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.307397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:23.333782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:10:23.357252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:10:23.416858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:10:23.440189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:10:23.465994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:10:23.758297Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346068584608727:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:23.758359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> YdbLogStore::LogTable [GOOD] >> YdbMonitoring::SelfCheck >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateTabletChangeToExternal [GOOD] Test command err: 2025-04-17T18:09:22.850635Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.853380Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.853541Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-17T18:09:22.853940Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:09:22.855084Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-17T18:09:22.855151Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.856021Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:73:2076] ControllerId# 72057594037932033 2025-04-17T18:09:22.856066Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.856188Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.856538Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.869562Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.869616Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.871668Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:81:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.871822Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:82:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.871966Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:83:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.872117Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:84:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.872260Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:85:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.872411Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:86:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.872556Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:87:2087] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.872581Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.872660Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:73:2076] 2025-04-17T18:09:22.872720Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:73:2076] 2025-04-17T18:09:22.872782Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.872820Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.873665Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.873749Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.876150Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.876273Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.877085Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:96:2074] ControllerId# 72057594037932033 2025-04-17T18:09:22.877120Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.877205Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.877409Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.879259Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.879297Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.880410Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:102:2078] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.880517Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:103:2079] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.880598Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:104:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.880685Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:105:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.880807Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:106:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.880940Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:107:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.881082Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:108:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.881106Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.881172Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:96:2074] 2025-04-17T18:09:22.881202Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:96:2074] 2025-04-17T18:09:22.881260Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.881301Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.881735Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.881803Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.884152Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.884271Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.885021Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:114:2074] ControllerId# 72057594037932033 2025-04-17T18:09:22.885066Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.885129Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.885301Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.886917Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.886947Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.888339Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:120:2078] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.888491Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:121:2079] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.888660Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:122:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.888789Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:123:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.888934Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:124:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.889031Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:125:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.889110Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:126:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.889130Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.889200Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:114:2074] 2025-04-17T18:09:22.889230Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:114:2074] 2025-04-17T18:09:22.889264Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.889305Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.889547Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.889744Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:114:2074] 2025-04-17T18:09:22.889774Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.889797Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.889950Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.905827Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:73:2076] 2025-04-17T18:09:22.905882Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.905910Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.907547Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.907706Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:96:2074] 2025-04-17T18:09:22.907740Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.907782Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.907938Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.908217Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeLis ... etId: 72075186224037888 followers: 0 2025-04-17T18:10:24.793033Z node 28 :TABLET_RESOLVER DEBUG: SelectForward node 28 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [28:321:2297] 2025-04-17T18:10:24.793161Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [28:385:2348] 2025-04-17T18:10:24.793231Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [28:385:2348] 2025-04-17T18:10:24.793365Z node 28 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [28:385:2348] 2025-04-17T18:10:24.793525Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [28:385:2348] 2025-04-17T18:10:24.793602Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [28:385:2348] 2025-04-17T18:10:24.793938Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [28:389:2351] 2025-04-17T18:10:24.793995Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [28:389:2351] 2025-04-17T18:10:24.794099Z node 28 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:24.794208Z node 28 :TABLET_RESOLVER DEBUG: SelectForward node 28 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [28:270:2261] 2025-04-17T18:10:24.794332Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [28:389:2351] 2025-04-17T18:10:24.794394Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [28:389:2351] 2025-04-17T18:10:24.794453Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [28:389:2351] 2025-04-17T18:10:24.794521Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [28:389:2351] 2025-04-17T18:10:24.794743Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [28:389:2351] 2025-04-17T18:10:24.794908Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [28:389:2351] 2025-04-17T18:10:24.794974Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [28:389:2351] 2025-04-17T18:10:24.795023Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [28:389:2351] 2025-04-17T18:10:24.795095Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [28:389:2351] 2025-04-17T18:10:24.795151Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [28:389:2351] 2025-04-17T18:10:24.795241Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [28:388:2350] EventType# 268697601 2025-04-17T18:10:24.795486Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2025-04-17T18:10:24.795569Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:10:24.795990Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{10, redo 442b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-17T18:10:24.796091Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:10:24.807644Z node 28 :BS_PROXY_PUT INFO: [efc53170c63234c6] bootstrap ActorId# [28:392:2354] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:8:0:0:240:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T18:10:24.807816Z node 28 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] Id# [72057594037927937:2:8:0:0:240:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T18:10:24.807902Z node 28 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] restore Id# [72057594037927937:2:8:0:0:240:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T18:10:24.808004Z node 28 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:8:0:0:240:1] Marker# BPG33 2025-04-17T18:10:24.808076Z node 28 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:8:0:0:240:1] Marker# BPG32 2025-04-17T18:10:24.808239Z node 28 :BS_PROXY DEBUG: Send to queueActorId# [28:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:8:0:0:240:1] FDS# 240 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T18:10:24.809440Z node 28 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] received {EvVPutResult Status# OK ID# [72057594037927937:2:8:0:0:240:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 23 } Cost# 81889 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 24 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-17T18:10:24.809563Z node 28 :BS_PROXY_PUT DEBUG: [efc53170c63234c6] Result# TEvPutResult {Id# [72057594037927937:2:8:0:0:240:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-17T18:10:24.809633Z node 28 :BS_PROXY_PUT INFO: [efc53170c63234c6] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:8:0:0:240:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T18:10:24.809779Z node 28 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.838 sample PartId# [72057594037927937:2:8:0:0:240:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 28 } TEvVPutResult{ TimestampMs# 2.043 VDiskId# [0:1:0:0:0] NodeId# 28 Status# OK } ] } 2025-04-17T18:10:24.809969Z node 28 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:240:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:10:24.810114Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2025-04-17T18:10:24.810285Z node 28 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [28:51:2092], reason = ReasonStop Marker# TSYS29 2025-04-17T18:10:24.810328Z node 28 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2025-04-17T18:10:24.810520Z node 28 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2025-04-17T18:10:24.810560Z node 28 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2025-04-17T18:10:24.810724Z node 28 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2025-04-17T18:10:24.811394Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [28:52:2092] 2025-04-17T18:10:24.811467Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [28:52:2092] 2025-04-17T18:10:24.811555Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [28:51:2092] EventType# 268960257 2025-04-17T18:10:24.811634Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [28:385:2348] 2025-04-17T18:10:24.811686Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [28:385:2348] 2025-04-17T18:10:24.811916Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-04-17T18:10:24.812002Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:10:24.812133Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:10:24.812213Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:10:24.812390Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-17T18:10:24.812453Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:10:24.812554Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:10:24.812636Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:10:24.812932Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [28:394:2356] 2025-04-17T18:10:24.812980Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [28:394:2356] 2025-04-17T18:10:24.813107Z node 28 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:24.813183Z node 28 :TABLET_RESOLVER DEBUG: SelectForward node 28 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [28:321:2297] 2025-04-17T18:10:24.813244Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [28:394:2356] 2025-04-17T18:10:24.813303Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [28:394:2356] 2025-04-17T18:10:24.813416Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [28:394:2356] 2025-04-17T18:10:24.813474Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [28:394:2356] 2025-04-17T18:10:24.813595Z node 28 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-04-17T18:10:24.813770Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:10:24.813894Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-17T18:10:24.813958Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-17T18:10:24.813989Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-17T18:10:24.814067Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [28:321:2297] CurrentLeaderTablet: [28:336:2309] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:10:24.814133Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [28:321:2297] CurrentLeaderTablet: [28:336:2309] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:10:24.814240Z node 28 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [28:321:2297] CurrentLeaderTablet: [28:336:2309] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-04-17T18:10:24.814367Z node 28 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogExistingUserId [GOOD] Test command err: 2025-04-17T18:08:18.395230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345552401966961:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.395287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1e/r3tmp/tmpdVXBN2/pdisk_1.dat 2025-04-17T18:08:19.081926Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:19.095550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:19.095611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:19.101259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32613, node 1 2025-04-17T18:08:19.397745Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:19.397762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:19.397770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:19.397871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.643252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:25932 2025-04-17T18:08:19.901017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:20.082324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:08:20.082599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:08:20.083237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:08:20.083395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:08:20.083502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:08:20.083673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:08:20.083771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:08:20.083928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:08:20.084059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:08:20.084201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:08:20.084315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:08:20.084428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494345560991902575:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:08:20.121548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:08:20.121645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:08:20.121932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:08:20.122053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:08:20.123681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:08:20.123903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:08:20.124019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:08:20.124111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:08:20.124254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:08:20.124368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:08:20.124495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:08:20.124600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494345556696935262:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:08:20.162018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345560991902560:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:08:20.162092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345560991902560:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:08:20.162361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345560991902560:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:08:20.162505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345560991902560:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:08:20.162599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345560991902560:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:08:20.162708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345560991902560:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:08:20.162825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345560991902560:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:08:20.162946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345560991902560:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:08:20.163058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494345560991902560:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=S ... 4976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 35, seqNo: [1] 2025-04-17T18:10:22.069540Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 36, seqNo: [1] 2025-04-17T18:10:22.069556Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 37, seqNo: [1] 2025-04-17T18:10:22.069573Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 38, seqNo: [1] 2025-04-17T18:10:22.069590Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 39, seqNo: [1] 2025-04-17T18:10:22.069606Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 40, seqNo: [1] 2025-04-17T18:10:22.069622Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 41, seqNo: [1] 2025-04-17T18:10:22.069640Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 42, seqNo: [1] 2025-04-17T18:10:22.069657Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 43, seqNo: [1] 2025-04-17T18:10:22.069673Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 44, seqNo: [1] 2025-04-17T18:10:22.069690Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 45, seqNo: [1] 2025-04-17T18:10:22.069707Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 46, seqNo: [1] 2025-04-17T18:10:22.069724Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 47, seqNo: [1] 2025-04-17T18:10:22.069740Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 48, seqNo: [1] 2025-04-17T18:10:22.069756Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 49, seqNo: [1] 2025-04-17T18:10:22.069773Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 50, seqNo: [1] 2025-04-17T18:10:22.069790Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 51, seqNo: [1] 2025-04-17T18:10:22.069807Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 52, seqNo: [1] 2025-04-17T18:10:22.069823Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 53, seqNo: [1] 2025-04-17T18:10:22.069839Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 54, seqNo: [1] 2025-04-17T18:10:22.069857Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 55, seqNo: [1] 2025-04-17T18:10:22.069873Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2025-04-17T18:10:22.069890Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2025-04-17T18:10:22.069907Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2025-04-17T18:10:22.069926Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2025-04-17T18:10:22.069944Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2025-04-17T18:10:22.069962Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2025-04-17T18:10:22.069978Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2025-04-17T18:10:22.069995Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2025-04-17T18:10:22.070013Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2025-04-17T18:10:22.070036Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished 2025-04-17T18:10:22.070072Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7494346077714723336:3180], TxId: 281474976710670, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=. TraceId : 01js2f3m9k5mm6pkwby1jst5t1. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-04-17T18:10:22.070370Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. pass away 2025-04-17T18:10:22.070608Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710670;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:10:22.071439Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7494346077714723251:3111] TxId: 281474976710670. Ctx: { TraceId: 01js2f3m9k5mm6pkwby1jst5t1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7494346077714723336:3180], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 9912 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 713 FinishTimeMs: 1744913422068 InputRows: 1 InputBytes: 310 OutputRows: 1 OutputBytes: 310 ResultRows: 1 ResultBytes: 310 ComputeCpuTimeUs: 299 BuildCpuTimeUs: 414 HostName: "ghrun-gkdwnlcfnq" NodeId: 28 CreateTimeMs: 1744913421278 UpdateTimeMs: 1744913422070 } MaxMemoryUsage: 1048576 } 2025-04-17T18:10:22.071518Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710670. Ctx: { TraceId: 01js2f3m9k5mm6pkwby1jst5t1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7494346077714723336:3180] 2025-04-17T18:10:22.071682Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7494346077714723251:3111] TxId: 281474976710670. Ctx: { TraceId: 01js2f3m9k5mm6pkwby1jst5t1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:10:22.071776Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7494346077714723251:3111] TxId: 281474976710670. Ctx: { TraceId: 01js2f3m9k5mm6pkwby1jst5t1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.138842s ReadRows: 50 ReadBytes: 16000 ru: 92 rate limiter was not found force flag: 1 2025-04-17T18:10:22.071883Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, ActorId: [28:7494346073419755909:3111], ActorState: ExecuteState, TraceId: 01js2f3m9k5mm6pkwby1jst5t1, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-17T18:10:22.072272Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, ActorId: [28:7494346073419755909:3111], ActorState: ExecuteState, TraceId: 01js2f3m9k5mm6pkwby1jst5t1, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 867.847 QueriesCount: 1 2025-04-17T18:10:22.072368Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, ActorId: [28:7494346073419755909:3111], ActorState: ExecuteState, TraceId: 01js2f3m9k5mm6pkwby1jst5t1, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:10:22.072510Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, ActorId: [28:7494346073419755909:3111], ActorState: ExecuteState, TraceId: 01js2f3m9k5mm6pkwby1jst5t1, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:10:22.072570Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, ActorId: [28:7494346073419755909:3111], ActorState: ExecuteState, TraceId: 01js2f3m9k5mm6pkwby1jst5t1, EndCleanup, isFinal: 1 2025-04-17T18:10:22.072667Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, ActorId: [28:7494346073419755909:3111], ActorState: ExecuteState, TraceId: 01js2f3m9k5mm6pkwby1jst5t1, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7494346026175111844:2245] 2025-04-17T18:10:22.072748Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, ActorId: [28:7494346073419755909:3111], ActorState: unknown state, TraceId: 01js2f3m9k5mm6pkwby1jst5t1, Cleanup temp tables: 0 2025-04-17T18:10:22.074301Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[28:7494346034765047331:2326];fline=actor.cpp:33;event=skip_flush_writing; RESULT: [[42000u;"nginx";"resource_6";"19";[2];["message"];["{\"auth\":{\"org_id\":7704,\"service\":{\"internal\":\"false\",\"ip\":\"258.258.258.258\"},\"type\":\"token\",\"user\":{\"id\":1000042,\"ip\":\"257.257.257.257\",\"is_cloud\":\"false\"}}}"]]] --------------------- STATS: total CPU: 1146 duration: 862845 usec cpu: 239577 usec { name: "/Root/OlapStore/log1" reads { rows: 50 bytes: 16000 } } 2025-04-17T18:10:22.076942Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913421000, txId: 18446744073709551615] shutting down 2025-04-17T18:10:22.077120Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTM4OTUxOTgtNWZjZjZiZGEtMjBhMjUxOGItNDhlNThhNjA=, ActorId: [28:7494346073419755909:3111], ActorState: unknown state, TraceId: 01js2f3m9k5mm6pkwby1jst5t1, Session actor destroyed 2025-04-17T18:10:22.141144Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[28:7494346034765047386:2327];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T18:10:22.146330Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[28:7494346034765047326:2324];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T18:10:22.152684Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[28:7494346034765047329:2325];fline=actor.cpp:33;event=skip_flush_writing; >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> YdbOlapStore::LogWithUnionAllDescending >> DataStreams::ListStreamsValidation [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2025-04-17T18:10:06.714295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346013595920410:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.714417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001951/r3tmp/tmpEG2Mky/pdisk_1.dat 2025-04-17T18:10:07.328257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.328351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.355257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:07.371238Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21780, node 1 2025-04-17T18:10:07.453654Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:07.453672Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:07.799129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.799156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.799177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.799301Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.193013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.331068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23010 2025-04-17T18:10:08.712436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.735333Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T18:10:10.407941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:10.661968Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037890:1][1:7494346030775791270:2368] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-17T18:10:10.885330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.110912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:11.141134Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-17T18:10:11.141170Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-17T18:10:11.141182Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-17T18:10:11.141192Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-17T18:10:11.141201Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-17T18:10:11.141211Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-17T18:10:11.141225Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-17T18:10:11.141242Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-17T18:10:11.141255Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-04-17T18:10:11.141265Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-17T18:10:11.141276Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-17T18:10:11.143216Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-17T18:10:11.143235Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-17T18:10:11.143250Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-17T18:10:11.144364Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-17T18:10:11.144379Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-17T18:10:11.156989Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,19) wasn't found 2025-04-17T18:10:11.157059Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-04-17T18:10:11.157138Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2025-04-17T18:10:11.157166Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,16) wasn't found 2025-04-17T18:10:11.157218Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,15) wasn't found 2025-04-17T18:10:11.157240Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,12) wasn't found 2025-04-17T18:10:11.157275Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-04-17T18:10:11.157365Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2025-04-17T18:10:11.157396Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,20) wasn't found 2025-04-17T18:10:11.157441Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,17) wasn't found 2025-04-17T18:10:11.163927Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,14) wasn't found test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001951/r3tmp/tmpR8Fcym/pdisk_1.dat 2025-04-17T18:10:13.221115Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:13.303361Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:13.333189Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:13.333269Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:13.336084Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3076, node 4 2025-04-17T18:10:13.508450Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:13.508474Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:13.508482Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:13.508666Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:13.755513Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:13.871129Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25515 2025-04-17T18:10:14.103731Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:14.391984Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:14.495902Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:14.624514Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:17.517813Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494346064274961221:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:17.518510Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001951/r3tmp/tmpMthsZo/pdisk_1.dat 2025-04-17T18:10:17.660303Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:17.691938Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:17.692019Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:17.703824Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17514, node 7 2025-04-17T18:10:17.785537Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:17.785559Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:17.785565Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:17.785697Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:18.027976Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:18.115751Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:6435 2025-04-17T18:10:18.319283Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:18.338572Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-17T18:10:18.577200Z node 7 :TX_PROXY ERROR: Actor# [7:7494346068569930579:3398] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:22.112540Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494346084308432761:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:22.112647Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001951/r3tmp/tmphTGipC/pdisk_1.dat 2025-04-17T18:10:22.353957Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14456, node 10 2025-04-17T18:10:22.458994Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:22.459156Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:22.484407Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:22.499950Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:22.499988Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:22.499996Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:22.500165Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:22.841895Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:22.923304Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2826 2025-04-17T18:10:23.159143Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2025-04-17T18:09:22.175596Z node 6 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.178377Z node 6 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.178503Z node 6 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.179038Z node 6 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [6:149:2076] ControllerId# 72057594037932033 2025-04-17T18:09:22.179063Z node 6 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.179124Z node 6 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.179324Z node 6 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.180559Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.182470Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.182553Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.183038Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:158:2076] ControllerId# 72057594037932033 2025-04-17T18:09:22.183061Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.183095Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.183180Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.184456Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.184493Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.186875Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:164:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.187044Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:165:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.187191Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:166:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.187317Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:167:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.187448Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:168:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.187570Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:169:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.187738Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:170:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.187766Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.187832Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:158:2076] 2025-04-17T18:09:22.187861Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:158:2076] 2025-04-17T18:09:22.187905Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.187951Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.189599Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.189726Z node 4 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.191973Z node 4 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.192083Z node 4 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.192948Z node 4 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [4:178:2077] ControllerId# 72057594037932033 2025-04-17T18:09:22.192986Z node 4 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.193053Z node 4 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.193236Z node 4 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.194966Z node 4 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.195005Z node 4 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.196544Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:184:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.196739Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:185:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.196866Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:186:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.197001Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:187:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.197161Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:188:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.197316Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:189:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.197470Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:190:2087] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.197504Z node 4 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.197555Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [4:178:2077] 2025-04-17T18:09:22.197578Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [4:178:2077] 2025-04-17T18:09:22.197611Z node 4 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.197640Z node 4 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.198078Z node 4 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.198156Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.200084Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.200152Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.200610Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:198:2077] ControllerId# 72057594037932033 2025-04-17T18:09:22.200638Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.200686Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.200795Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.202492Z node 5 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.202526Z node 5 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.203947Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:204:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.204099Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:205:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.204250Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:206:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.204372Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:207:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.204504Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:208:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.204663Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:209:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.204790Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:210:2087] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.204813Z node 5 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.204857Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [5:198:2077] 2025-04-17T18:09:22.204882Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [5:198:2077] 2025-04-17T18:09:22.204911Z node 5 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.204946Z node 5 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.205413Z node 5 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.213422Z node 6 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.213462Z node 6 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.215021Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:215:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.215169Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:216:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.215315Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:217:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.215455Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:218:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.215590Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:219:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.215728Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:220:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.215873Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:221:20 ... 953Z node 52 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037892 CurrentLeader: [57:1947:2264] CurrentLeaderTablet: [57:1953:2267] CurrentGeneration: 3 CurrentStep: 0} 2025-04-17T18:10:24.978027Z node 52 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037892 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037892 Cookie: 0 CurrentLeader: [57:1947:2264] CurrentLeaderTablet: [57:1953:2267] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-04-17T18:10:24.978082Z node 52 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037892 followers: 0 2025-04-17T18:10:24.978136Z node 52 :TABLET_RESOLVER DEBUG: SelectForward node 52 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [57:1947:2264] 2025-04-17T18:10:24.978307Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037892] forward result remote node 57 [52:2076:2746] 2025-04-17T18:10:24.978512Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037892] remote node connected [52:2076:2746] 2025-04-17T18:10:24.978564Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037892]::SendEvent [52:2076:2746] 2025-04-17T18:10:24.978866Z node 57 :PIPE_SERVER DEBUG: [72075186224037892] Accept Connect Originator# [52:2076:2746] 2025-04-17T18:10:24.979264Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037892] connected with status OK role: Leader [52:2076:2746] 2025-04-17T18:10:24.979320Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037892] send queued [52:2076:2746] 2025-04-17T18:10:24.980558Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037893] ::Bootstrap [52:2080:2748] 2025-04-17T18:10:24.980604Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037893] lookup [52:2080:2748] 2025-04-17T18:10:24.980667Z node 52 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:24.980727Z node 52 :TABLET_RESOLVER DEBUG: SelectForward node 52 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [57:1295:2098] 2025-04-17T18:10:24.980845Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result remote node 57 [52:2080:2748] 2025-04-17T18:10:24.980951Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037893] remote node connected [52:2080:2748] 2025-04-17T18:10:24.980993Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [52:2080:2748] 2025-04-17T18:10:24.981242Z node 57 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [52:2080:2748] 2025-04-17T18:10:24.981694Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [52:2080:2748] 2025-04-17T18:10:24.981742Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [52:2080:2748] 2025-04-17T18:10:24.982903Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [52:2083:2750] 2025-04-17T18:10:24.982945Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [52:2083:2750] 2025-04-17T18:10:24.983008Z node 52 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:24.983056Z node 52 :TABLET_RESOLVER DEBUG: SelectForward node 52 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [56:1300:2099] 2025-04-17T18:10:24.983175Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 56 [52:2083:2750] 2025-04-17T18:10:24.983286Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [52:2083:2750] 2025-04-17T18:10:24.983333Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [52:2083:2750] 2025-04-17T18:10:24.983789Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connect request undelivered [52:2083:2750] 2025-04-17T18:10:24.983844Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] immediate retry [52:2083:2750] 2025-04-17T18:10:24.983878Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [52:2083:2750] 2025-04-17T18:10:24.983935Z node 52 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037894 entry.State: StNormal 2025-04-17T18:10:24.984168Z node 52 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:24.984296Z node 52 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:10:24.984447Z node 52 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-04-17T18:10:24.984511Z node 52 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-04-17T18:10:24.984557Z node 52 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-04-17T18:10:24.984610Z node 52 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [57:1949:2265] CurrentLeaderTablet: [57:1954:2268] CurrentGeneration: 3 CurrentStep: 0} 2025-04-17T18:10:24.984743Z node 52 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [57:1949:2265] CurrentLeaderTablet: [57:1954:2268] CurrentGeneration: 3 CurrentStep: 0} 2025-04-17T18:10:24.984831Z node 52 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [57:1949:2265] CurrentLeaderTablet: [57:1954:2268] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-04-17T18:10:24.984874Z node 52 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-04-17T18:10:24.984924Z node 52 :TABLET_RESOLVER DEBUG: SelectForward node 52 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [57:1949:2265] 2025-04-17T18:10:24.985049Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 57 [52:2083:2750] 2025-04-17T18:10:24.985191Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [52:2083:2750] 2025-04-17T18:10:24.985241Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [52:2083:2750] 2025-04-17T18:10:24.985513Z node 57 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [52:2083:2750] 2025-04-17T18:10:24.985972Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [52:2083:2750] 2025-04-17T18:10:24.986021Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [52:2083:2750] 2025-04-17T18:10:24.987182Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [52:2087:2752] 2025-04-17T18:10:24.987222Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [52:2087:2752] 2025-04-17T18:10:24.987278Z node 52 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:24.987321Z node 52 :TABLET_RESOLVER DEBUG: SelectForward node 52 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [57:1793:2192] 2025-04-17T18:10:24.987388Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result remote node 57 [52:2087:2752] 2025-04-17T18:10:24.987506Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037895] remote node connected [52:2087:2752] 2025-04-17T18:10:24.987558Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [52:2087:2752] 2025-04-17T18:10:24.987814Z node 57 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [52:2087:2752] 2025-04-17T18:10:24.988198Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [52:2087:2752] 2025-04-17T18:10:24.988237Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [52:2087:2752] 2025-04-17T18:10:24.989273Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [52:2090:2754] 2025-04-17T18:10:24.989310Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [52:2090:2754] 2025-04-17T18:10:24.989365Z node 52 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:24.989409Z node 52 :TABLET_RESOLVER DEBUG: SelectForward node 52 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [57:1796:2194] 2025-04-17T18:10:24.989496Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result remote node 57 [52:2090:2754] 2025-04-17T18:10:24.989615Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037896] remote node connected [52:2090:2754] 2025-04-17T18:10:24.989657Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [52:2090:2754] 2025-04-17T18:10:24.989828Z node 57 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [52:2090:2754] 2025-04-17T18:10:24.990090Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [52:2090:2754] 2025-04-17T18:10:24.990129Z node 52 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [52:2090:2754] 2025-04-17T18:10:24.991261Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [52:2092:2755] 2025-04-17T18:10:24.991339Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [52:2092:2755] 2025-04-17T18:10:24.991445Z node 52 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:24.991519Z node 52 :TABLET_RESOLVER DEBUG: SelectForward node 52 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [52:595:2274] 2025-04-17T18:10:24.991634Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [52:2092:2755] 2025-04-17T18:10:24.991727Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [52:2092:2755] 2025-04-17T18:10:24.991804Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [52:2092:2755] 2025-04-17T18:10:24.991881Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [52:2092:2755] 2025-04-17T18:10:24.992033Z node 52 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [52:2092:2755] 2025-04-17T18:10:24.992295Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [52:2092:2755] 2025-04-17T18:10:24.992375Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [52:2092:2755] 2025-04-17T18:10:24.992449Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [52:2092:2755] 2025-04-17T18:10:24.992537Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [52:2092:2755] 2025-04-17T18:10:24.992614Z node 52 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [52:2092:2755] 2025-04-17T18:10:24.992719Z node 52 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [52:567:2269] EventType# 268697616 |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> DataStreams::TestPutRecords [GOOD] >> TraverseDatashard::TraverseTwoTablesServerless >> AnalyzeColumnshard::AnalyzeTable [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2025-04-17T18:10:06.718946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346015795566893:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.719009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001961/r3tmp/tmp5gg0tf/pdisk_1.dat 2025-04-17T18:10:07.294593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.298338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.298899Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:07.315776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5763, node 1 2025-04-17T18:10:07.693991Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.694014Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.694022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.694130Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.197151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.234976Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:08.370914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10584 2025-04-17T18:10:08.598405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:11.621232Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346037272500804:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:11.621279Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001961/r3tmp/tmp6jgZ7V/pdisk_1.dat 2025-04-17T18:10:11.895911Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3260, node 4 2025-04-17T18:10:11.995677Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:11.995769Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:12.020271Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:12.102169Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:12.102209Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:12.102216Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:12.102343Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:12.415543Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:12.434343Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:10:12.572109Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15027 2025-04-17T18:10:12.811168Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:13.083052Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480 encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } 2025-04-17T18:10:13.183925Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:13.272603Z :INFO: [/Root/] [/Root/] [1d4f22ed-d4fadbf3-3c2f23b-2212dbfa] Starting read session 2025-04-17T18:10:13.273745Z :DEBUG: [/Root/] [/Root/] [1d4f22ed-d4fadbf3-3c2f23b-2212dbfa] Starting session to cluster null (localhost:3260) 2025-04-17T18:10:13.278467Z :DEBUG: [/Root/] [/Root/] [1d4f22ed-d4fadbf3-3c2f23b-2212dbfa] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:10:13.278510Z :DEBUG: [/Root/] [/Root/] [1d4f22ed-d4fadbf3-3c2f23b-2212dbfa] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:10:13.278664Z :DEBUG: [/Root/] [/Root/] [1d4f22ed-d4fadbf3-3c2f23b-2212dbfa] [null] Reconnecting session to cluster null in 0.000000s 2025-04-17T18:10:13.310347Z :DEBUG: [/Root/] [/Root/] [1d4f22ed-d4fadbf3-3c2f23b-2212dbfa] [null] Successfully connected. Initializing session 2025-04-17T18:10:13.315017Z node 4 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-17T18:10:13.315046Z node 4 :PQ_READ_PROXY DEBUG: new session created cookie 1 2025-04-17T18:10:13.331248Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-04-17T18:10:13.331661Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_17971562761657169562_v1 read init: from# ipv6:[::1]:34924, request# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-04-17T18:10:13.333561Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_17971562761657169562_v1 auth for : user1 2025-04-17T18:10:13.338083Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_17971562761657169562_v1 Handle describe topics response 2025-04-17T18:10:13.339092Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_17971562761657169562_v1 auth is DEAD 2025-04-17T18:10:13.340266Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_17971562761657169562_v1 auth ok: topics# 1, initDone# 0 2025-04-17T18:10:13.341434Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_17971562761657169562_v1 register session: topic# /Root/stream_TestPutRe ... nsumer user1 session user1_7_1_16651406154208981574_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-04-17T18:10:22.383002Z :DEBUG: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:10:22.383277Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-1) 2025-04-17T18:10:22.383364Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-17T18:10:22.383418Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2025-04-17T18:10:22.384273Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2025-04-17T18:10:22.384368Z :DEBUG: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] [null] The application data is transferred to the client. Number of messages 2, size 1048579 bytes 2025-04-17T18:10:22.384433Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2025-04-17T18:10:22.384498Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2025-04-17T18:10:22.384551Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2025-04-17T18:10:22.384583Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2025-04-17T18:10:22.384609Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (8-8) 2025-04-17T18:10:22.384420Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2025-04-17T18:10:22.386254Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2025-04-17T18:10:22.388507Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2025-04-17T18:10:22.391815Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2025-04-17T18:10:22.392962Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2025-04-17T18:10:22.402028Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2025-04-17T18:10:22.403059Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2025-04-17T18:10:22.404086Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {8, 0} (8-8) 2025-04-17T18:10:22.404145Z :DEBUG: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] [null] The application data is transferred to the client. Number of messages 7, size 7340032 bytes 2025-04-17T18:10:22.409332Z :DEBUG: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:10:22.409473Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2025-04-17T18:10:22.409498Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2025-04-17T18:10:22.409559Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2025-04-17T18:10:22.409613Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2025-04-17T18:10:22.409666Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-17T18:10:22.409716Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-17T18:10:22.409755Z :DEBUG: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-04-17T18:10:22.409971Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2025-04-17T18:10:22.410012Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2025-04-17T18:10:22.410035Z :DEBUG: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-04-17T18:10:22.410154Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2025-04-17T18:10:22.410207Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (1-1) 2025-04-17T18:10:22.411185Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (2-2) 2025-04-17T18:10:22.411239Z :DEBUG: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] [null] The application data is transferred to the client. Number of messages 3, size 1049088 bytes 2025-04-17T18:10:22.412152Z :INFO: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] Closing read session. Close timeout: 0.000000s 2025-04-17T18:10:22.412231Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:3:5:3:0 null:stream_TestPutRecordsCornerCases:2:4:0:0 null:stream_TestPutRecordsCornerCases:4:3:1:0 null:stream_TestPutRecordsCornerCases:0:2:1:0 null:stream_TestPutRecordsCornerCases:1:1:8:0 2025-04-17T18:10:22.412276Z :INFO: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] Counters: { Errors: 0 CurrentSessionLifetimeMs: 169 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 1 } 2025-04-17T18:10:22.412373Z :NOTICE: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T18:10:22.412718Z :DEBUG: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] [null] Abort session to cluster 2025-04-17T18:10:22.413630Z :NOTICE: [/Root/] [/Root/] [daab1542-67d44d49-4906b027-64fe6573] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:10:22.414254Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_16651406154208981574_v1 grpc read failed 2025-04-17T18:10:22.414297Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_16651406154208981574_v1 grpc closed 2025-04-17T18:10:22.414343Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_16651406154208981574_v1 is DEAD 2025-04-17T18:10:23.808072Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494346086493325476:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:23.808145Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001961/r3tmp/tmpEnQ2Gi/pdisk_1.dat 2025-04-17T18:10:23.923169Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:23.950864Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:23.950955Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:23.953738Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27915, node 10 2025-04-17T18:10:24.025050Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:24.025081Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:24.025089Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:24.025267Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:24.344713Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:24.450302Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2614 2025-04-17T18:10:24.628967Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:24.896543Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2025-04-17T18:10:25.002939Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] Test command err: 2025-04-17T18:10:16.609794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:16.609926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:16.610053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c2d/r3tmp/tmpXPh2Tk/pdisk_1.dat 2025-04-17T18:10:17.003500Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62299, node 1 2025-04-17T18:10:17.260334Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:17.260402Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:17.260436Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:17.261083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:17.262780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:17.348659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:17.348820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:17.372469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30931 2025-04-17T18:10:18.001632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:21.156812Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:21.196631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:21.196758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:21.234296Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:21.236532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:21.459893Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.460354Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.460894Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.461212Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.461290Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.461374Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.461450Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.461515Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.461592Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.627255Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:21.627362Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:21.643824Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:21.813389Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:21.876812Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:21.876944Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:21.904786Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:21.908686Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:21.908945Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:21.909025Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:21.909094Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:21.909147Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:21.909214Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:21.909278Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:21.910883Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:21.956197Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:21.956309Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:21.965249Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:21.970107Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:21.970291Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:21.986339Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:22.007289Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:22.007369Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:22.007451Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:22.018830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:22.026785Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:22.026930Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:22.270372Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:22.448824Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:22.519531Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:23.290469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:23.290644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:23.310551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:23.420388Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:23.420656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:23.421000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:23.421155Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:23.421280Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:23.421444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:23.421594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:23.421772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:23.421916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:23.422048Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:23.422206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:23.422333Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:23.452667Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:23.452779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:10:23.452911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:10:23.452956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:10:23.453100Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:10:23.453146Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:10:23.453239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:10:23.453271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:10:23.453322Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:10:23.453348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:10:23.453395Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:10:23.453420Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:10:23.454072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:10:23.454129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:10:23.454338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:10:23.454373Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:10:23.454533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:10:23.454582Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:10:23.454739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:10:23.454773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:10:23.454862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:10:23.454895Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:10:23.571880Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:10:24.344115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2594:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:24.344271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:24.347601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-04-17T18:10:24.448021Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:10:25.156914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2692:3172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.167219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.170063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-17T18:10:25.199432Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000014s FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TraverseDatashard::TraverseOneTableServerless |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> TraverseDatashard::TraverseTwoTables [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> YdbMonitoring::SelfCheck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: 2025-04-17T18:10:21.857385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:21.857526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:21.857673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bf2/r3tmp/tmprghUUC/pdisk_1.dat 2025-04-17T18:10:22.236749Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6892, node 1 2025-04-17T18:10:22.466773Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:22.466835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:22.466865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:22.467618Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:22.474222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.564811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:22.564964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:22.578984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5459 2025-04-17T18:10:23.114542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:25.914073Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:25.950764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:25.950872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:25.988920Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:25.991397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:26.225449Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:26.226008Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:26.226560Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:26.226911Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:26.226985Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:26.227042Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:26.227110Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:26.227182Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:26.227277Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:26.393196Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:26.393303Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:26.406447Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:26.546771Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:26.591902Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:26.592010Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:26.623360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:26.626512Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:26.626726Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:26.626806Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:26.626868Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:26.626919Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:26.626968Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:26.627019Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:26.628262Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:26.663575Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:26.663676Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:26.671390Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:26.675891Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:26.676032Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:26.697130Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:26.722479Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:26.722533Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:26.722606Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:26.735197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:26.742696Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:26.742845Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:26.967526Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:27.125665Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:27.185673Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:27.904219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:27.904395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:27.921447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:28.339519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2545:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:28.339677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:28.341072Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2550:3128]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:28.341268Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:28.341353Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2552:3130] 2025-04-17T18:10:28.341416Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2552:3130] 2025-04-17T18:10:28.341923Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2553:2992] 2025-04-17T18:10:28.342227Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2552:3130], server id = [2:2553:2992], tablet id = 72075186224037894, status = OK 2025-04-17T18:10:28.342470Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2553:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:10:28.342531Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:10:28.342720Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:10:28.342825Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2550:3128], StatRequests.size() = 1 2025-04-17T18:10:28.358792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2557:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:28.358960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:28.359389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3139], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:28.364058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:10:28.509234Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:10:28.509314Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:10:28.607239Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2552:3130], schemeshard count = 1 2025-04-17T18:10:28.912085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2564:3141], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:10:29.057801Z node 1 :TX_PROXY ERROR: Actor# [1:2690:3216] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:29.066016Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2713:3232]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:29.066164Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:29.066226Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2713:3232], StatRequests.size() = 1 2025-04-17T18:10:29.106284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2f3vt56chp43bagwbh6tjt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODg3ZGEyZTQtNDMyMWNkMDctZjMyMGJkMzAtYTVhMWMzNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:10:29.184739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897 2025-04-17T18:10:29.567477Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3057:3296]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:29.567694Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:10:29.567758Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3057:3296], StatRequests.size() = 1 2025-04-17T18:10:29.591575Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3066:3305]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:29.591814Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-17T18:10:29.591841Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3066:3305], StatRequests.size() = 1 2025-04-17T18:10:29.625374Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2f3x1feme3qdw3d7kjx29p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTkyMzI2NjgtYTZjZWQwYWItMzk2OGRhYi1jZGFiNGY2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:10:29.703477Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3116:3264]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:29.705867Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:29.705918Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:10:29.706219Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:29.706283Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:10:29.706335Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:10:29.738294Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-17T18:10:29.739489Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-17T18:10:29.739797Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3141:3277]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:29.741822Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:29.741866Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:10:29.742091Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:29.742131Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:10:29.742171Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:10:29.744070Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-17T18:10:29.744359Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbMonitoring::SelfCheck [GOOD] Test command err: 2025-04-17T18:08:18.421367Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345551197844405:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.421445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1b/r3tmp/tmpZLKdxQ/pdisk_1.dat 2025-04-17T18:08:18.866456Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:18.884427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.884520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.890835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8194, node 1 2025-04-17T18:08:19.103454Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:19.103471Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:19.103478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:19.103566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.479884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:19.666126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:54064" , at schemeshard: 72057594046644480 2025-04-17T18:08:19.666663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:19.669415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T18:08:19.669456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-17T18:08:19.669527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:08:19.669565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:08:19.669625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T18:08:19.669689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-17T18:08:19.669932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-17T18:08:19.671640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-17T18:08:19.671848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:08:19.671863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:19.671965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:08:19.671994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-17T18:08:19.675127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-17T18:08:19.675277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-17T18:08:19.675438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:08:19.675450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:08:19.675575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:08:19.675636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:08:19.675649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494345551197845007:2388], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-17T18:08:19.675659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494345551197845007:2388], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-17T18:08:19.675691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:19.675718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-17T18:08:19.676390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:08:19.676847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } Binded ... 80 2025-04-17T18:10:21.382360Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:21.382406Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:21.382466Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:21.382497Z node 64 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId# 281474976710667:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:10:21.382542Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-17T18:10:21.382718Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710667:0 progress is 1/1 2025-04-17T18:10:21.382736Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710667 ready parts: 1/1 2025-04-17T18:10:21.382755Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710667:0 progress is 1/1 2025-04-17T18:10:21.382770Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710667 ready parts: 1/1 2025-04-17T18:10:21.382788Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710667, ready parts: 1/1, is published: true 2025-04-17T18:10:21.382826Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [64:7494346079776466852:2370] message: TxId: 281474976710667 2025-04-17T18:10:21.382844Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710667 ready parts: 1/1 2025-04-17T18:10:21.382860Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710667:0 2025-04-17T18:10:21.382868Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710667:0 2025-04-17T18:10:21.382993Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-17T18:10:21.384255Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:10:21.384282Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:10:21.384297Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:10:21.384312Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:10:21.387227Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:10:21.387863Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[64:7494346075481498887:2327];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-04-17T18:10:21.388219Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-17T18:10:21.388465Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:10:21.388815Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037888 not found 2025-04-17T18:10:21.388851Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037889 not found 2025-04-17T18:10:21.388869Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037891 not found 2025-04-17T18:10:21.388890Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037890 not found 2025-04-17T18:10:21.389396Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T18:10:21.389603Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:21.390459Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:10:21.390645Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:10:21.391372Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:10:21.391606Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:10:21.391635Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T18:10:21.391677Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:10:21.392569Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[64:7494346075481498879:2324];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-04-17T18:10:21.396361Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[64:7494346075481498881:2325];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-04-17T18:10:21.400370Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[64:7494346075481498886:2326];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-04-17T18:10:21.412182Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:10:21.412227Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:10:21.415066Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:10:21.415102Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:10:21.415843Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:10:21.415871Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-17T18:10:21.415915Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:10:21.415934Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:10:21.415971Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:10:26.184601Z node 67 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[67:7494346100957127962:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:26.184666Z node 67 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a1b/r3tmp/tmperxaM8/pdisk_1.dat 2025-04-17T18:10:26.299670Z node 67 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:26.341147Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:26.341251Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:26.344069Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16042, node 67 2025-04-17T18:10:26.407679Z node 67 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:26.407708Z node 67 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:26.407717Z node 67 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:26.407890Z node 67 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:26.717320Z node 67 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-67" reason: "YELLOW-e9e2-1231c6b1-68" reason: "YELLOW-e9e2-1231c6b1-69" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-67" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 67 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-68" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 68 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-69" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 69 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 67 host: "::1" port: 12001 } >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> DataStreams::TestReservedConsumersMetering [GOOD] >> TraverseDatashard::TraverseOneTable >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2025-04-17T18:10:06.704564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346016530075816:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.704637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001971/r3tmp/tmpbS3Pbi/pdisk_1.dat 2025-04-17T18:10:07.382218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.382320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.422255Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:07.425088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12454, node 1 2025-04-17T18:10:07.626553Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.626576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.626591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.626694Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.196694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.315540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21343 2025-04-17T18:10:08.570867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:09.070965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "1" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000007" } records { sequence_number: "1" shard_id: "shard-000007" } records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000007" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000009" } records { sequence_number: "1" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "5" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000008" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000009" } records { sequence_number: "2" shard_id: "shard-000006" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "7" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000008" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000006" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000009" } records { sequence_number: "8" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000009" } records { sequence_number: "9" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "10" shard_id: "shard-000001" } records { sequence_number: "10" shard_id: "shard-000009" } records { sequence_number: "10" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000005" } records { sequence_number: "4" shard_id: "shard-000008" } records { sequence_number: "11" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000005" } records { sequence_number: "11" shard_id: "shard-000001" } records { sequence_number: "11" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000006" } records { sequence_number: "12" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000007" } records { sequence_number: "7" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000007" } records { sequence_number: "13" shard_id: "shard-000004" } records { sequence_number: "8" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "12" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000008" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000006" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000009" } records { sequence_number: "13" shard_id: "shard-000001" } records { sequence_number: "14" shard_id: "shard-000009" } records { sequence_number: "14" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000001" } 2025-04-17T18:10:11.705305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346016530075816:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:11.705384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "15" shard_id: "shard-000001" } records { sequence_number: "15" shard_id: "shard-000009" } records { sequence_number: "15" shard_id: "shard-000004" } records { sequence_number: "9" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000008" } records { sequence_number: "16" shard_id: "shard-000004" } records { sequence_number: "17" shard_id: "shard-000004" } records { sequence_number: "10" shard_id: "shard-000005" } records { sequence_number: "16" shard_id: "shard-000001" } records { sequence_number: "16" shard_id: "shard-000009" } records { sequence_number: "6" shard_id: "shard-000006" } records { sequence_number: "17" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000007" } records { sequence_number: "10" shard_id: "shard-000007" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000007" } records { sequence_number: "18" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000005" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "17" shard_id: "shard-000009" } records { sequence_number: "7" shard_id: "shard-000008" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000006" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000009" } records { sequence_number: "18" shard_id: "shard-000001" } records { sequence_number: "19" shard_id: "shard-000009" } records { sequence_number: "19" shard_id: "shard-000004" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "20" shard_id: "shard-000001" } records { sequence_number: "20" shard_id: "shard-000009" } records { sequence_number: "20" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000005" } records { sequence_number: "8" shard_id: "shard-000008" } records { sequence_number: "21" shard_id: "shard-000004" } r ... older_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913426335-170","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1744913426,"finish":1744913426},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913426}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913426335-171","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1744913426,"finish":1744913426},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913426}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744913426335-172","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744913426,"finish":1744913426},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744913426}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744913426376-173","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744913426,"finish":1744913427},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913427}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913426376-174","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744913426,"finish":1744913427},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913427}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913426376-175","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744913426,"finish":1744913427},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913427}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744913426376-176","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744913426,"finish":1744913427},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744913427}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744913427408-177","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744913427,"finish":1744913428},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913428}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913427408-178","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744913427,"finish":1744913428},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913428}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913427408-179","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744913427,"finish":1744913428},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913428}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744913427408-180","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744913427,"finish":1744913428},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744913428}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744913428424-181","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744913428,"finish":1744913429},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913429}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913428424-182","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744913428,"finish":1744913429},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913429}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913428424-183","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744913428,"finish":1744913429},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913429}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744913428424-184","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744913428,"finish":1744913429},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744913429}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744913429473-185","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744913429,"finish":1744913430},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913430}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913429473-186","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744913429,"finish":1744913430},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913430}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913429473-187","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744913429,"finish":1744913430},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913430}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744913429473-188","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744913429,"finish":1744913430},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744913430}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744913430487-189","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744913430,"finish":1744913431},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913431}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913430487-190","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744913430,"finish":1744913431},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913431}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744913430487-191","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744913430,"finish":1744913431},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744913431}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744913430487-192","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744913430,"finish":1744913431},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744913431}' |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2025-04-17T18:10:06.735471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346015805545309:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.735551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001967/r3tmp/tmp6sbUGo/pdisk_1.dat 2025-04-17T18:10:07.359001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.359096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.366626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:07.373750Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3280, node 1 2025-04-17T18:10:07.726952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.726986Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.726995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.727097Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.242236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.363360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20128 2025-04-17T18:10:08.694463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.719589Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T18:10:08.995290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:09.029171Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T18:10:09.029198Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-17T18:10:09.029208Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-17T18:10:09.029222Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T18:10:11.878409Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346035462076492:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:11.878505Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001967/r3tmp/tmpPwGJu5/pdisk_1.dat 2025-04-17T18:10:12.195307Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:12.241604Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:12.241693Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19774, node 4 2025-04-17T18:10:12.254529Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:12.474943Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:12.474964Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:12.474970Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:12.475108Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:12.715703Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:12.816940Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21732 2025-04-17T18:10:13.015528Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:13.264790Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:13.347573Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:13.376490Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-04-17T18:10:13.376508Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-04-17T18:10:13.376519Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-04-17T18:10:13.376562Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-04-17T18:10:16.649958Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494346058173275263:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:16.650104Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001967/r3tmp/tmpTmgXly/pdisk_1.dat 2025-04-17T18:10:16.810633Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:16.855183Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:16.855277Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:16.861472Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21031, node 7 2025-04-17T18:10:16.964155Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:16.964182Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:16.964189Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:16.964367Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15678 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:17.241061Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:17.330399Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15678 2025-04-17T18:10:17.547259Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:17.781551Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:17.864956Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:17.951639Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:17.981353Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-04-17T18:10:17.981432Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-04-17T18:10:17.982536Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-04-17T18:10:17.982565Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-04-17T18:10:21.577154Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494346079714453873:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:21.578349Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001967/r3tmp/tmpg6MZPU/pdisk_1.dat 2025-04-17T18:10:21.748679Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:21.785702Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:21.785804Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:21.790751Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8626, node 10 2025-04-17T18:10:21.863123Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:21.863150Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:21.863159Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:21.863315Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:22.183119Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:22.252283Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:7745 2025-04-17T18:10:22.487869Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:26.577070Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494346079714453873:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:26.577160Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: 2025-04-17T18:10:22.656347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:22.656481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:22.656616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bec/r3tmp/tmpIBglu3/pdisk_1.dat 2025-04-17T18:10:23.008112Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12371, node 1 2025-04-17T18:10:23.239488Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:23.239543Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:23.239587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:23.240298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:23.250510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:23.333074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:23.333197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:23.346853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3942 2025-04-17T18:10:23.876028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:26.730735Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:26.768104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:26.768232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:26.807140Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:26.811455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:27.034221Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:27.034706Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:27.035245Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:27.035516Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:27.035612Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:27.035698Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:27.035766Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:27.035813Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:27.035886Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:27.201688Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:27.201788Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:27.214891Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:27.348200Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:27.390770Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:27.390850Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:27.421745Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:27.423366Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:27.423582Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:27.423662Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:27.423718Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:27.423781Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:27.423823Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:27.423873Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:27.425947Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:27.463095Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:27.463177Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:27.469960Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:27.473916Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:27.474075Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:27.486222Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:10:27.509145Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:27.509197Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:27.509253Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:10:27.518572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:27.526620Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:27.526752Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:27.695837Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:27.849152Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:27.947455Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:28.431885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:10:28.964569Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:29.126151Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:10:29.126233Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:29.126319Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2598:2950], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:29.127178Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2599:2951] 2025-04-17T18:10:29.127352Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2599:2951], schemeshard id = 72075186224037899 2025-04-17T18:10:29.970437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:30.418773Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:30.650392Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-17T18:10:30.650453Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-04-17T18:10:30.650536Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3089:3156], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-04-17T18:10:30.651679Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3090:3157] 2025-04-17T18:10:30.651932Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3090:3157], schemeshard id = 72075186224037905 2025-04-17T18:10:31.731566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3215:3414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:31.731729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:31.746922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-17T18:10:32.048510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3520:3462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:32.048670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:32.092991Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3525:3466]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:32.093222Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:32.093404Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-17T18:10:32.093471Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3528:3469] 2025-04-17T18:10:32.093554Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3528:3469] 2025-04-17T18:10:32.094165Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3529:3394] 2025-04-17T18:10:32.094490Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3528:3469], server id = [2:3529:3394], tablet id = 72075186224037894, status = OK 2025-04-17T18:10:32.094714Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3529:3394], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:10:32.094776Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:10:32.094994Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:10:32.095066Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3525:3466], StatRequests.size() = 1 2025-04-17T18:10:32.112457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3533:3473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:32.112656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:32.113182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3538:3478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:32.119928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-17T18:10:32.334600Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:10:32.334659Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:10:32.446028Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3528:3469], schemeshard count = 1 2025-04-17T18:10:32.732516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3540:3480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:10:32.927600Z node 1 :TX_PROXY ERROR: Actor# [1:3670:3557] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:32.938650Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3693:3573]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:32.938900Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:32.938947Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3693:3573], StatRequests.size() = 1 2025-04-17T18:10:33.000468Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2f3zff5jwxhzpweh5fgjgh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg3NzVjNjMtMjlhZmFjNTAtYzJkMGEyYzctZGY0Mjk0ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:10:33.079562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905 2025-04-17T18:10:33.479386Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4052:3638]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:33.479659Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:10:33.480182Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2025-04-17T18:10:33.480231Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:10:33.480544Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:10:33.480607Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:4052:3638], StatRequests.size() = 1 2025-04-17T18:10:33.505332Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4061:3647]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:33.505537Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-17T18:10:33.505571Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:4061:3647], StatRequests.size() = 1 2025-04-17T18:10:33.545391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2f40vsc17zas8k7pzyf665, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODMzZTlkYzktYWYyYzdlNGYtYTI5MjAyYzgtYzg0ZmI2ZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:10:33.584504Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4105:3663]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:33.586790Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:33.586855Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:10:33.587125Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:33.587171Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T18:10:33.587228Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:10:33.600521Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-17T18:10:33.600804Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-17T18:10:33.601311Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4130:3676]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:33.604352Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:33.604405Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:10:33.604918Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:33.604967Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T18:10:33.605013Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:10:33.607368Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-17T18:10:33.607661Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 >> TraverseColumnShard::TraverseColumnTable >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock >> AnalyzeColumnshard::AnalyzeMultiOperationId >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 32035, msgbus: 15407 2025-04-17T18:06:58.757844Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345208732254802:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:06:58.757897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001348/r3tmp/tmpBw9Mn5/pdisk_1.dat 2025-04-17T18:06:59.155049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:06:59.155156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:06:59.169609Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:06:59.176079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32035, node 1 2025-04-17T18:06:59.196300Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:59.196430Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:06:59.246666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:06:59.246684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:06:59.246689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:06:59.246786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15407 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:06:59.460344Z node 1 :TX_PROXY DEBUG: actor# [1:7494345208732255044:2116] Handle TEvNavigate describe path dc-1 2025-04-17T18:06:59.460397Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222842:2438] HANDLE EvNavigateScheme dc-1 2025-04-17T18:06:59.462315Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222842:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:59.498666Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222842:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:06:59.506892Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222842:2438] Handle TEvDescribeSchemeResult Forward to# [1:7494345213027222841:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:06:59.538699Z node 1 :TX_PROXY DEBUG: actor# [1:7494345208732255044:2116] Handle TEvProposeTransaction 2025-04-17T18:06:59.538741Z node 1 :TX_PROXY DEBUG: actor# [1:7494345208732255044:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:06:59.538876Z node 1 :TX_PROXY DEBUG: actor# [1:7494345208732255044:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494345213027222857:2446] 2025-04-17T18:06:59.616212Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:59.616301Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:06:59.616319Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:59.616392Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:59.616688Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:59.616804Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:06:59.616847Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:06:59.616983Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:06:59.617586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.619800Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:06:59.619855Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222857:2446] txid# 281474976710657 SEND to# [1:7494345213027222856:2445] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:06:59.632815Z node 1 :TX_PROXY DEBUG: actor# [1:7494345208732255044:2116] Handle TEvProposeTransaction 2025-04-17T18:06:59.632838Z node 1 :TX_PROXY DEBUG: actor# [1:7494345208732255044:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:06:59.632871Z node 1 :TX_PROXY DEBUG: actor# [1:7494345208732255044:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494345213027222895:2480] 2025-04-17T18:06:59.635063Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:06:59.635114Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:06:59.635148Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:06:59.635207Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:06:59.635415Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:06:59.635493Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:06:59.635525Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:06:59.635616Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:06:59.636031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:06:59.638000Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:06:59.638035Z node 1 :TX_PROXY DEBUG: Actor# [1:7494345213027222895:2480] txid# 281474976710658 SEND to# [1:7494345213027222894:2479] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:07:01.602133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345221617157584:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:01.602140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345221617157596:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:01.602316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:07:01.602582Z node 1 :TX_PROXY DEBUG: actor# [1:7494345208732255044:2116] H ... 5 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:10:24.019886Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380614:2865] txid# 281474976710665 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:10:24.019977Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380614:2865] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:10:24.020399Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380614:2865] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:10:24.020582Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380614:2865] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-04-17T18:10:24.020678Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380614:2865] txid# 281474976710665 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72075186224037891} 2025-04-17T18:10:24.021370Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380614:2865] txid# 281474976710665 HANDLE EvClientConnected 2025-04-17T18:10:24.025704Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976710665 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:42488" , at schemeshard: 72075186224037891 2025-04-17T18:10:24.026143Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-17T18:10:24.026425Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T18:10:24.026474Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T18:10:24.026739Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-04-17T18:10:24.026783Z node 60 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-17T18:10:24.026901Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-17T18:10:24.026933Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T18:10:24.026974Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-17T18:10:24.026993Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T18:10:24.027058Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-04-17T18:10:24.027137Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2025-04-17T18:10:24.027176Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-04-17T18:10:24.027200Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T18:10:24.027220Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2025-04-17T18:10:24.027242Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2025-04-17T18:10:24.027258Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-04-17T18:10:24.031646Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-04-17T18:10:24.032000Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-04-17T18:10:24.032243Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380614:2865] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-04-17T18:10:24.032340Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380614:2865] txid# 281474976710665 SEND to# [59:7494346091003380613:2359] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} 2025-04-17T18:10:24.032380Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-04-17T18:10:24.032420Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T18:10:24.032842Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T18:10:24.032996Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-04-17T18:10:24.033032Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7494346067881575776:2304], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-17T18:10:24.033050Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7494346067881575776:2304], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-17T18:10:24.034114Z node 60 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-17T18:10:24.034281Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-17T18:10:24.034313Z node 60 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976710665 2025-04-17T18:10:24.034344Z node 60 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976710665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-04-17T18:10:24.034376Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-04-17T18:10:24.034507Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976710665, subscribers: 0 TEST clusteradmin triggers auth on tenant 2025-04-17T18:10:24.037406Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 TClient is connected to server localhost:24896 TClient::Ls request: /dc-1/tenant-db 2025-04-17T18:10:24.432488Z node 59 :TX_PROXY DEBUG: actor# [59:7494346056643641039:2113] Handle TEvNavigate describe path /dc-1/tenant-db 2025-04-17T18:10:24.432545Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380626:2870] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-04-17T18:10:24.433007Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380626:2870] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:10:24.433206Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380626:2870] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-04-17T18:10:24.435582Z node 59 :TX_PROXY DEBUG: Actor# [59:7494346091003380626:2870] Handle TEvDescribeSchemeResult Forward to# [59:7494346091003380625:2869] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-04-17T18:10:24.478475Z node 59 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2025-04-17T18:10:24.479035Z node 59 :HIVE WARN: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:10:24.482962Z node 60 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 4874, MsgBus: 26597 2025-04-17T18:09:25.358262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345840556370613:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:25.358304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca8/r3tmp/tmp1AtnNi/pdisk_1.dat 2025-04-17T18:09:25.889767Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:25.893152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:25.893234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:25.904037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4874, node 1 2025-04-17T18:09:26.057337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:26.057362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:26.057369Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:26.057508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26597 TClient is connected to server localhost:26597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:26.732959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.756370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:26.916416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:27.050571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:27.143313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:28.920300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345853441274270:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:28.920430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:29.247382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.277752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.310728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.356365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.388435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.447431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.491080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:29.561792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345857736242228:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:29.561859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:29.561919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345857736242233:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:29.564937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:29.578025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345857736242235:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:29.656014Z node 1 :TX_PROXY ERROR: Actor# [1:7494345857736242291:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:30.358764Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345840556370613:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:30.358848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:30.547651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:30.809090Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710674 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-17T18:09:30.809740Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:30.809796Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345862031210145:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:30.809873Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715757 2025-04-17T18:09:30.809910Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345862031210145:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:30.810115Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:30.810149Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345862031210145:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281 ... :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Applying to Unlocking 2025-04-17T18:10:02.451895Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:02.451936Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494345997559796467:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:02.452024Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976715674, txId# 281474976710765 2025-04-17T18:10:02.452059Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494345997559796467:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:02.452214Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:02.452237Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494345997559796467:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:02.452844Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976715674, cookie: 281474976715674, txId: 281474976710765, status: StatusAccepted 2025-04-17T18:10:02.452942Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494345997559796467:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710765 SchemeshardId: 72057594046644480 PathId: 17 2025-04-17T18:10:02.453352Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:02.453383Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494345997559796467:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:02.459064Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710765, buildInfoId: 281474976715674 2025-04-17T18:10:02.459153Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710765, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494345997559796467:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:02.459836Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:02.459899Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494345997559796467:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:02.459927Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:10:02.460274Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:02.460315Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494345997559796467:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:02.460328Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715674, subscribers count# 1 2025-04-17T18:10:02.460491Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715674 2025-04-17T18:10:02.460675Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715674 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-17T18:10:02.462690Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-17T18:10:02.462708Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-17T18:10:02.462724Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-17T18:10:11.329499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:11.329521Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:12.295497Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715674 |89.0%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> TraverseColumnShard::TraverseServerlessColumnTable >> AnalyzeColumnshard::AnalyzeSameOperationId >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: 2025-04-17T18:10:30.525628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:30.525763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:30.525906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b93/r3tmp/tmpXdbJBR/pdisk_1.dat 2025-04-17T18:10:30.880864Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2177, node 1 2025-04-17T18:10:31.101945Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:31.102019Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:31.102048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:31.102653Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:31.104763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:31.197796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:31.197937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:31.212136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23293 2025-04-17T18:10:31.757938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:34.464200Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:34.504446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:34.504553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:34.541983Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:34.544307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:34.766760Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.767207Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.767754Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.768058Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.768143Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.768224Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.768315Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.768404Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.768469Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.930809Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:34.930910Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:34.943723Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:35.061027Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:35.096341Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:35.096411Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:35.125011Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:35.126677Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:35.126875Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:35.126938Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:35.127006Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:35.127056Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:35.127104Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:35.127143Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:35.128302Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:35.163415Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:35.163524Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:35.171006Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:35.174785Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:35.174897Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:35.184334Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:10:35.201709Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:35.201757Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:35.201803Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:10:35.210951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:35.217020Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:35.217169Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:35.397717Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:35.608673Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:35.708409Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:36.214927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:10:36.768316Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:36.919270Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:10:36.919336Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:36.919444Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2598:2950], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:36.920541Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2599:2951] 2025-04-17T18:10:36.920708Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2599:2951], schemeshard id = 72075186224037899 2025-04-17T18:10:38.033919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2734:3250], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:38.034078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:38.050638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-17T18:10:38.342126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3040:3301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:38.342252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:38.379619Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3045:3305]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:38.379746Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:38.379843Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-17T18:10:38.379888Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3048:3308] 2025-04-17T18:10:38.379975Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3048:3308] 2025-04-17T18:10:38.380537Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3049:3189] 2025-04-17T18:10:38.380944Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3048:3308], server id = [2:3049:3189], tablet id = 72075186224037894, status = OK 2025-04-17T18:10:38.381180Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3049:3189], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:10:38.381239Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:10:38.381430Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:10:38.381490Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3045:3305], StatRequests.size() = 1 2025-04-17T18:10:38.400981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3053:3312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:38.401176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:38.401582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3058:3317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:38.409464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-17T18:10:38.569193Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:10:38.569278Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:10:38.590634Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3048:3308], schemeshard count = 1 2025-04-17T18:10:38.854943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3060:3319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-17T18:10:39.075275Z node 1 :TX_PROXY ERROR: Actor# [1:3185:3391] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:39.085046Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3208:3407]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:39.085216Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:39.085256Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3208:3407], StatRequests.size() = 1 2025-04-17T18:10:39.130261Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2f45m44ke1s1xw7qfmb99p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2ZWFiZmItOTBjYjAwYjktZmExNWI0OTYtN2Q4NmFhNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:10:39.226822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899 2025-04-17T18:10:39.556533Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3539:3471]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:39.556752Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:10:39.556794Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3539:3471], StatRequests.size() = 1 2025-04-17T18:10:39.576909Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3548:3480]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:39.577030Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-17T18:10:39.577054Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3548:3480], StatRequests.size() = 1 2025-04-17T18:10:39.612885Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2f46spbgxfgba2ensryg2v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmUzMzk0Ny1kMDQ4MzA3Yy01MDZkYjZmMy1jMGNkM2MyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:10:39.696037Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3592:3442]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:39.698208Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:39.698254Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:10:39.698481Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:39.698517Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T18:10:39.698554Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:10:39.715632Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-17T18:10:39.715946Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-17T18:10:39.716317Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3617:3455]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:39.719337Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:39.719395Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:10:39.719872Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:39.719925Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T18:10:39.719977Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 3] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:10:39.722275Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-17T18:10:39.722683Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TraverseDatashard::TraverseOneTableServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] Test command err: 2025-04-17T18:09:22.161779Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.167255Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.169466Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-17T18:09:22.172045Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:09:22.174476Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-17T18:09:22.174524Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.175785Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:48:2075] ControllerId# 72057594037932033 2025-04-17T18:09:22.175819Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.176950Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.177291Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.189235Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.189267Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.190684Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:56:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.190781Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:57:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.190877Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:58:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.190991Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:59:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.191060Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:60:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.191168Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:61:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.191254Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:62:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.191280Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.191339Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:48:2075] 2025-04-17T18:09:22.191368Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:48:2075] 2025-04-17T18:09:22.191414Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.191443Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.193496Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.193558Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.195624Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.195694Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.196247Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:71:2073] ControllerId# 72057594037932033 2025-04-17T18:09:22.196267Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.196302Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.196427Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.197639Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.197669Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.198896Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:77:2077] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.199018Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:78:2078] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.199104Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:79:2079] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.199189Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:80:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.199274Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:81:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.199336Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:82:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.199420Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:83:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.199437Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.199475Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:71:2073] 2025-04-17T18:09:22.199491Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:71:2073] 2025-04-17T18:09:22.199524Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.199545Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.199748Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.200054Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.200814Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:71:2073] 2025-04-17T18:09:22.200857Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.200886Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.201005Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.223704Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:48:2075] 2025-04-17T18:09:22.223759Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.223792Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.223925Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.223958Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-17T18:09:22.230226Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-17T18:09:22.232593Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-17T18:09:22.232837Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.232896Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-17T18:09:22.233015Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-17T18:09:22.233443Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-17T18:09:22.241732Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:48:2075] 2025-04-17T18:09:22.241785Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.243927Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.244169Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.244599Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.244667Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.244823Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-17T18:09:22.244862Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-17T18:09:22.244884Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-17T18:09:22.244917Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.244972Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:52:2064] 2025-04-17T18:09:22.244995Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:52:2064] 2025-04-17T18:09:22.245110Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:75:2064] 2025-04-17T18:09:22.245144Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:75:2064] 2025-04-17T18:09:22.245192Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-17T18:09:22.245218Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [2:95:2087] 2025-04-17T18:09:22.245234Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [2:95:20 ... 0Z node 25 :TABLET_RESOLVER DEBUG: SelectForward node 25 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [25:932:2270] 2025-04-17T18:10:40.865615Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result local node, try to connect [25:1078:2356] 2025-04-17T18:10:40.865647Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [25:1078:2356] 2025-04-17T18:10:40.865734Z node 25 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [25:1078:2356] 2025-04-17T18:10:40.865827Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [25:1078:2356] 2025-04-17T18:10:40.865865Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [25:1078:2356] 2025-04-17T18:10:40.866067Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [25:1082:2359] 2025-04-17T18:10:40.866101Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [25:1082:2359] 2025-04-17T18:10:40.866148Z node 25 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StInit ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:40.866282Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:10:40.866548Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-04-17T18:10:40.866583Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-04-17T18:10:40.866611Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-04-17T18:10:40.866758Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [25:678:2178] CurrentLeaderTablet: [25:684:2181] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:10:40.866817Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [25:678:2178] CurrentLeaderTablet: [25:684:2181] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:10:40.866899Z node 25 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [25:678:2178] CurrentLeaderTablet: [25:684:2181] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:10:40.866926Z node 25 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-04-17T18:10:40.866967Z node 25 :TABLET_RESOLVER DEBUG: SelectForward node 25 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [25:678:2178] 2025-04-17T18:10:40.867031Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result local node, try to connect [25:1082:2359] 2025-04-17T18:10:40.867064Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [25:1082:2359] 2025-04-17T18:10:40.867153Z node 25 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [25:1082:2359] 2025-04-17T18:10:40.867251Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [25:1082:2359] 2025-04-17T18:10:40.867278Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [25:1082:2359] 2025-04-17T18:10:40.867483Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [25:1086:2362] 2025-04-17T18:10:40.867508Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [25:1086:2362] 2025-04-17T18:10:40.867555Z node 25 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StInit ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:40.867635Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037895 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:10:40.867874Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 0} 2025-04-17T18:10:40.867914Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 1} 2025-04-17T18:10:40.867949Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 2} 2025-04-17T18:10:40.868115Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [25:844:2222] CurrentLeaderTablet: [25:846:2223] CurrentGeneration: 2 CurrentStep: 0} 2025-04-17T18:10:40.868176Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [25:844:2222] CurrentLeaderTablet: [25:846:2223] CurrentGeneration: 2 CurrentStep: 0} 2025-04-17T18:10:40.868271Z node 25 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037895 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037895 Cookie: 0 CurrentLeader: [25:844:2222] CurrentLeaderTablet: [25:846:2223] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:10:40.868296Z node 25 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037895 followers: 0 2025-04-17T18:10:40.868333Z node 25 :TABLET_RESOLVER DEBUG: SelectForward node 25 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [25:844:2222] 2025-04-17T18:10:40.868395Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result local node, try to connect [25:1086:2362] 2025-04-17T18:10:40.868425Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [25:1086:2362] 2025-04-17T18:10:40.868510Z node 25 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [25:1086:2362] 2025-04-17T18:10:40.868632Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [25:1086:2362] 2025-04-17T18:10:40.868663Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [25:1086:2362] 2025-04-17T18:10:40.868873Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [25:1090:2365] 2025-04-17T18:10:40.868909Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [25:1090:2365] 2025-04-17T18:10:40.868974Z node 25 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StInit ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:40.869082Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037896 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:10:40.869335Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 0} 2025-04-17T18:10:40.869389Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 1} 2025-04-17T18:10:40.869424Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 2} 2025-04-17T18:10:40.869591Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [25:760:2200] CurrentLeaderTablet: [25:766:2203] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:10:40.869666Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [25:760:2200] CurrentLeaderTablet: [25:766:2203] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:10:40.869759Z node 25 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037896 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037896 Cookie: 0 CurrentLeader: [25:760:2200] CurrentLeaderTablet: [25:766:2203] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:10:40.869784Z node 25 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037896 followers: 0 2025-04-17T18:10:40.869829Z node 25 :TABLET_RESOLVER DEBUG: SelectForward node 25 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [25:760:2200] 2025-04-17T18:10:40.869894Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result local node, try to connect [25:1090:2365] 2025-04-17T18:10:40.869926Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [25:1090:2365] 2025-04-17T18:10:40.870019Z node 25 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [25:1090:2365] 2025-04-17T18:10:40.870124Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [25:1090:2365] 2025-04-17T18:10:40.870160Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [25:1090:2365] 2025-04-17T18:10:40.870383Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037897] ::Bootstrap [25:1094:2368] 2025-04-17T18:10:40.870408Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037897] lookup [25:1094:2368] 2025-04-17T18:10:40.870463Z node 25 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037897 entry.State: StInit ev: {EvForward TabletID: 72075186224037897 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:10:40.870567Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037897 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:10:40.870836Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 0} 2025-04-17T18:10:40.870877Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 1} 2025-04-17T18:10:40.870908Z node 24 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 2} 2025-04-17T18:10:40.871078Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [25:1018:2318] CurrentLeaderTablet: [25:1020:2319] CurrentGeneration: 2 CurrentStep: 0} 2025-04-17T18:10:40.871156Z node 25 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [25:1018:2318] CurrentLeaderTablet: [25:1020:2319] CurrentGeneration: 2 CurrentStep: 0} 2025-04-17T18:10:40.871229Z node 25 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037897 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037897 Cookie: 0 CurrentLeader: [25:1018:2318] CurrentLeaderTablet: [25:1020:2319] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:10:40.871255Z node 25 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037897 followers: 0 2025-04-17T18:10:40.871290Z node 25 :TABLET_RESOLVER DEBUG: SelectForward node 25 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037897 followers: 0 countLeader 1 allowFollowers 0 winner: [25:1018:2318] 2025-04-17T18:10:40.871383Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037897] forward result local node, try to connect [25:1094:2368] 2025-04-17T18:10:40.871413Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037897]::SendEvent [25:1094:2368] 2025-04-17T18:10:40.871480Z node 25 :PIPE_SERVER DEBUG: [72075186224037897] Accept Connect Originator# [25:1094:2368] 2025-04-17T18:10:40.871593Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037897] connected with status OK role: Leader [25:1094:2368] 2025-04-17T18:10:40.871631Z node 25 :PIPE_CLIENT DEBUG: TClient[72075186224037897] send queued [25:1094:2368] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] >> KqpIndexes::SecondaryIndexInsert1 >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: 2025-04-17T18:10:32.204567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:32.204749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:32.204902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b71/r3tmp/tmpbm1uDW/pdisk_1.dat 2025-04-17T18:10:32.588818Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12472, node 1 2025-04-17T18:10:32.815780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:32.815843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:32.815874Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:32.816563Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:32.818735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:32.905709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:32.905806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:32.919413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13540 2025-04-17T18:10:33.438248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:36.303511Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:36.348590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:36.348729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:36.386892Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:36.389409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:36.617135Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:36.617702Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:36.618271Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:36.618630Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:36.618714Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:36.618796Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:36.618873Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:36.618943Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:36.619037Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:36.784183Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:36.784287Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:36.798687Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:36.922599Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:36.963854Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:36.963968Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:36.994895Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:36.997836Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:36.998049Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:36.998131Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:36.998203Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:36.998257Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:36.998319Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:36.998393Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:36.999616Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:37.035303Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:37.035413Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:37.041914Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:37.045772Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:37.045908Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:37.056737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:10:37.075969Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:37.076018Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:37.076090Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:10:37.086477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:37.093122Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:37.093278Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:37.253752Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:37.410554Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:37.478501Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:38.001188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:10:38.551073Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:38.719308Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:10:38.719364Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:38.719455Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2600:2952], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:38.720836Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2602:2954] 2025-04-17T18:10:38.721379Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2602:2954], schemeshard id = 72075186224037899 2025-04-17T18:10:39.717701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2728:3246], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:39.717847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:39.733699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-17T18:10:40.066653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3030:3293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:40.066795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:40.067891Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3035:3297]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:40.068074Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:40.068172Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-17T18:10:40.068219Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3038:3300] 2025-04-17T18:10:40.068275Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3038:3300] 2025-04-17T18:10:40.068775Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3039:3188] 2025-04-17T18:10:40.069014Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3038:3300], server id = [2:3039:3188], tablet id = 72075186224037894, status = OK 2025-04-17T18:10:40.069159Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3039:3188], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:10:40.069198Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:10:40.069337Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:10:40.069384Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3035:3297], StatRequests.size() = 1 2025-04-17T18:10:40.084473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3043:3304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:40.084582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:40.084968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3048:3309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:40.090435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-17T18:10:40.246351Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:10:40.246427Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:10:40.267668Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3038:3300], schemeshard count = 1 2025-04-17T18:10:40.519602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3050:3311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-17T18:10:40.786398Z node 1 :TX_PROXY ERROR: Actor# [1:3177:3381] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:40.796989Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3200:3397]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:40.797182Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:40.797222Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3200:3397], StatRequests.size() = 1 2025-04-17T18:10:40.839327Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2f478kc0sjpa4qa3stk18h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU2Y2Y0ODItZTkxZmY4MC0zYzI1YzRhYi04ZjRkNDM0ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:10:40.882955Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3242:3245]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:40.885373Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:40.885438Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:10:40.885882Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:40.885939Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T18:10:40.885987Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:10:40.898834Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-17T18:10:40.899176Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] Test command err: Trying to start YDB, gRPC: 21351, MsgBus: 65427 2025-04-17T18:09:23.234622Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345830409940499:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:23.235269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cd8/r3tmp/tmpyP7kjP/pdisk_1.dat 2025-04-17T18:09:23.719222Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:23.743099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:23.743184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:23.747414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21351, node 1 2025-04-17T18:09:23.878759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:23.878781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:23.878793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:23.878914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65427 TClient is connected to server localhost:65427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:24.605993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.623109Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:24.641017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.790419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:24.981942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:25.064603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:27.022584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345847589811323:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.022702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.346278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.380977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.426829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.459743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.498976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.561528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.608193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:27.668077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345847589811981:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.668153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.668399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345847589811986:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:27.672068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:27.688135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345847589811988:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:27.767827Z node 1 :TX_PROXY ERROR: Actor# [1:7494345847589812043:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:28.231912Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345830409940499:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:28.231977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:28.716602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:29.852035Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_SCHEME_CHANGED;details=Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. at tablet# 72075186224037940;tx_id=281474976710697; 2025-04-17T18:09:29.852063Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Cannot parse tx 281474976710697. SCHEME_ERROR: Table '19' doesn't exist. at tablet# 72075186224037922;tx_id=281474976710697; 2025-04-17T18:09:29.860214Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. 2025-04-17T18:09:29.860479Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 281474976710697. SCHEME_ERROR: Table '19' doesn't exist. 2025-04-17T18:09:29.860812Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Cannot parse tx 281474976710697. SCHEME_ERROR: Table '19' doesn't exist. at tablet# 72075186224037930;tx_id=281474976710697; 2025-04-17T18:09:29.860835Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 281474976710697. SCHEME_ERROR: Table '19' doesn't exist. 2025-04-17T18:09:29.861130Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345856179748635:2573], Table: `/Root/MultiShardIndexed` ([72057594046644480:17:1]), SessionActorId: [1:7494345851884780474:2573]Got SCHEME CHANGED for table `/Root/MultiShardIndexed`. ShardID=72075186224037940, Sink=[1:7494345856179748635:2573].{
: Error: Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. at tablet# 72075186224037940, code: 2034 } 2025-04-17T18:09:29.861658Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494345856179748558:2573], SessionActorId: [1:7494345851884780474:2573], statusCode=SCHEME_ERROR. Issue=
: Error: Scheme changed. Table: `/Root/MultiShardIndexed`., code: 2028
: Error: Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. at tablet# 72075186224037940, code: 2034 . sessionActorId=[1:7494345851884780474:2573]. isRollback=0 2025-04-17T18:09:29.861969Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkxMjFkNTQtZWM5NTA1ODQtZTkwMDA3MTAtOTc4ZGI0Nzc=, ActorId: [1:7494345851884780474:2573], ActorState: ExecuteState, TraceId: 01js2f22py6jfrsrkc66tb5vrg, got TEvKqpBuffer::TEvError in ExecuteState, status: SCHEME_ERROR send to: [1:7494345856179748594:2573] from: [1:7494345856179748558:2573] 2025-04-17T18:09:29.862065Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494345856179748594:2573] TxId: 281474976710697. Ctx: { TraceId: 01js2f22py6jfrsrkc66tb5vrg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzkxMjFkNTQtZWM5NTA1ODQtZTkwMDA3MTAtOTc4ZGI0Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. SCHEME_ERROR: {
: Error: Scheme changed. Table: `/Root/MultiShardIndexed`., code: 2028 subissue: {
: Error: Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. at tablet# 72075186224037940, code: 2034 } } 2025-04-17T18:09:29.862349Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkxMjFkNTQtZWM5NTA1ODQtZTkwMDA3MTAtOTc4ZGI0Nzc=, ActorId: [1:7494345851884780474:2573], ActorState: ExecuteState, TraceId: 01js2f22py6jfrsrkc66tb5vrg, Create QueryResponse fo ... @ 0x195B3027 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x195B3027 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19EB1315 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19EB1315 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19EB1315 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x19E80CA8 12. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: Execute @ 0x195B21F3 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19E82575 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19EAB88C 15. ??:0: ?? @ 0x7F6D981A4D8F 16. ??:0: ?? @ 0x7F6D981A4E3F 17. ??:0: ?? @ 0x166C7028 Trying to start YDB, gRPC: 26821, MsgBus: 12313 2025-04-17T18:10:22.140296Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346084844073256:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:22.141150Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cd8/r3tmp/tmpU3TJxa/pdisk_1.dat 2025-04-17T18:10:22.293926Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:22.310233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:22.310330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:22.312236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26821, node 2 2025-04-17T18:10:22.368498Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:22.368525Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:22.368533Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:22.368730Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12313 TClient is connected to server localhost:12313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:22.784121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:22.801917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:22.878131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:23.037679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:23.113451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:24.974651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346093434009557:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:24.974719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.023234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:25.049034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:25.075357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:25.117274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:25.147250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:25.215060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:25.245255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:25.293927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346097728977516:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.294029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.294147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346097728977521:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.297666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:25.306318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346097728977523:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:25.399336Z node 2 :TX_PROXY ERROR: Actor# [2:7494346097728977578:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:26.488891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:27.139472Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346084844073256:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:27.139535Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:36.818788Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-17T18:10:36.818821Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-17T18:10:36.818838Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-17T18:10:36.820531Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-17T18:10:37.249985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:37.250013Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:38.064986Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037939:1][72075186224037922][2:7494346149268601226:2647] Apply status: status# 2, reason# 7 2025-04-17T18:10:38.099627Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037939:1][72075186224037922][2:7494346153563569834:2647] Handshake status: status# 2, reason# 7 2025-04-17T18:10:38.103907Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-17T18:10:38.103951Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-04-17T18:10:38.107414Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-17T18:10:38.107451Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037937 not found 2025-04-17T18:10:38.107462Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-17T18:10:38.107472Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037935 not found 2025-04-17T18:10:38.108004Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037936 not found 2025-04-17T18:10:38.121583Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-17T18:10:38.121612Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-17T18:10:38.738226Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037938:1][72075186224037922][2:7494346153563571040:2646] Handshake status: status# 2, reason# 7 |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 >> TraverseDatashard::TraverseOneTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: 2025-04-17T18:10:36.096170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:36.096315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:36.096465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b37/r3tmp/tmpSS5zjp/pdisk_1.dat 2025-04-17T18:10:36.451259Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5072, node 1 2025-04-17T18:10:36.669550Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:36.669612Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:36.669644Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:36.670330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:36.675749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:36.764914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:36.765012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:36.778493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14294 2025-04-17T18:10:37.298137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:40.316030Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:40.345210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:40.345298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:40.382281Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:40.384457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:40.614926Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:40.615356Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:40.615908Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:40.616213Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:40.616286Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:40.616362Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:40.616425Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:40.616483Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:40.616583Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:40.779263Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:40.779369Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:40.792595Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:40.938320Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:40.981550Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:40.981627Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:41.009606Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:41.012810Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:41.013021Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:41.013093Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:41.013141Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:41.013221Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:41.013272Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:41.013321Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:41.014456Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:41.048353Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:41.048435Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:41.055169Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:41.058472Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:41.058572Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:41.069239Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:41.089447Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:41.089506Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:41.089578Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:41.098612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:41.105213Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:41.105349Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:41.290836Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:41.433447Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:41.531896Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:42.176780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:42.176926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:42.191010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:42.589784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2545:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:42.589950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:42.591350Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2550:3128]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:42.591566Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:42.591659Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2552:3130] 2025-04-17T18:10:42.591728Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2552:3130] 2025-04-17T18:10:42.592271Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2553:2992] 2025-04-17T18:10:42.592560Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2552:3130], server id = [2:2553:2992], tablet id = 72075186224037894, status = OK 2025-04-17T18:10:42.592733Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2553:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:10:42.592786Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:10:42.592985Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:10:42.593053Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2550:3128], StatRequests.size() = 1 2025-04-17T18:10:42.611077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2557:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:42.611249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:42.611640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3139], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:42.618357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:10:42.789018Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:10:42.789084Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:10:42.863122Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2552:3130], schemeshard count = 1 2025-04-17T18:10:43.158318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2564:3141], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:10:43.295428Z node 1 :TX_PROXY ERROR: Actor# [1:2690:3216] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:43.303832Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2713:3232]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:43.303961Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:10:43.303985Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2713:3232], StatRequests.size() = 1 2025-04-17T18:10:43.343675Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2f49qg0n7wwey29cmwm9ad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjU2MzU0NDgtMzYzM2ViNGMtNGFhYjE4OGEtODQ1NmVjYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:10:43.411692Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2762:3048]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:43.414016Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:43.414062Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:10:43.414482Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:43.414539Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:10:43.414585Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:10:43.426392Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-17T18:10:43.426623Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> KqpIndexes::UpdateOnReadColumns [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateOnReadColumns [GOOD] Test command err: Trying to start YDB, gRPC: 16159, MsgBus: 4799 2025-04-17T18:09:58.074353Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345982935969766:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:58.074427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c58/r3tmp/tmpwjbj75/pdisk_1.dat 2025-04-17T18:09:58.597893Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:58.599118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:58.599203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:58.603910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16159, node 1 2025-04-17T18:09:58.858763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:58.858787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:58.858795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:58.858901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4799 TClient is connected to server localhost:4799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:59.598817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.622419Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:59.635941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.784818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:59.967972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.049863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.535871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345995820873416:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.535988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:01.867732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.905471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.939164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:01.969869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.000833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.080063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.135277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:02.234431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346000115841386:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.234535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.236394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346000115841391:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.240178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:02.254958Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:10:02.255188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346000115841395:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:02.325670Z node 1 :TX_PROXY ERROR: Actor# [1:7494346000115841451:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:03.102286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345982935969766:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:03.102352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:03.425233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15722, MsgBus: 12335 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c58/r3tmp/tmpXN81d3/pdisk_1.dat 2025-04-17T18:10:05.996270Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:06.041472Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15722, node 2 2025-04-17T18:10:06.080239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:06.080315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:06.082234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:06.115074Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:06.115102Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:06.115110Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:06.115230Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12335 TClient is connected to server localhost:12335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:06.607301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:06.619811Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:10:06.635219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:06.722005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Ope ... 4976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:33.597168Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:33.663257Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494346131628364465:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:33.663347Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494346131628364470:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:33.663361Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:33.667347Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:33.677941Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494346131628364472:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:33.769769Z node 5 :TX_PROXY ERROR: Actor# [5:7494346131628364529:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:34.947360Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:34.984105Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:10:35.019521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:10:35.110863Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494346118743460130:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:35.110935Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19711, MsgBus: 18236 2025-04-17T18:10:38.422417Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494346153176545701:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:38.422484Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c58/r3tmp/tmpvmwKxK/pdisk_1.dat 2025-04-17T18:10:38.577866Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:38.599496Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:38.599615Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:38.601610Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19711, node 6 2025-04-17T18:10:38.646601Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:38.646638Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:38.646651Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:38.646791Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18236 TClient is connected to server localhost:18236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:39.214074Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:39.232067Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:39.301948Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:39.482900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:39.542244Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:41.673345Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494346166061449374:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:41.673454Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:41.723420Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:41.756689Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:41.787217Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:41.816886Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:41.846035Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:41.878075Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:41.944303Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:41.997417Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494346166061450040:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:41.997498Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:41.997525Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494346166061450045:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:42.001546Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:42.011404Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494346166061450047:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:42.068757Z node 6 :TX_PROXY ERROR: Actor# [6:7494346170356417397:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:43.309671Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:43.350819Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:10:43.422738Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494346153176545701:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:43.422856Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:43.450251Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpIndexes::SecondaryIndexInsert1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexInsert1 [GOOD] Test command err: Trying to start YDB, gRPC: 1049, MsgBus: 7975 2025-04-17T18:09:43.717965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345915055568537:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:43.718162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c84/r3tmp/tmpk6OtsF/pdisk_1.dat 2025-04-17T18:09:44.037634Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1049, node 1 2025-04-17T18:09:44.106745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:44.106921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:44.113093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:44.122865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:44.122895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:44.122911Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:44.123017Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7975 TClient is connected to server localhost:7975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:44.685724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.712201Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:44.724406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.856050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.009276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:45.073491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:46.634458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345927940472200:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:46.634545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:46.950314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:46.977924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.003083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.036769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.065796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.100694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.143827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:47.227226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345932235440160:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.227315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.227597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345932235440165:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:47.232375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:47.243809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345932235440167:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:47.305482Z node 1 :TX_PROXY ERROR: Actor# [1:7494345932235440222:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:48.323188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.718461Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345915055568537:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:48.718536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:48.770598Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710674 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-17T18:09:48.771508Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:48.771592Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345936530408099:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:48.771712Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715757 2025-04-17T18:09:48.771767Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345936530408099:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:48.772037Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:48.772074Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, ... SS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346054013971409:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:16.264898Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:10:16.265189Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:16.265241Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346054013971409:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:16.265254Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715674, subscribers count# 1 2025-04-17T18:10:16.265989Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715674 2025-04-17T18:10:16.266193Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715674 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-17T18:10:25.153089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:25.153115Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:26.003126Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715674 Trying to start YDB, gRPC: 22126, MsgBus: 31889 2025-04-17T18:10:42.254833Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346168762760764:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:42.254921Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c84/r3tmp/tmpds4kFr/pdisk_1.dat 2025-04-17T18:10:42.372288Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:42.405595Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:42.405686Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:42.407355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22126, node 3 2025-04-17T18:10:42.439830Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:42.439865Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:42.439879Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:42.440029Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31889 TClient is connected to server localhost:31889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:42.886790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:42.904280Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:42.971949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:43.182816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:43.262598Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:45.614918Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346181647664442:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:45.614996Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:45.656450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:45.682549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:45.706226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:45.770444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:45.797196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:45.826915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:45.856466Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:45.934504Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346181647665106:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:45.934563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:45.934651Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346181647665111:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:45.938114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:45.947339Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494346181647665113:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:10:46.049006Z node 3 :TX_PROXY ERROR: Actor# [3:7494346185942632467:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:46.951492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:47.255103Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494346168762760764:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.255167Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:47.358594Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2025-04-17T18:10:06.713894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346016683010184:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.716490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001959/r3tmp/tmpvT7w2A/pdisk_1.dat 2025-04-17T18:10:07.437650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.437750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.459861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:07.551942Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6156, node 1 2025-04-17T18:10:07.707583Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.707613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.707620Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.715346Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.314021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.527730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:14676 2025-04-17T18:10:08.783029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:09.231568Z node 1 :PERSQUEUE ERROR: [PQ: 72075186224037888, Partition: 0, State: StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2025-04-17T18:10:11.867323Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346036060824356:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:11.867372Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001959/r3tmp/tmpDjQRQI/pdisk_1.dat 2025-04-17T18:10:12.076252Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8723, node 4 2025-04-17T18:10:12.226303Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:12.226450Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:12.246887Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:12.274575Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:12.274598Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:12.274625Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:12.274758Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:12.562541Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:12.703409Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:27311 2025-04-17T18:10:12.975429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:16.867847Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494346036060824356:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:16.867922Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:27.068378Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:27.068404Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:42.519473Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494346168417681431:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:42.519551Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001959/r3tmp/tmpbJypaF/pdisk_1.dat 2025-04-17T18:10:42.646622Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:42.680771Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:42.680889Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:42.683390Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2104, node 7 2025-04-17T18:10:42.731205Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:42.731243Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:42.731253Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:42.731499Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:42.951861Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:43.004389Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:26547 2025-04-17T18:10:43.190858Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:46.258632Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494346189056436132:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:46.258692Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001959/r3tmp/tmpdZ38po/pdisk_1.dat 2025-04-17T18:10:46.350276Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:46.373246Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:46.373313Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:46.375380Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62181, node 10 2025-04-17T18:10:46.418432Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:46.418463Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:46.418472Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:46.418643Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:46.616168Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:46.665138Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:27966 2025-04-17T18:10:46.879187Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 13447, MsgBus: 3664 2025-04-17T18:09:45.315487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345923670686225:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:45.315544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c7f/r3tmp/tmpY4mjAz/pdisk_1.dat 2025-04-17T18:09:45.711392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:45.750450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:45.750664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13447, node 1 2025-04-17T18:09:45.755100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:45.810732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:45.810753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:45.810760Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:45.810853Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3664 TClient is connected to server localhost:3664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:46.485229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:46.501214Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:46.519051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:46.656126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:46.828653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:46.904748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:48.482853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345936555589880:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.483060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:48.800921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.877722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.911281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.953333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:48.988739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.065952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.139617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:49.224662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345940850557845:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:49.224931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:49.225210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345940850557850:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:49.228342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:49.239581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345940850557852:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:49.341051Z node 1 :TX_PROXY ERROR: Actor# [1:7494345940850557910:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:50.327326Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345923670686225:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:50.327396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:50.437523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:50.690934Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710674 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-17T18:09:50.691744Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:50.691809Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345945145525792:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:50.691919Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715757 2025-04-17T18:09:50.691986Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345945145525792:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:50.692250Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:50.692288Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , Subscrib ... :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Applying to Unlocking 2025-04-17T18:10:21.240910Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:21.240962Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346078408563069:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:21.241394Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976715674, txId# 281474976710765 2025-04-17T18:10:21.241456Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346078408563069:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:21.242326Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:21.242380Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346078408563069:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:21.243628Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976715674, cookie: 281474976715674, txId: 281474976710765, status: StatusAccepted 2025-04-17T18:10:21.243802Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346078408563069:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710765 SchemeshardId: 72057594046644480 PathId: 17 2025-04-17T18:10:21.244792Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-17T18:10:21.244832Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-17T18:10:21.245755Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:21.245847Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346078408563069:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:21.248075Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-17T18:10:21.249336Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710765, buildInfoId: 281474976715674 2025-04-17T18:10:21.249404Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710765, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346078408563069:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:21.249912Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:21.249971Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346078408563069:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:21.249991Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:10:21.250320Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:21.250396Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346078408563069:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:21.250419Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715674, subscribers count# 1 2025-04-17T18:10:21.250666Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715674 2025-04-17T18:10:21.250854Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715674 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-17T18:10:31.003820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:31.003848Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:31.139606Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715674 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:79:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:78:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:78:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:79:2110] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:83:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:82:2113] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:82:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:83:2113] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:83:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> DataShardReadTableSnapshots::ReadTableSnapshot >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T18:10:47.562923Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346192385108553:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.563038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:47.602123Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346190403221052:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.602189Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001985/r3tmp/tmpupLlH6/pdisk_1.dat 2025-04-17T18:10:47.734661Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:47.735984Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:47.938303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.938760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:47.941693Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:47.942478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:47.946344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.946407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:47.983859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:47.988627Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26155, node 1 2025-04-17T18:10:47.994682Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:47.994713Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:48.106072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001985/r3tmp/yandexezDTMQ.tmp 2025-04-17T18:10:48.106098Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001985/r3tmp/yandexezDTMQ.tmp 2025-04-17T18:10:48.107336Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001985/r3tmp/yandexezDTMQ.tmp 2025-04-17T18:10:48.107518Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:48.275207Z INFO: TTestServer started on Port 3538 GrpcPort 26155 TClient is connected to server localhost:3538 PQClient connected to localhost:26155 === TenantModeEnabled() = 1 === Init PQ - start server on port 26155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:48.597398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:10:48.597572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.597748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:10:48.598029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:10:48.598066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.599712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:48.599807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T18:10:48.599927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.599957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:10:48.599971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T18:10:48.599981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-17T18:10:48.601628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.601669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:10:48.601686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-17T18:10:48.602627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:48.602656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T18:10:48.602702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:48.604092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.604119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.604135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:48.604191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.617846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:10:48.619744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T18:10:48.619994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:10:48.622714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913448670, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:48.622882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913448670 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:10:48.622947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:48.623307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T18:10:48.623357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:48.623586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:10:48.623648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:10:48.625845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:10:48.625877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:10:48.626125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:10:48.626147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494346196680076583:2441], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T18:10:48.626206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.626235Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T18:10:48.626315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:48.626341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.626370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:48.626380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.626397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T18:10:48.626414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.626445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T18:10:48.626456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for tx ... E `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:10:55.828821Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494346224850756573:2381] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-17T18:10:55.828838Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:10:55.829232Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-17T18:10:55.829333Z node 3 :PERSQUEUE INFO: new Cookie 12345678|8596c62c-91af8fa3-2216afd7-7130a96e_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-17T18:10:55.829673Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|8596c62c-91af8fa3-2216afd7-7130a96e_0 2025-04-17T18:10:55.830465Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|8596c62c-91af8fa3-2216afd7-7130a96e_0 grpc read done: success: 0 data: 2025-04-17T18:10:55.830481Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|8596c62c-91af8fa3-2216afd7-7130a96e_0 grpc read failed 2025-04-17T18:10:55.830623Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|8596c62c-91af8fa3-2216afd7-7130a96e_0 2025-04-17T18:10:55.830641Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|8596c62c-91af8fa3-2216afd7-7130a96e_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-04-17T18:10:55.830892Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-04-17T18:10:55.844888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976710665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:50768" , at schemeshard: 72057594046644480 2025-04-17T18:10:55.845053Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:55.845228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-17T18:10:55.845252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-17T18:10:55.845436Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-17T18:10:55.845468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:55.845534Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-17T18:10:55.845545Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T18:10:55.845570Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-17T18:10:55.845589Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T18:10:55.845627Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-17T18:10:55.845678Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2025-04-17T18:10:55.845709Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-17T18:10:55.845724Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T18:10:55.845739Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2025-04-17T18:10:55.845751Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2025-04-17T18:10:55.845762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-04-17T18:10:55.847318Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:10:55.847493Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2025-04-17T18:10:55.847623Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:10:55.847638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-17T18:10:55.847800Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:10:55.847819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7494346211965853650:2385], at schemeshard: 72057594046644480, txId: 281474976710665, path id: 10 2025-04-17T18:10:55.848156Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2025-04-17T18:10:55.848258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2025-04-17T18:10:55.848276Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710665 2025-04-17T18:10:55.848290Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-04-17T18:10:55.848311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-17T18:10:55.848389Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710665, subscribers: 0 2025-04-17T18:10:55.849659Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-17T18:10:55.849684Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-17T18:10:55.849737Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710665 2025-04-17T18:10:55.849989Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-04-17T18:10:55.850046Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:50750 2025-04-17T18:10:55.850058Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:50750 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-17T18:10:55.850064Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:10:55.850728Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-17T18:10:55.850838Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-17T18:10:55.850862Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:10:55.850871Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:10:55.850917Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494346224850756606:2392] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-17T18:10:55.850942Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:10:55.851287Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-17T18:10:55.851371Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|f6a6f44e-559a05be-4eb6cc6e-db2ece02_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-04-17T18:10:55.851645Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|f6a6f44e-559a05be-4eb6cc6e-db2ece02_0 2025-04-17T18:10:55.852557Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|f6a6f44e-559a05be-4eb6cc6e-db2ece02_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-17T18:10:55.852695Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|f6a6f44e-559a05be-4eb6cc6e-db2ece02_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-17T18:10:55.852747Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|f6a6f44e-559a05be-4eb6cc6e-db2ece02_0 2025-04-17T18:10:55.852932Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|f6a6f44e-559a05be-4eb6cc6e-db2ece02_0 is DEAD 2025-04-17T18:10:55.853194Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T18:10:47.566149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346192804364270:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.566256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:47.599573Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346189639444138:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.599669Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001999/r3tmp/tmpNYWinz/pdisk_1.dat 2025-04-17T18:10:47.762294Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:47.762554Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:47.945927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.946024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:47.949790Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:47.950476Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:47.951569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:47.984832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.984881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:47.989668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63035, node 1 2025-04-17T18:10:48.106836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001999/r3tmp/yandexcYpZEl.tmp 2025-04-17T18:10:48.106865Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001999/r3tmp/yandexcYpZEl.tmp 2025-04-17T18:10:48.108950Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001999/r3tmp/yandexcYpZEl.tmp 2025-04-17T18:10:48.109118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:48.275302Z INFO: TTestServer started on Port 2240 GrpcPort 63035 TClient is connected to server localhost:2240 PQClient connected to localhost:63035 === TenantModeEnabled() = 1 === Init PQ - start server on port 63035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:48.613878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:10:48.614068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.614302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:10:48.614512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:10:48.614581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.616451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:48.616604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T18:10:48.616751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.616813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:10:48.616833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T18:10:48.616853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-17T18:10:48.618374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:48.618422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T18:10:48.618447Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:48.618612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.618647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:10:48.618667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T18:10:48.620051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.620086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.620105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:48.620141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.623422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:10:48.624907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T18:10:48.625076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:10:48.627425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913448670, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:48.627805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913448670 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:10:48.627904Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:48.628313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T18:10:48.628363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:48.628726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:10:48.628789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:10:48.630813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:10:48.630836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:10:48.631098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:10:48.631128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494346192804364935:2404], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T18:10:48.631178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.631224Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T18:10:48.631348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:48.631410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.631438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:48.631448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.631459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T18:10:48.631471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.631482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T18:10:48.631490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-17T18:10:48.631543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046 ... 8:10:56.290218Z :INFO: [/Root] [/Root] [9aced20-abdfe6b5-5806d08a-dc1f729e] [null] Closing session to cluster: SessionClosed { Status: BAD_REQUEST Issues: "
: Error: no read rule provided for consumer 'non_existing' in topic '/Root/account1/write_topic' in current cluster '', code: 500032 " } 2025-04-17T18:10:56.290361Z :NOTICE: [/Root] [/Root] [9aced20-abdfe6b5-5806d08a-dc1f729e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:10:56.290405Z :DEBUG: [/Root] [/Root] [9aced20-abdfe6b5-5806d08a-dc1f729e] [null] Abort session to cluster Got new read session event: SessionClosed { Status: BAD_REQUEST Issues: "
: Error: no read rule provided for consumer 'non_existing' in topic '/Root/account1/write_topic' in current cluster '', code: 500032 " } 2025-04-17T18:10:56.290484Z :INFO: [/Root] [/Root] [9aced20-abdfe6b5-5806d08a-dc1f729e] Closing read session. Close timeout: 0.000000s 2025-04-17T18:10:56.290544Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:10:56.290582Z :INFO: [/Root] [/Root] [9aced20-abdfe6b5-5806d08a-dc1f729e] Counters: { Errors: 1 CurrentSessionLifetimeMs: 12 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:10:56.290682Z :NOTICE: [/Root] [/Root] [9aced20-abdfe6b5-5806d08a-dc1f729e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:10:56.290941Z :INFO: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] Starting read session 2025-04-17T18:10:56.291008Z :DEBUG: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] Starting session to cluster null (localhost:21408) 2025-04-17T18:10:56.291173Z :DEBUG: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:10:56.291204Z :DEBUG: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:10:56.291239Z :DEBUG: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] [null] Reconnecting session to cluster null in 0.000000s 2025-04-17T18:10:56.293651Z :DEBUG: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] [null] Successfully connected. Initializing session 2025-04-17T18:10:56.294036Z node 3 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-17T18:10:56.294052Z node 3 :PQ_READ_PROXY DEBUG: new session created cookie 2 2025-04-17T18:10:56.294406Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-04-17T18:10:56.294498Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 read init: from# ipv6:[::1]:47052, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-04-17T18:10:56.294564Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 auth for : consumer_aba 2025-04-17T18:10:56.294861Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 Handle describe topics response 2025-04-17T18:10:56.294957Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 auth is DEAD 2025-04-17T18:10:56.294977Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 auth ok: topics# 1, initDone# 0 2025-04-17T18:10:56.295897Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 register session: topic# /Root/account1/write_topic 2025-04-17T18:10:56.296075Z :INFO: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] [null] Server session id: consumer_aba_3_2_6169166508415372947_v1 2025-04-17T18:10:56.296331Z :DEBUG: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:10:56.296490Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7494346230390145579:2415] connected; active server actors: 1 2025-04-17T18:10:56.296539Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 grpc read done: success# 1, data# { read { } } 2025-04-17T18:10:56.296703Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 got read request: guid# 80217bf4-697fd56e-a29544b9-458632f3 2025-04-17T18:10:56.296809Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7494346230390145579:2415] session consumer_aba_3_2_6169166508415372947_v1 2025-04-17T18:10:56.296860Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2025-04-17T18:10:56.296903Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2025-04-17T18:10:56.296941Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_6169166508415372947_v1" (Sender=[3:7494346230390145576:2415], Pipe=[3:7494346230390145579:2415], Partitions=[], ActiveFamilyCount=0) 2025-04-17T18:10:56.296974Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2025-04-17T18:10:56.297021Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-17T18:10:56.297066Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_6169166508415372947_v1" (Sender=[3:7494346230390145576:2415], Pipe=[3:7494346230390145579:2415], Partitions=[], ActiveFamilyCount=0) 2025-04-17T18:10:56.297133Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_6169166508415372947_v1" sender [3:7494346230390145576:2415] lock partition 0 for ReadingSession "consumer_aba_3_2_6169166508415372947_v1" (Sender=[3:7494346230390145576:2415], Pipe=[3:7494346230390145579:2415], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-17T18:10:56.297236Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-17T18:10:56.297261Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000218s 2025-04-17T18:10:56.298818Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_6169166508415372947_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7494346230390145579 RawX2: 4503612512274799 } Path: "/Root/account1/write_topic" } 2025-04-17T18:10:56.299555Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2025-04-17T18:10:56.299782Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7494346230390145581:2418] 2025-04-17T18:10:56.299810Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: consumer_aba_3_2_6169166508415372947_v1:1 with generation 1 2025-04-17T18:10:56.306419Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1744913456178 CreateTimestampMS: 1744913456176 SizeLag: 165 WriteTimestampEstimateMS: 1744913456178 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-17T18:10:56.306462Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2025-04-17T18:10:56.306556Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2025-04-17T18:10:56.307415Z :INFO: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] Closing read session. Close timeout: 0.000000s 2025-04-17T18:10:56.307469Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2025-04-17T18:10:56.307507Z :INFO: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:10:56.307594Z :NOTICE: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T18:10:56.307635Z :DEBUG: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] [null] Abort session to cluster 2025-04-17T18:10:56.308363Z :NOTICE: [/Root] [/Root] [e7947bf3-94274ae5-f8bbb8f-872d943d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:10:56.308476Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 grpc read done: success# 0, data# { } 2025-04-17T18:10:56.308497Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 grpc read failed 2025-04-17T18:10:56.308530Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 grpc closed 2025-04-17T18:10:56.308560Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6169166508415372947_v1 is DEAD 2025-04-17T18:10:56.308797Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer_aba_3_2_6169166508415372947_v1 2025-04-17T18:10:56.309676Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7494346230390145579:2415] disconnected; active server actors: 1 2025-04-17T18:10:56.309740Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] pipe [3:7494346230390145579:2415] client consumer_aba disconnected session consumer_aba_3_2_6169166508415372947_v1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T18:10:47.565112Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346191962374664:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.565414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:47.616121Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346191779988368:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.616348Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:47.773149Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:47.773231Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001976/r3tmp/tmpE0fs4K/pdisk_1.dat 2025-04-17T18:10:47.938590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.938710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:47.941556Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:47.942651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:47.968912Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:47.990783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.990851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11245, node 1 2025-04-17T18:10:47.996289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:47.997422Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:47.999212Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:48.107250Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001976/r3tmp/yandexK1OSDB.tmp 2025-04-17T18:10:48.107290Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001976/r3tmp/yandexK1OSDB.tmp 2025-04-17T18:10:48.107435Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001976/r3tmp/yandexK1OSDB.tmp 2025-04-17T18:10:48.107574Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:48.275255Z INFO: TTestServer started on Port 17128 GrpcPort 11245 TClient is connected to server localhost:17128 PQClient connected to localhost:11245 === TenantModeEnabled() = 1 === Init PQ - start server on port 11245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:48.594337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:10:48.594549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.594690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:10:48.594875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:10:48.594902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.596410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:48.596545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T18:10:48.596697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.596737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:10:48.596753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T18:10:48.596763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-17T18:10:48.597582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:48.597607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T18:10:48.597629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:48.598383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.598429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:10:48.598442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T18:10:48.599735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.599761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.599805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:48.599830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.603997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:10:48.608946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T18:10:48.609093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:10:48.611144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913448656, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:48.611248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913448656 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:10:48.611280Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:48.611565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T18:10:48.611603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:48.611798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:10:48.611846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:10:48.613415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:10:48.613483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:10:48.613648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:10:48.613675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494346191962375359:2415], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T18:10:48.613713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.613731Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T18:10:48.613816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:48.613832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.613849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:48.613865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.613881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T18:10:48.613908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:48.613921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T18:10:48.613929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for ... nnection 2025-04-17T18:10:55.815471Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-17T18:10:55.815669Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-04-17T18:10:55.815727Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:48858 2025-04-17T18:10:55.815739Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:48858 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-17T18:10:55.815744Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:10:55.815846Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-04-17T18:10:55.816292Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-17T18:10:55.816397Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-17T18:10:55.816408Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:10:55.816413Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:10:55.816434Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494346225689209807:2392] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-17T18:10:55.816458Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:10:55.816816Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-17T18:10:55.816856Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|8e18c1c6-3cf9c5d2-aa08f507-b44d4f03_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-04-17T18:10:55.817105Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|8e18c1c6-3cf9c5d2-aa08f507-b44d4f03_0 ===Assert streaming op1 ===Assert streaming op2 2025-04-17T18:10:55.817832Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|8e18c1c6-3cf9c5d2-aa08f507-b44d4f03_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T18:10:55.818085Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-17T18:10:55.818314Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:10:55.821878Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse ===ModifyAcl BEFORE MODIFY PERMISSIONS 2025-04-17T18:10:55.828482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976715666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:48882" , at schemeshard: 72057594046644480 2025-04-17T18:10:55.828603Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:55.828692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-17T18:10:55.828706Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-17T18:10:55.828814Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-17T18:10:55.828838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:55.828897Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-04-17T18:10:55.828915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-04-17T18:10:55.828930Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-04-17T18:10:55.828938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-04-17T18:10:55.829017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-17T18:10:55.829065Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 1/1, is published: false 2025-04-17T18:10:55.829084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-17T18:10:55.829093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-04-17T18:10:55.829103Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2025-04-17T18:10:55.829112Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 0 2025-04-17T18:10:55.829122Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715666, [OwnerId: 72057594046644480, LocalPathId: 10], 4 2025-04-17T18:10:55.830537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715666, response: Status: StatusSuccess TxId: 281474976715666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:10:55.830680Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2025-04-17T18:10:55.830793Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:10:55.830813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715666, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-17T18:10:55.830934Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:10:55.830955Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7494346212804306828:2365], at schemeshard: 72057594046644480, txId: 281474976715666, path id: 10 2025-04-17T18:10:55.831357Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-04-17T18:10:55.831404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-04-17T18:10:55.831411Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715666 2025-04-17T18:10:55.831422Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715666, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2025-04-17T18:10:55.831430Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-17T18:10:55.831503Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 0 ===Wait for session created with token with removed ACE to die2025-04-17T18:10:55.832651Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715666 2025-04-17T18:10:56.553451Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7494346229984177137:2401], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:10:56.553615Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTI3MGM2MDgtNGY1YWViYzMtYTc3MjMxNjEtYjM4YjhjYjQ=, ActorId: [3:7494346229984177130:2397], ActorState: ExecuteState, TraceId: 01js2f4qcm17taza7041tncgn8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:10:56.553970Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:10:56.817617Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:10:56.818481Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|8e18c1c6-3cf9c5d2-aa08f507-b44d4f03_0 describe result for acl check 2025-04-17T18:10:56.818617Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|8e18c1c6-3cf9c5d2-aa08f507-b44d4f03_0 2025-04-17T18:10:56.819002Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|8e18c1c6-3cf9c5d2-aa08f507-b44d4f03_0 is DEAD status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2025-04-17T18:10:56.819363Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> DataShardReadTableSnapshots::ReadTableSplitBefore >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> DataShardReadTableSnapshots::ReadTableDropColumn >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 8245, MsgBus: 23722 2025-04-17T18:10:00.391814Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345991529576702:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:00.392673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c3b/r3tmp/tmpJ14WQK/pdisk_1.dat 2025-04-17T18:10:00.814260Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:00.818323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:00.818418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:00.821009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8245, node 1 2025-04-17T18:10:00.915954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:00.915980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:00.915987Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:00.916122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23722 TClient is connected to server localhost:23722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:01.516171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.564729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.723418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.873495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.963353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:03.675443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346004414480220:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.675562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:04.040138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.073805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.107074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.140636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.212887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.282567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.370018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.444802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346008709448190:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:04.444898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:04.445152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346008709448195:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:04.449479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:04.460777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346008709448197:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:04.515944Z node 1 :TX_PROXY ERROR: Actor# [1:7494346008709448249:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:05.390440Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345991529576702:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:05.390519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:05.556434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:05.819358Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710674 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-17T18:10:05.821212Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:05.821284Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494346013004416102:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:05.821411Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715757 2025-04-17T18:10:05.821462Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494346013004416102:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:05.821767Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:05.821804Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494346013004416102:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281 ... 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346126471913945:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.060239Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976715674, txId# 281474976710764 2025-04-17T18:10:33.060298Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346126471913945:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.060564Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:33.060615Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346126471913945:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.061929Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976715674, cookie: 281474976715674, txId: 281474976710764, status: StatusAccepted 2025-04-17T18:10:33.062053Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346126471913945:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710764 SchemeshardId: 72057594046644480 PathId: 17 2025-04-17T18:10:33.062439Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:33.062493Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346126471913945:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.063910Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710764, buildInfoId: 281474976715674 2025-04-17T18:10:33.063979Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710764, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346126471913945:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.064235Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:33.064305Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346126471913945:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.064338Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:10:33.064569Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:33.064637Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346126471913945:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.064659Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715674, subscribers count# 1 2025-04-17T18:10:33.064912Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715674 2025-04-17T18:10:33.065092Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715674 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-17T18:10:33.067962Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-17T18:10:33.067996Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-17T18:10:33.068013Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-17T18:10:33.604248Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346109292041691:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:33.604319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:42.965731Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715674 2025-04-17T18:10:43.694745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:43.694774Z node 2 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T18:10:52.735000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346214029727161:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:52.735195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:52.757799Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346212511918325:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:52.757848Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:52.906647Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:52.909742Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001966/r3tmp/tmpqSljUG/pdisk_1.dat 2025-04-17T18:10:53.066495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:53.076947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:53.077093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:53.103238Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:53.104197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21672, node 1 2025-04-17T18:10:53.135801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:53.135885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:53.139908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:53.162125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001966/r3tmp/yandexnFdIup.tmp 2025-04-17T18:10:53.162149Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001966/r3tmp/yandexnFdIup.tmp 2025-04-17T18:10:53.162276Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001966/r3tmp/yandexnFdIup.tmp 2025-04-17T18:10:53.162387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:53.211023Z INFO: TTestServer started on Port 24262 GrpcPort 21672 TClient is connected to server localhost:24262 PQClient connected to localhost:21672 === TenantModeEnabled() = 1 === Init PQ - start server on port 21672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:53.554027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:10:53.554284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:53.554463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:10:53.554665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:10:53.554731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:53.557256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:53.557434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-04-17T18:10:53.557632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:53.557702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:10:53.557723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T18:10:53.557753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-17T18:10:53.558629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:53.558664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T18:10:53.558693Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:53.559671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:53.559712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:10:53.559727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T18:10:53.561484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:53.561517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:53.561535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:53.561567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:53.564772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:10:53.566544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T18:10:53.566712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:10:53.569177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913453612, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:53.569323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913453612 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:10:53.569363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:53.569638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T18:10:53.569680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:53.569837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:10:53.569903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:10:53.571607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:10:53.571644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:10:53.571842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:10:53.571869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494346218324695103:2394], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T18:10:53.571928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:53.571953Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T18:10:53.572059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:53.572081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:53.572137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:53.572148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:53.572170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T18:10:53.572191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:53.572216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T18:10:53.572225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-17T18:10:53.572281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940 ... ER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:11:00.654132Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494346249316911577:2380] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-17T18:11:00.654149Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:11:00.654645Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-17T18:11:00.654728Z node 3 :PERSQUEUE INFO: new Cookie 12345678|b1c6975c-1bbae907-413ec6db-4747c644_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-17T18:11:00.655095Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|b1c6975c-1bbae907-413ec6db-4747c644_0 2025-04-17T18:11:00.656064Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|b1c6975c-1bbae907-413ec6db-4747c644_0 grpc read done: success: 0 data: 2025-04-17T18:11:00.656082Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|b1c6975c-1bbae907-413ec6db-4747c644_0 grpc read failed 2025-04-17T18:11:00.656226Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|b1c6975c-1bbae907-413ec6db-4747c644_0 2025-04-17T18:11:00.656238Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|b1c6975c-1bbae907-413ec6db-4747c644_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-04-17T18:11:00.656513Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-04-17T18:11:00.671726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:47050" , at schemeshard: 72057594046644480 2025-04-17T18:11:00.671828Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:00.671935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-17T18:11:00.671950Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-17T18:11:00.672057Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-17T18:11:00.672079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:00.672156Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-17T18:11:00.672172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-17T18:11:00.672190Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-17T18:11:00.672198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-17T18:11:00.672234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-17T18:11:00.672304Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-04-17T18:11:00.672328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-17T18:11:00.672339Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-17T18:11:00.672349Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-17T18:11:00.672358Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-04-17T18:11:00.672368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-04-17T18:11:00.673904Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:11:00.674093Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2025-04-17T18:11:00.674259Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:11:00.674283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-17T18:11:00.674465Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:11:00.674484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7494346236432008643:2381], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2025-04-17T18:11:00.674887Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-17T18:11:00.674968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-17T18:11:00.674993Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-17T18:11:00.675007Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-04-17T18:11:00.675023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-17T18:11:00.675092Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2025-04-17T18:11:00.676510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-04-17T18:11:00.676812Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-17T18:11:00.676841Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-17T18:11:00.677138Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2025-04-17T18:11:00.677265Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:47026 2025-04-17T18:11:00.677283Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:47026 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-17T18:11:00.677289Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:11:00.678101Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-17T18:11:00.678299Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-17T18:11:00.678313Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:11:00.678321Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:11:00.678350Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494346249316911609:2392] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-17T18:11:00.678366Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:11:00.678804Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-17T18:11:00.678866Z node 3 :PERSQUEUE INFO: new Cookie test-message-group|af861c06-5fb2dfac-aee875b2-13471e5e_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2025-04-17T18:11:00.679176Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|af861c06-5fb2dfac-aee875b2-13471e5e_0 2025-04-17T18:11:00.680005Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group|af861c06-5fb2dfac-aee875b2-13471e5e_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-17T18:11:00.680237Z node 3 :PQ_WRITE_PROXY INFO: updating token 2025-04-17T18:11:00.680295Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:11:00.680894Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|af861c06-5fb2dfac-aee875b2-13471e5e_0 describe result for acl check 2025-04-17T18:11:00.680979Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|af861c06-5fb2dfac-aee875b2-13471e5e_0 2025-04-17T18:11:00.681217Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|af861c06-5fb2dfac-aee875b2-13471e5e_0 is DEAD 2025-04-17T18:11:00.681514Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] >> DataStreams::TestInvalidRetentionCombinations [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T18:10:54.002161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346222734810360:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:54.002393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:54.021291Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346221643375369:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:54.021457Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:54.134080Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001965/r3tmp/tmpPwj9Pz/pdisk_1.dat 2025-04-17T18:10:54.148859Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:54.287666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:54.287789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:54.290577Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:54.291330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21664, node 1 2025-04-17T18:10:54.322928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:54.326608Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:54.326675Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:54.352437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:54.352495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:54.355594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:54.361130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001965/r3tmp/yandexVCQ7qa.tmp 2025-04-17T18:10:54.361196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001965/r3tmp/yandexVCQ7qa.tmp 2025-04-17T18:10:54.361351Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001965/r3tmp/yandexVCQ7qa.tmp 2025-04-17T18:10:54.361501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:54.412955Z INFO: TTestServer started on Port 25213 GrpcPort 21664 TClient is connected to server localhost:25213 PQClient connected to localhost:21664 === TenantModeEnabled() = 1 === Init PQ - start server on port 21664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:54.750775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:10:54.750976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:54.751128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:10:54.751327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:10:54.751356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:54.753165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:54.753298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T18:10:54.753426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:54.753458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:10:54.753500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-17T18:10:54.753515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-17T18:10:54.754452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:10:54.754469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-17T18:10:54.754487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:10:54.755034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:54.755066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:10:54.755074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-17T18:10:54.756446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:54.756498Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:54.756514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-17T18:10:54.756551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-17T18:10:54.760465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:10:54.762007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-17T18:10:54.762240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:10:54.764116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913454809, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:54.764211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913454809 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:10:54.764232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-17T18:10:54.764401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-17T18:10:54.764428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-17T18:10:54.764585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:10:54.764630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:10:54.765841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:10:54.765872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:10:54.766041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:10:54.766066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494346222734811078:2435], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-17T18:10:54.766102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:54.766132Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-17T18:10:54.766249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-17T18:10:54.766273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-17T18:10:54.766291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-17T18:10:54.766303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-17T18:10:54.766314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-04-17T18:10:54.766329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-17T18:10:54.766338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-17T18:10:54.766351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for ... ready parts: 2/2 2025-04-17T18:11:01.978614Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2025-04-17T18:11:01.978624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:0 2025-04-17T18:11:01.978651Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-17T18:11:01.978660Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:1 2025-04-17T18:11:01.978664Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:1 2025-04-17T18:11:01.978726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 5 2025-04-17T18:11:01.978738Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715664, publications: 2, subscribers: 1 2025-04-17T18:11:01.978746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715664, [OwnerId: 72057594046644480, LocalPathId: 11], 5 2025-04-17T18:11:01.978750Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715664, [OwnerId: 72057594046644480, LocalPathId: 12], 2 2025-04-17T18:11:01.979104Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-17T18:11:01.979169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-17T18:11:01.979187Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-17T18:11:01.979198Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2025-04-17T18:11:01.979208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 2 2025-04-17T18:11:01.979335Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-17T18:11:01.979376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-17T18:11:01.979387Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-17T18:11:01.979395Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2025-04-17T18:11:01.979402Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 4 2025-04-17T18:11:01.979429Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 2025-04-17T18:11:01.979437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7494346249536606352:2370] 2025-04-17T18:11:01.981040Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 2025-04-17T18:11:01.981075Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2025-04-17T18:11:02.086207Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-17T18:11:02.086242Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-17T18:11:02.086574Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2025-04-17T18:11:02.086693Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:46126 2025-04-17T18:11:02.086716Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:46126 proto=v1 topic=Root/acc/topic1 durationSec=0 2025-04-17T18:11:02.086725Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:11:02.088729Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-17T18:11:02.088866Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-17T18:11:02.088889Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:11:02.088897Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:11:02.088932Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494346253831573896:2381] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-17T18:11:02.088951Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:11:02.089418Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-17T18:11:02.089514Z node 3 :PERSQUEUE INFO: new Cookie 12345678|f44d0ca9-591729c4-dc187ce6-f52f54f4_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-17T18:11:02.089861Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|f44d0ca9-591729c4-dc187ce6-f52f54f4_0 2025-04-17T18:11:02.090721Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|f44d0ca9-591729c4-dc187ce6-f52f54f4_0 grpc read done: success: 0 data: 2025-04-17T18:11:02.090738Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|f44d0ca9-591729c4-dc187ce6-f52f54f4_0 grpc read failed 2025-04-17T18:11:02.090912Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|f44d0ca9-591729c4-dc187ce6-f52f54f4_0 2025-04-17T18:11:02.090931Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|f44d0ca9-591729c4-dc187ce6-f52f54f4_0 is DEAD Finish: 0 2025-04-17T18:11:02.091212Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializeWritePQService done === PersQueueClient === InitializePQ completed 2025-04-17T18:11:02.098880Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-17T18:11:02.098906Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-17T18:11:02.099206Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2025-04-17T18:11:02.099308Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:46126 2025-04-17T18:11:02.099326Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:46126 proto=v1 topic=topic1 durationSec=0 2025-04-17T18:11:02.099334Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:11:02.101068Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-17T18:11:02.101197Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-17T18:11:02.101213Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:11:02.101221Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-17T18:11:02.101267Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494346253831573916:2390] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-17T18:11:02.101296Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:11:02.101765Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-17T18:11:02.101829Z node 3 :PERSQUEUE INFO: new Cookie 12345678|996fa596-9dadf5e2-f99bd04c-46728edc_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-17T18:11:02.102204Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|996fa596-9dadf5e2-f99bd04c-46728edc_0 2025-04-17T18:11:02.103421Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 12345678|996fa596-9dadf5e2-f99bd04c-46728edc_0 grpc read done: success: 0 data: 2025-04-17T18:11:02.103452Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|996fa596-9dadf5e2-f99bd04c-46728edc_0 grpc read failed 2025-04-17T18:11:02.103944Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 2 sessionId: 12345678|996fa596-9dadf5e2-f99bd04c-46728edc_0 2025-04-17T18:11:02.103965Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|996fa596-9dadf5e2-f99bd04c-46728edc_0 is DEAD 2025-04-17T18:11:02.104294Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogExistingRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2025-04-17T18:10:06.835219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346015269542879:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:06.845921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00197f/r3tmp/tmpPpTqJI/pdisk_1.dat 2025-04-17T18:10:07.368569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:07.368685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:07.400292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20184, node 1 2025-04-17T18:10:07.438136Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:07.438157Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:07.453338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:07.815193Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:07.815211Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:07.815218Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:07.815303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:08.187740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:08.315302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22556 2025-04-17T18:10:08.509161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:11.698323Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346035960066154:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:11.700599Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00197f/r3tmp/tmpbIr5jI/pdisk_1.dat 2025-04-17T18:10:11.916621Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18229, node 4 2025-04-17T18:10:12.008774Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:12.008809Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:12.008815Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:12.008955Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:12.017779Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:12.017902Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:12.020799Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:12.318208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:12.423701Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8797 2025-04-17T18:10:12.698458Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:12.963544Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:10:12.997713Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7494346040255034612:2825], for# user2@builtin, access# DescribeSchema 2025-04-17T18:10:13.015264Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7494346044550001911:2826], for# user2@builtin, access# DescribeSchema 2025-04-17T18:10:13.033639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.677463Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494346059094277043:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:16.677618Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00197f/r3tmp/tmpL3xVml/pdisk_1.dat 2025-04-17T18:10:16.872816Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:16.925688Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:16.926605Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:16.932768Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21234, node 7 2025-04-17T18:10:16.994837Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:16.994862Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:16.994869Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:16.995019Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:17.271479Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:17.343108Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8484 2025-04-17T18:10:17.566810Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:17.586967Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-17T18:10:21.677629Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494346059094277043:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:21.677687Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:31.851491Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:31.851522Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:59.859869Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494346244868247363:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:59.859992Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00197f/r3tmp/tmpdeH3e8/pdisk_1.dat 2025-04-17T18:10:59.991962Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:00.036664Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:00.036760Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:00.040013Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11819, node 10 2025-04-17T18:11:00.094035Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:00.094062Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:00.094069Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:00.094262Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:00.341337Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:00.398012Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11271 2025-04-17T18:11:00.605927Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting...
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 14422, MsgBus: 3069 2025-04-17T18:09:59.397276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345985562018300:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:59.412967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c4c/r3tmp/tmpqlX6wi/pdisk_1.dat 2025-04-17T18:09:59.945389Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:59.956122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:59.956208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:59.963551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14422, node 1 2025-04-17T18:10:00.074860Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:00.074886Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:00.074893Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:00.075019Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3069 TClient is connected to server localhost:3069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:00.680584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.705440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.824002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:00.988346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:01.062164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:10:02.768896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345998446921891:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:02.769064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.063573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.099148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.124965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.152460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.232942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.274060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.310107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:10:03.402153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346002741889854:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.402317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.402662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346002741889859:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:03.407105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:10:03.423633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346002741889861:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:10:03.496031Z node 1 :TX_PROXY ERROR: Actor# [1:7494346002741889921:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:04.399148Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345985562018300:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:04.399218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:04.468576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:10:04.756169Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710674 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-17T18:10:04.756778Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:04.756826Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494346007036857788:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:04.756923Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715757 2025-04-17T18:10:04.756965Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494346007036857788:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:04.757190Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:04.757221Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494346007036857788:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474 ... :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Applying to Unlocking 2025-04-17T18:10:33.903378Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:33.903421Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346129803292781:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.903499Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976715674, txId# 281474976710765 2025-04-17T18:10:33.903542Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346129803292781:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.903748Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:33.903785Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346129803292781:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.904872Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976715674, cookie: 281474976715674, txId: 281474976710765, status: StatusAccepted 2025-04-17T18:10:33.904981Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346129803292781:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710765 SchemeshardId: 72057594046644480 PathId: 17 2025-04-17T18:10:33.905580Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:33.905634Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346129803292781:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.908221Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-17T18:10:33.908250Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-17T18:10:33.908265Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-17T18:10:33.908321Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710765, buildInfoId: 281474976715674 2025-04-17T18:10:33.908358Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710765, buildInfo: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346129803292781:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.909152Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:33.909212Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346129803292781:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.909236Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:10:33.909466Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715674 2025-04-17T18:10:33.909518Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7494346129803292781:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:33.909535Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715674, subscribers count# 1 2025-04-17T18:10:33.909735Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715674 2025-04-17T18:10:33.909882Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715674 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-17T18:10:43.831299Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715674 2025-04-17T18:10:43.859988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:43.860016Z node 2 :IMPORT WARN: Table profiles were not loaded |89.1%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T18:10:51.103138Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346209479933907:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:51.103297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:51.125111Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346207305162167:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:51.125165Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:51.280038Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:51.280036Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00196a/r3tmp/tmpYwGgZC/pdisk_1.dat 2025-04-17T18:10:51.454535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:51.461999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:51.462078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:51.466102Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:51.466998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:51.487834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:51.487916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:51.490412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29498, node 1 2025-04-17T18:10:51.525022Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00196a/r3tmp/yandex3pb641.tmp 2025-04-17T18:10:51.525054Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00196a/r3tmp/yandex3pb641.tmp 2025-04-17T18:10:51.525229Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00196a/r3tmp/yandex3pb641.tmp 2025-04-17T18:10:51.525404Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:51.563778Z INFO: TTestServer started on Port 24823 GrpcPort 29498 TClient is connected to server localhost:24823 PQClient connected to localhost:29498 === TenantModeEnabled() = 1 === Init PQ - start server on port 29498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:51.843257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:10:51.843420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:51.843598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:10:51.843831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:10:51.843878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:51.845944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:51.846096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T18:10:51.846268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:51.846308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:10:51.846317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T18:10:51.846325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-17T18:10:51.847249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:51.847278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T18:10:51.847291Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:10:51.848101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:51.848136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:10:51.848149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T18:10:51.849685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:51.849722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:51.849748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:51.849783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:51.853129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:10:51.854631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T18:10:51.854799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:10:51.856811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913451904, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:10:51.856929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913451904 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:10:51.856957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:51.857123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T18:10:51.857172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:10:51.857302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:10:51.857355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:10:51.858840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:10:51.858874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:10:51.859026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:10:51.859056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494346209479934550:2393], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T18:10:51.859095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:51.859118Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T18:10:51.859197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:51.859226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:51.859246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:10:51.859259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:51.859276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T18:10:51.859295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:10:51.859307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T18:10:51.859323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-17T18:10:51.859368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940 ... n established. Init response: session_id: "1236|4f682063-622a0dc7-1af157eb-da168a81_0" topic: "PQ/account3/folder1/folder2/topic" 2025-04-17T18:11:02.770675Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write 1 messages with Id from 1 to 1 2025-04-17T18:11:02.771565Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write session: try to update token 2025-04-17T18:11:02.771601Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-17T18:11:02.776912Z :INFO: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write session: close. Timeout = 10000 ms 2025-04-17T18:11:02.777472Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|4f682063-622a0dc7-1af157eb-da168a81_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T18:11:02.778559Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-17T18:11:02.778936Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T18:11:02.778969Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-04-17T18:11:02.779069Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-17T18:11:02.779229Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::IEventHandle 2025-04-17T18:11:02.782592Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-17T18:11:02.782638Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-04-17T18:11:02.783131Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 0 messageNo: 1 size: 511961 2025-04-17T18:11:02.783594Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 1 messageNo: 1 size: 511961 2025-04-17T18:11:02.783798Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size: 176151 2025-04-17T18:11:02.783835Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size 176151 offset: -1 2025-04-17T18:11:02.783964Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0. Amount: 1200088. Cookie: 7 2025-04-17T18:11:03.001130Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494346256724577320:2594], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:11:03.001329Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGU3NmRhNjktNGUxMTdlZDktMTQ3MDUwYjItOTE5MzZhZTM=, ActorId: [1:7494346256724577318:2593], ActorState: ExecuteState, TraceId: 01js2f4xp666s8c85tdknz2d9y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:11:03.001705Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:11:03.560695Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0: Cookie: 7 2025-04-17T18:11:03.560814Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 0 2025-04-17T18:11:03.560843Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 1 2025-04-17T18:11:03.560856Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 2 2025-04-17T18:11:03.565361Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob complete sourceId '\0001236' seqNo 1 partNo 2 FormedBlobsCount 0 NewHead: Offset 6 PartNo 0 PackedSize 1200285 count 1 nextOffset 7 batches 3 2025-04-17T18:11:03.565948Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account3/folder1/folder2/topic' partition 0 compactOffset 6,1 HeadOffset 6 endOffset 6 curOffset 7 d0000000000_00000000000000000006_00000_0000000001_00002| size 1200275 WTime 1744913463565 2025-04-17T18:11:03.566717Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:11:03.566737Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:11:03.566748Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T18:11:03.566757Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:11:03.566766Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] m0000000000p1236 2025-04-17T18:11:03.566774Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] d0000000000_00000000000000000006_00000_0000000001_00002| 2025-04-17T18:11:03.566781Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:11:03.566789Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:11:03.566799Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:11:03.566854Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:11:03.566908Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 6 partNo 0 count 1 size 1200275 2025-04-17T18:11:03.573014Z node 2 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 6 count 1 size 1200275 actorID [2:7494346245959868723:2360] 2025-04-17T18:11:03.573112Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1200088 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:11:03.573112Z node 2 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037899' partition 0 offset 6 partno 0 count 1 parts 2 size 1200275 2025-04-17T18:11:03.573144Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:11:03.573187Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 6 is stored on disk 2025-04-17T18:11:03.573221Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:11:03.573251Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 1, Offset: 6 is stored on disk 2025-04-17T18:11:03.573288Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:11:03.573319Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 2, Offset: 6 is stored on disk 2025-04-17T18:11:03.573324Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-17T18:11:03.573528Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::IEventHandle 2025-04-17T18:11:03.574212Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write session got write response: sequence_numbers: 1 offsets: 6 already_written: false write_statistics { persist_duration_ms: 12 queued_in_partition_duration_ms: 776 throttled_on_partition_duration_ms: 776 } 2025-04-17T18:11:03.574247Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write session: acknoledged message 1 2025-04-17T18:11:03.577552Z :INFO: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write session will now close 2025-04-17T18:11:03.577586Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write session: aborting 2025-04-17T18:11:03.578067Z :INFO: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:11:03.578087Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write session is aborting and will not restart 2025-04-17T18:11:03.578135Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4f682063-622a0dc7-1af157eb-da168a81_0] Write session: destroy 2025-04-17T18:11:03.578321Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|4f682063-622a0dc7-1af157eb-da168a81_0 grpc read done: success: 0 data: 2025-04-17T18:11:03.578347Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|4f682063-622a0dc7-1af157eb-da168a81_0 grpc read failed 2025-04-17T18:11:03.578372Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|4f682063-622a0dc7-1af157eb-da168a81_0 grpc closed 2025-04-17T18:11:03.578383Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|4f682063-622a0dc7-1af157eb-da168a81_0 is DEAD 2025-04-17T18:11:03.579104Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison DURATION 2.957838s 2025-04-17T18:11:03.579459Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7494346256724577314:2591] destroyed 2025-04-17T18:11:03.579481Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T18:11:03.877464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 8] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: 2025-04-17T18:10:58.472070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:58.472474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:58.472521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019e2/r3tmp/tmpg5hNn3/pdisk_1.dat 2025-04-17T18:10:58.945673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:58.996941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:59.045525Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:10:59.047501Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:10:59.047772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:59.048514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:59.061503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:59.146467Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:10:59.146543Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:10:59.147537Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T18:10:59.247183Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:10:59.247286Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:10:59.247881Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:10:59.247990Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:10:59.248289Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:10:59.248506Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:10:59.248604Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:10:59.251878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:59.252842Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:10:59.253530Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:10:59.253599Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:10:59.301938Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:10:59.303139Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:10:59.304621Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:10:59.304975Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:10:59.353460Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:10:59.354360Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:10:59.354471Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:10:59.356274Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:10:59.356364Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:10:59.356430Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:10:59.358213Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:10:59.358400Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:10:59.358512Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:10:59.369353Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:10:59.419108Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:10:59.420449Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:10:59.420622Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:10:59.420662Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:10:59.420693Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:10:59.420729Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:10:59.420968Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:10:59.421023Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:10:59.422109Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:10:59.422233Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:10:59.422365Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:10:59.422463Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:10:59.422536Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:10:59.422600Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:10:59.422636Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:10:59.422670Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:10:59.422731Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:10:59.422877Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:10:59.422924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:10:59.422971Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:10:59.424408Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:10:59.424482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:10:59.424586Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:10:59.424866Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:10:59.424939Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:10:59.425369Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:10:59.425440Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:10:59.425482Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:10:59.425519Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:10:59.425552Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:10:59.425829Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:10:59.425877Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:10:59.425913Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:10:59.425954Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:10:59.426007Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:10:59.426036Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:10:59.426067Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:10:59.426098Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:10:59.426123Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:10:59.427636Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:10:59.427685Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:10:59.438484Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 90 has no attached operations 2025-04-17T18:11:04.764411Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T18:11:04.764466Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:11:04.764785Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-17T18:11:04.764815Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T18:11:04.764844Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-17T18:11:04.764914Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:11:04.765127Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1001:2799], Recipient [2:904:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T18:11:04.765157Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T18:11:04.765266Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:11:04.765378Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2025-04-17T18:11:04.765409Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-17T18:11:04.765434Z node 2 :TX_PROXY TRACE: [ReadTable [2:864:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1001:2799] ShardId# 72075186224037890 2025-04-17T18:11:04.765509Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-17T18:11:04.765567Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-17T18:11:04.765593Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T18:11:04.765863Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:863:2693], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T18:11:04.765892Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T18:11:04.765916Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-17T18:11:04.765964Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:11:04.766040Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:11:04.766169Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-17T18:11:04.777168Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-17T18:11:04.777239Z node 2 :TX_PROXY TRACE: [ReadTable [2:864:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1001:2799] ShardId# 72075186224037890 2025-04-17T18:11:04.777381Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-17T18:11:04.777532Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-17T18:11:04.777569Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-17T18:11:04.777927Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:863:2693], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T18:11:04.777963Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T18:11:04.777991Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-17T18:11:04.778039Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:11:04.778113Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:11:04.778334Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2025-04-17T18:11:04.778372Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2025-04-17T18:11:04.778398Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2025-04-17T18:11:04.778451Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-17T18:11:04.778480Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037890 2025-04-17T18:11:04.778682Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:904:2724], Recipient [2:904:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:04.778719Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:04.778783Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:11:04.778818Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:11:04.778855Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2025-04-17T18:11:04.778884Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2025-04-17T18:11:04.778917Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2025-04-17T18:11:04.778956Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-17T18:11:04.778984Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2025-04-17T18:11:04.779012Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2025-04-17T18:11:04.779037Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-17T18:11:04.779068Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2025-04-17T18:11:04.779094Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2025-04-17T18:11:04.779116Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T18:11:04.779139Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2025-04-17T18:11:04.779184Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-17T18:11:04.779207Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T18:11:04.779228Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2025-04-17T18:11:04.779256Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:11:04.779280Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-17T18:11:04.779305Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T18:11:04.779329Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T18:11:04.779394Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:11:04.779425Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-17T18:11:04.779460Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T18:11:04.779525Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:11:04.779762Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:904:2724], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 290 } } 2025-04-17T18:11:04.779794Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2025-04-17T18:11:04.779847Z node 2 :TX_PROXY INFO: [ReadTable [2:864:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013118s execute time: 0.318484s total time: 0.331602s 2025-04-17T18:11:04.780216Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-17T18:11:04.780448Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:899:2722]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-17T18:11:04.780665Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:904:2724]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: 2025-04-17T18:11:02.043764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:11:02.044090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:11:02.044127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019b2/r3tmp/tmpnxP41Z/pdisk_1.dat 2025-04-17T18:11:02.352417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:11:02.384495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:02.421506Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:11:02.422261Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:11:02.422403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:02.422482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:02.433550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:02.510156Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:11:02.510227Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:11:02.510409Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T18:11:02.596569Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:11:02.596660Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:11:02.597043Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:11:02.597116Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:11:02.597327Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:11:02.597462Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:11:02.597529Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:11:02.599070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:11:02.599386Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:11:02.600021Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:11:02.600081Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:11:02.623901Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:11:02.624651Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:11:02.624957Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:11:02.625139Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:02.657317Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:11:02.658061Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:02.658169Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:11:02.659640Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:11:02.659722Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:11:02.659789Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:11:02.660174Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:11:02.660291Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:11:02.660378Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:11:02.671059Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:11:02.704009Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:11:02.704185Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:11:02.704282Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:11:02.704306Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:11:02.704343Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:11:02.704369Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:11:02.704541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:02.704574Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:02.704783Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:11:02.704849Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:11:02.704954Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:11:02.704986Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:11:02.705016Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:11:02.705055Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:11:02.705091Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:11:02.705113Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:11:02.705160Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:11:02.705262Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:02.705290Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:02.705321Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:11:02.705619Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:11:02.705655Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:11:02.705773Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:11:02.705956Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:11:02.705996Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:11:02.706067Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:11:02.706110Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:11:02.706142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:11:02.706170Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:11:02.706229Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:11:02.706438Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:11:02.706459Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:11:02.706487Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:11:02.706528Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:11:02.706567Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:11:02.706590Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:11:02.706618Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:11:02.706642Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:11:02.706663Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:11:02.707994Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:11:02.708050Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:11:02.718629Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 281474976715659] at 72075186224037888 is Executed 2025-04-17T18:11:06.930760Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-17T18:11:06.930789Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-17T18:11:06.930800Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T18:11:06.930844Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:745:2626] for [0:281474976715659] at 72075186224037888 2025-04-17T18:11:06.930863Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-17T18:11:06.930893Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:11:06.930984Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287427, Sender [2:666:2570], Recipient [2:745:2626]: NKikimrTx.TEvStreamClearanceRequest TxId: 281474976715659 ShardId: 72075186224037888 KeyRange { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } 2025-04-17T18:11:06.931011Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamClearanceRequest from ShardId# 72075186224037888 2025-04-17T18:11:06.931063Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Sending TEvStreamClearanceResponse to [2:666:2570] ShardId# 72075186224037888 2025-04-17T18:11:06.931192Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715659 2025-04-17T18:11:06.931216Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-17T18:11:06.931281Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2025-04-17T18:11:06.931308Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-17T18:11:06.931370Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:06.931386Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:06.931412Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:11:06.931442Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:11:06.931476Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:11:06.931509Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T18:11:06.931534Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715659] at 72075186224037888 2025-04-17T18:11:06.931556Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-17T18:11:06.931580Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-17T18:11:06.931605Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2025-04-17T18:11:06.931626Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-17T18:11:06.931787Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-17T18:11:06.931813Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:11:06.931833Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T18:11:06.931850Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:11:06.931865Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:11:06.931897Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:11:06.932268Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:779:2647], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T18:11:06.932295Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T18:11:06.932381Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2025-04-17T18:11:06.932404Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T18:11:06.932651Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:744:2626], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T18:11:06.932693Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T18:11:06.932761Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T18:11:06.932808Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-17T18:11:06.932884Z node 2 :TX_DATASHARD ERROR: Got scan fatal error: Invalid DyNumber binary representation 2025-04-17T18:11:06.932918Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-17T18:11:06.933027Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:11:06.933050Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-04-17T18:11:06.933136Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2025-04-17T18:11:06.933163Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-04-17T18:11:06.933201Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-04-17T18:11:06.933292Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:06.933320Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:06.933369Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:11:06.933399Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:11:06.933443Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-04-17T18:11:06.933468Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-17T18:11:06.933493Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2025-04-17T18:11:06.933543Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-17T18:11:06.933570Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2025-04-17T18:11:06.933600Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:11:06.933636Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-17T18:11:06.933657Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2025-04-17T18:11:06.933689Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:11:06.933717Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:11:06.933734Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:11:06.933765Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-17T18:11:06.933817Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:11:06.933844Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2025-04-17T18:11:06.933874Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:11:06.933894Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T18:11:06.933917Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:11:06.933938Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:11:06.933976Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:11:06.934007Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-17T18:11:06.934040Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2025-04-17T18:11:06.934074Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2025-04-17T18:11:06.934139Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:11:06.934340Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:666:2570], Recipient [2:745:2626]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 332 } } 2025-04-17T18:11:06.934367Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2025-04-17T18:11:06.934403Z node 2 :TX_PROXY ERROR: [ReadTable [2:745:2626] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2025-04-17T18:11:06.934639Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1500 TxId: 281474976715658 >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: 2025-04-17T18:11:01.853838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:11:01.854228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:11:01.854273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019c6/r3tmp/tmpNjPBMr/pdisk_1.dat 2025-04-17T18:11:02.156796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:11:02.199874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:02.237989Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:11:02.238707Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:11:02.238867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:02.238970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:02.250010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:02.326882Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:11:02.326943Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:11:02.327062Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T18:11:02.401089Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:11:02.401165Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:11:02.401664Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:11:02.401770Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:11:02.401990Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:11:02.402147Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:11:02.402231Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:11:02.403626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:11:02.403950Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:11:02.404473Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:11:02.404531Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:11:02.430519Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:11:02.431513Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:11:02.431932Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:11:02.432137Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:02.463992Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:11:02.464574Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:02.464648Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:11:02.465849Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:11:02.465902Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:11:02.465940Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:11:02.466201Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:11:02.466293Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:11:02.466340Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:11:02.476963Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:11:02.495354Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:11:02.495543Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:11:02.495679Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:11:02.495707Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:11:02.495744Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:11:02.495775Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:11:02.495954Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:02.495986Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:02.496256Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:11:02.496325Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:11:02.496422Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:11:02.496457Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:11:02.496495Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:11:02.496538Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:11:02.496564Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:11:02.496600Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:11:02.496656Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:11:02.496762Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:02.496786Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:02.496821Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:11:02.497148Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:11:02.497198Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:11:02.497300Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:11:02.497502Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:11:02.497555Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:11:02.497653Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:11:02.497697Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:11:02.497734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:11:02.497767Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:11:02.497789Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:11:02.497985Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:11:02.498023Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:11:02.498048Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:11:02.498080Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:11:02.498129Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:11:02.498151Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:11:02.498202Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:11:02.498232Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:11:02.498250Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:11:02.499392Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:11:02.499432Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:11:02.510115Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 186224037896 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:11:07.457263Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037896 2025-04-17T18:11:07.457303Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-17T18:11:07.457334Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-17T18:11:07.457366Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-17T18:11:07.457614Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-17T18:11:07.457632Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-17T18:11:07.457649Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-17T18:11:07.457701Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1364:3077], Recipient [2:1262:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T18:11:07.457717Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T18:11:07.457762Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-17T18:11:07.457933Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:11:07.458023Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2025-04-17T18:11:07.458044Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2025-04-17T18:11:07.458067Z node 2 :TX_PROXY TRACE: [ReadTable [2:1084:2857] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1364:3077] ShardId# 72075186224037896 2025-04-17T18:11:07.458120Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2025-04-17T18:11:07.458155Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-17T18:11:07.458172Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-17T18:11:07.458420Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:1083:2857], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-17T18:11:07.458443Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T18:11:07.458457Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-17T18:11:07.458481Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-17T18:11:07.458511Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-17T18:11:07.458589Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2025-04-17T18:11:07.458604Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2025-04-17T18:11:07.458618Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2025-04-17T18:11:07.458642Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037896 2025-04-17T18:11:07.458655Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037896 2025-04-17T18:11:07.458728Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1262:2998], Recipient [2:1262:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:07.458743Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:07.458766Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-17T18:11:07.458782Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:11:07.458811Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2025-04-17T18:11:07.458826Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2025-04-17T18:11:07.458850Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2025-04-17T18:11:07.458873Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-17T18:11:07.458888Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2025-04-17T18:11:07.458903Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2025-04-17T18:11:07.458918Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-17T18:11:07.458985Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2025-04-17T18:11:07.459009Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2025-04-17T18:11:07.459027Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2025-04-17T18:11:07.459041Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2025-04-17T18:11:07.459063Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-17T18:11:07.459079Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2025-04-17T18:11:07.459092Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2025-04-17T18:11:07.459106Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:11:07.459118Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037896 2025-04-17T18:11:07.459131Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-17T18:11:07.459144Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-17T18:11:07.459175Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-17T18:11:07.459201Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-17T18:11:07.459222Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T18:11:07.459264Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-17T18:11:07.459385Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1262:2998], Recipient [2:1084:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 203 } } 2025-04-17T18:11:07.459402Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2025-04-17T18:11:07.459438Z node 2 :TX_PROXY INFO: [ReadTable [2:1084:2857] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.015761s execute time: 0.523339s total time: 0.539100s 2025-04-17T18:11:07.459691Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:886:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T18:11:07.459838Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:994:2791]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T18:11:07.460053Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:999:2793]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T18:11:07.460191Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1260:2996]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T18:11:07.460412Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1367:3080], Recipient [2:1149:2914]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:07.460432Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:07.460453Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1365:3078], serverId# [2:1367:3080], sessionId# [0:0:0] 2025-04-17T18:11:07.460490Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1262:2998]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T18:11:07.460619Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1149:2914]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-17T18:11:07.460699Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1368:3081], Recipient [2:1153:2916]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:07.460715Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:07.460730Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1366:3079], serverId# [2:1368:3081], sessionId# [0:0:0] 2025-04-17T18:11:07.460785Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1153:2916]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 12071, MsgBus: 21702 2025-04-17T18:09:41.518203Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345910030289897:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:41.518373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c8a/r3tmp/tmpCIowr9/pdisk_1.dat 2025-04-17T18:09:41.863843Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:09:41.892605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:41.892711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12071, node 1 2025-04-17T18:09:41.894353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:41.964333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:41.964362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:41.964373Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:41.964518Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21702 TClient is connected to server localhost:21702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:42.480332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.500429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.642322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.825058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:42.909593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:44.600746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345922915193584:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.600860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:44.890365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.925049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:44.959942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.027412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.057826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.099942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.143444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:45.248936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345927210161548:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:45.249025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:45.249282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345927210161553:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:45.252367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:45.260097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345927210161555:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:45.325853Z node 1 :TX_PROXY ERROR: Actor# [1:7494345927210161612:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:46.248160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:46.522318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345910030289897:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:46.522381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:46.644859Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710674 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-17T18:09:46.645772Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:46.645859Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345931505129479:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:46.645980Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715757 2025-04-17T18:09:46.646031Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345931505129479:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:46.647360Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:46.647426Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345931505129479:2537], AlterMainTableTxId: 0 ... ildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346166793803428:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:41.716313Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715766 2025-04-17T18:10:41.716382Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346166793803428:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:41.716661Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:41.716711Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346166793803428:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:41.718034Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976710674, cookie: 281474976710674, txId: 281474976715766, status: StatusAccepted 2025-04-17T18:10:41.718164Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346166793803428:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 17 2025-04-17T18:10:41.718863Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:41.718922Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346166793803428:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:41.721296Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037932 not found 2025-04-17T18:10:41.721331Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037931 not found 2025-04-17T18:10:41.721345Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037930 not found 2025-04-17T18:10:41.721360Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-04-17T18:10:41.723025Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfoId: 281474976710674 2025-04-17T18:10:41.723108Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346166793803428:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:41.723352Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:41.723411Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346166793803428:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:41.723439Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:10:41.723747Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:41.723804Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346166793803428:2536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:41.723824Z node 3 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710674, subscribers count# 1 2025-04-17T18:10:41.724019Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710674 2025-04-17T18:10:41.724207Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710674 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-17T18:10:51.111326Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:51.111360Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:51.500523Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710674 |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: 2025-04-17T18:11:03.359637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:11:03.359979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:11:03.360023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001980/r3tmp/tmpbakpk4/pdisk_1.dat 2025-04-17T18:11:03.639253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:11:03.667241Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:03.703023Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:11:03.703686Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:11:03.703835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:03.703931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:03.714998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:03.791463Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:11:03.791519Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:11:03.791644Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T18:11:03.874791Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:11:03.874856Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:11:03.875235Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:11:03.875310Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:11:03.875495Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:11:03.875613Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:11:03.875680Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:11:03.876833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:11:03.877162Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:11:03.877651Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:11:03.877712Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:11:03.901575Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:11:03.902326Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:11:03.902658Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:11:03.902835Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:03.934005Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:11:03.934774Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:03.934859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:11:03.935960Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:11:03.936010Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:11:03.936042Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:11:03.936312Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:11:03.936417Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:11:03.936466Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:11:03.947027Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:11:03.979153Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:11:03.979295Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:11:03.979379Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:11:03.979402Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:11:03.979425Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:11:03.979446Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:11:03.979601Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:03.979634Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:03.979831Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:11:03.979888Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:11:03.979970Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:11:03.979995Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:11:03.980025Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:11:03.980072Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:11:03.980109Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:11:03.980135Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:11:03.980168Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:11:03.980251Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:03.980271Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:11:03.980296Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:11:03.980576Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:11:03.980629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:11:03.980715Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:11:03.980891Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:11:03.980959Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:11:03.981054Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:11:03.981103Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:11:03.981135Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:11:03.981185Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:11:03.981222Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:11:03.981456Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:11:03.981486Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:11:03.981509Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:11:03.981539Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:11:03.981574Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:11:03.981592Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:11:03.981614Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:11:03.981636Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:11:03.981654Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:11:03.983026Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:11:03.983075Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:11:03.993814Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... _DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-17T18:11:08.747346Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:864:2693], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715662 Cleared: true 2025-04-17T18:11:08.747388Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-17T18:11:08.747480Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:08.747503Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:08.747547Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:11:08.747583Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:11:08.747624Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-04-17T18:11:08.747663Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit WaitForStreamClearance 2025-04-17T18:11:08.747703Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715662] at 72075186224037888 2025-04-17T18:11:08.747742Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-17T18:11:08.747776Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-17T18:11:08.747804Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ReadTableScan 2025-04-17T18:11:08.747834Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-04-17T18:11:08.747977Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2025-04-17T18:11:08.748013Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:11:08.748035Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T18:11:08.748059Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:11:08.748080Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:11:08.748120Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:11:08.748537Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:876:2704], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T18:11:08.748565Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T18:11:08.748607Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T18:11:08.748817Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T18:11:08.748862Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T18:11:08.748925Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:11:08.749055Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:11:08.749118Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-17T18:11:08.749173Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-17T18:11:08.749214Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T18:11:08.749552Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T18:11:08.749586Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T18:11:08.749625Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:11:08.749686Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:11:08.749740Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-17T18:11:08.749784Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-17T18:11:08.749820Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T18:11:08.749984Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T18:11:08.750002Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T18:11:08.750026Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:11:08.750079Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-17T18:11:08.750144Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-17T18:11:08.750189Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-17T18:11:08.750214Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-17T18:11:08.750394Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-17T18:11:08.750437Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-17T18:11:08.750478Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:11:08.750522Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-17T18:11:08.750649Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-17T18:11:08.750686Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-04-17T18:11:08.750741Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-04-17T18:11:08.750767Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-04-17T18:11:08.750867Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:08.750902Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:11:08.750945Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:11:08.750977Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-17T18:11:08.751012Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-04-17T18:11:08.751041Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-04-17T18:11:08.751073Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2025-04-17T18:11:08.751110Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-17T18:11:08.751146Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2025-04-17T18:11:08.751177Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:11:08.751206Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-17T18:11:08.751232Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2025-04-17T18:11:08.751270Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:11:08.751299Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:11:08.751323Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:11:08.751355Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-17T18:11:08.751372Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:11:08.751395Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-04-17T18:11:08.751430Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:11:08.751459Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T18:11:08.751487Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:11:08.751514Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:11:08.751563Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:11:08.751592Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-17T18:11:08.751624Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T18:11:08.751681Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:11:08.751804Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2025-04-17T18:11:08.751855Z node 2 :TX_PROXY INFO: [ReadTable [2:864:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013273s execute time: 0.103635s total time: 0.116908s 2025-04-17T18:11:08.752148Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |89.2%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:104:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:107:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (a ... 594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:140:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:50:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:88:2057] recipient: [25:86:2116] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:90:2057] recipient: [25:86:2116] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:89:2117] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:143:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:88:2057] recipient: [26:86:2116] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:90:2057] recipient: [26:86:2116] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:89:2117] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:143:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:85:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:89:2057] recipient: [27:87:2116] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:91:2057] recipient: [27:87:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:90:2117] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:144:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:91:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:92:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:94:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:93:2120] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:147:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:88:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:91:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:92:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:94:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:93:2120] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:147:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:89:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:92:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:93:2057] recipient: [30:91:2119] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:95:2057] recipient: [30:91:2119] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:94:2120] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:148:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:91:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:95:2057] recipient: [31:93:2121] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:97:2057] recipient: [31:93:2121] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:96:2122] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:150:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:91:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:95:2057] recipient: [32:93:2121] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:97:2057] recipient: [32:93:2121] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:96:2122] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:150:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:94:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:95:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:95:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:151:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] >> TSchemeShardUserAttrsTest::SpecialAttributes >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> TSchemeShardUserAttrsTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2025-04-17T18:10:47.563502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346190497325755:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.563636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:47.601328Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346189502728735:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.601447Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:47.750795Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:47.750864Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00198e/r3tmp/tmpsGIWpc/pdisk_1.dat 2025-04-17T18:10:47.963976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.964051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:47.966100Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:47.966849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:47.967238Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:47.976075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.976143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:48.002871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17794, node 1 2025-04-17T18:10:48.106064Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00198e/r3tmp/yandexMLcQnI.tmp 2025-04-17T18:10:48.106094Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00198e/r3tmp/yandexMLcQnI.tmp 2025-04-17T18:10:48.107351Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00198e/r3tmp/yandexMLcQnI.tmp 2025-04-17T18:10:48.107509Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:48.275294Z INFO: TTestServer started on Port 10021 GrpcPort 17794 TClient is connected to server localhost:10021 PQClient connected to localhost:17794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:48.536875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:10:48.593443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:10:50.265259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346203382228826:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.265260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346203382228816:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.265369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.268698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:10:50.273211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346203382228882:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.273276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.283021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346203382228845:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:10:50.348941Z node 1 :TX_PROXY ERROR: Actor# [1:7494346203382228925:2818] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:50.670523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:50.671105Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494346203382228943:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:10:50.671386Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWY4NzEzNWMtODMyMjY0Mi1jM2JmN2Y5NS0yZmRhMWU0NQ==, ActorId: [1:7494346203382228813:2340], ActorState: ExecuteState, TraceId: 01js2f4h8ea500zmwnszqgyq7r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:10:50.673557Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:10:50.748908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:50.830257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:10:51.126939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2f4hwm7j6zbgyh50dg0pm3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM0YTA2MDgtN2YwOWFkNWItYTRkMWQ1ZmMtM2IyOTVkZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494346207677196623:3107] 2025-04-17T18:10:52.563244Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346190497325755:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:52.563313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:52.600585Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346189502728735:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:52.600660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-17T18:10:56.936283Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494346190497326073:2143], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:10:56.936531Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494346190497326073:2143], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-17T18:10:56.936637Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494346190497326073:2143], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494346194792293822:2458] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913448607 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:10:56.936756Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494346190497326073:2143], cacheItem# { Subscriber: { Subscriber: [1:7494346194792293822:2458] DomainOwnerId: 72057594046644480 Type: 2 ... sourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:11:09.540437Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494346242452493166:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:11:09.540504Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494346242452493166:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346255337395965:2736] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913462334 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:09.540579Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494346242452493166:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:11:09.540625Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494346242452493166:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346255337396187:2893] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913462460 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:09.540645Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346285402169858:4883], recipient# [3:7494346242452493150:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:11:09.540730Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494346242452493166:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:11:09.540750Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346285402169859:4884], recipient# [3:7494346242452493150:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:11:09.540830Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494346242452493166:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346242452493653:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 29 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913459884 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:09.540962Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346285402169860:4885], recipient# [3:7494346242452493150:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:11:09.779757Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7494346242452493166:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T18:11:09.779845Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7494346242452493166:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346255337396187:2893] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913462460 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:09.779879Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7494346242452493166:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346255337395965:2736] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913462334 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:09.780089Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346285402169871:4891], recipient# [3:7494346285402169870:2601], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-17T18:11:09.780778Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494346242452493166:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:11:09.780862Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494346242452493166:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346242452493653:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 29 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913459884 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:09.780956Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346285402169874:4892], recipient# [3:7494346285402169873:2607], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:10.783604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:10.783728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:10.783779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:10.783820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:10.784674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:10.784726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:10.784779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:10.784840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:10.785942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:10.854047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:10.854102Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:10.866699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:10.866938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:10.867041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:10.874310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:10.874476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:10.877840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:10.878619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:10.884435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:10.890012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:10.890162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:10.890817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:10.890866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:10.890925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:10.891027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:10.896266Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:11.018032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:11.018263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.018466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:11.018713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:11.018766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.021047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.021161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:11.021339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.021380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:11.021412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:11.021460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:11.023277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.023343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:11.023380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:11.024860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.024902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.024945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.024989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.033043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:11.035025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:11.035163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:11.035899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.036001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:11.036055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.036247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:11.036289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.036436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:11.036508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:11.038140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.038196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.038339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.038382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:11.038678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.038716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:11.038787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:11.038816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.038846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:11.038901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.038949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:11.038979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.039009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:11.039031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:11.039076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:11.039104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:11.039126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:11.040356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:11.040450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:11.040483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5 msg type: 269090816 2025-04-17T18:11:11.141972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000005 2025-04-17T18:11:11.142538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.142621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:11.142661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 105:0, step: 5000005, at schemeshard: 72057594046678944 2025-04-17T18:11:11.142724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.142767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:11:11.142798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:11:11.142831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:11:11.142854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:11:11.142910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:11:11.142956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T18:11:11.142982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-17T18:11:11.143016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:11:11.143042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T18:11:11.143065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T18:11:11.143132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T18:11:11.143160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-04-17T18:11:11.143184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T18:11:11.143204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-17T18:11:11.143915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:11:11.144288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:11:11.145877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.145927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.146084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T18:11:11.146237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.146271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-17T18:11:11.146309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2025-04-17T18:11:11.146905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:11:11.146998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:11:11.147041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:11:11.147079Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T18:11:11.147120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T18:11:11.147490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:11:11.147587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:11:11.147618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:11:11.147644Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T18:11:11.147673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T18:11:11.147743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-17T18:11:11.148096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:11:11.148144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T18:11:11.148251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:11:11.150381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:11:11.151365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:11:11.151455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T18:11:11.151751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T18:11:11.151795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T18:11:11.152391Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:11:11.152487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:11:11.152555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:399:2390] TestWaitNotification: OK eventTxId 105 2025-04-17T18:11:11.153234Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:11:11.153449Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 218us result status StatusPathDoesNotExist 2025-04-17T18:11:11.153630Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:11:11.154275Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:11:11.154476Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 179us result status StatusSuccess 2025-04-17T18:11:11.154929Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:10.783573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:10.783800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:10.783855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:10.783894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:10.784701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:10.784768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:10.784847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:10.784923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:10.786028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:10.854045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:10.854095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:10.865457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:10.865747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:10.865852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:10.874318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:10.874469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:10.877841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:10.878666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:10.884054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:10.890223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:10.890309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:10.890817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:10.890860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:10.890927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:10.891035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:10.896193Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:10.984831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:10.986291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:10.987719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:10.988585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:10.988636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:10.991187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:10.991289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:10.991421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:10.991460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:10.991486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:10.991535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:10.992932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:10.992971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:10.992996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:10.994279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:10.994320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:10.994366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:10.994428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:10.997858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:10.999612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:11.000417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:11.001347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.001480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:11.001518Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.002522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:11.002583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.003449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:11.003524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:11.005762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.005810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.006045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.006088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:11.006497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.006625Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:11.006702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:11.006742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.006781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:11.006818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.006848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:11.006876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.006912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:11.006935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:11.006978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:11.007003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:11.007042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:11.008311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:11.008413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:11.008439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ationSubscriber for txId 102: satisfy waiter [1:320:2311] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-17T18:11:11.073562Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:11:11.073684Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 133us result status StatusSuccess 2025-04-17T18:11:11.075128Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-17T18:11:11.080548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "MyRoot" UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:11.080651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.080711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:11.080810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:11.080849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.082410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:11.082504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2025-04-17T18:11:11.082618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.082651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.082688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-17T18:11:11.082778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:11.084006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-17T18:11:11.084164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-17T18:11:11.084393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.084471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:11.084507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-04-17T18:11:11.084652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:11:11.084690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:11:11.084720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:11:11.084746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:11:11.084799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:11.084844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-17T18:11:11.084895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:11.084955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:11:11.084982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T18:11:11.085004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T18:11:11.085038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:11:11.085068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-17T18:11:11.085092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 FAKE_COORDINATOR: Erasing txId 103 2025-04-17T18:11:11.086393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.086435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.086590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.086631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-17T18:11:11.086978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:11:11.087054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:11:11.087079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-17T18:11:11.087105Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-17T18:11:11.087132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:11.087211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-17T18:11:11.088312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T18:11:11.088523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T18:11:11.088558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T18:11:11.088892Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T18:11:11.088958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:11:11.088981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 103 2025-04-17T18:11:11.089323Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:11:11.089472Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 130us result status StatusSuccess 2025-04-17T18:11:11.089773Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] >> TSchemeShardUserAttrsTest::VariousUse >> TSchemeShardUserAttrsTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:11.567521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:11.567579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:11.567605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:11.567627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:11.567659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:11.567690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:11.567743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:11.567805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:11.568037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:11.625174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:11.625223Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:11.635571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:11.635903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:11.636018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:11.642675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:11.642835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:11.643265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.643438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:11.645574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.646526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.646565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.646719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:11.646753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.646787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:11.646872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.651818Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:11.748921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:11.749158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.749324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:11.749507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:11.749543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.751960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.752123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:11.752343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.752412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:11.752446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:11.752494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:11.754483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.754543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:11.754578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:11.756284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.756344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.756397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.756432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.758990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:11.760342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:11.760479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:11.761197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.761311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:11.761352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.761613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:11.761661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.761826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:11.761962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:11.763815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.763849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.763982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.764012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:11.764285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.764314Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:11.764392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:11.764418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.764443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:11.764481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.764506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:11.764536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.764557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:11.764575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:11.764618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:11.764657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:11.764676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:11.770519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:11.770632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:11.770662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4-17T18:11:11.805397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T18:11:11.805449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T18:11:11.805521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.805559Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:11.805608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T18:11:11.805711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:11.807188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:11:11.807282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:11:11.807332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:11:11.807369Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-17T18:11:11.807404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:11.808172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:11:11.808270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:11:11.808303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:11:11.808342Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T18:11:11.808379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:11:11.808446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T18:11:11.809885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T18:11:11.809996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 2025-04-17T18:11:11.810772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.810872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:11.810938Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-04-17T18:11:11.811065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-17T18:11:11.811217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:11.811271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:11:11.811637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:11:11.813072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:11:11.814128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.814164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.814319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:11:11.814425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.814456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T18:11:11.814493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T18:11:11.814740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.814776Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:11:11.814865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:11:11.814897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:11:11.814951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:11:11.814981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:11:11.815023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T18:11:11.815069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:11:11.815102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:11:11.815130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:11:11.815186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:11:11.815218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T18:11:11.815250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-17T18:11:11.815275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-17T18:11:11.816045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:11:11.816123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:11:11.816150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:11:11.816186Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:11:11.816231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:11.817303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:11:11.817373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:11:11.817409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:11:11.817442Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:11:11.817471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:11:11.817529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T18:11:11.819411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:11:11.820474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-04-17T18:11:11.823137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:11.823387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.823491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2025-04-17T18:11:11.825325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:11.825473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:11.774059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:11.774135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:11.774165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:11.774217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:11.774260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:11.774303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:11.774363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:11.774418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:11.774746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:11.832256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:11.832298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:11.843587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:11.843905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:11.844046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:11.850168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:11.850334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:11.850831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.850991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:11.852981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.853879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.853918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.854091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:11.854124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.854152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:11.854251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.859092Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:11.941577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:11.941832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.942002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:11.942235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:11.942281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.944185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.944360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:11.944559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.944634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:11.944677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:11.944709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:11.946641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.946692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:11.946728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:11.948469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.948517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.948571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.948619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.951602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:11.953457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:11.953607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:11.954455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.954563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:11.954603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.954822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:11.954868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:11.955009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:11.955084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:11.956866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.956902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.957044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.957079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:11.957425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.957460Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:11.957538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:11.957575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.957611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:11.957652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.957713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:11.957747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:11.957774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:11.957797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:11.957840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:11.957874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:11.957900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:11.959387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:11.959474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:11.959501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... G: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:11:12.021148Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:11:12.021226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:11:12.021266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:330:2321] TestWaitNotification: OK eventTxId 102 2025-04-17T18:11:12.021681Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:11:12.021832Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 159us result status StatusSuccess 2025-04-17T18:11:12.022020Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-17T18:11:12.024058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:12.024190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.024290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-17T18:11:12.024441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:12.024488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.026497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:12.026630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2025-04-17T18:11:12.026837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.026882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.026932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-17T18:11:12.027047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:12.028494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-17T18:11:12.028598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-17T18:11:12.028799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:12.028868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:12.028907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-04-17T18:11:12.029054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:11:12.029081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:11:12.029110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:11:12.029133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:11:12.029172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:11:12.029222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-17T18:11:12.029267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:11:12.029296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:11:12.029319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T18:11:12.029352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T18:11:12.029387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:11:12.029418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-17T18:11:12.029456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-17T18:11:12.030956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:12.031003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:11:12.031142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:12.031177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2025-04-17T18:11:12.031567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:11:12.031662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:11:12.031699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-17T18:11:12.031734Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:11:12.031790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:11:12.031867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-17T18:11:12.033336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-17T18:11:12.033589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-17T18:11:12.033630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-17T18:11:12.033955Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-17T18:11:12.034009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:11:12.034033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:347:2338] TestWaitNotification: OK eventTxId 103 2025-04-17T18:11:12.034588Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:11:12.034761Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 180us result status StatusSuccess 2025-04-17T18:11:12.035095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:11.842328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:11.842396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:11.842426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:11.842452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:11.842485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:11.842514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:11.842559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:11.842612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:11.842888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:11.903990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:11.904055Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:11.916905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:11.917249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:11.917382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:11.925894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:11.926144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:11.926886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:11.927103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:11.929322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.930289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:11.930327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:11.930486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:11.930517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:11.930547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:11.930621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:11.937142Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:12.064031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:12.064274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.064487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:12.064714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:12.064765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.067049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:12.067186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:12.067377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.067429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:12.067460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:12.067489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:12.069483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.069546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:12.069582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:12.071265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.071315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.071372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:12.071420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:12.075111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:12.077182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:12.077384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:12.078362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:12.078524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:12.078579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:12.078852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:12.078906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:12.079071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:12.079158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:12.081315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:12.081373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:12.081547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:12.081587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:12.081932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.081970Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:12.082050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:12.082084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:12.082117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:12.082159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:12.082211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:12.082264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:12.082312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:12.082341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:12.082401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:12.082434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:12.082465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:12.084150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:12.084243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:12.084274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:11:12.084305Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:11:12.084357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:12.084445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:11:12.087364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:11:12.087846Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 >> TSchemeShardUserAttrsTest::VariousUse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:12.576560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:12.576624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:12.576649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:12.576674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:12.576708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:12.576739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:12.576794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:12.576847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:12.577097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:12.633732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:12.633777Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:12.645150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:12.645479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:12.645605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:12.651980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:12.652149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:12.652672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:12.652837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:12.654933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:12.655873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:12.655914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:12.656076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:12.656121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:12.656160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:12.656246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.660902Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:12.752438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:12.752683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.752878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:12.753099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:12.753142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.755399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:12.755603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:12.755815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.755891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:12.755924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:12.755968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:12.758093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.758151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:12.758206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:12.760000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.760048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.760104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:12.760162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:12.763761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:12.765601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:12.765793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:12.766729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:12.766859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:12.766907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:12.767201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:12.767257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:12.767420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:12.767544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:12.769477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:12.769524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:12.769715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:12.769764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:12.770195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.770246Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:12.770330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:12.770368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:12.770405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:12.770450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:12.770488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:12.770548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:12.770589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:12.770618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:12.770673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:12.770710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:12.770741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:12.772461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:12.772561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:12.772599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-04-17T18:11:12.952092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-17T18:11:12.952153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:12.952213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:11:12.952276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T18:11:12.952332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-04-17T18:11:12.952390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-17T18:11:12.952425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-17T18:11:12.952480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-17T18:11:12.952550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T18:11:12.952594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 3, subscribers: 0 2025-04-17T18:11:12.952640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-17T18:11:12.952713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 3], 7 2025-04-17T18:11:12.952748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-17T18:11:12.953730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T18:11:12.954434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T18:11:12.955451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:12.955489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:12.955598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:11:12.955632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T18:11:12.955714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:12.955754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-17T18:11:12.955789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-04-17T18:11:12.955803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 4 FAKE_COORDINATOR: Erasing txId 112 2025-04-17T18:11:12.956325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T18:11:12.956414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T18:11:12.956460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2025-04-17T18:11:12.956518Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T18:11:12.956556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:11:12.957019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T18:11:12.957081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T18:11:12.957104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-04-17T18:11:12.957128Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-17T18:11:12.957172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:11:12.957851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T18:11:12.957933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-17T18:11:12.957959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-04-17T18:11:12.957982Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T18:11:12.958012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T18:11:12.958108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-04-17T18:11:12.958690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:11:12.958736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T18:11:12.958799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:11:12.960154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T18:11:12.960524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T18:11:12.961920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-17T18:11:12.962008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-17T18:11:12.962385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-17T18:11:12.962440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-17T18:11:12.963082Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-17T18:11:12.963165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-17T18:11:12.963201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:495:2486] TestWaitNotification: OK eventTxId 112 2025-04-17T18:11:12.963973Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:11:12.964173Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 226us result status StatusSuccess 2025-04-17T18:11:12.964518Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2025-04-17T18:11:12.968647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:12.968844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2025-04-17T18:11:12.968966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T18:11:12.971196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:12.971351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 |89.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotRead::TestReadOnly-withSink >> KqpTx::TooManyTx >> KqpLocks::TwoPhaseTx >> KqpSinkLocks::EmptyRange >> KqpSinkTx::OlapDeferredEffects >> KqpSinkMvcc::OlapNamedStatement >> KqpSnapshotIsolation::TSimpleOltpNoSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink >> KqpTx::DeferredEffects >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2025-04-17T18:10:47.562940Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346192984374366:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.563026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:47.599170Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346191539336802:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:47.600076Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00199e/r3tmp/tmpORXvzF/pdisk_1.dat 2025-04-17T18:10:47.741810Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:47.743101Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:47.938749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.938843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:47.939681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:47.939733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:47.941900Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:47.942069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:47.942456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:47.983051Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18435, node 1 2025-04-17T18:10:47.993918Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:47.993954Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:48.106115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00199e/r3tmp/yandexKrKcnz.tmp 2025-04-17T18:10:48.106200Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00199e/r3tmp/yandexKrKcnz.tmp 2025-04-17T18:10:48.107408Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00199e/r3tmp/yandexKrKcnz.tmp 2025-04-17T18:10:48.107622Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:48.276540Z INFO: TTestServer started on Port 28393 GrpcPort 18435 TClient is connected to server localhost:28393 PQClient connected to localhost:18435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:48.536832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:10:48.593584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:10:50.288818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346205869277419:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.288936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346205869277397:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.289030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.292526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:10:50.295676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346205869277459:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.295762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:50.308565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346205869277425:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:10:50.565193Z node 1 :TX_PROXY ERROR: Actor# [1:7494346205869277505:2799] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:50.653430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:50.669116Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494346205869277525:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:10:50.669339Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQ0NGZjZjgtNWMzYzE5ODAtMmM5YmI2ZDMtNmRiZWNiOWY=, ActorId: [1:7494346205869277393:2340], ActorState: ExecuteState, TraceId: 01js2f4h980cypdr4yjpqc0pm0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:10:50.671304Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:10:50.735373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:50.824934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:10:51.126870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2f4hwmdxzst7tsfbqe3n54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNmODMwNi03MmZiMjk2Yy1hNjkyN2I2My0xNmRlMGQyMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494346210164245202:3087] 2025-04-17T18:10:52.563129Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346192984374366:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:52.563199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:52.598997Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346191539336802:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:52.599079Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok >>>>> Prepare scheme WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-17T18:10:56.914062Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494346192984374622:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:10:56.914276Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494346192984374622:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-17T18:10:56.914358Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494346192984374622:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494346197279342411:2448] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913448607 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:10:56.914423Z node 1 :TX_P ... false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 23 IsSync: true Partial: 0 } 2025-04-17T18:11:15.273179Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494346253236081625:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Cluster PathId: Partial: 0 } 2025-04-17T18:11:15.273230Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494346253236081625:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Cluster PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7494346261826017142:2749] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913464070 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-04-17T18:11:15.273292Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494346253236081625:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346261826017142:2749] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913464070 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/PQ/Config/V2/Cluster TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 24 IsSync: true Partial: 0 } 2025-04-17T18:11:15.273358Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494346253236081625:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 } 2025-04-17T18:11:15.273403Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494346253236081625:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7494346266120984585:2840] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913464189 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-04-17T18:11:15.273453Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494346253236081625:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346266120984585:2840] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913464189 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/PQ/Config/V2/Versions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 24 IsSync: true Partial: 0 } 2025-04-17T18:11:15.273637Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346313365628674:5474], recipient# [3:7494346313365628672:2877], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:11:15.273771Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346313365628675:5475], recipient# [3:7494346313365628673:2878], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:11:15.274008Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494346253236081625:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:11:15.274093Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494346253236081625:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346261826017142:2749] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913464070 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:15.274188Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494346253236081625:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:11:15.274244Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494346253236081625:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346266120984585:2840] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744913464189 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:15.274338Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346313365628678:5476], recipient# [3:7494346253236081609:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:11:15.274362Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346313365628679:5477], recipient# [3:7494346253236081609:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:11:15.274442Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494346253236081625:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:11:15.274501Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494346253236081625:2127], cacheItem# { Subscriber: { Subscriber: [3:7494346253236082115:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 28 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913461648 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:15.274619Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494346313365628680:5478], recipient# [3:7494346253236081609:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 20947, MsgBus: 15993 2025-04-17T18:09:48.722520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345939648359121:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:48.734647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c74/r3tmp/tmp2oZupK/pdisk_1.dat 2025-04-17T18:09:49.223598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:09:49.223735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:09:49.231391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:09:49.238325Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20947, node 1 2025-04-17T18:09:49.366870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:09:49.366896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:09:49.366907Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:09:49.367017Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15993 TClient is connected to server localhost:15993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:09:49.961834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:49.994527Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:09:50.016457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.160478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.312178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:50.404092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:09:52.007075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345952533262777:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:52.007198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:52.328950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.397425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.424311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.448569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.481110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.527278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.597392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:09:52.656005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345956828230740:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:52.656076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:52.656391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345956828230745:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:09:52.660108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:09:52.670219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494345956828230747:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:09:52.774139Z node 1 :TX_PROXY ERROR: Actor# [1:7494345956828230804:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:09:53.707298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:09:53.725829Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345939648359121:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:09:53.725893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:09:54.059323Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710674 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-17T18:09:54.092137Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:54.092252Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345965418165964:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:54.092501Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715757 2025-04-17T18:09:54.092568Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7494345965418165964:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:09:54.092955Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:09:54.092998Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: u ... ildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346200805522045:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:49.850635Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710674, txId# 281474976715766 2025-04-17T18:10:49.850689Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346200805522045:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:49.850893Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:49.850941Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346200805522045:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:49.851812Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976710674, cookie: 281474976710674, txId: 281474976715766, status: StatusAccepted 2025-04-17T18:10:49.851913Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346200805522045:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 17 2025-04-17T18:10:49.852194Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:49.852257Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346200805522045:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:49.854136Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfoId: 281474976710674 2025-04-17T18:10:49.854226Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfo: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346200805522045:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:49.854541Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:49.854598Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346200805522045:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:49.854628Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:10:49.854911Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710674 2025-04-17T18:10:49.854977Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710674, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 17], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7494346200805522045:2537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:10:49.855001Z node 3 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710674, subscribers count# 1 2025-04-17T18:10:49.855930Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710674 2025-04-17T18:10:49.856071Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710674 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-17T18:10:49.857079Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-04-17T18:10:49.857097Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037931 not found 2025-04-17T18:10:49.857112Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037932 not found 2025-04-17T18:10:49.857128Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037930 not found 2025-04-17T18:10:59.642792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:59.642828Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:59.682264Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710674 >> KqpTx::RollbackTx >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> KqpTx::TooManyTx [GOOD] >> KqpTx::SnapshotROInteractive2 >> KqpTx::DeferredEffects [GOOD] >> KqpTx::CommitStats >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocks::MixedTxFail-useSink >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 17340, MsgBus: 29610 2025-04-17T18:11:14.635305Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346307775380664:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:14.635418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001820/r3tmp/tmpRoRnyy/pdisk_1.dat 2025-04-17T18:11:14.960111Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17340, node 1 2025-04-17T18:11:15.006323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:15.006442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:15.008062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:15.071260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:15.071286Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:15.071291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:15.071391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29610 TClient is connected to server localhost:29610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:15.662000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.692362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.818152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.928681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.983730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:16.878914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346316365317030:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:16.878985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.250867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.273107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.293542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.315636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.340149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.372644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.439674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.482765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346320660285000:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.482810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.482826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346320660285005:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.485415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:17.492149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346320660285007:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:17.582261Z node 1 :TX_PROXY ERROR: Actor# [1:7494346320660285061:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> KqpSinkTx::SnapshotRO >> KqpTx::RollbackTx [GOOD] >> KqpTx::RollbackTx2 >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink >> KqpTx::CommitStats [GOOD] >> KqpTx::SnapshotROInteractive2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 22040, MsgBus: 2689 2025-04-17T18:11:15.239958Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346309802954769:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:15.240104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00180e/r3tmp/tmpLXvzOb/pdisk_1.dat 2025-04-17T18:11:15.522938Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22040, node 1 2025-04-17T18:11:15.533972Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:11:15.554276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:15.554305Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:15.554311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:15.554443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:15.589076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:15.589145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:15.591028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2689 TClient is connected to server localhost:2689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:15.937144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.952269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:16.055600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:16.179370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:16.227478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:17.427308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346318392891143:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.427400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.608053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.632992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.658980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.681473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.706274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.734661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.764547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.841880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346318392891802:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.841951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.841978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346318392891807:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.845073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:17.852835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346318392891809:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:17.924268Z node 1 :TX_PROXY ERROR: Actor# [1:7494346318392891864:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21446, MsgBus: 19899 2025-04-17T18:11:19.448502Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346327177772622:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:19.448578Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00180e/r3tmp/tmpHLiLaW/pdisk_1.dat 2025-04-17T18:11:19.533243Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21446, node 2 2025-04-17T18:11:19.580382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:19.580478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:19.581721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:19.586036Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:19.586055Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:19.586061Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:19.586128Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19899 TClient is connected to server localhost:19899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:19.925921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.934042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:20.041861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:20.192371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:20.256706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:22.166806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346340062676281:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.166929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.195679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.223576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.250374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.279856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.310474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.340650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.371792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.423363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346340062676938:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.423435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346340062676944:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.423450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.426270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:22.433758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346340062676946:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:22.501399Z node 2 :TX_PROXY ERROR: Actor# [2:7494346340062677001:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpLocks::MixedTxFail-useSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 19437, MsgBus: 26543 2025-04-17T18:11:14.635427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346305556712406:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:14.635522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001816/r3tmp/tmpEIXBLn/pdisk_1.dat 2025-04-17T18:11:14.958309Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19437, node 1 2025-04-17T18:11:14.993945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:14.996027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:14.998091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:15.071264Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:15.071319Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:15.071327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:15.071442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26543 TClient is connected to server localhost:26543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:15.662209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.689991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.829312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.937446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.983104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:17.052022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346318441616064:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.052108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.256562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.278927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.300669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.322202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.345875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.372422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.399547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.458578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346318441616716:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.458627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.458730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346318441616721:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.461165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:17.467306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346318441616723:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:17.525984Z node 1 :TX_PROXY ERROR: Actor# [1:7494346318441616777:3539] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:18.405612Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjgxNjJiNmYtM2FjZWY0NWMtZDIxMmNlZTItMjcxNGNiYjM=, ActorId: [1:7494346322736584405:2511], ActorState: ReadyState, TraceId: 01js2f5cqtdamwsny4q33x1mmc, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:861: Too many transactions, current active: 2 MaxTxPerSession: 2 Trying to start YDB, gRPC: 25869, MsgBus: 29980 2025-04-17T18:11:19.078993Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346327163518575:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:19.079090Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001816/r3tmp/tmpUzOcFb/pdisk_1.dat 2025-04-17T18:11:19.173125Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25869, node 2 2025-04-17T18:11:19.212712Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:19.212816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:19.214434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:19.252174Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:19.252205Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:19.252214Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:19.252355Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29980 TClient is connected to server localhost:29980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:19.608640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.624500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.694070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.810748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.866511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:21.815093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346335753454938:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.815173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.869548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.894558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.923310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.949650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.975992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.006670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.036305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.087776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346340048422892:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.087867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.088023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346340048422897:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.092736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:22.100283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346340048422899:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:22.172712Z node 2 :TX_PROXY ERROR: Actor# [2:7494346340048422956:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> YdbOlapStore::LogExistingRequest [GOOD] >> YdbOlapStore::LogCountByResource >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] >> KqpSinkLocks::EmptyRangeOlap >> KqpTx::RollbackByIdle >> KqpTx::RollbackTx2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2025-04-17T18:05:39.232187Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:05:39.386080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:05:39.412428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:05:39.413299Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:05:39.425691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:05:39.425951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:05:39.426213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:05:39.426326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:05:39.426424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:05:39.426545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:05:39.426646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:05:39.426793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:05:39.426930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:05:39.427046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:05:39.427145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:05:39.427236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:05:39.459269Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:05:39.459453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:05:39.459510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:05:39.459704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:39.461368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:05:39.461465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:05:39.461521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:05:39.461641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:05:39.461740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:05:39.461791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:05:39.461824Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:05:39.462032Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:05:39.462106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:05:39.462147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:05:39.462200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:05:39.462297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:05:39.462353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:05:39.462415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:05:39.462446Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:05:39.462516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:05:39.462565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:05:39.462613Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:05:39.462671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:05:39.462713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:05:39.462742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:05:39.463184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-17T18:05:39.463283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T18:05:39.463379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-04-17T18:05:39.463499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=72; 2025-04-17T18:05:39.463655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:05:39.463708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:05:39.463742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:05:39.464004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:05:39.464070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:05:39.464107Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:05:39.464263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:05:39.464310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:05:39.464342Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:05:39.464546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:05:39.464593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:05:39.464624Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:05:39.464760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:05:39.464801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:05:39.464851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :106;method=PutObject;id=[9437184:2:100:255:135:2848:0]; 2025-04-17T18:11:22.844585Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:136:2848:0]; 2025-04-17T18:11:22.844633Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:137:2848:0]; 2025-04-17T18:11:22.844671Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:138:2848:0]; 2025-04-17T18:11:22.844710Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:139:2840:0]; 2025-04-17T18:11:22.844748Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:140:2848:0]; 2025-04-17T18:11:22.844787Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:141:2840:0]; 2025-04-17T18:11:22.844825Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:142:2848:0]; 2025-04-17T18:11:22.844861Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:143:2840:0]; 2025-04-17T18:11:22.844896Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:144:2848:0]; 2025-04-17T18:11:22.844934Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:145:2784:0]; 2025-04-17T18:11:22.844971Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:146:2784:0]; 2025-04-17T18:11:22.845013Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:147:2792:0]; 2025-04-17T18:11:22.845095Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:148:2792:0]; 2025-04-17T18:11:22.845166Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:149:2768:0]; 2025-04-17T18:11:22.845223Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:150:2784:0]; 2025-04-17T18:11:22.845262Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:151:2776:0]; 2025-04-17T18:11:22.845306Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:100:255:152:9504:0]; 2025-04-17T18:11:23.557890Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:11:23.558580Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[304] (CS::INDEXATION) apply at tablet 9437184 2025-04-17T18:11:23.597850Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:124 Blob count: 699 2025-04-17T18:11:23.603788Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=8632904;raw_bytes=13115202;count=6;records=133336} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5183268;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=208144648;raw_bytes=316096877;count=140;records=3215388} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:11:23.860796Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:11:23.861378Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[306] (CS::INDEXATION) apply at tablet 9437184 2025-04-17T18:11:23.891837Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:125 Blob count: 549 2025-04-17T18:11:23.896146Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=8632904;raw_bytes=13115202;count=6;records=133336} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5183268;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=208144648;raw_bytes=316096877;count=140;records=3215388} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:11:24.199016Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:11:24.199740Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[308] (CS::GENERAL) apply at tablet 9437184 2025-04-17T18:11:24.296533Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:126 Blob count: 422 2025-04-17T18:11:24.299149Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=8632904;raw_bytes=13115202;count=6;records=133336} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5183268;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=208144648;raw_bytes=316096877;count=140;records=3215388} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:11:25.724232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-17T18:11:25.724313Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;fline=with_appended.cpp:65;portions=150,151,;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8; 2025-04-17T18:11:25.726469Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::5a96a7f2-1bb711f0-bee4ab36-4b548be8; 2025-04-17T18:11:25.726529Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:11:25.726590Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;tablet_id=9437184;fline=columnshard_impl.cpp:768;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=3059560;indexing_debug={task_ids=5a96f176-1bb711f0-8a49de03-47144181,;}; 2025-04-17T18:11:25.726627Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=24; 2025-04-17T18:11:25.726690Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912845274;tx_id=18446744073709551615;;current_snapshot_ts=1744913141432; 2025-04-17T18:11:25.726724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=24;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:11:25.726762Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:11:25.726793Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:11:25.726848Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.104500s; 2025-04-17T18:11:25.726887Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:11:25.728593Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:124 Blob count: 699 2025-04-17T18:11:25.757599Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=98;external_task_id=5a96a7f2-1bb711f0-bee4ab36-4b548be8;mem=3571882;cpu=0; 2025-04-17T18:11:25.758384Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:768;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=3059560;indexing_debug={task_ids=5a96f176-1bb711f0-8a49de03-47144181,;}; 2025-04-17T18:11:25.758492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-17T18:11:25.758535Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;fline=with_appended.cpp:65;portions=152,153,;task_id=5a96f176-1bb711f0-8a49de03-47144181; 2025-04-17T18:11:25.760585Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::5a96f176-1bb711f0-8a49de03-47144181; 2025-04-17T18:11:25.760644Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:11:25.760704Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:11:25.760749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=24; 2025-04-17T18:11:25.760799Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744912845274;tx_id=18446744073709551615;;current_snapshot_ts=1744913141432; 2025-04-17T18:11:25.760840Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=24;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:11:25.760877Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:11:25.760915Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:11:25.760988Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.104500s; 2025-04-17T18:11:25.761027Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5a96f176-1bb711f0-8a49de03-47144181;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:11:25.761099Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:125 Blob count: 549 2025-04-17T18:11:25.764913Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=99;external_task_id=5a96f176-1bb711f0-8a49de03-47144181;mem=2799958;cpu=0; 2025-04-17T18:11:25.765387Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx2 [GOOD] Test command err: Trying to start YDB, gRPC: 17538, MsgBus: 8775 2025-04-17T18:11:18.756698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346325913920199:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:18.756812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001807/r3tmp/tmpVykYwg/pdisk_1.dat 2025-04-17T18:11:19.018409Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17538, node 1 2025-04-17T18:11:19.093382Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:19.093408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:19.093418Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:19.093590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:19.117470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:19.117600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:19.119723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8775 TClient is connected to server localhost:8775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:19.493283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.506898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.620246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.748092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.820216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:21.331085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346338798823864:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.331167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.596604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.621948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.647139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.673057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.735980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.759154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.825739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.866396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346338798824528:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.866459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.866506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346338798824533:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.869605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:21.877353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346338798824535:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:21.972460Z node 1 :TX_PROXY ERROR: Actor# [1:7494346338798824590:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:22.829272Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWE3NWNiYWYtZWMwOGFmY2EtMWZjMDg3MWEtN2ExMGJiZDI=, ActorId: [1:7494346343093792205:2511], ActorState: ReadyState, TraceId: 01js2f5h276c6wkq752e493z5h, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 21625, MsgBus: 24120 2025-04-17T18:11:23.378804Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346344633963330:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:23.378880Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001807/r3tmp/tmpr33wJU/pdisk_1.dat 2025-04-17T18:11:23.472343Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21625, node 2 2025-04-17T18:11:23.522825Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:23.522848Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:23.522856Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:23.522946Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:23.531704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:23.531764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:23.533341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24120 TClient is connected to server localhost:24120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:23.896633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:23.910369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:23.979721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:24.088734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:24.143419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:25.886883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346353223899685:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:25.886984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:25.932492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:25.954259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:25.975737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.001577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.027302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.057361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.083932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.159813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346357518867642:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:26.159900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:26.160004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346357518867647:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:26.162671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:26.170424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346357518867649:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:26.232443Z node 2 :TX_PROXY ERROR: Actor# [2:7494346357518867704:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:27.080411Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjM2ZWQ0OTgtNTFhYzNmYzAtYWQwMjI4MjItOTFlNWUxZWU=, ActorId: [2:7494346357518868022:2510], ActorState: ReadyState, TraceId: 01js2f5n78fqt71k7at5598yd5, Create QueryResponse for error on request, msg: >> KqpSinkMvcc::OltpNamedStatementNoSink >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::OlapExplicitTcl >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> KqpTx::RollbackByIdle [GOOD] >> KqpTx::RollbackInvalidated >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> KqpSinkMvcc::OlapMultiSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 9786, MsgBus: 64691 2025-04-17T18:11:18.887801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346325418959487:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:18.888070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001806/r3tmp/tmp9hQl7t/pdisk_1.dat 2025-04-17T18:11:19.132664Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9786, node 1 2025-04-17T18:11:19.190463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:19.190505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:19.190512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:19.190633Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:19.230504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:19.230640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:19.232292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64691 TClient is connected to server localhost:64691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:19.641034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.662315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.791137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.908974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:19.982912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:21.197279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346338303863153:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.197396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.407195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.433275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.460695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.488412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.513573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.579775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.646250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.693784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346338303863819:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.693834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346338303863824:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.693843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.697220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:21.706981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346338303863826:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:21.789457Z node 1 :TX_PROXY ERROR: Actor# [1:7494346338303863880:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:23.071239Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjJhMzViYTUtYTVhNTU4MGYtYzExNzYxZTMtN2FlYzBlNzc=, ActorId: [1:7494346342598831473:2502], ActorState: ExecuteState, TraceId: 01js2f5h7j9pw9y5sb1jatxc47, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16802, MsgBus: 6116 2025-04-17T18:11:23.728930Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346344814752572:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:23.728995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001806/r3tmp/tmpLJPYNx/pdisk_1.dat 2025-04-17T18:11:23.806917Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16802, node 2 2025-04-17T18:11:23.854227Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:23.854253Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:23.854259Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:23.854346Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:23.858091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:23.858171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:23.859561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6116 TClient is connected to server localhost:6116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:24.150401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:24.168277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:24.211707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:24.325720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:24.378366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:26.191334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346357699656239:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:26.191463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:26.213504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.236295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.255189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.275055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.296501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.320116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.345932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.418057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346357699656900:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:26.418149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:26.418219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346357699656905:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:26.420532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:26.427606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346357699656907:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:26.512935Z node 2 :TX_PROXY ERROR: Actor# [2:7494346357699656965:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:28.729107Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346344814752572:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:28.729184Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:11:29.536678Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715674; 2025-04-17T18:11:29.537868Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346370584559432:2511], Table: `/Root/EightShard` ([72057594046644480:3:1]), SessionActorId: [2:7494346361994624584:2511]Got LOCKS BROKEN for table `/Root/EightShard`. ShardID=72075186224037891, Sink=[2:7494346370584559432:2511].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:11:29.538319Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346370584559412:2511], SessionActorId: [2:7494346361994624584:2511], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7494346361994624584:2511]. isRollback=0 2025-04-17T18:11:29.538495Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGEzYmNjN2QtZWYwNjQyYjEtY2M1ZTg1OTctYjQ4MDI1MTk=, ActorId: [2:7494346361994624584:2511], ActorState: ExecuteState, TraceId: 01js2f5qfmc9ys7jnxkr8b6971, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7494346370584559413:2511] from: [2:7494346370584559412:2511] 2025-04-17T18:11:29.538571Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346370584559413:2511] TxId: 281474976715674. Ctx: { TraceId: 01js2f5qfmc9ys7jnxkr8b6971, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGEzYmNjN2QtZWYwNjQyYjEtY2M1ZTg1OTctYjQ4MDI1MTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:11:29.538725Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGEzYmNjN2QtZWYwNjQyYjEtY2M1ZTg1OTctYjQ4MDI1MTk=, ActorId: [2:7494346361994624584:2511], ActorState: ExecuteState, TraceId: 01js2f5qfmc9ys7jnxkr8b6971, Create QueryResponse for error on request, msg: 2025-04-17T18:11:29.538923Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715674; 2025-04-17T18:11:29.539080Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1744913489585 : 281474976715674] from 72075186224037888 at tablet 72075186224037888, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::SnapshotROInteractive1 >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> KqpTx::RollbackInvalidated [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2025-04-17T18:10:49.627428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346201645024548:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:49.627643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:49.654871Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346198229695885:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:49.655100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:10:49.784708Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001972/r3tmp/tmpOOuGKE/pdisk_1.dat 2025-04-17T18:10:49.807240Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:10:49.970372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:49.970461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:49.972563Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:49.973264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:49.994875Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11779, node 1 2025-04-17T18:10:50.008164Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:50.008199Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:10:50.021142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:50.021213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:50.027742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:50.036855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001972/r3tmp/yandexuEtpwI.tmp 2025-04-17T18:10:50.036877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001972/r3tmp/yandexuEtpwI.tmp 2025-04-17T18:10:50.037038Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001972/r3tmp/yandexuEtpwI.tmp 2025-04-17T18:10:50.037152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:50.084553Z INFO: TTestServer started on Port 13499 GrpcPort 11779 TClient is connected to server localhost:13499 PQClient connected to localhost:11779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:10:50.299521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:10:50.343174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:10:52.344337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346214529927597:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:52.344411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346214529927606:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:52.344457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:52.347632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:10:52.351162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346214529927645:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:52.351528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:52.362923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346214529927611:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:10:52.426545Z node 1 :TX_PROXY ERROR: Actor# [1:7494346214529927685:2804] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:52.603383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:10:52.604108Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494346214529927703:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:10:52.604325Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzY4OGE5ZTQtYmE0NTY1MWQtNGVhMGVjM2MtMWFmYmNmZTY=, ActorId: [1:7494346214529927594:2340], ActorState: ExecuteState, TraceId: 01js2f4k9h34bpfsqaw4kwcxcw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:10:52.606121Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:10:52.688898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:10:52.770497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:10:52.939537Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2f4ks81jpgvnb6mym6gsrb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmEyZGJmYTItOWExOTcwNy1hMzk2MDBjYy03ZDY4MWZmZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494346214529928076:3085] 2025-04-17T18:10:54.627352Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346201645024548:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:54.627427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:10:54.654951Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346198229695885:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:10:54.655016Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-17T18:10:58.753156Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494346201645024835:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:10:58.753352Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494346201645024835:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-17T18:10:58.753455Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494346201645024835:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494346205939992587:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744913450364 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:10:58.753557Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494346201 ... .write.partition_throttled_milliseconds","bin":"2500"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.partition_throttled_milliseconds","bin":"5"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.partition_throttled_milliseconds","bin":"50"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.partition_throttled_milliseconds","bin":"500"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.partition_throttled_milliseconds","bin":"5000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.partition_throttled_milliseconds","bin":"999999"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.uncompressed_bytes"},"value":56,"kind":"RATE"}]} ===Request counters with query: /counters/counters=datastreams/database=%2FRoot/cloud_id=somecloud/folder_id=somefolder/database_id=root/topic=account2%2Ftopic2/consumer=some@random@consumer/json counters: {"sensors":[{"labels":{"name":"api.grpc.topic.stream_read.bytes"},"value":488,"kind":"RATE"},{"labels":{"name":"api.grpc.topic.stream_read.messages"},"value":4,"kind":"RATE"},{"labels":{"name":"topic.read.bytes"},"value":488,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"100"},"value":4,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"1000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"10000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"180000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"200"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"2000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"30000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"500"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"5000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"60000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"999999"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.messages"},"value":4,"kind":"RATE"}]} ===Request counters with query: /counters/counters=pqproxy/subsystem=userAgents/json counters: {"sensors":[{"labels":{"sensor":"BytesReadByUserAgent","consumer":"some@random@consumer","sdk_build_info":"ydb-cpp-sdk\/dev","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":511,"kind":"RATE"},{"labels":{"topic":"\/Root\/account2\/topic2","sensor":"BytesWrittenByUserAgent","sdk_build_info":"ydb-cpp-sdk\/dev","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":460,"kind":"RATE"}]} ===Request counters with query: /counters/counters=pqproxy/subsystem=userAgents/json counters: {"sensors":[{"labels":{"sensor":"BytesReadByUserAgent","consumer":"some@random@consumer","sdk_build_info":"ydb-cpp-sdk\/dev","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":511,"kind":"RATE"},{"labels":{"topic":"\/Root\/account2\/topic2","sensor":"BytesWrittenByUserAgent","sdk_build_info":"ydb-cpp-sdk\/dev","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":460,"kind":"RATE"}]} 2025-04-17T18:11:32.910896Z :INFO: [/Root] [/Root] [6fd9219-c0e61eba-94e534b9-5d3096a3] Closing read session. Close timeout: 0.000000s 2025-04-17T18:11:32.910969Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:account2/topic2:1:5:0:0 -:account2/topic2:2:4:0:0 -:account2/topic2:3:3:0:0 -:account2/topic2:4:2:0:0 -:account2/topic2:0:1:3:0 2025-04-17T18:11:32.911016Z :INFO: [/Root] [/Root] [6fd9219-c0e61eba-94e534b9-5d3096a3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 30 BytesRead: 40 MessagesRead: 4 BytesReadCompressed: 40 BytesInflightUncompressed: 30 BytesInflightCompressed: 0 BytesInflightTotal: 30 MessagesInflight: 3 } 2025-04-17T18:11:32.911117Z :NOTICE: [/Root] [/Root] [6fd9219-c0e61eba-94e534b9-5d3096a3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T18:11:32.911188Z :DEBUG: [/Root] [/Root] [6fd9219-c0e61eba-94e534b9-5d3096a3] [] Returning serverBytesSize = 0 to budget 2025-04-17T18:11:32.911353Z :DEBUG: [/Root] [/Root] [6fd9219-c0e61eba-94e534b9-5d3096a3] [] Abort session to cluster 2025-04-17T18:11:32.912258Z :NOTICE: [/Root] [/Root] [6fd9219-c0e61eba-94e534b9-5d3096a3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:11:32.912376Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_11422502460355313487_v1 grpc read done: success# 0, data# { } 2025-04-17T18:11:32.912402Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_11422502460355313487_v1 grpc read failed 2025-04-17T18:11:32.912421Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_11422502460355313487_v1 grpc closed 2025-04-17T18:11:32.912483Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_11422502460355313487_v1 is DEAD 2025-04-17T18:11:32.912757Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.912786Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7494346383947818514:2548] destroyed 2025-04-17T18:11:32.912802Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.912831Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7494346383947818509:2545] destroyed 2025-04-17T18:11:32.912845Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.912862Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7494346383947818508:2544] destroyed 2025-04-17T18:11:32.912900Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.912831Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.912915Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.912931Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.912909Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7494346383947818513:2547] destroyed 2025-04-17T18:11:32.912933Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.912945Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7494346383947818511:2546] destroyed 2025-04-17T18:11:32.912964Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.912988Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.913654Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [7:7494346383947818498:2541] disconnected; active server actors: 1 2025-04-17T18:11:32.913691Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [7:7494346383947818498:2541] client some@random@consumer disconnected session some@random@consumer_7_1_11422502460355313487_v1 2025-04-17T18:11:32.914577Z :INFO: [/Root] TraceId [] SessionId [123|d2d7345a-ead90346-bf9c549f-8d062782_0] MessageGroupId [123] Write session: close. Timeout 0.000000s 2025-04-17T18:11:32.915147Z :INFO: [/Root] TraceId [] SessionId [123|d2d7345a-ead90346-bf9c549f-8d062782_0] MessageGroupId [123] Write session will now close 2025-04-17T18:11:32.915179Z :DEBUG: [/Root] TraceId [] SessionId [123|d2d7345a-ead90346-bf9c549f-8d062782_0] MessageGroupId [123] Write session: aborting 2025-04-17T18:11:32.915817Z :INFO: [/Root] TraceId [] SessionId [123|d2d7345a-ead90346-bf9c549f-8d062782_0] MessageGroupId [123] Write session: gracefully shut down, all writes complete 2025-04-17T18:11:32.915876Z :DEBUG: [/Root] TraceId [] SessionId [123|d2d7345a-ead90346-bf9c549f-8d062782_0] MessageGroupId [123] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-17T18:11:32.915937Z :DEBUG: [/Root] TraceId [] SessionId [123|d2d7345a-ead90346-bf9c549f-8d062782_0] MessageGroupId [123] Write session is aborting and will not restart 2025-04-17T18:11:32.916014Z :DEBUG: [/Root] TraceId [] SessionId [123|d2d7345a-ead90346-bf9c549f-8d062782_0] MessageGroupId [123] Write session: destroy 2025-04-17T18:11:32.916040Z node 7 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 123|d2d7345a-ead90346-bf9c549f-8d062782_0 grpc read done: success: 0 data: 2025-04-17T18:11:32.916075Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|d2d7345a-ead90346-bf9c549f-8d062782_0 grpc read failed 2025-04-17T18:11:32.916122Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|d2d7345a-ead90346-bf9c549f-8d062782_0 grpc closed 2025-04-17T18:11:32.916148Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|d2d7345a-ead90346-bf9c549f-8d062782_0 is DEAD 2025-04-17T18:11:32.917402Z node 7 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:11:32.917557Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7494346383947818488:2534] destroyed 2025-04-17T18:11:32.917600Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T18:11:32.925202Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7494346339802021446:2106], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:11:32.925358Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7494346339802021446:2106], cacheItem# { Subscriber: { Subscriber: [8:7494346352686923491:2185] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:11:32.925492Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7494346382751694922:2430], recipient# [8:7494346382751694921:2352], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] |89.3%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 24114, MsgBus: 15784 2025-04-17T18:11:27.088096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346363971043356:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:27.088168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017f6/r3tmp/tmpPcZ9Pt/pdisk_1.dat 2025-04-17T18:11:27.339041Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24114, node 1 2025-04-17T18:11:27.392746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:27.392771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:27.392790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:27.392934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:27.423913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:27.424055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:27.425876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15784 TClient is connected to server localhost:15784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:27.764510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:27.784893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:27.906905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:28.031653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:28.115214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:29.155964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346372560979730:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:29.156156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:29.400767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.423926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.447493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.473481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.501770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.529486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.557975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.598724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346372560980385:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:29.598778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:29.598913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346372560980390:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:29.601246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:29.607587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346372560980392:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:29.685556Z node 1 :TX_PROXY ERROR: Actor# [1:7494346372560980446:3538] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:30.390136Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzVkODk0N2QtYmI2YzYyZGItNzllZGY4ZGItMzZjNDc3NjE=, ActorId: [1:7494346376855948066:2511], ActorState: ReadyState, TraceId: 01js2f5rd02d7yhyjqnemgmqp1, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 23821, MsgBus: 2249 2025-04-17T18:11:30.905381Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346375605340402:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:30.905437Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017f6/r3tmp/tmphnWh59/pdisk_1.dat 2025-04-17T18:11:30.986607Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23821, node 2 2025-04-17T18:11:31.031738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:31.031818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:31.033659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:31.040351Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:31.040375Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:31.040384Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:31.040472Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2249 TClient is connected to server localhost:2249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:31.378586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:31.386616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:31.427726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:31.550471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:31.594752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:33.335621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346388490244056:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:33.335715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:33.376105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:33.401534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:33.425802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:33.447709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:33.470509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:33.499857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:33.528869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:33.605090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346388490244715:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:33.605173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:33.605184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346388490244720:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:33.607885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:33.615805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346388490244722:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:33.688196Z node 2 :TX_PROXY ERROR: Actor# [2:7494346388490244777:3537] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:34.434426Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494346392785212421:2521], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:11:34.434595Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTIzNmQxYzYtYjk0YjY0YzQtNjA5MzcxN2MtNjJkYzAwNzQ=, ActorId: [2:7494346392785212399:2511], ActorState: ExecuteState, TraceId: 01js2f5wcac0jtmxvdba8j0n75, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 01js2f5wc3623hs1tp9evshb4d 2025-04-17T18:11:34.440363Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTIzNmQxYzYtYjk0YjY0YzQtNjA5MzcxN2MtNjJkYzAwNzQ=, ActorId: [2:7494346392785212399:2511], ActorState: ReadyState, TraceId: 01js2f5wd8d8wwb78rrhqnhekf, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 29927, MsgBus: 24307 2025-04-17T18:11:14.635758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346305310228785:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:14.635886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00181f/r3tmp/tmpLvRpV7/pdisk_1.dat 2025-04-17T18:11:14.928028Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29927, node 1 2025-04-17T18:11:15.010438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:15.010534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:15.012089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:15.071416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:15.071447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:15.071454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:15.071605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24307 TClient is connected to server localhost:24307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:15.674315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.700182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.805974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.941891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:16.014844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:16.987186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346313900165159:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:16.987264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.250748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.274150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.338770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.361487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.425051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.451366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.482864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.525057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346318195133130:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.525178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.525385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346318195133135:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.528448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:17.536848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346318195133137:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:17.621634Z node 1 :TX_PROXY ERROR: Actor# [1:7494346318195133192:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:18.821613Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710676; 2025-04-17T18:11:18.830215Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346322490100872:2511], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7494346322490100817:2511]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7494346322490100872:2511].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:11:18.830727Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346322490100865:2511], SessionActorId: [1:7494346322490100817:2511], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494346322490100817:2511]. isRollback=0 2025-04-17T18:11:18.830967Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjZhYjRjMGItOTFhNjI1LWE2OWUzNjRlLTQwYjkxMjlj, ActorId: [1:7494346322490100817:2511], ActorState: ExecuteState, TraceId: 01js2f5d53c92vsytn62w2jeyr, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494346322490100943:2511] from: [1:7494346322490100865:2511] 2025-04-17T18:11:18.831067Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346322490100943:2511] TxId: 281474976710676. Ctx: { TraceId: 01js2f5d53c92vsytn62w2jeyr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjZhYjRjMGItOTFhNjI1LWE2OWUzNjRlLTQwYjkxMjlj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:11:18.831250Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjZhYjRjMGItOTFhNjI1LWE2OWUzNjRlLTQwYjkxMjlj, ActorId: [1:7494346322490100817:2511], ActorState: ExecuteState, TraceId: 01js2f5d53c92vsytn62w2jeyr, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 6941, MsgBus: 21928 2025-04-17T18:11:19.504903Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346329763415048:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:19.504962Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00181f/r3tmp/tmp1c23Pl/pdisk_1.dat 2025-04-17T18:11:19.601306Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6941, node 2 2025-04-17T18:11:19.635584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:19.635677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:19.637294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:19.646624Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:19.646647Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:19.646653Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:19.646748Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21928 TClient is connected to server localhost:21928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinish ... 214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:11:23.848982Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:11:23.849355Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:11:23.849486Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[2:7494346342648318422:2372];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037903; 2025-04-17T18:11:23.849556Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[2:7494346342648318422:2372];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=15;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037899; 2025-04-17T18:11:23.849627Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[2:7494346342648318422:2372];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037899; 2025-04-17T18:11:23.849686Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[2:7494346342648318422:2372];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037903; 2025-04-17T18:11:23.850072Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:11:24.219465Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQ4YTU2YmUtN2NlNGI0ZDItNDMxM2MyN2EtMmJkNTkxNDg=, ActorId: [2:7494346346943287919:2808], ActorState: ExecuteState, TraceId: 01js2f5jc0a0pncvydv9f1d57k, Create QueryResponse for error on request, msg: 2025-04-17T18:11:24.220185Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715670;tx_id=281474976715670;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715670; 2025-04-17T18:11:24.220764Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;self_id=[2:7494346342648318526:2402];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715665;problem=finished; 2025-04-17T18:11:24.221520Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715670; 2025-04-17T18:11:24.221626Z node 2 :TX_COLUMNSHARD_TX WARN: fline=manager.cpp:134;event=abort;tx_id=281474976715665;problem=finished; Trying to start YDB, gRPC: 28697, MsgBus: 10736 2025-04-17T18:11:27.622664Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:11:27.622813Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:11:27.622913Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00181f/r3tmp/tmpOs40ba/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28697, node 3 2025-04-17T18:11:27.954991Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:27.955657Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:27.955695Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:27.955720Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:27.956009Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:27.991709Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:27.991857Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:28.003078Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10736 TClient is connected to server localhost:10736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:28.247779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:28.290253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:28.542495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:28.906633Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:29.220005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:29.688702Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1806:3402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:29.688889Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:29.709954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.959435Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:30.209805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:30.451698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:30.708184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:30.980385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:31.299624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:31.627940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2549:3971], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:31.628077Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:31.628466Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2554:3976], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:31.633999Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:31.805275Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2556:3978], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:31.850608Z node 3 :TX_PROXY ERROR: Actor# [3:2625:4028] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:32.772102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:11:33.040818Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:11:33.395101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 62741, MsgBus: 14739 2025-04-17T18:11:14.635375Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346307200535957:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:14.635493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00180f/r3tmp/tmpuZmCS2/pdisk_1.dat 2025-04-17T18:11:14.921351Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62741, node 1 2025-04-17T18:11:14.991946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:14.992041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:14.993846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:15.071395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:15.071427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:15.071439Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:15.071597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14739 TClient is connected to server localhost:14739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:15.669425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:17.089091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346320085438507:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.089167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346320085438518:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.089211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.092141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:17.100032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346320085438521:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:11:17.194015Z node 1 :TX_PROXY ERROR: Actor# [1:7494346320085438572:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:17.419824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.525190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:18.279276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:19.543472Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-04-17T18:11:19.553867Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:11:19.554037Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:11:19.554217Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346328675381955:2968], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7494346328675381620:2968]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7494346328675381955:2968].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:11:19.554710Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346328675381948:2968], SessionActorId: [1:7494346328675381620:2968], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494346328675381620:2968]. isRollback=0 2025-04-17T18:11:19.554953Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWViZTI5YzItOGVmODQ2YjctMTFlMjA5YmYtN2UzODhmZTg=, ActorId: [1:7494346328675381620:2968], ActorState: ExecuteState, TraceId: 01js2f5dt4f23frcd5fdvg45yn, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494346328675381949:2968] from: [1:7494346328675381948:2968] 2025-04-17T18:11:19.555054Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346328675381949:2968] TxId: 281474976715667. Ctx: { TraceId: 01js2f5dt4f23frcd5fdvg45yn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWViZTI5YzItOGVmODQ2YjctMTFlMjA5YmYtN2UzODhmZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:11:19.555247Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWViZTI5YzItOGVmODQ2YjctMTFlMjA5YmYtN2UzODhmZTg=, ActorId: [1:7494346328675381620:2968], ActorState: ExecuteState, TraceId: 01js2f5dt4f23frcd5fdvg45yn, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 2025-04-17T18:11:19.635175Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346307200535957:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:19.635222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 6876, MsgBus: 61715 2025-04-17T18:11:25.267237Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346352891267587:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:25.267317Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00180f/r3tmp/tmp5fTAqp/pdisk_1.dat 2025-04-17T18:11:25.348239Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6876, node 2 2025-04-17T18:11:25.394657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:25.394755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:25.395086Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:25.395104Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:25.395111Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:25.395199Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:25.396290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61715 TClient is connected to server localhost:61715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:25.707341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:27.550304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346361481202819:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:27.550365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346361481202838:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:27.550451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:27.553056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:27.560339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346361481202848:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:11:27.626462Z node 2 :TX_PROXY ERROR: Actor# [2:7494346361481202899:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:27.665713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:27.700424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:28.409784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.591353Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjFmNjY1ZDctNTE4NjU5ZS00ZmEwZTMwMy01NGRkYjQ0OA==, ActorId: [2:7494346370071145900:2967], ActorState: ExecuteState, TraceId: 01js2f5qjxc826rqx1jb8jts3f, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 2025-04-17T18:11:30.267402Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346352891267587:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:30.267465Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkTx::LocksAbortOnCommit >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink >> KqpTx::CommitRequired ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 6811, MsgBus: 11115 2025-04-17T18:11:14.635441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346308844508738:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:14.635522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001817/r3tmp/tmpaFmH4l/pdisk_1.dat 2025-04-17T18:11:14.929768Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6811, node 1 2025-04-17T18:11:15.006660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:15.006765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:15.008489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:15.071281Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:15.071302Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:15.071312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:15.071422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11115 TClient is connected to server localhost:11115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:15.662762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.692490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.835852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:15.980111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:16.054056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:17.034478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346321729412398:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.034574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.252722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.276757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.299444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.321045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.346029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.376383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.405109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.449701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346321729413061:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.449799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.449915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346321729413066:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.452187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:17.458356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346321729413068:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:17.524574Z node 1 :TX_PROXY ERROR: Actor# [1:7494346321729413122:3539] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9484, MsgBus: 12314 2025-04-17T18:11:19.689626Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346329355231629:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:19.689684Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001817/r3tmp/tmp0jD5LI/pdisk_1.dat 2025-04-17T18:11:19.754533Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9484, node 2 2025-04-17T18:11:19.804604Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:19.804625Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:19.804631Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:19.804736Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:19.813320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:19.813424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:19.814825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12314 TClient is connected to server localhost:12314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:20.091645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:20.099826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:20.165689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:20.264154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:20.313506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:21.867611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346337945167987:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.867696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:21.914246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.934442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.960303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:21.981006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.003488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.029995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.056556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:22.104080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346342240135939:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.104139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346342240135944:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.104141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:22.106775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:22.115127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346342240135946:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:22.214644Z node 2 :TX_PROXY ERROR: Actor# [2:7494346342240136002:3537] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:24.689724Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346329355231629:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:24.689832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:11:34.741784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:11:34.741818Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:36.029450Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346402369679196:2675], TxId: 281474976715684, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjZkYTU0MDctODdlMWE4YjQtOWMwNmM4MmYtMTBkOTc5ZQ==. TraceId : 01js2f5xsa3jy44wjxay8975tz. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744913483000/18446744073709551615 shard 72075186224037888 with lowWatermark v1744913483131/18446744073709551615 (node# 2 state# Ready) } } 2025-04-17T18:11:36.030223Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346402369679196:2675], TxId: 281474976715684, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjZkYTU0MDctODdlMWE4YjQtOWMwNmM4MmYtMTBkOTc5ZQ==. TraceId : 01js2f5xsa3jy44wjxay8975tz. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744913483000/18446744073709551615 shard 72075186224037888 with lowWatermark v1744913483131/18446744073709551615 (node# 2 state# Ready) } }. 2025-04-17T18:11:36.030591Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346402369679197:2676], TxId: 281474976715684, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjZkYTU0MDctODdlMWE4YjQtOWMwNmM4MmYtMTBkOTc5ZQ==. TraceId : 01js2f5xsa3jy44wjxay8975tz. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346402369679192:2511], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:11:36.031263Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjZkYTU0MDctODdlMWE4YjQtOWMwNmM4MmYtMTBkOTc5ZQ==, ActorId: [2:7494346342240136326:2511], ActorState: ExecuteState, TraceId: 01js2f5xsa3jy44wjxay8975tz, Create QueryResponse for error on request, msg: >> KqpSinkTx::OlapLocksAbortOnCommit >> KqpTx::ExplicitTcl >> KqpTx::RollbackManyTx >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] >> KqpSinkMvcc::OltpNamedStatement >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitPrepared >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 4106, MsgBus: 27531 2025-04-17T18:11:30.031378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346376435319878:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:30.031574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ed/r3tmp/tmprkesb9/pdisk_1.dat 2025-04-17T18:11:30.287252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:30.301518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:30.301615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:30.306664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4106, node 1 2025-04-17T18:11:30.365169Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:30.365197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:30.365202Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:30.365298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27531 TClient is connected to server localhost:27531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:30.762068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:30.781359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:30.901146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:31.003798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:31.059347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:32.332319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346385025256255:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:32.332472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:32.562821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:32.588723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:32.611049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:32.636697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:32.661831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:32.688457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:32.753527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:32.831075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346385025256922:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:32.831131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:32.831320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346385025256927:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:32.834714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:32.843568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346385025256929:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:32.924597Z node 1 :TX_PROXY ERROR: Actor# [1:7494346385025256985:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:35.031550Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346376435319878:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:35.031621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:11:36.313186Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzQ5YWQyNmUtMTBiMTI1ZGItOTU3MzkxZTQtYjlkMmUwNDY=, ActorId: [1:7494346389320224573:2502], ActorState: ExecuteState, TraceId: 01js2f5y1a2very29f6q03bt3y, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken Trying to start YDB, gRPC: 14482, MsgBus: 17119 2025-04-17T18:11:36.901576Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346402306661534:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:36.901814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ed/r3tmp/tmpTJnTT8/pdisk_1.dat 2025-04-17T18:11:36.977809Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14482, node 2 2025-04-17T18:11:37.030858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:37.030941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:37.032192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:37.032214Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:37.032222Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:37.032338Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:37.032380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17119 TClient is connected to server localhost:17119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:37.367582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:37.385329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:37.427104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:37.550077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:37.621419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:39.046808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346415191565201:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.046922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.083877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.107539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.129489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.152446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.175451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.203372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.230582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.272109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346415191565852:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.272160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.272222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346415191565857:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.274682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:39.281269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346415191565859:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:39.370805Z node 2 :TX_PROXY ERROR: Actor# [2:7494346415191565915:3537] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:40.479401Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGJjYzI0NmYtNDY0ZDZhOWMtNGZkYjNlOTktODcwNDk1N2Y=, ActorId: [2:7494346419486533531:2510], ActorState: ExecuteState, TraceId: 01js2f628w22b87g2twb3hc0ve, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::EmptyTxOnCommit >> YdbOlapStore::LogTsRangeDescending [GOOD] >> YdbQueryService::TestAttachTwice >> KqpService::PatternCache [GOOD] >> KqpService::RangeCache+UseCache >> KqpSinkLocks::TInvalidateOlap >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 22948, MsgBus: 13456 2025-04-17T18:11:22.246397Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346343070898397:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:22.246581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ff/r3tmp/tmp2xX2dv/pdisk_1.dat 2025-04-17T18:11:22.514423Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22948, node 1 2025-04-17T18:11:22.564177Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:22.564202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:22.564214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:22.564349Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:22.601155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:22.601233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:22.603039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13456 TClient is connected to server localhost:13456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:22.979003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:24.347455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346351660833634:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:24.347526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346351660833653:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:24.347559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:24.350842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:24.358219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346351660833663:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:24.442851Z node 1 :TX_PROXY ERROR: Actor# [1:7494346351660833714:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:24.701049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:24.802577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:25.526832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:26.990246Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDY0ZWRiYmUtYmM4ZjVmOTctZjE0YTExZjktM2U5ZDViNzk=, ActorId: [1:7494346360250776737:2968], ActorState: ExecuteState, TraceId: 01js2f5n2y822vmek8ey71sdb7, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 2025-04-17T18:11:27.245974Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346343070898397:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:27.246035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 23498, MsgBus: 4349 2025-04-17T18:11:32.712532Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346386310135948:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:32.712609Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ff/r3tmp/tmp0YBGWO/pdisk_1.dat 2025-04-17T18:11:32.786082Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23498, node 2 2025-04-17T18:11:32.836034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:32.836141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:32.837467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:32.845479Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:32.845501Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:32.845508Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:32.845639Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4349 TClient is connected to server localhost:4349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:33.187589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:35.401232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346399195038487:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:35.401398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346399195038467:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:35.401642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:35.404209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:35.410833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346399195038499:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:11:35.492105Z node 2 :TX_PROXY ERROR: Actor# [2:7494346399195038550:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:35.540004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:35.574277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:36.474158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-17T18:11:37.712881Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346386310135948:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:37.712955Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpTx::CommitPrepared [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 30001, MsgBus: 10182 2025-04-17T18:11:37.476209Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346406295463010:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:37.476322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017d2/r3tmp/tmp60mJRC/pdisk_1.dat 2025-04-17T18:11:37.756532Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30001, node 1 2025-04-17T18:11:37.792796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:37.792821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:37.792831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:37.793001Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:37.837477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:37.837599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:37.839313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10182 TClient is connected to server localhost:10182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:38.162972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:38.185403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:38.297337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:38.417741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:38.475337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:39.559337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346414885399382:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.559433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.737531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.758745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.779173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.802893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.826287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.876162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.899501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.971434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346414885400041:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.971516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.971568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346414885400046:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.974367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:39.980679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346414885400048:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:40.082562Z node 1 :TX_PROXY ERROR: Actor# [1:7494346419180367400:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5603, MsgBus: 30160 2025-04-17T18:11:41.236484Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346422985732065:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:41.236580Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017d2/r3tmp/tmplUZivf/pdisk_1.dat 2025-04-17T18:11:41.317490Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5603, node 2 2025-04-17T18:11:41.351417Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:41.351439Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:41.351444Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:41.351517Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:41.371586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:41.371649Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:41.373136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30160 TClient is connected to server localhost:30160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:41.613495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:41.619801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:41.653894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:41.770869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:41.830200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:43.078141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346431575668440:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:43.078226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:43.113370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:43.134871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:43.156284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:43.178504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:43.205572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:43.248526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:43.275159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:43.318265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346431575669099:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:43.318338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:43.318373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346431575669104:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:43.320966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:43.328026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346431575669106:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:43.414701Z node 2 :TX_PROXY ERROR: Actor# [2:7494346431575669162:3537] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [GOOD] >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] >> KqpTx::EmptyTxOnCommit [GOOD] >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::EmptyTxOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 17413, MsgBus: 15003 2025-04-17T18:11:38.049406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346411482355740:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:38.049465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017d0/r3tmp/tmpYBq3zl/pdisk_1.dat 2025-04-17T18:11:38.318130Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17413, node 1 2025-04-17T18:11:38.368900Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:38.368916Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:38.368927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:38.369040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:38.421042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:38.421183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:38.422992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15003 TClient is connected to server localhost:15003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:38.758788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:38.777959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:38.867803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:38.978234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:39.026016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:40.194541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346420072292115:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:40.194623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:40.407615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:40.433096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:40.458566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:40.481859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:40.503930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:40.538404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:40.561254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:40.594517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346420072292769:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:40.594601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346420072292774:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:40.594604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:40.596746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:40.602273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346420072292776:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:40.684748Z node 1 :TX_PROXY ERROR: Actor# [1:7494346420072292831:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:41.485190Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjEyNzQyMi03MzQzZWMyOS1hN2QwM2Q1OC00ZTdlOTY0NQ==, ActorId: [1:7494346424367260443:2511], ActorState: ReadyState, TraceId: 01js2f6397a09p54y3ekee806r, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5403, MsgBus: 21567 2025-04-17T18:11:42.091867Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346428371313244:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:42.091920Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017d0/r3tmp/tmpJMZprv/pdisk_1.dat 2025-04-17T18:11:42.181828Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5403, node 2 2025-04-17T18:11:42.214675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:42.214759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:42.215274Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:42.215288Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:42.215292Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:42.215362Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:42.216396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21567 TClient is connected to server localhost:21567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:42.510315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:42.518827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:42.562778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:42.681515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:42.750607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:44.113672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346436961249606:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:44.113743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:44.155433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:44.177039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:44.198336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:44.218907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:44.239243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:44.264027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:44.332995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:44.370499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346436961250262:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:44.370570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346436961250267:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:44.370593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:44.372637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:44.378427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346436961250269:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:44.464166Z node 2 :TX_PROXY ERROR: Actor# [2:7494346436961250324:3535] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSinkTx::OlapExplicitTcl [GOOD] >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSinkTx::InvalidateOnError >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 >> KqpSinkLocks::DifferentKeyUpdate >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapExplicitTcl [GOOD] Test command err: Trying to start YDB, gRPC: 28232, MsgBus: 21377 2025-04-17T18:11:14.635310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346308866869846:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:14.635413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00182f/r3tmp/tmpiErmtp/pdisk_1.dat 2025-04-17T18:11:14.929235Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28232, node 1 2025-04-17T18:11:15.020947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:15.021047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:15.022781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:15.071294Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:15.071320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:15.071326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:15.071433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21377 TClient is connected to server localhost:21377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:15.665530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:17.246340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346321751772399:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.246428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346321751772380:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.246653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:17.250264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:17.259556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346321751772409:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:17.332463Z node 1 :TX_PROXY ERROR: Actor# [1:7494346321751772460:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:17.586719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.725611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:17.725611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:17.725824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:17.726151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:17.726283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:17.726324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:17.726460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:17.726548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:17.726609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:17.726668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:17.726747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:17.726793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:17.726891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:17.726913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:17.727011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:17.727015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:17.727146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:17.727155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:17.727282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:17.727290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:17.727389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346321751772658:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:17.727397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:17.727521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:17.727628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346321751772661:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:17.764933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346321751772665:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:17.764941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346321751772692:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:17.764981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:74943463 ... 86224038015;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.724667Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037954;self_id=[2:7494346387043838802:2463];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037954;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.724869Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7494346399928745346:3275];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.725038Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346399928745601:3335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.725270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346399928745601:3335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.725389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7494346399928745330:3269];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.725625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7494346399928745330:3269];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.725758Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7494346399928745336:3271];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.725964Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7494346399928745336:3271];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.726367Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7494346399928745340:3272];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.726407Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346399928745305:3258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.726686Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346399928745305:3258];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.726698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7494346399928745340:3272];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.726810Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7494346399928745295:3252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.727028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7494346399928745295:3252];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.915856Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7494346387043838906:2490];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037907;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.916061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7494346387043838906:2490];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037907;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.916215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[2:7494346387043838982:2494];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037944;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.916329Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[2:7494346387043838982:2494];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037944;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.916496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7494346382748871169:2421];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037902;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.916633Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7494346382748871169:2421];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037902;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.916730Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7494346387043838751:2444];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037957;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.916854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7494346387043838751:2444];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037957;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.916967Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037959;self_id=[2:7494346387043838743:2441];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037959;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.917073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037959;self_id=[2:7494346387043838743:2441];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037959;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.917235Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:7494346387043838919:2492];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037970;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.917495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:7494346387043838919:2492];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037970;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.918515Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7494346387043838879:2482];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037922;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.918661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7494346387043838879:2482];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037922;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.919771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7494346387043838891:2487];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037938;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.919971Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7494346387043838891:2487];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037938;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.920121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;self_id=[2:7494346387043838881:2483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037945;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.920227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;self_id=[2:7494346387043838881:2483];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037945;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:39.983376Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=281474976710667;tx_id=281474976710667;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710667; 2025-04-17T18:11:39.984686Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:40.042885Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7494346382748871153:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037914;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:40.043204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7494346382748871153:2417];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037914;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:40.202128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[2:7494346387043839068:2512];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037915;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:40.202334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[2:7494346387043839068:2512];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037915;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:40.251187Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7494346387043838860:2473];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037969;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:40.251293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:7494346387043838769:2447];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037986;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:40.251411Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7494346387043838860:2473];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037969;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:40.251450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:7494346387043838769:2447];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037986;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:40.281861Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Njk3OWEyODQtMTY3MGZlYTEtNWZiNjdiZTktY2M0YjkzMQ==, ActorId: [2:7494346412813650318:3865], ActorState: ReadyState, TraceId: 01js2f623gcjn9jyczev2n08dk, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:11:44.949317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:11:44.949348Z node 2 :IMPORT WARN: Table profiles were not loaded >> YdbQueryService::TestAttachTwice [GOOD] >> KqpTx::SnapshotRO >> KqpTx::RollbackRoTx [GOOD] >> YdbOlapStore::LogCountByResource [GOOD] >> KqpSinkMvcc::OltpNamedStatement [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 6091, MsgBus: 27288 2025-04-17T18:11:39.121367Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346413255643697:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:39.121458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017b9/r3tmp/tmpystdvz/pdisk_1.dat 2025-04-17T18:11:39.373635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:39.373707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:39.375614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:39.382058Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6091, node 1 2025-04-17T18:11:39.435790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:39.435807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:39.435812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:39.435914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27288 TClient is connected to server localhost:27288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:39.797952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:39.813469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:39.921102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:40.029897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:40.103624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:41.097738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346421845580058:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:41.097816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:41.347182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:41.410324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:41.429265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:41.449427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:41.486192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:41.510720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:41.535729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:41.609197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346421845580727:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:41.609252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346421845580732:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:41.609257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:41.611531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:41.619606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346421845580734:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:41.691016Z node 1 :TX_PROXY ERROR: Actor# [1:7494346421845580789:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:44.148056Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346413255643697:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:44.148274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23178, MsgBus: 18560 2025-04-17T18:11:45.926135Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346440612920492:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:45.926276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017b9/r3tmp/tmp6EFb88/pdisk_1.dat 2025-04-17T18:11:45.999117Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23178, node 2 2025-04-17T18:11:46.045638Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:46.045663Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:46.045671Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:46.045812Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:46.049027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:46.049113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:46.050865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18560 TClient is connected to server localhost:18560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:46.324164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:46.332305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:46.378453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:46.472087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:46.540449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:48.051956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346453497824153:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.052057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.076015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:48.098116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:48.121098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:48.144142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:48.167497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:48.232192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:48.262165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:48.306030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346453497824811:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.306098Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.306141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346453497824816:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.308518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:48.316082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346453497824818:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:48.409331Z node 2 :TX_PROXY ERROR: Actor# [2:7494346453497824873:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:49.123681Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTgwODg2MjAtMjVjYjJkN2YtOTM1YmNkODItYzhmMzA5MmU=, ActorId: [2:7494346457792792488:2510], ActorState: ReadyState, TraceId: 01js2f6aqs9k9dbgepx939tabm, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbQueryService::TestAttachTwice [GOOD] Test command err: 2025-04-17T18:08:49.258309Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345683316489712:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:49.258419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ab/r3tmp/tmpxukao4/pdisk_1.dat 2025-04-17T18:08:49.718841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:49.741525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:49.741627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:49.755993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4107, node 1 2025-04-17T18:08:49.901056Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:49.901076Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:49.901082Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:49.901191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:50.248870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:12443 2025-04-17T18:08:50.628755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:08:50.629160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:50.629630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T18:08:50.629677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-17T18:08:50.629745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:08:50.629814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:08:50.629848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T18:08:50.629898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-17T18:08:50.630166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-17T18:08:50.632186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-17T18:08:50.632408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:08:50.632427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:50.632543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:08:50.632572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-17T18:08:50.642931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-17T18:08:50.643109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-04-17T18:08:50.643371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:08:50.643385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:08:50.643528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:08:50.643618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:08:50.643647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494345683316490289:2377], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-17T18:08:50.643668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494345683316490289:2377], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-17T18:08:50.643703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:50.643732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-17T18:08:50.644355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:08:50.645013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChanne ... 2025-04-17T18:11:40.690480Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 46, seqNo: [1] 2025-04-17T18:11:40.690492Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 47, seqNo: [1] 2025-04-17T18:11:40.690504Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 48, seqNo: [1] 2025-04-17T18:11:40.690516Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 49, seqNo: [1] 2025-04-17T18:11:40.690527Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 50, seqNo: [1] 2025-04-17T18:11:40.690539Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 51, seqNo: [1] 2025-04-17T18:11:40.690552Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 52, seqNo: [1] 2025-04-17T18:11:40.690564Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 53, seqNo: [1] 2025-04-17T18:11:40.690575Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 54, seqNo: [1] 2025-04-17T18:11:40.690586Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 55, seqNo: [1] 2025-04-17T18:11:40.690598Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2025-04-17T18:11:40.690609Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2025-04-17T18:11:40.690621Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2025-04-17T18:11:40.690632Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2025-04-17T18:11:40.690646Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2025-04-17T18:11:40.690657Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2025-04-17T18:11:40.690671Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2025-04-17T18:11:40.690683Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2025-04-17T18:11:40.690695Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2025-04-17T18:11:40.690717Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished 2025-04-17T18:11:40.690749Z node 22 :KQP_COMPUTE DEBUG: SelfId: [22:7494346420784604961:3178], TxId: 281474976710670, task: 65. Ctx: { TraceId : 01js2f61vt1wydzsns7qmhhppf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-04-17T18:11:40.690906Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. pass away 2025-04-17T18:11:40.690992Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7494346420784604872:3107] TxId: 281474976710670. Ctx: { TraceId: 01js2f61vt1wydzsns7qmhhppf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [22:7494346420784604961:3178], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 6607 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 365 FinishTimeMs: 1744913500689 ComputeCpuTimeUs: 97 BuildCpuTimeUs: 268 HostName: "ghrun-gkdwnlcfnq" NodeId: 22 CreateTimeMs: 1744913500629 UpdateTimeMs: 1744913500690 } MaxMemoryUsage: 1048576 } 2025-04-17T18:11:40.691047Z node 22 :KQP_EXECUTER INFO: TxId: 281474976710670. Ctx: { TraceId: 01js2f61vt1wydzsns7qmhhppf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [22:7494346420784604961:3178] 2025-04-17T18:11:40.691060Z node 22 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710670;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:11:40.691158Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7494346420784604872:3107] TxId: 281474976710670. Ctx: { TraceId: 01js2f61vt1wydzsns7qmhhppf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:11:40.691226Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7494346420784604872:3107] TxId: 281474976710670. Ctx: { TraceId: 01js2f61vt1wydzsns7qmhhppf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.074037s ReadRows: 0 ReadBytes: 0 ru: 49 rate limiter was not found force flag: 1 2025-04-17T18:11:40.691303Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, ActorId: [22:7494346420784604828:3107], ActorState: ExecuteState, TraceId: 01js2f61vt1wydzsns7qmhhppf, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-17T18:11:40.691674Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, ActorId: [22:7494346420784604828:3107], ActorState: ExecuteState, TraceId: 01js2f61vt1wydzsns7qmhhppf, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 126.119 QueriesCount: 1 2025-04-17T18:11:40.691762Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, ActorId: [22:7494346420784604828:3107], ActorState: ExecuteState, TraceId: 01js2f61vt1wydzsns7qmhhppf, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:11:40.691885Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, ActorId: [22:7494346420784604828:3107], ActorState: ExecuteState, TraceId: 01js2f61vt1wydzsns7qmhhppf, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:11:40.691930Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, ActorId: [22:7494346420784604828:3107], ActorState: ExecuteState, TraceId: 01js2f61vt1wydzsns7qmhhppf, EndCleanup, isFinal: 1 2025-04-17T18:11:40.692003Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, ActorId: [22:7494346420784604828:3107], ActorState: ExecuteState, TraceId: 01js2f61vt1wydzsns7qmhhppf, Sent query response back to proxy, proxyRequestId: 5, proxyId: [22:7494346382129895430:2280] 2025-04-17T18:11:40.692054Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, ActorId: [22:7494346420784604828:3107], ActorState: unknown state, TraceId: 01js2f61vt1wydzsns7qmhhppf, Cleanup temp tables: 0 RESULT: [] --------------------- STATS: total CPU: 2217 duration: 1059 usec cpu: 1059 usec duration: 120404 usec cpu: 137519 usec { name: "/Root/OlapStore/log1" } 2025-04-17T18:11:40.694813Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913500057, txId: 18446744073709551615] shutting down 2025-04-17T18:11:40.694941Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=M2EyN2Y5NjYtOGMxNzAyMGYtOGJlYjE0ZjItODg3YTBiNzk=, ActorId: [22:7494346420784604828:3107], ActorState: unknown state, TraceId: 01js2f61vt1wydzsns7qmhhppf, Session actor destroyed 2025-04-17T18:11:42.549492Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7494346426263278492:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:42.549585Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ab/r3tmp/tmpHopCm9/pdisk_1.dat 2025-04-17T18:11:42.765736Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:42.813902Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:42.814038Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:42.816295Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3358, node 25 2025-04-17T18:11:42.880699Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:42.880743Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:42.880765Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:42.880965Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:43.367157Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 28339, MsgBus: 61352 2025-04-17T18:11:29.118213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346369790597565:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:29.118314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ee/r3tmp/tmpFBcWN3/pdisk_1.dat 2025-04-17T18:11:29.379840Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28339, node 1 2025-04-17T18:11:29.436071Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:29.436092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:29.436102Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:29.436227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:29.476755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:29.476921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:29.478634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61352 TClient is connected to server localhost:61352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:29.815680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:31.306266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346378380532830:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:31.306347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346378380532798:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:31.306676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:31.310539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:31.319244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346378380532835:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:31.413217Z node 1 :TX_PROXY ERROR: Actor# [1:7494346378380532886:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:31.647557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:31.746645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:32.448563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:34.118390Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346369790597565:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:34.118459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 15251, MsgBus: 63941 2025-04-17T18:11:39.875533Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346416198796119:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:39.875597Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ee/r3tmp/tmpgIj2fB/pdisk_1.dat 2025-04-17T18:11:39.968875Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15251, node 2 2025-04-17T18:11:40.004646Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:40.004706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:40.005742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:40.006740Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:40.006756Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:40.006765Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:40.006884Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63941 TClient is connected to server localhost:63941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:40.275302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:41.792949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346424788731362:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:41.792989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346424788731342:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:41.793017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:41.795887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:41.801973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346424788731379:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:11:41.885684Z node 2 :TX_PROXY ERROR: Actor# [2:7494346424788731433:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:41.915343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:41.943205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:42.592633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-17T18:11:44.876012Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346416198796119:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:44.876114Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkMvcc::OlapMultiSinks [FAIL] >> KqpSinkTx::ExplicitTcl >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> KqpSnapshotRead::TestSnapshotExpiration-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2025-04-17T18:08:20.089147Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345558859557867:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:20.090661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:08:20.133572Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494345560077684567:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:20.133699Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:08:20.160178Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494345557886904059:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:20.160288Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a14/r3tmp/tmpQ71dBs/pdisk_1.dat 2025-04-17T18:08:20.746911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:20.747012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:20.750763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:20.750832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:20.751393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:20.751443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:20.865478Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:08:20.865508Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:08:20.865685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:20.875949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:20.876713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:20.903117Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17619, node 1 2025-04-17T18:08:21.066685Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:21.066719Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:08:21.179051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:21.180706Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:21.231349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:08:21.255985Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:21.256010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:21.256020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:21.256139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:21.629612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:25.087616Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494345558859557867:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:25.087715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:08:25.138279Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494345560077684567:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:25.138358Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:08:25.158076Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494345557886904059:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:25.158157Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Killing node 1 Killing node 2 2025-04-17T18:08:35.901933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:08:35.901965Z node 1 :IMPORT WARN: Table profiles were not loaded Killing node 3 2025-04-17T18:08:44.082302Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494345662439416736:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:44.082382Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a14/r3tmp/tmpHpNLdQ/pdisk_1.dat 2025-04-17T18:08:44.286968Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:44.316432Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:44.316521Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:44.320455Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10913, node 5 2025-04-17T18:08:44.494907Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:44.494925Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:44.494932Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:44.495064Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:44.707590Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:2031 2025-04-17T18:08:44.972023Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Root" DiffACL: "\n\033\010\000\022\027\010\001\020\200\200\002\032\ralice@builtin \003" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:08:44.972144Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:44.972285Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 1] name: Root type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:08:44.972305Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:08:44.972500Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-17T18:08:44.972527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:08:44.972595Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-17T18:08:44.972611Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-17T18:08:44.972632Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-17T18:08:44.972641Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-17T18:08:44.972685Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:08:44.97 ... . Sending channels info to compute actor: [50:7494346455014649180:3389], channels: 1 2025-04-17T18:11:48.323656Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7494346455014649177:3108] TxId: 281474976710674. Ctx: { TraceId: 01js2f68p2ee9vedehgzdaym7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [50:7494346455014649180:3389], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-17T18:11:48.323687Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7494346455014649177:3108] TxId: 281474976710674. Ctx: { TraceId: 01js2f68p2ee9vedehgzdaym7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [50:7494346455014649180:3389], 2025-04-17T18:11:48.323806Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7494346455014649180:3389], TxId: 281474976710674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=. TraceId : 01js2f68p2ee9vedehgzdaym7h. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646926 2025-04-17T18:11:48.323931Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7494346455014649180:3389], TxId: 281474976710674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=. TraceId : 01js2f68p2ee9vedehgzdaym7h. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7494346455014649180 RawX2: 4503814375738685 } } DstEndpoint { ActorId { RawX1: 7494346455014649177 RawX2: 4503814375738404 } } InMemory: true } 2025-04-17T18:11:48.324106Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7494346455014649180:3389], TxId: 281474976710674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=. TraceId : 01js2f68p2ee9vedehgzdaym7h. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-17T18:11:48.324250Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7494346455014649180:3389], TxId: 281474976710674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=. TraceId : 01js2f68p2ee9vedehgzdaym7h. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [50:7494346455014649177:3108] TaskId: 1 Stats: CpuTimeUs: 338 Tasks { TaskId: 1 CpuTimeUs: 147 FinishTimeMs: 1744913508323 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 48 BuildCpuTimeUs: 99 HostName: "ghrun-gkdwnlcfnq" NodeId: 50 CreateTimeMs: 1744913508323 CurrentWaitOutputTimeUs: 52 UpdateTimeMs: 1744913508324 } MaxMemoryUsage: 1048576 2025-04-17T18:11:48.324277Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7494346455014649177:3108] TxId: 281474976710674. Ctx: { TraceId: 01js2f68p2ee9vedehgzdaym7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [50:7494346450719681079:3108], seqNo: 1, nRows: 1 2025-04-17T18:11:48.324310Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7494346455014649180:3389], TxId: 281474976710674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=. TraceId : 01js2f68p2ee9vedehgzdaym7h. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:11:48.324359Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976710674, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-17T18:11:48.324445Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7494346455014649177:3108] TxId: 281474976710674. Ctx: { TraceId: 01js2f68p2ee9vedehgzdaym7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [50:7494346455014649180:3389], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 338 Tasks { TaskId: 1 CpuTimeUs: 147 FinishTimeMs: 1744913508323 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 48 BuildCpuTimeUs: 99 HostName: "ghrun-gkdwnlcfnq" NodeId: 50 CreateTimeMs: 1744913508323 CurrentWaitOutputTimeUs: 52 UpdateTimeMs: 1744913508324 } MaxMemoryUsage: 1048576 } 2025-04-17T18:11:48.324531Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7494346455014649177:3108] TxId: 281474976710674. Ctx: { TraceId: 01js2f68p2ee9vedehgzdaym7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [50:7494346455014649180:3389], 2025-04-17T18:11:48.324584Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, ActorId: [50:7494346450719681079:3108], ActorState: ExecuteState, TraceId: 01js2f68p2ee9vedehgzdaym7h, Forwarded TEvStreamData to [50:7494346450719681077:3107] 2025-04-17T18:11:48.325004Z node 50 :KQP_EXECUTER DEBUG: TxId: 281474976710674, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 8388572, to: [50:7494346455014649181:3389] 2025-04-17T18:11:48.325061Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7494346455014649180:3389], TxId: 281474976710674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=. TraceId : 01js2f68p2ee9vedehgzdaym7h. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-17T18:11:48.325091Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976710674, task: 1. Tasks execution finished 2025-04-17T18:11:48.325130Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7494346455014649180:3389], TxId: 281474976710674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=. TraceId : 01js2f68p2ee9vedehgzdaym7h. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-17T18:11:48.325226Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976710674, task: 1. pass away 2025-04-17T18:11:48.325295Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7494346455014649177:3108] TxId: 281474976710674. Ctx: { TraceId: 01js2f68p2ee9vedehgzdaym7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [50:7494346455014649180:3389], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1006 Tasks { TaskId: 1 CpuTimeUs: 150 FinishTimeMs: 1744913508325 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 51 BuildCpuTimeUs: 99 HostName: "ghrun-gkdwnlcfnq" NodeId: 50 CreateTimeMs: 1744913508323 UpdateTimeMs: 1744913508325 } MaxMemoryUsage: 1048576 } 2025-04-17T18:11:48.325317Z node 50 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-17T18:11:48.325337Z node 50 :KQP_EXECUTER INFO: TxId: 281474976710674. Ctx: { TraceId: 01js2f68p2ee9vedehgzdaym7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [50:7494346455014649180:3389] 2025-04-17T18:11:48.325438Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7494346455014649177:3108] TxId: 281474976710674. Ctx: { TraceId: 01js2f68p2ee9vedehgzdaym7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T18:11:48.325483Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7494346455014649177:3108] TxId: 281474976710674. Ctx: { TraceId: 01js2f68p2ee9vedehgzdaym7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001006s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-17T18:11:48.325540Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, ActorId: [50:7494346450719681079:3108], ActorState: ExecuteState, TraceId: 01js2f68p2ee9vedehgzdaym7h, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-17T18:11:48.325798Z node 50 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, ActorId: [50:7494346450719681079:3108], ActorState: ExecuteState, TraceId: 01js2f68p2ee9vedehgzdaym7h, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 721.461 QueriesCount: 1 2025-04-17T18:11:48.325843Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, ActorId: [50:7494346450719681079:3108], ActorState: ExecuteState, TraceId: 01js2f68p2ee9vedehgzdaym7h, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:11:48.325940Z node 50 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, ActorId: [50:7494346450719681079:3108], ActorState: ExecuteState, TraceId: 01js2f68p2ee9vedehgzdaym7h, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:11:48.325968Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, ActorId: [50:7494346450719681079:3108], ActorState: ExecuteState, TraceId: 01js2f68p2ee9vedehgzdaym7h, EndCleanup, isFinal: 1 2025-04-17T18:11:48.326019Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, ActorId: [50:7494346450719681079:3108], ActorState: ExecuteState, TraceId: 01js2f68p2ee9vedehgzdaym7h, Sent query response back to proxy, proxyRequestId: 5, proxyId: [50:7494346412064971636:2280] 2025-04-17T18:11:48.326055Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, ActorId: [50:7494346450719681079:3108], ActorState: unknown state, TraceId: 01js2f68p2ee9vedehgzdaym7h, Cleanup temp tables: 0 2025-04-17T18:11:48.326640Z node 50 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913507000, txId: 18446744073709551615] shutting down 2025-04-17T18:11:48.326741Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MjZlODEwNTAtYmVjNGY1NGItYjgwNjc5OGQtZDcwNzAwZjA=, ActorId: [50:7494346450719681079:3108], ActorState: unknown state, TraceId: 01js2f68p2ee9vedehgzdaym7h, Session actor destroyed >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpTx::SnapshotRO [GOOD] >> KqpTx::SnapshotROInteractive1 >> KqpSnapshotIsolation::TConflictWriteOltp [GOOD] >> KqpSnapshotIsolation::TConflictWriteOlap [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 20841, MsgBus: 27714 2025-04-17T18:11:32.766869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346383776986717:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:32.766986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017e4/r3tmp/tmpYFviqP/pdisk_1.dat 2025-04-17T18:11:33.044084Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20841, node 1 2025-04-17T18:11:33.117534Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:33.117562Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:33.117577Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:33.117732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:33.131294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:33.131397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:33.133155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27714 TClient is connected to server localhost:27714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:33.531847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:35.169680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346396661889279:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:35.169867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346396661889269:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:35.170229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:35.173620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:35.183883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346396661889284:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:35.253246Z node 1 :TX_PROXY ERROR: Actor# [1:7494346396661889335:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:35.506006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:35.611582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:36.397081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:37.766896Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346383776986717:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:37.767650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 4750, MsgBus: 2517 2025-04-17T18:11:43.582865Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346430145687351:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:43.582920Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017e4/r3tmp/tmpeUqCVy/pdisk_1.dat 2025-04-17T18:11:43.647293Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4750, node 2 2025-04-17T18:11:43.696078Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:43.696097Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:43.696102Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:43.696182Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:43.705899Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:43.705967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:43.707443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2517 TClient is connected to server localhost:2517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:43.980555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:45.662016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346438735622603:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:45.662051Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346438735622584:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:45.662091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:45.664640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:45.670961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346438735622613:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:11:45.762608Z node 2 :TX_PROXY ERROR: Actor# [2:7494346438735622664:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:45.795648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:45.825151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:46.483965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:47.500576Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715666; 2025-04-17T18:11:47.501726Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346447325565921:2966], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7494346447325565606:2966]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7494346447325565921:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:11:47.502069Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346447325565914:2966], SessionActorId: [2:7494346447325565606:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7494346447325565606:2966]. isRollback=0 2025-04-17T18:11:47.502252Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTc3NDhlYjQtMzJjZjdmZmQtYzNhYTQyZmItODlmNWUyYg==, ActorId: [2:7494346447325565606:2966], ActorState: ExecuteState, TraceId: 01js2f6948d6f402x1d52ff43x, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7494346447325565915:2966] from: [2:7494346447325565914:2966] 2025-04-17T18:11:47.502321Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346447325565915:2966] TxId: 281474976715666. Ctx: { TraceId: 01js2f6948d6f402x1d52ff43x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTc3NDhlYjQtMzJjZjdmZmQtYzNhYTQyZmItODlmNWUyYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:11:47.502446Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTc3NDhlYjQtMzJjZjdmZmQtYzNhYTQyZmItODlmNWUyYg==, ActorId: [2:7494346447325565606:2966], ActorState: ExecuteState, TraceId: 01js2f6948d6f402x1d52ff43x, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:11:48.583321Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346430145687351:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:48.583384Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [FAIL] Test command err: Trying to start YDB, gRPC: 13692, MsgBus: 23333 2025-04-17T18:11:14.635338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346305866201477:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:14.635475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001818/r3tmp/tmpZPRaOI/pdisk_1.dat 2025-04-17T18:11:14.960115Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13692, node 1 2025-04-17T18:11:14.965311Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:11:14.997523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:14.997633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:14.999339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:15.071265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:15.071286Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:15.071298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:15.071424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23333 TClient is connected to server localhost:23333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:15.663081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:16.859707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346314456136740:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:16.859722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346314456136729:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:16.859783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:16.864502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:16.872470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346314456136743:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:16.941827Z node 1 :TX_PROXY ERROR: Actor# [1:7494346314456136794:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:17.265958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:17.380429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:17.380435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:17.380582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:17.380765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:17.380835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:17.380891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:17.380949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:17.381045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:17.381053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:17.381155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:17.381285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:17.381286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:17.381348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:17.381359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:17.381432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:17.381437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:17.381524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:17.381526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494346318751104273:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:17.381612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:17.381693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:17.381805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:17.381922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:17.382012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:17.382105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346318751104285:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:17.407998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346318751104291:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:17.408049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346318751104291:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2 ... vNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.227783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346411915977523:3304];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.227981Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346411915977523:3304];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.228790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7494346411915977551:3308];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.229000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7494346411915977551:3308];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.229754Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7494346411915977624:3315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.229890Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7494346411915977624:3315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.231850Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7494346411915977611:3313];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.232068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7494346411915977611:3313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.241553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7494346411915977436:3280];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.241553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7494346411915977494:3294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.241747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7494346411915977494:3294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.241763Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7494346411915977436:3280];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.244588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346411915977430:3277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.244746Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346411915977430:3277];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.245112Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7494346411915977525:3305];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.245113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7494346411915977596:3311];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.245259Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7494346411915977596:3311];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.245273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7494346411915977525:3305];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.245623Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7494346411915977594:3310];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.245756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7494346411915977594:3310];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.246604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7494346411915977751:3325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.246620Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7494346411915977490:3292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.246710Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7494346411915977751:3325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.246818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7494346411915977490:3292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.247124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:7494346399031071277:2522];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037947;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.247202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7494346394736103657:2472];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037969;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.247224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:7494346399031071277:2522];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037947;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.247390Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7494346394736103657:2472];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037969;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.247650Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7494346411915976741:3134];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038077;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.247766Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7494346411915976741:3134];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038077;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.248199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;self_id=[2:7494346394736103513:2450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037965;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.248308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;self_id=[2:7494346394736103513:2450];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037965;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.250258Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7494346411915977501:3297];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.250379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7494346411915977501:3297];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.263045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037959;self_id=[2:7494346394736103723:2481];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037959;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:50.263207Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037959;self_id=[2:7494346394736103723:2481];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037959;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:588, void NKikimr::NKqp::CompareYson(const TString &, const TString &, const TString &): (ReformatYson(expected) == ReformatYson(actual)) failed: ("[[[\"2\"]]]" != "[[[\"1\"]]]") , with diff: "[[[\"(2|1)\"]]]" 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:588: CompareYson @ 0x4900DB47 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:368: DoExecute @ 0x1900EF4A 4. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:389: Execute_ @ 0x18FDD558 6. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: operator() @ 0x18FE36B7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18FE36B7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18FE36B7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 15. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: Execute @ 0x18FE2883 16. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 18. ??:0: ?? @ 0x7F15F4F5FD8F 19. ??:0: ?? @ 0x7F15F4F5FE3F 20. ??:0: ?? @ 0x16619028 >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapSnapshotRO >> KqpSinkTx::OlapSnapshotROInteractive1 >> KqpSinkLocks::TInvalidate >> KqpLocks::Invalidate >> KqpSinkTx::InvalidateOnError [GOOD] >> KqpTx::SnapshotROInteractive1 [GOOD] >> KqpService::RangeCache+UseCache [GOOD] >> KqpSinkLocks::TInvalidateOlap [GOOD] >> KqpSinkLocks::UncommittedRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 19184, MsgBus: 5017 2025-04-17T18:11:36.680316Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346402061205721:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:36.680614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017db/r3tmp/tmpEsqYWW/pdisk_1.dat 2025-04-17T18:11:36.974381Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19184, node 1 2025-04-17T18:11:37.013792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:37.013951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:37.015668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:37.024509Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:37.024557Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:37.024567Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:37.024652Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5017 TClient is connected to server localhost:5017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:37.411067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:38.792330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346410651140984:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:38.792405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346410651140976:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:38.792659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:38.796285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:38.805159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346410651140990:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:38.884280Z node 1 :TX_PROXY ERROR: Actor# [1:7494346410651141041:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:39.074953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.168814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:39.752346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:40.881446Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710667; 2025-04-17T18:11:40.892758Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346419241084700:2966], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [1:7494346419241083968:2966]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[1:7494346419241084700:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:11:40.893118Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346419241084689:2966], SessionActorId: [1:7494346419241083968:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494346419241083968:2966]. isRollback=0 2025-04-17T18:11:40.893967Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNjZGM1YTgtNmU2NTZmMTctNjdmZWMyMjMtMTUyNGU1Yjc=, ActorId: [1:7494346419241083968:2966], ActorState: ExecuteState, TraceId: 01js2f62ny02qfzy12vmc1vh6w, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494346419241084690:2966] from: [1:7494346419241084689:2966] 2025-04-17T18:11:40.894042Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346419241084690:2966] TxId: 281474976710667. Ctx: { TraceId: 01js2f62ny02qfzy12vmc1vh6w, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNjZGM1YTgtNmU2NTZmMTctNjdmZWMyMjMtMTUyNGU1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:11:40.894251Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNjZGM1YTgtNmU2NTZmMTctNjdmZWMyMjMtMTUyNGU1Yjc=, ActorId: [1:7494346419241083968:2966], ActorState: ExecuteState, TraceId: 01js2f62ny02qfzy12vmc1vh6w, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 2025-04-17T18:11:41.679693Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346402061205721:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:41.679760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 5440, MsgBus: 63942 2025-04-17T18:11:46.674670Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346444782532664:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:46.674716Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017db/r3tmp/tmps3yEai/pdisk_1.dat 2025-04-17T18:11:46.737514Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5440, node 2 2025-04-17T18:11:46.779394Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:46.779411Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:46.779418Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:46.779516Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:46.800036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:46.800119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:46.801580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63942 TClient is connected to server localhost:63942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:47.102264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:48.565574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346453372467914:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.565615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346453372467909:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.565685Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.568832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:48.575876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346453372467923:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:11:48.673421Z node 2 :TX_PROXY ERROR: Actor# [2:7494346453372467974:2332] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:48.704071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:48.730872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:49.441157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:50.377159Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-17T18:11:50.377318Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:11:50.377421Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:11:50.377561Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346461962411081:2966], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7494346461962410974:2966]Got CONSTRAINT VIOLATION for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7494346461962411081:2966].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:11:50.377652Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346461962411074:2966], SessionActorId: [2:7494346461962410974:2966], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7494346461962410974:2966]. isRollback=0 2025-04-17T18:11:50.377836Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDI2NThlZWQtYTFhOGNkMjUtMmE0ZDAyM2EtZjRiMzBhY2U=, ActorId: [2:7494346461962410974:2966], ActorState: ExecuteState, TraceId: 01js2f6by1a4kwbg2pwm0gdzv7, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7494346461962411075:2966] from: [2:7494346461962411074:2966] 2025-04-17T18:11:50.377896Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346461962411075:2966] TxId: 281474976715664. Ctx: { TraceId: 01js2f6by1a4kwbg2pwm0gdzv7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDI2NThlZWQtYTFhOGNkMjUtMmE0ZDAyM2EtZjRiMzBhY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KV`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:11:50.378117Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDI2NThlZWQtYTFhOGNkMjUtMmE0ZDAyM2EtZjRiMzBhY2U=, ActorId: [2:7494346461962410974:2966], ActorState: ExecuteState, TraceId: 01js2f6by1a4kwbg2pwm0gdzv7, Create QueryResponse for error on request, msg:
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-04-17T18:11:50.420328Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDI2NThlZWQtYTFhOGNkMjUtMmE0ZDAyM2EtZjRiMzBhY2U=, ActorId: [2:7494346461962410974:2966], ActorState: ExecuteState, TraceId: 01js2f6bze1kwy8fvv3e41jwc7, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01js2f6bxyb9kypg8d69tn7ejc, code: 2015 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:11:51.674912Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346444782532664:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:51.674964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 3136, MsgBus: 14934 2025-04-17T18:11:49.282125Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346456780491065:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:49.283688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017a7/r3tmp/tmp6jEmdh/pdisk_1.dat 2025-04-17T18:11:49.514920Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3136, node 1 2025-04-17T18:11:49.573920Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:49.573943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:49.573971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:49.574081Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:49.630531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:49.630613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:49.632529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14934 TClient is connected to server localhost:14934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:49.950090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:49.970954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:50.104090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:50.233107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:50.288640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:51.286255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346465370427364:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:51.286534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:51.459495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:51.521653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:51.542351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:51.563613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:51.584329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:51.608271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:51.652039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:51.688483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346465370428022:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:51.688530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346465370428027:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:51.688536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:51.690763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:51.696445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346465370428029:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:51.777913Z node 1 :TX_PROXY ERROR: Actor# [1:7494346465370428085:3539] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:52.808200Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQ3NGVhNjktMmEwNDk4ZTgtNDFlNjAxNjUtMmI3MmQ5YmQ=, ActorId: [1:7494346469665395702:2511], ActorState: ExecuteState, TraceId: 01js2f6ea0bb03zbcwf7vm08zn, Create QueryResponse for error on request, msg:
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 Trying to start YDB, gRPC: 21735, MsgBus: 2179 2025-04-17T18:11:53.297539Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346475634658616:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:53.297617Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017a7/r3tmp/tmppwPy6W/pdisk_1.dat 2025-04-17T18:11:53.360546Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21735, node 2 2025-04-17T18:11:53.407207Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:53.407228Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:53.407233Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:53.407331Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:53.420830Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:53.420887Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:53.422534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2179 TClient is connected to server localhost:2179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:53.701597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:53.710348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:53.741976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:53.828623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:53.897132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:55.208341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346484224594971:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.208395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.244491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.263498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.284880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.308107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.329172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.355708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.419417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.460224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346484224595633:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.460270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.460334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346484224595638:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.462726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:55.469236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346484224595640:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:55.528853Z node 2 :TX_PROXY ERROR: Actor# [2:7494346484224595695:3537] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 28028, MsgBus: 32274 2025-04-17T18:11:47.780987Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346447164557705:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:47.781065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ac/r3tmp/tmp9PEcOe/pdisk_1.dat 2025-04-17T18:11:48.003584Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28028, node 1 2025-04-17T18:11:48.058122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:48.058145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:48.058152Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:48.058252Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:48.110502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:48.110577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:48.112199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32274 TClient is connected to server localhost:32274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:48.364695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:48.388630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:48.471542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:48.569373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:48.635963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:49.850030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346455754494068:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:49.850136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:50.068041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:50.089644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:50.151889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:50.172638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:50.194970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:50.241400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:50.262704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:50.303330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346460049462021:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:50.303407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:50.303411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346460049462026:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:50.305459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:50.311137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346460049462028:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:11:50.411141Z node 1 :TX_PROXY ERROR: Actor# [1:7494346460049462084:3535] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:51.080117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:11:51.102883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:11:51.124715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:11:52.780918Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346447164557705:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:52.780983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62469, MsgBus: 19669 2025-04-17T18:11:53.606219Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346473586994875:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:53.606275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ac/r3tmp/tmpswje4O/pdisk_1.dat 2025-04-17T18:11:53.674763Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62469, node 2 2025-04-17T18:11:53.719401Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:53.719438Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:53.719448Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:53.719538Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:53.728179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:53.728287Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:53.729684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19669 TClient is connected to server localhost:19669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:54.000063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:54.008514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:54.052539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:54.179597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:54.246136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:55.599307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346482176931251:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.599436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.621735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.644756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.667880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.688776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.709394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.731367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.757455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.801589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346482176931905:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.801638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.801651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346482176931910:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:55.803615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:55.809245Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346482176931912:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:55.882291Z node 2 :TX_PROXY ERROR: Actor# [2:7494346482176931967:3535] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:56.691894Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715674; 2025-04-17T18:11:56.700087Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346486471899675:2509], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7494346486471899574:2509]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7494346486471899675:2509].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:11:56.700627Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346486471899668:2509], SessionActorId: [2:7494346486471899574:2509], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7494346486471899574:2509]. isRollback=0 2025-04-17T18:11:56.700831Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjNjNjhhNmUtZmQ3OWE2MDMtZWY0NWMyZGEtNzliZWFmMGE=, ActorId: [2:7494346486471899574:2509], ActorState: ExecuteState, TraceId: 01js2f6j3af40d36wkzsn71sxs, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7494346486471899669:2509] from: [2:7494346486471899668:2509] 2025-04-17T18:11:56.700900Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346486471899669:2509] TxId: 281474976715674. Ctx: { TraceId: 01js2f6j3af40d36wkzsn71sxs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjNjNjhhNmUtZmQ3OWE2MDMtZWY0NWMyZGEtNzliZWFmMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:11:56.701071Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjNjNjhhNmUtZmQ3OWE2MDMtZWY0NWMyZGEtNzliZWFmMGE=, ActorId: [2:7494346486471899574:2509], ActorState: ExecuteState, TraceId: 01js2f6j3af40d36wkzsn71sxs, Create QueryResponse for error on request, msg: >> KqpSinkTx::SnapshotROInteractive2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 18718, MsgBus: 12097 2025-04-17T18:05:05.504173Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494344722676068746:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:05:05.504390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b85/r3tmp/tmpy1vni6/pdisk_1.dat 2025-04-17T18:05:05.874670Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:05:05.913875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:05:05.913970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:05:05.915489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18718, node 1 2025-04-17T18:05:05.976756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:05:05.976776Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:05:05.976786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:05:05.976882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12097 TClient is connected to server localhost:12097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:05:06.646708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.665366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:06.837744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:07.017905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:07.089270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:05:08.543194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344735560972404:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.543286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:08.849736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.880073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.959035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:05:08.992860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.064747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.113652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.155189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:05:09.254126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344739855940372:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.254207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.254278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494344739855940377:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:05:09.258644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:05:09.269001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494344739855940379:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:05:09.353985Z node 1 :TX_PROXY ERROR: Actor# [1:7494344739855940436:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:05:10.311168Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=Zjc2NTNkOWItOThmOGM2YzQtM2FlNmU5ZGYtYjI4NmY3NA==, ActorId: [1:7494344735560972379:2403], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:05:10.311217Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=Zjc2NTNkOWItOThmOGM2YzQtM2FlNmU5ZGYtYjI4NmY3NA==, ActorId: [1:7494344735560972379:2403], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:05:10.311239Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Zjc2NTNkOWItOThmOGM2YzQtM2FlNmU5ZGYtYjI4NmY3NA==, ActorId: [1:7494344735560972379:2403], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:05:10.311265Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Zjc2NTNkOWItOThmOGM2YzQtM2FlNmU5ZGYtYjI4NmY3NA==, ActorId: [1:7494344735560972379:2403], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:05:10.311364Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Zjc2NTNkOWItOThmOGM2YzQtM2FlNmU5ZGYtYjI4NmY3NA==, ActorId: [1:7494344735560972379:2403], ActorState: unknown state, Session actor destroyed 2025-04-17T18:05:10.312452Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzdkZjY0Y2UtZGQ3MWQ2ZjgtOGFkZjRlYzktMjRjY2E5ZmU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzdkZjY0Y2UtZGQ3MWQ2ZjgtOGFkZjRlYzktMjRjY2E5ZmU= 2025-04-17T18:05:10.312729Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzdkZjY0Y2UtZGQ3MWQ2ZjgtOGFkZjRlYzktMjRjY2E5ZmU=, ActorId: [1:7494344744150908064:2512], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:05:10.329908Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzkwZTc3OC05N2ZlMDJhMC05ZDcxMDZiLTVlNjU2NjMw, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzkwZTc3OC05N2ZlMDJhMC05ZDcxMDZiLTVlNjU2NjMw 2025-04-17T18:05:10.330100Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzkwZTc3OC05N2ZlMDJhMC05ZDcxMDZiLTVlNjU2NjMw, ActorId: [1:7494344744150908066:2514], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:05:10.339159Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmM2OGMxMTgtODgzZDgxNjMtMTBjNmYwMDMtZjkwZjIxYjM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmM2OGMxMTgtODgzZDgxNjMtMTBjNmYwMDMtZjkwZjIxYjM= 2025-04-17T18:05:10.339376Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmM2OGMxMTgtODgzZDgxNjMtMTBjNmYwMDMtZjkwZjIxYjM=, ActorId: [1:7494344744150908068:2516], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:05:10.348623Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2NiZDU3LTJmYjIzOGU0LWNlYzBhMDctMzEzZjkxZQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id M2NiZDU3LTJmYjIzOGU0LWNlYzBhMDctMzEzZjkxZQ== 2025-04-17T18:05:10.348821Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2NiZDU3LTJmYjIzOGU0LWNlYzBhMDctMzEzZjkxZQ==, ActorId: [1:7494344744150908070:2518], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:05:10.357833Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzNjNTViMzQtYjZjNjAyNzYtOTNlZmMzMWMtZGUyNDUwODM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzNjNTViMzQtYjZjNjAyNzYtOTNlZmMzMWMtZGUyNDUwODM= 2025-04-17T18:05:10.362264Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzNjNTViMzQtYjZjNjAyNzYtOTNlZmMzMWMtZGUyNDUwODM=, ActorId: [1:7494344744150908072:2520], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:05:10.365310Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWFiNjI0OGEtNGVjM2E1NzctZjFiZGI5NDEtMjA3MGNiNzI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OWFiNjI0OGEtNGVjM2E1NzctZjFiZGI5NDEtMjA3MGNiNzI= 2025-04-17T18:05:10.365451Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWFiNjI0OGEtNGVjM2E1NzctZjFiZGI5ND ... t\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.035615Z node 7 :TX_PROXY ERROR: Actor# [7:7494346088942505359:2422] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.041034Z node 7 :TX_PROXY ERROR: Actor# [7:7494346088942505382:2434] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.044994Z node 7 :TX_PROXY ERROR: Actor# [7:7494346088942505390:2440] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.045512Z node 7 :TX_PROXY ERROR: Actor# [7:7494346088942505392:2442] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.047126Z node 7 :TX_PROXY ERROR: Actor# [7:7494346088942505402:2450] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.057472Z node 7 :TX_PROXY ERROR: Actor# [7:7494346088942505414:2458] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.064499Z node 7 :TX_PROXY ERROR: Actor# [7:7494346088942505422:2464] txid# 281474976715675, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.064715Z node 7 :TX_PROXY ERROR: Actor# [7:7494346088942505423:2465] txid# 281474976715676, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:23.075023Z node 7 :TX_PROXY ERROR: Actor# [7:7494346088942505438:2476] txid# 281474976715677, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:10:33.144818Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:33.144860Z node 7 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 7480, MsgBus: 15738 2025-04-17T18:11:42.387726Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494346426618431432:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:42.387813Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b85/r3tmp/tmpEvkTeE/pdisk_1.dat 2025-04-17T18:11:42.483697Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7480, node 8 2025-04-17T18:11:42.520622Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:42.520706Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:42.521809Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:42.555052Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:42.555076Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:42.555083Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:42.555231Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15738 TClient is connected to server localhost:15738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:43.023329Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:43.037119Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:43.112555Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:43.294041Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:43.360942Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:46.340656Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494346443798302407:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:46.340756Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:46.398309Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:46.429779Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:46.458988Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:46.488548Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:46.518354Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:46.586378Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:46.664565Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:46.721560Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494346443798303077:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:46.721653Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:46.721748Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494346443798303082:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:46.725210Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:46.734679Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7494346443798303084:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:46.833649Z node 8 :TX_PROXY ERROR: Actor# [8:7494346443798303142:3560] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:47.387942Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7494346426618431432:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:47.388064Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 7.968562s took: 7.969769s took: 7.970540s took: 7.971106s took: 7.971194s took: 7.971590s took: 7.971657s took: 7.972434s took: 7.972910s took: 7.973072s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 4271, MsgBus: 7562 2025-04-17T18:11:26.569104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346359109151589:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:26.569616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017fa/r3tmp/tmpIQ6fp5/pdisk_1.dat 2025-04-17T18:11:26.819011Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4271, node 1 2025-04-17T18:11:26.884269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:26.884289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:26.884294Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:26.884401Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:26.912760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:26.912851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:26.914520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7562 TClient is connected to server localhost:7562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:27.316634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:28.675239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346367699086837:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:28.675336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346367699086843:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:28.675432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:28.678253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:28.684525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346367699086851:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:28.774928Z node 1 :TX_PROXY ERROR: Actor# [1:7494346367699086902:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:28.982797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:29.095830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:29.095999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:29.095998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:29.096073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:29.096237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:29.096293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:29.096351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:29.096376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:29.096519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:29.096550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:29.096648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:29.096669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:29.096727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:29.096780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:29.096806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:29.096889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:29.096919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:29.096957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:29.097038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:29.097038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:29.097122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346371994054401:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:29.097156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:29.097297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:29.097402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346371994054389:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:29.126147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346371994054409:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:29.126196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346371994054391:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:29.126209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346371994 ... imr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.399885Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7494346451100438051:3077];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.399981Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7494346451100438087:3093];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.400093Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7494346451100438087:3093];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.400212Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[2:7494346451100438057:3080];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038084;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.400308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[2:7494346451100438057:3080];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038084;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.400455Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7494346451100438059:3081];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.400570Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7494346451100438059:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.401059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7494346451100438042:3072];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038062;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.401208Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7494346451100438042:3072];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038062;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.401758Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7494346451100437838:3047];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.401921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7494346451100437838:3047];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.402051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7494346451100438093:3096];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038068;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.402162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7494346451100438093:3096];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.409105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7494346451100438003:3060];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038080;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.409284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7494346451100438003:3060];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038080;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.410216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7494346451100438061:3082];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.410354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7494346451100438061:3082];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.411023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7494346451100438006:3061];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.411220Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7494346451100438006:3061];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.413272Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7494346451100437999:3058];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.413408Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7494346451100437999:3058];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.413511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7494346451100437903:3055];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.413660Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7494346451100437903:3055];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.415604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7494346451100438085:3092];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.415750Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7494346451100438085:3092];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.419243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7494346451100438188:3120];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038064;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.419420Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7494346451100438188:3120];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.422827Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7494346451100438017:3063];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.423028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7494346451100438017:3063];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.428888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7494346451100438055:3079];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.429072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7494346451100438055:3079];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.429190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7494346451100438081:3090];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038055;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.429321Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7494346451100438038:3070];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.429343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7494346451100438081:3090];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.429463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7494346451100438047:3075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.429467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7494346451100438038:3070];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.429586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7494346451100438167:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.429631Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7494346451100438047:3075];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.429701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7494346451100438167:3110];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.430089Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7494346451100437996:3057];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038082;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.430237Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7494346451100437996:3057];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.431951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7494346451100438106:3103];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.432121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7494346451100438106:3103];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpTx::LocksAbortOnCommit >> KqpLocksTricky::TestNoLocksIssue-withSink >> KqpLocks::Invalidate [GOOD] >> KqpLocks::InvalidateOnCommit >> KqpTx::InteractiveTx >> KqpSinkLocks::InvalidateOnCommit |89.4%| [TA] $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkTx::ExplicitTcl [GOOD] >> KqpSinkTx::Interactive |89.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::MixEnginesOldNew >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::MixedTxFail+useSink >> KqpTx::InteractiveTx [GOOD] >> KqpTx::InvalidateOnError >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 >> THeavyPerfTest::TTestLoadEverything [GOOD] >> THiveImplTest::BootQueueSpeed >> KqpSinkLocks::TInvalidate [GOOD] >> KqpSinkLocks::OlapUncommittedRead >> KqpTx::MixEnginesOldNew [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop >> KqpTx::InvalidateOnError [GOOD] >> KqpSinkLocks::UncommittedRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 28033, MsgBus: 25299 2025-04-17T18:11:58.869590Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346496820261396:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:58.869663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001790/r3tmp/tmpBqA2i9/pdisk_1.dat 2025-04-17T18:11:59.085032Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28033, node 1 2025-04-17T18:11:59.151495Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:59.151517Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:59.151524Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:59.151655Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:59.197367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:59.197485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:59.199017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25299 TClient is connected to server localhost:25299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:59.495249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:59.522655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:59.630118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:59.731201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:59.775359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:00.768579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346505410197768:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:00.768668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:00.988113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:01.008115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:01.027848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:01.049760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:01.072548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:01.118015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:01.138775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:01.209865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346509705165728:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:01.209927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:01.209978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346509705165733:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:01.212238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:01.219015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346509705165735:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:01.317930Z node 1 :TX_PROXY ERROR: Actor# [1:7494346509705165792:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:02.364361Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2Y1Mzg2ZTgtYjQ4Y2NkYjgtNDlmMTJiODQtYjY3MzljMmM=, ActorId: [1:7494346509705166073:2502], ActorState: ExecuteState, TraceId: 01js2f6qjg9p0hsncxsr823069, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-17T18:12:02.370858Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2Y1Mzg2ZTgtYjQ4Y2NkYjgtNDlmMTJiODQtYjY3MzljMmM=, ActorId: [1:7494346509705166073:2502], ActorState: ReadyState, TraceId: 01js2f6qp246526z9b3ssq3vdc, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 31168, MsgBus: 12668 2025-04-17T18:12:02.840240Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346512225346718:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:02.840280Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001790/r3tmp/tmplrUBbl/pdisk_1.dat 2025-04-17T18:12:02.898621Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31168, node 2 2025-04-17T18:12:02.940517Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:02.940535Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:02.940541Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:02.940618Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:02.958902Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:02.958991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:02.960574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12668 TClient is connected to server localhost:12668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:03.197154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:03.205423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:03.242622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:03.328722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:03.369758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:04.759222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346520815283091:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:04.759283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:04.783637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.803291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.821740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.840926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.862442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.884877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.948599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.986421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346520815283749:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:04.986481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:04.986483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346520815283754:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:04.988384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:04.993954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346520815283756:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:05.076930Z node 2 :TX_PROXY ERROR: Actor# [2:7494346525110251107:3533] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpLocks::MixedTxFail+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 7015, MsgBus: 15412 2025-04-17T18:11:59.891298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346498533270053:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:59.891383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00178a/r3tmp/tmpbWGp0H/pdisk_1.dat 2025-04-17T18:12:00.151816Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7015, node 1 2025-04-17T18:12:00.206621Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:00.206649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:00.206653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:00.206769Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:00.248677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:00.248779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:00.250679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15412 TClient is connected to server localhost:15412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:00.544553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:00.557713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:00.667742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:00.765122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:00.837005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:01.804190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346507123206423:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:01.804270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:01.986453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:02.006409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:02.025290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:02.045577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:02.065686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:02.093188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:02.121193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:02.193543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346511418174384:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:02.193599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:02.193669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346511418174389:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:02.196294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:02.203245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346511418174391:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:02.301557Z node 1 :TX_PROXY ERROR: Actor# [1:7494346511418174448:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15848, MsgBus: 2170 2025-04-17T18:12:03.459210Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346518797687140:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:03.459273Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00178a/r3tmp/tmpDFaePz/pdisk_1.dat 2025-04-17T18:12:03.529385Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15848, node 2 2025-04-17T18:12:03.572092Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:03.572110Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:03.572117Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:03.572198Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:03.583230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:03.583290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:03.585053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2170 TClient is connected to server localhost:2170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:03.833335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:03.841934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:03.909010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:04.010723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:04.078334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:05.327223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346527387623507:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.327282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.373762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:05.413176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:05.432674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:05.452760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:05.475486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:05.499653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:05.525069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:05.594934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346527387624170:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.595015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.595095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346527387624175:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.597680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:05.604143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346527387624177:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:05.680729Z node 2 :TX_PROXY ERROR: Actor# [2:7494346527387624233:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:06.255219Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-17T18:12:06.261143Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037911 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:12:06.261296Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037911 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:12:06.261433Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346531682591870:2509], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [2:7494346531682591841:2509]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[2:7494346531682591870:2509].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:12:06.261901Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346531682591863:2509], SessionActorId: [2:7494346531682591841:2509], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7494346531682591841:2509]. isRollback=0 2025-04-17T18:12:06.262076Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDU4MmRkOWMtOTMyZGUxNDMtZjkxMjIyYWUtMjI1M2UxN2E=, ActorId: [2:7494346531682591841:2509], ActorState: ExecuteState, TraceId: 01js2f6ve976egedapdtgh4hjn, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7494346531682591864:2509] from: [2:7494346531682591863:2509] 2025-04-17T18:12:06.262144Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346531682591864:2509] TxId: 281474976715672. Ctx: { TraceId: 01js2f6ve976egedapdtgh4hjn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDU4MmRkOWMtOTMyZGUxNDMtZjkxMjIyYWUtMjI1M2UxN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:12:06.262298Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDU4MmRkOWMtOTMyZGUxNDMtZjkxMjIyYWUtMjI1M2UxN2E=, ActorId: [2:7494346531682591841:2509], ActorState: ExecuteState, TraceId: 01js2f6ve976egedapdtgh4hjn, Create QueryResponse for error on request, msg: 2025-04-17T18:12:06.313178Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDU4MmRkOWMtOTMyZGUxNDMtZjkxMjIyYWUtMjI1M2UxN2E=, ActorId: [2:7494346531682591841:2509], ActorState: ExecuteState, TraceId: 01js2f6vfvd477e1jzsq0aktkt, Create QueryResponse for error on request, msg: >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 19457, MsgBus: 32099 2025-04-17T18:11:43.367481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346430227272372:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:43.367726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017b7/r3tmp/tmpsKJdcK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19457, node 1 2025-04-17T18:11:43.635587Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:11:43.635905Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:11:43.637861Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:43.662978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:43.663001Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:43.663008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:43.663152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:43.718145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:43.718286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:43.719904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32099 TClient is connected to server localhost:32099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:44.027451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:45.178206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346438817207604:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:45.178262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346438817207623:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:45.178384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:45.182875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:45.192920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346438817207640:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:45.271267Z node 1 :TX_PROXY ERROR: Actor# [1:7494346438817207691:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:45.517362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:45.620491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:45.620631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:45.620835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:45.620932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:45.621017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:45.621110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:45.621182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:45.621284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:45.621351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:45.621419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:45.621499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:45.621561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:45.622664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:45.622761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:45.622901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:45.623046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:45.623185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:45.623294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:45.623388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:45.623466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:45.623593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:45.623717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:45.623848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:45.623954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:45.646664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346438817207888:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:45.646714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346438817207888:2345];tablet_id=72075186224037896;process=TTxInitSchema::Exe ... node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDYyMzdmZDctZGIzYWU3ZjItNjc0YmU5NTktMjM3M2Y5ZTQ=, ActorId: [1:7494346460292053498:3913], ActorState: ExecuteState, TraceId: 01js2f6cch7qfkznkfpzvegmy9, Create QueryResponse for error on request, msg: 2025-04-17T18:11:50.930991Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037890 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-17T18:11:50.931037Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[1:7494346438817207894:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:11:50.931106Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037891 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-17T18:11:50.931151Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[1:7494346438817207902:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:11:50.931297Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037892 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-17T18:11:50.931340Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037892;self_id=[1:7494346438817207890:2346];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:11:50.931542Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[1:7494346438817207896:2349];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:11:50.931563Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037893 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-17T18:11:50.931604Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:11:50.931650Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037889 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-17T18:11:50.931675Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037894 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-17T18:11:50.931706Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[1:7494346438817207900:2351];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:11:50.931728Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[1:7494346438817207884:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:11:50.931777Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037895 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-17T18:11:50.931785Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037896 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-17T18:11:50.931840Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[1:7494346438817207888:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:11:50.931842Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[1:7494346438817207898:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0;
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 2025-04-17T18:11:51.256144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:11:51.256598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346438817207886:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 7067, MsgBus: 29981 2025-04-17T18:11:57.144857Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346490703473722:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:57.144891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017b7/r3tmp/tmpH6AFr1/pdisk_1.dat 2025-04-17T18:11:57.213651Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7067, node 2 2025-04-17T18:11:57.263955Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:57.263984Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:57.263990Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:57.264107Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:57.265581Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:57.265742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:57.267425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29981 TClient is connected to server localhost:29981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:57.582222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:59.240484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346499293408954:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:59.240525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346499293408966:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:59.240562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:59.242947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:59.249597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346499293408977:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:11:59.345390Z node 2 :TX_PROXY ERROR: Actor# [2:7494346499293409028:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:59.383801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:59.453103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:00.155783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-17T18:12:02.144979Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346490703473722:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:02.145029Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 18032, MsgBus: 2713 2025-04-17T18:11:55.463087Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346483389440827:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:55.463183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001795/r3tmp/tmpnsae7Z/pdisk_1.dat 2025-04-17T18:11:55.664548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:55.665968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:55.666025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:55.668403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18032, node 1 2025-04-17T18:11:55.712683Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:55.712701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:55.712716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:55.712799Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2713 TClient is connected to server localhost:2713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:56.048885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:56.065214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:56.152416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:56.241532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:56.310318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:57.221778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346491979377183:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:57.221880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:57.447214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.469704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.489987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.509601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.529654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.553010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.580844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.654904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346491979377856:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:57.654974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346491979377861:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:57.654980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:57.657392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:57.664152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346491979377863:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:57.748684Z node 1 :TX_PROXY ERROR: Actor# [1:7494346491979377920:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:58.679825Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-17T18:11:58.679997Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037914 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:11:58.680112Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037914 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:11:58.680258Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346496274345631:2510], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7494346496274345535:2510]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7494346496274345631:2510].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:11:58.680698Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346496274345617:2510], SessionActorId: [1:7494346496274345535:2510], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494346496274345535:2510]. isRollback=0 2025-04-17T18:11:58.680923Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTI2ZTZjMWQtODM2MTg1NjUtMzExYWY3OTItZmJmYjgwNjU=, ActorId: [1:7494346496274345535:2510], ActorState: ExecuteState, TraceId: 01js2f6m0zdqe0c2kjat563x17, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494346496274345618:2510] from: [1:7494346496274345617:2510] 2025-04-17T18:11:58.680993Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346496274345618:2510] TxId: 281474976715673. Ctx: { TraceId: 01js2f6m0zdqe0c2kjat563x17, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI2ZTZjMWQtODM2MTg1NjUtMzExYWY3OTItZmJmYjgwNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:11:58.681243Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTI2ZTZjMWQtODM2MTg1NjUtMzExYWY3OTItZmJmYjgwNjU=, ActorId: [1:7494346496274345535:2510], ActorState: ExecuteState, TraceId: 01js2f6m0zdqe0c2kjat563x17, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 28157, MsgBus: 29200 2025-04-17T18:11:59.486970Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346501492853978:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:59.487040Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001795/r3tmp/tmpEcJEfd/pdisk_1.dat 2025-04-17T18:11:59.556513Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28157, node 2 2025-04-17T18:11:59.597216Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:59.597233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:59.597239Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:59.597311Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:59.609317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:59.609394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileS ... 976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.472186Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.474919Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.475337Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.478205Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.478597Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.481360Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.481677Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.484595Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.484856Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.487717Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.488032Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.490989Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.491196Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.494321Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.494629Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.497410Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.497700Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.500517Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.500887Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.503635Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.504023Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.506704Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.507010Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.509668Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.510338Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.512860Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.513528Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.516037Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.516669Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.519208Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:12:06.674961Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:12:06.674961Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:12:06.675205Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7494346526844698294:2405];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037903; 2025-04-17T18:12:06.675241Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7494346526844698294:2405];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=15;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037899; 2025-04-17T18:12:06.675274Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7494346526844698294:2405];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037903; 2025-04-17T18:12:06.675333Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7494346526844698294:2405];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037899; 2025-04-17T18:12:06.675402Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:12:06.675534Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-04-17T18:12:06.839780Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715668; 2025-04-17T18:12:06.840336Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494346531139667646:2807], SessionActorId: [3:7494346531139667591:2807], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[3:7494346531139667646:2807].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:12:06.840466Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494346531139667646:2807], SessionActorId: [3:7494346531139667591:2807], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7494346531139667591:2807]. isRollback=0 2025-04-17T18:12:06.840616Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmYzZDlmZTgtYThhZWJhMDItZTBjMGE3MDctNzYzNGY0ZDM=, ActorId: [3:7494346531139667591:2807], ActorState: ExecuteState, TraceId: 01js2f6w084hcwfa38b5tvx3a8, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7494346531139667672:2807] from: [3:7494346531139667646:2807] 2025-04-17T18:12:06.840640Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037936;self_id=[3:7494346526844698078:2367];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:06.840682Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494346531139667672:2807] TxId: 281474976715668. Ctx: { TraceId: 01js2f6w084hcwfa38b5tvx3a8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmYzZDlmZTgtYThhZWJhMDItZTBjMGE3MDctNzYzNGY0ZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:12:06.840788Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmYzZDlmZTgtYThhZWJhMDItZTBjMGE3MDctNzYzNGY0ZDM=, ActorId: [3:7494346531139667591:2807], ActorState: ExecuteState, TraceId: 01js2f6w084hcwfa38b5tvx3a8, Create QueryResponse for error on request, msg: 2025-04-17T18:12:06.842082Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715668;tx_id=281474976715668;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715668; 2025-04-17T18:12:06.843554Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715668; >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> KqpTx::BeginTransactionBadMode >> KqpSinkTx::OlapInvalidateOnError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [GOOD] Test command err: Trying to start YDB, gRPC: 14139, MsgBus: 6631 2025-04-17T18:11:58.357245Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346496673307518:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:58.357399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001791/r3tmp/tmpHEDHOF/pdisk_1.dat 2025-04-17T18:11:58.581044Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14139, node 1 2025-04-17T18:11:58.635662Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:58.635696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:58.635703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:58.635835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:58.692562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:58.692751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:58.694226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6631 TClient is connected to server localhost:6631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:59.009331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:00.267616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346505263242769:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:00.267704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346505263242777:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:00.267759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:00.271295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:00.278131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346505263242783:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:12:00.363061Z node 1 :TX_PROXY ERROR: Actor# [1:7494346505263242834:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:00.542273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:00.628728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:01.260320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:12:03.357249Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346496673307518:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:03.357292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit >> KqpSnapshotRead::TestReadOnly+withSink >> KqpSinkTx::OlapSnapshotRO [GOOD] >> KqpLocks::DifferentKeyUpdate >> KqpSnapshotIsolation::TReadOnlyOltp [GOOD] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [GOOD] >> KqpSinkTx::Interactive [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 13833, MsgBus: 27854 2025-04-17T18:11:37.968150Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346406455816123:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:37.968215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017bf/r3tmp/tmp27IoOx/pdisk_1.dat 2025-04-17T18:11:38.218996Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13833, node 1 2025-04-17T18:11:38.268286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:38.268311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:38.268316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:38.268430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:38.310799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:38.310909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:38.312560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27854 TClient is connected to server localhost:27854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:38.656798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:39.951241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346415045751359:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.951361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346415045751378:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.951466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:39.955007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:39.963395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346415045751389:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:40.066761Z node 1 :TX_PROXY ERROR: Actor# [1:7494346419340718738:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:40.309601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:40.407677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:40.407683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:40.407814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:40.408030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:40.408118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:40.408185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:40.408266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:40.408338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:40.408345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:40.408438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:40.408525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:40.408533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:40.408613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:40.408613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:40.408671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:40.408686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:40.408764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:40.408788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494346419340718927:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:40.408857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:40.408973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:40.409061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:40.409134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:40.409227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:40.409308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346419340718934:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:40.433021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346419340718943:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:40.433091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346419340718943:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:40.433252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;sel ... indexation;reason=disabled; 2025-04-17T18:12:02.841145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7494346504018012343:3116];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.841348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7494346504018012343:3116];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.842167Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7494346504018012308:3109];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.842425Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7494346504018012308:3109];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.843213Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7494346504018012389:3141];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.843450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7494346504018012389:3141];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.844337Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7494346504018012640:3199];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.844558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7494346504018012640:3199];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.846241Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7494346504018012452:3173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.846478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7494346504018012452:3173];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.846753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7494346504018012316:3112];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.847055Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7494346504018012316:3112];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.848803Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7494346504018012405:3150];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.849061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7494346504018012405:3150];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.850403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7494346504018012367:3129];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.850641Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7494346504018012367:3129];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.850867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7494346504018012457:3176];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.851084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7494346504018012457:3176];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.852484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7494346504018012561:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.852735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7494346504018012561:3192];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.852859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7494346504018012590:3198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.853102Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7494346504018012590:3198];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.855126Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7494346504018012448:3170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.855360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7494346504018012448:3170];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.856009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7494346491133106548:2531];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037907;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.856156Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7494346491133106548:2531];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037907;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.858568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7494346504018012363:3126];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.858802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7494346504018012363:3126];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.860595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7494346504018012228:3100];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.860821Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7494346504018012228:3100];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.861110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7494346504018012306:3108];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.861439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7494346504018012306:3108];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.861661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;self_id=[2:7494346491133106444:2503];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037956;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.861776Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;self_id=[2:7494346491133106444:2503];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037956;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.862823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[2:7494346499723044559:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038087;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.863077Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[2:7494346499723044559:3054];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038087;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.863360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7494346504018012487:3181];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:02.863604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7494346504018012487:3181];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:03.247494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[2:7494346491133106358:2481];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037973;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:03.247687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[2:7494346491133106358:2481];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037973;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:03.247787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7494346491133106477:2519];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037923;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:03.247944Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7494346491133106477:2519];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037923;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:03.355376Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjVkY2U0MWUtOTU1YzRjNjQtNzUxZjlmYjQtY2EwZWYwZmM=, ActorId: [2:7494346512607950469:3861], ActorState: ExecuteState, TraceId: 01js2f6rk58jdvpekpe7vkcey5, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveTest::TestBlockCreateTablet >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 64048, MsgBus: 28924 2025-04-17T18:11:51.945083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346468336741847:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:51.945158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017a6/r3tmp/tmpV4qlMD/pdisk_1.dat 2025-04-17T18:11:52.201490Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64048, node 1 2025-04-17T18:11:52.253370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:52.253397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:52.253405Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:52.253524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:52.314823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:52.314968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:52.316629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28924 TClient is connected to server localhost:28924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:52.598236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:53.660048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346476926677073:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:53.660077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346476926677107:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:53.660182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:53.663303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:53.671292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346476926677111:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:53.768064Z node 1 :TX_PROXY ERROR: Actor# [1:7494346476926677163:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:53.955247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.050971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.670882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:55.605304Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWYxMzk4NjUtMzY5OGRkZjUtYTdjN2U0MTItODE0OGJhZjc=, ActorId: [1:7494346485516620125:2966], ActorState: ReadyState, TraceId: 01js2f6h2edbstb38jq1qzmg2c, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:11:56.945176Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346468336741847:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:56.945243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 15334, MsgBus: 25381 2025-04-17T18:12:01.345183Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346507713594986:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:01.345263Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017a6/r3tmp/tmp8udD7J/pdisk_1.dat 2025-04-17T18:12:01.404013Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15334, node 2 2025-04-17T18:12:01.443539Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:01.443564Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:01.443570Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:01.443644Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:01.467458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:01.467537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:01.468586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25381 TClient is connected to server localhost:25381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:01.710597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:03.152259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346516303530245:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:03.152313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346516303530219:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:03.152573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:03.155261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:03.162883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346516303530248:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:12:03.225924Z node 2 :TX_PROXY ERROR: Actor# [2:7494346516303530299:2332] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:03.255563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:03.285105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:03.935629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:12:06.345489Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346507713594986:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:06.345555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkMvcc::SnapshotExpiration >> KqpTx::BeginTransactionBadMode [GOOD] >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] Test command err: Trying to start YDB, gRPC: 26157, MsgBus: 10232 2025-04-17T18:11:47.420262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346448333137017:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:47.420404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ad/r3tmp/tmp4Yi9Da/pdisk_1.dat 2025-04-17T18:11:47.672987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26157, node 1 2025-04-17T18:11:47.733276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:47.733305Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:47.733314Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:47.733427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:47.774170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:47.774287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:47.775700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10232 TClient is connected to server localhost:10232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:48.100049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:49.414476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346456923072254:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:49.414589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:49.414654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346456923072277:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:49.418047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:49.428832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346456923072282:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:49.499488Z node 1 :TX_PROXY ERROR: Actor# [1:7494346456923072335:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:49.719963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:49.809581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:50.480506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:11:52.420427Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346448333137017:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:52.420496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 16639, MsgBus: 23145 2025-04-17T18:11:57.242980Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346492631159413:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:57.243051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ad/r3tmp/tmpCSpWzp/pdisk_1.dat 2025-04-17T18:11:57.332194Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16639, node 2 2025-04-17T18:11:57.376469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:57.376539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:57.378072Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:57.378100Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:57.378103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:57.378105Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:57.378211Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23145 TClient is connected to server localhost:23145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:57.656658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:59.331528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346501221094669:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:59.331579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346501221094646:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:59.331609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:59.334135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:59.340676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346501221094675:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:11:59.432423Z node 2 :TX_PROXY ERROR: Actor# [2:7494346501221094726:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:59.468987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:59.578069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7494346501221094934:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:59.578068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7494346501221094920:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:59.578219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7494346501221094920:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:59.578442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7494346501221094920:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:59.578532Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7494346501221094920:2345];tablet_i ... imr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037901;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.690823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7494346505516064084:2609];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037901;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.690996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[2:7494346505516063997:2601];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037925;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.691114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[2:7494346505516063997:2601];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037925;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.691189Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[2:7494346505516063903:2572];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037924;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.691273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[2:7494346505516063903:2572];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037924;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.691372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7494346505516064017:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037935;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.691458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7494346505516064017:2606];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037935;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.691562Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[2:7494346505516063944:2587];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037926;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.691652Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[2:7494346505516063944:2587];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037926;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.691789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[2:7494346505516063954:2590];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037931;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.691872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[2:7494346505516063954:2590];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037931;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.692042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7494346505516064011:2605];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037913;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.692257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7494346505516064011:2605];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037913;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.692359Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[2:7494346505516063858:2560];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037950;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.692462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[2:7494346505516063858:2560];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037950;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.692594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;self_id=[2:7494346505516063305:2472];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037995;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.692716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;self_id=[2:7494346505516063305:2472];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037995;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.692897Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7494346505516063848:2554];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037938;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.693016Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7494346505516063848:2554];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037938;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.693168Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[2:7494346505516063765:2533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037966;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.693291Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[2:7494346505516063765:2533];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037966;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.693687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7494346505516063981:2598];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037916;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.693848Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7494346505516063981:2598];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037916;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.693941Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;self_id=[2:7494346505516064009:2604];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037912;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.694072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;self_id=[2:7494346505516064009:2604];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037912;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.694218Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7494346505516063929:2585];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037940;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.694347Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7494346505516063929:2585];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037940;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.694458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7494346505516063959:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.694624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7494346505516063959:2591];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037908;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.694760Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7494346505516064081:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037932;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.694880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7494346505516064081:2608];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.696139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[2:7494346505516063911:2576];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037944;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.696278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[2:7494346505516063911:2576];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037944;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.696674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[2:7494346505516063920:2579];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037915;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.696803Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[2:7494346505516063920:2579];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037915;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.697161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;self_id=[2:7494346505516063878:2567];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037942;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.697325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;self_id=[2:7494346505516063878:2567];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037942;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.697418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7494346505516064071:2607];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037910;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.697501Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7494346505516064071:2607];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037910;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.697680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7494346505516063502:2492];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037978;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.697823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7494346505516063502:2492];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037978;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.718683Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[2:7494346514106001213:3086];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037998;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:04.719039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[2:7494346514106001213:3086];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037998;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOltp [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::BeginTransactionBadMode [GOOD] Test command err: Trying to start YDB, gRPC: 13007, MsgBus: 24689 2025-04-17T18:11:52.193251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346468943767156:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:52.193365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017a0/r3tmp/tmpixWloD/pdisk_1.dat 2025-04-17T18:11:52.416735Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13007, node 1 2025-04-17T18:11:52.459698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:52.459715Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:52.459723Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:52.459810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:52.519748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:52.519840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:52.521500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24689 TClient is connected to server localhost:24689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:52.786440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:52.807120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:52.886911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:52.982601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:53.051241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:54.023776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346477533703522:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:54.023871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:54.213734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.238652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.260740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.282504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.305752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.357386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.419717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.456514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346477533704187:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:54.456574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:54.456667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346477533704192:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:54.459543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:11:54.466249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346477533704194:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:11:54.567141Z node 1 :TX_PROXY ERROR: Actor# [1:7494346477533704248:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:57.193361Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346468943767156:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:57.193431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:12:07.412829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:12:07.412848Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:07.986872Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346533368280103:2661], TxId: 281474976715684, task: 1. Ctx: { TraceId : 01js2f6x2149xbkyqkqgj3xy04. SessionId : ydb://session/3?node_id=1&id=OTA0ODY5NWEtYmYzM2E5ZDMtOTU4ZDI4ZTctMmFiNmRjNDc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744913515212/18446744073709551615 shard 72075186224037888 with lowWatermark v1744913515408/18446744073709551615 (node# 1 state# Ready) } } 2025-04-17T18:12:07.987201Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346533368280103:2661], TxId: 281474976715684, task: 1. Ctx: { TraceId : 01js2f6x2149xbkyqkqgj3xy04. SessionId : ydb://session/3?node_id=1&id=OTA0ODY5NWEtYmYzM2E5ZDMtOTU4ZDI4ZTctMmFiNmRjNDc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744913515212/18446744073709551615 shard 72075186224037888 with lowWatermark v1744913515408/18446744073709551615 (node# 1 state# Ready) } }. 2025-04-17T18:12:07.987455Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346533368280105:2662], TxId: 281474976715684, task: 2. Ctx: { TraceId : 01js2f6x2149xbkyqkqgj3xy04. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTA0ODY5NWEtYmYzM2E5ZDMtOTU4ZDI4ZTctMmFiNmRjNDc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494346533368280099:2501], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:07.987787Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTA0ODY5NWEtYmYzM2E5ZDMtOTU4ZDI4ZTctMmFiNmRjNDc=, ActorId: [1:7494346481828671824:2501], ActorState: ExecuteState, TraceId: 01js2f6x2149xbkyqkqgj3xy04, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 64024, MsgBus: 28973 2025-04-17T18:12:08.605115Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346539034649148:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:08.605205Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017a0/r3tmp/tmpAPFxin/pdisk_1.dat 2025-04-17T18:12:08.673447Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64024, node 2 2025-04-17T18:12:08.720615Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:08.720642Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:08.720647Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:08.720737Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:08.727042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:08.727113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:08.728676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28973 TClient is connected to server localhost:28973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:09.012141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:09.020305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:09.089061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:09.208623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:09.256380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.562076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346547624585510:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.562148Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.595389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:10.617689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:10.639632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:10.660057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:10.680937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:10.702787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:10.726745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:10.798988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346547624586171:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.799035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.799103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346547624586176:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.801299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:10.815236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346547624586178:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:10.868923Z node 2 :TX_PROXY ERROR: Actor# [2:7494346547624586234:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink >> KqpSnapshotIsolation::TSimpleOltp [GOOD] >> KqpSnapshotIsolation::TSimpleOlap [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: 2025-04-17T18:10:37.145173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:37.145286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:37.145385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b34/r3tmp/tmpJCjMCX/pdisk_1.dat 2025-04-17T18:10:37.441477Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6519, node 1 2025-04-17T18:10:37.637170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:37.637214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:37.637238Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:37.637755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:37.639255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:37.721520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:37.721664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:37.735762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12815 2025-04-17T18:10:38.270576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:41.158427Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:41.196385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:41.196477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:41.233007Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:41.234881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:41.456833Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:41.457191Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:41.457607Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:41.457800Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:41.457853Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:41.457897Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:41.457942Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:41.457987Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:41.458046Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:41.616285Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:41.616395Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:41.629354Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:41.754388Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:41.796747Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:41.796853Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:41.823558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:41.826722Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:41.826921Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:41.826970Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:41.827009Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:41.827053Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:41.827098Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:41.827154Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:41.828363Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:41.859528Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:41.859655Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:41.866095Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:41.869863Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:41.869979Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:41.880379Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:41.898301Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:41.898344Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:41.898390Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:41.906312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:41.915339Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:41.915441Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:42.095868Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:42.260088Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:42.317597Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:43.029463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:43.029648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:43.048830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:43.391917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2545:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:43.392027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:43.393052Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2550:3128]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:43.393193Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:43.393268Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2552:3130] 2025-04-17T18:10:43.393332Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2552:3130] 2025-04-17T18:10:43.393774Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2553:2992] 2025-04-17T18:10:43.393992Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2552:3130], server id = [2:2553:2992], tablet id = 72075186224037894, status = OK 2025-04-17T18:10:43.394143Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2553:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:10:43.394227Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:10:43.394380Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:10:43.394432Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2550:3128], StatRequests.size() = 1 2025-04-17T18:10:43.409001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2557:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:43.409122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:43.409410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3139], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:43.414292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:10:43.554632Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:10:43.554696Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:10:43.651962Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2552:3130], schemeshard count = 1 2025-04-17T18:10:43.945901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorA ... 04-17T18:11:31.310008Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:32.847584Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:32.847805Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:34.816829Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:36.427153Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:36.427542Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:38.471027Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:39.929900Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:39.930303Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:42.013547Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:43.337265Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:43.337510Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:45.351821Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:46.964491Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:46.964715Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:48.920391Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:50.324469Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:50.324681Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:52.223651Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:53.881469Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:53.881686Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:56.120719Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:57.869257Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:57.869621Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:12:00.212894Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:12:01.755697Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:01.755909Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:12:04.118529Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:12:05.655279Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:05.655528Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:12:07.942406Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:12:08.832222Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:12:08.832307Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:12:08.832344Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:12:08.832377Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:12:09.864151Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:09.864343Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:12:09.906606Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-04-17T18:12:09.906663Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 226.000000s, at schemeshard: 72075186224037897 2025-04-17T18:12:09.906886Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-17T18:12:09.920205Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:12:10.685486Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:10.685576Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:10.685617Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:12:10.685677Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:12:10.685728Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:10.686141Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:10.690964Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:10.694963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6699:4738], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.695032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6709:4743], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.695125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.703211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:12:10.741063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6713:4746], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:12:10.891097Z node 2 :TX_PROXY ERROR: Actor# [2:6809:4792] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:10.939802Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6838:4807]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:10.940067Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:12:10.940217Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6840:4809] 2025-04-17T18:12:10.940297Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6840:4809] 2025-04-17T18:12:10.940666Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6841:4810] 2025-04-17T18:12:10.940749Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6841:4810], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:12:10.940808Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:12:10.940918Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6840:4809], server id = [2:6841:4810], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:10.940978Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:10.941043Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6838:4807], StatRequests.size() = 1 2025-04-17T18:12:11.109753Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjkxZGViMDQtMThlNjJmOGUtNTliZjllZS1mZjc4ZjdiNw==, TxId: 2025-04-17T18:12:11.109850Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjkxZGViMDQtMThlNjJmOGUtNTliZjllZS1mZjc4ZjdiNw==, TxId: 2025-04-17T18:12:11.111458Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:11.124128Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:11.124184Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:11.145204Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:12:11.145272Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:12:11.208382Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6840:4809], schemeshard count = 1 2025-04-17T18:12:11.880658Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:11.880743Z node 2 :STATISTICS ERROR: [72075186224037894] IsColumnTable. traversal path [OwnerId: 72075186224037897, LocalPathId: 4] is not known to schemeshard 2025-04-17T18:12:11.881011Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:11.882978Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:11.890073Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTk0YWRlNjUtZjllZGQ2NzMtZDRiOTQ4YTMtYzZiYmI0Nzk=, TxId: 2025-04-17T18:12:11.890119Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTk0YWRlNjUtZjllZGQ2NzMtZDRiOTQ4YTMtYzZiYmI0Nzk=, TxId: 2025-04-17T18:12:11.890402Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:11.903269Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:11.903329Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2868:3277] 2025-04-17T18:12:11.903736Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6921:4863]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:11.905525Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:11.905564Z node 2 :STATISTICS ERROR: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2025-04-17T18:12:11.905592Z node 2 :STATISTICS DEBUG: ReplyFailed(), request id = 2 >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOlap [GOOD] >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 22981, MsgBus: 10938 2025-04-17T18:11:52.163576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346469972522101:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:52.163645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017a1/r3tmp/tmpoAcKcO/pdisk_1.dat 2025-04-17T18:11:52.396168Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22981, node 1 2025-04-17T18:11:52.442774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:52.442809Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:52.442816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:52.442930Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:52.499185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:52.499339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:52.500769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10938 TClient is connected to server localhost:10938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:52.775545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:53.989531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346474267490066:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:53.989599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346474267490058:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:53.989871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:53.992897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:53.999378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346474267490072:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:54.079882Z node 1 :TX_PROXY ERROR: Actor# [1:7494346478562457419:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:54.260936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.357527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:54.924070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.163411Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346469972522101:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:57.163472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:11:58.078806Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2025-04-17T18:11:58.079686Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346495742335603:2966], SessionActorId: [1:7494346482857433056:2966], Got LOCKS BROKEN for table. ShardID=72075186224037989, Sink=[1:7494346495742335603:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:11:58.080086Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710666; 2025-04-17T18:11:58.080172Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346495742335603:2966], SessionActorId: [1:7494346482857433056:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494346482857433056:2966]. isRollback=0 2025-04-17T18:11:58.080242Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744913518124 : 281474976710666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-04-17T18:11:58.080356Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYyN2UxNTUtYTdhMmRlY2YtYWRiNGMwNS0xMjg4YmJiNg==, ActorId: [1:7494346482857433056:2966], ActorState: ExecuteState, TraceId: 01js2f6kaz7mv8n6ay4dg02x2b, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494346495742335604:2966] from: [1:7494346495742335603:2966] 2025-04-17T18:11:58.080434Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346495742335604:2966] TxId: 281474976710666. Ctx: { TraceId: 01js2f6kaz7mv8n6ay4dg02x2b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTYyN2UxNTUtYTdhMmRlY2YtYWRiNGMwNS0xMjg4YmJiNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:11:58.080618Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYyN2UxNTUtYTdhMmRlY2YtYWRiNGMwNS0xMjg4YmJiNg==, ActorId: [1:7494346482857433056:2966], ActorState: ExecuteState, TraceId: 01js2f6kaz7mv8n6ay4dg02x2b, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 28936, MsgBus: 17625 2025-04-17T18:12:03.712158Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346516274063539:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:03.712206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017a1/r3tmp/tmphFzOiE/pdisk_1.dat 2025-04-17T18:12:03.782324Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28936, node 2 2025-04-17T18:12:03.826562Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:03.826584Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:03.826591Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:03.826683Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:03.837633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:03.837701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:03.838800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17625 TClient is connected to server localhost:17625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:04.115005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:05.592625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346524863998788:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.592662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346524863998762:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.592830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.595045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:05.601142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346524863998791:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:12:05.687189Z node 2 :TX_PROXY ERROR: Actor# [2:7494346524863998842:2332] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:05.721705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:05.752142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:06.424755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:07.578994Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWMwZDNjNjktZGI4Mjk0MzAtYmI1NjFkMzAtZDY1OTUwYTA=, ActorId: [2:7494346533453941744:2966], ActorState: ExecuteState, TraceId: 01js2f6wqjdn0begg16yb7xy2n, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:12:08.712634Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346516274063539:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:08.712713Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::TestCheckSubHiveForwarding |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] |89.5%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> KqpSinkMvcc::OltpMultiSinks >> KqpLocks::EmptyRange [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 31949, MsgBus: 17250 2025-04-17T18:12:09.440955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346543862913200:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:09.441016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00177f/r3tmp/tmp77ywH2/pdisk_1.dat 2025-04-17T18:12:09.640913Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:09.643218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:09.643326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:09.645235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31949, node 1 2025-04-17T18:12:09.684602Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:09.684622Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:09.684627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:09.684712Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17250 TClient is connected to server localhost:17250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:10.003707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.016780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.112589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.220490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.273152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:11.243841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346552452849553:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:11.243919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:11.426297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:11.452616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:11.474043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:11.495585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:11.516379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:11.545017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:11.569982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:11.643575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346552452850215:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:11.643650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:11.643695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346552452850220:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:11.646795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:11.653707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346552452850222:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:11.734409Z node 1 :TX_PROXY ERROR: Actor# [1:7494346552452850277:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31196, MsgBus: 4283 2025-04-17T18:12:13.239153Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346561415805237:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:13.239200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00177f/r3tmp/tmpC2K2zh/pdisk_1.dat 2025-04-17T18:12:13.294915Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31196, node 2 2025-04-17T18:12:13.339813Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:13.339828Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:13.339834Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:13.339918Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:13.362461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:13.362539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:13.364089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4283 TClient is connected to server localhost:4283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:13.631592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:13.643405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:13.674572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:13.753999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:13.791671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:15.203397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346570005741590:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:15.203470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:15.240397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:15.269773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:15.292413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:15.312488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:15.332075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:15.361211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:15.389985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:15.430133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346570005742245:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:15.430210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:15.430283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346570005742250:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:15.433048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:15.439418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346570005742252:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:15.527901Z node 2 :TX_PROXY ERROR: Actor# [2:7494346570005742309:3533] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:16.654268Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OThkNTZmNWQtNDYyYWJhZDUtMzgzNjJmYTktYzIxZDUwNDU=, ActorId: [2:7494346574300709889:2501], ActorState: ExecuteState, TraceId: 01js2f75jyes4t7tznz89rd1sv, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> TVPatchTests::PatchPartOk >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> TVPatchTests::PatchPartFastXorDiffDisorder >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists >> TVPatchTests::PatchPartOk [GOOD] >> TVPatchTests::FindingPartsWhenError [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> KqpSinkTx::OlapInteractive >> TVPatchTests::FindingPartsWithTimeout [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2025-04-17T18:12:18.759532Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:18.761330Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-17T18:12:18.761376Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-17T18:12:18.761520Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-17T18:12:18.762031Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:18.762215Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-04-17T18:12:18.762935Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-04-17T18:12:18.762983Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-04-17T18:12:18.763125Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2025-04-17T18:12:18.763158Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-17T18:12:18.763204Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2025-04-17T18:12:18.759532Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:18.761426Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2025-04-17T18:12:18.761482Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-17T18:12:18.761551Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 26193, MsgBus: 27317 2025-04-17T18:11:46.823364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346446276486880:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:46.823416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017af/r3tmp/tmpKAToMy/pdisk_1.dat 2025-04-17T18:11:47.055516Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26193, node 1 2025-04-17T18:11:47.111327Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:47.111354Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:47.111365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:47.111463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:47.140961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:47.141051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:47.142728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27317 TClient is connected to server localhost:27317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:47.485825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:48.686845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346454866422139:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.686898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346454866422134:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.686984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:48.690245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:48.698768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346454866422148:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:48.795808Z node 1 :TX_PROXY ERROR: Actor# [1:7494346454866422199:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:49.023314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:49.142895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:49.143076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:49.143294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:49.143393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:49.143558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:49.143667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:49.143744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:49.143853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:49.143936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:49.144043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:49.144129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:49.144259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346459161389686:2343];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:49.146000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:49.146053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:49.146208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:49.146277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:49.146376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:49.146467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:49.146549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:49.146660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:49.146810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:49.146950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:49.147073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:49.147146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346459161389688:2344];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:49.171208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346459161389698:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:49.171255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346459161389698:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:49.171384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;sel ... node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037954;self_id=[2:7494346525740016533:2492];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037954;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.076973Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037967;self_id=[2:7494346525740016511:2482];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077012Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037968;self_id=[2:7494346525740016501:2477];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037968;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077030Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037965;self_id=[2:7494346525740016451:2463];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037965;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077069Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037960;self_id=[2:7494346525740016404:2440];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037960;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077097Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037969;self_id=[2:7494346525740016495:2475];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077114Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7494346525740016517:2485];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077158Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037961;self_id=[2:7494346525740016492:2474];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037961;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077181Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037971;self_id=[2:7494346525740016488:2472];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077207Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7494346525740016246:2426];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077239Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037973;self_id=[2:7494346530034983846:2500];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037973;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077257Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037978;self_id=[2:7494346525740016294:2435];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077286Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037972;self_id=[2:7494346525740016526:2489];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037972;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077302Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7494346525740016259:2429];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077331Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037974;self_id=[2:7494346525740016484:2470];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037974;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077338Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7494346525740016239:2424];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077375Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7494346525740016257:2428];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077377Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7494346525740016297:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077416Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7494346525740016121:2422];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077418Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037975;self_id=[2:7494346525740016446:2461];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037975;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077458Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7494346525740016266:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077458Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7494346525740016367:2439];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077497Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7494346525740016301:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077498Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7494346525740016244:2425];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077533Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7494346525740016277:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077540Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037966;self_id=[2:7494346525740016421:2445];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037966;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077576Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7494346525740016292:2434];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077585Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7494346525740016123:2423];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077622Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037977;self_id=[2:7494346525740016413:2442];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077635Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037976;self_id=[2:7494346525740016482:2469];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037976;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077668Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7494346525740016364:2438];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077680Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037990;self_id=[2:7494346525740016255:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077710Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7494346525740016264:2430];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077714Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7494346525740016119:2421];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077753Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7494346525740016274:2432];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.077800Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7494346525740016112:2420];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2025-04-17T18:12:18.759580Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:18.761150Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-17T18:12:18.761198Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-17T18:12:18.761377Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-04-17T18:12:18.762071Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-17T18:12:18.762256Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:08.087657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:08.087757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:08.087800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:08.087833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:08.087894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:08.087923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:08.087991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:08.088088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:08.088468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:08.146058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:08.146105Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:08.156794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:08.157083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:08.157201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:08.163278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:08.163414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:08.163832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:08.163964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:08.165944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:08.167029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:08.167081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:08.167236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:08.167276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:08.167306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:08.167394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.172492Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:08.265182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:08.265362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.265567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:08.265786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:08.265833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.267569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:08.267703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:08.267884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.267963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:08.268001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:08.268025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:08.269533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.269586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:08.269621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:08.271167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.271200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.271235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:08.271276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:08.273540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:08.274784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:08.274920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:08.275568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:08.275660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:08.275696Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:08.275890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:08.275925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:08.276034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:08.276080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:08.277555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:08.277582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:08.277712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:08.277748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:08.278042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.278087Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:08.278200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:08.278232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:08.278269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:08.278295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:08.278337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:08.278364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:08.278386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:08.278404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:08.278445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:08.278469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:08.278489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:08.279601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:08.279666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:08.279687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... seconds 2025-04-17T18:12:17.919074Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-17T18:12:17.920137Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:302:2291], Recipient [3:312:2299]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:12:17.931583Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:19.153000Z 2025-04-17T18:12:17.982971Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:17.983031Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:17.983053Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-17T18:12:17.983167Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T18:12:17.983196Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T18:12:17.983297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-17T18:12:17.983351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-04-17T18:12:17.983415Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:18.000000Z at schemeshard 72057594046678944 2025-04-17T18:12:17.983473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-04-17T18:12:17.983538Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:12:17.993893Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:17.993946Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:17.993985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T18:12:18.334669Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:18.334734Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:18.334798Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:312:2299]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:12:18.334915Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:18.334945Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:18.376225Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-04-17T18:12:18.376317Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:18.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-17T18:12:18.376410Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 30 seconds 2025-04-17T18:12:18.376611Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [3:129:2154], Recipient [3:312:2299]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-04-17T18:12:18.376728Z node 3 :TX_DATASHARD INFO: Started background compaction# 7 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:129:2154], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-17T18:12:18.377588Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.153000Z 2025-04-17T18:12:18.377640Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 7 2025-04-17T18:12:18.380772Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1268:3205], Recipient [3:312:2299]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-17T18:12:18.380862Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-17T18:12:18.381449Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:312:2299], Recipient [3:129:2154]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 6 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 13776 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-17T18:12:18.381478Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T18:12:18.381523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 1.3776 2025-04-17T18:12:18.381599Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:12:18.381638Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T18:12:18.383708Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:302:2291], Recipient [3:312:2299]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:12:18.386885Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 7, ts 1970-01-01T00:00:20.153000Z 2025-04-17T18:12:18.386931Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 7, front# 7 2025-04-17T18:12:18.386956Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:129:2154]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:12:18.387306Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:312:2299], Recipient [3:129:2154]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-04-17T18:12:18.387332Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-04-17T18:12:18.387386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2025-04-17T18:12:18.387424Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-17T18:12:18.388680Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:302:2291], Recipient [3:312:2299]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:12:18.400035Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:20.153000Z 2025-04-17T18:12:18.451433Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:18.451485Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:18.451506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-17T18:12:18.451557Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T18:12:18.451580Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T18:12:18.451661Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-17T18:12:18.451727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-04-17T18:12:18.451779Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:19.000000Z at schemeshard 72057594046678944 2025-04-17T18:12:18.451825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-04-17T18:12:18.451888Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:12:18.462340Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:18.462401Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:18.462428Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2025-04-17T18:12:18.759533Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:18.761420Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-17T18:12:18.761464Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-17T18:12:18.761523Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-04-17T18:12:19.003793Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:19.003967Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-17T18:12:19.004007Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-17T18:12:19.004128Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-04-17T18:12:19.004178Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-17T18:12:19.004225Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2025-04-17T18:12:19.062716Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:19.063434Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-17T18:12:19.063473Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-17T18:12:19.063626Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-17T18:12:19.063726Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2025-04-17T18:12:19.063772Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2025-04-17T18:12:18.759632Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:18.761348Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-17T18:12:18.761415Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-17T18:12:18.761658Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-04-17T18:12:18.762047Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-17T18:12:18.762098Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-04-17T18:12:19.020250Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2025-04-17T18:12:19.030662Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:734} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2025-04-17T18:12:19.030729Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-17T18:12:19.030798Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 18380, MsgBus: 18197 2025-04-17T18:11:55.452015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346483819572674:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:55.452181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001797/r3tmp/tmpMMrFjp/pdisk_1.dat 2025-04-17T18:11:55.674022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:55.674106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:55.676019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:11:55.689030Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18380, node 1 2025-04-17T18:11:55.733335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:55.733360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:55.733374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:55.733515Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18197 TClient is connected to server localhost:18197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:56.065531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:57.274535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346492409507924:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:57.274553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346492409507932:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:57.274626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:57.277357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:57.284553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346492409507938:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:57.343813Z node 1 :TX_PROXY ERROR: Actor# [1:7494346492409507989:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:57.544141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.658034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:11:58.283533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:11:59.355732Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-17T18:11:59.355885Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:11:59.356008Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:11:59.356114Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346500999451295:2966], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7494346500999450921:2966]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7494346500999451295:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:11:59.356431Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346500999451279:2966], SessionActorId: [1:7494346500999450921:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494346500999450921:2966]. isRollback=0 2025-04-17T18:11:59.356618Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjM5NTFhMDgtNDgwNzU2MzktNTRhYjQxMTYtNzJhM2Nl, ActorId: [1:7494346500999450921:2966], ActorState: ExecuteState, TraceId: 01js2f6mpgfjdymtxcx2g4yadj, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494346500999451280:2966] from: [1:7494346500999451279:2966] 2025-04-17T18:11:59.356680Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346500999451280:2966] TxId: 281474976710665. Ctx: { TraceId: 01js2f6mpgfjdymtxcx2g4yadj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM5NTFhMDgtNDgwNzU2MzktNTRhYjQxMTYtNzJhM2Nl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:11:59.356838Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjM5NTFhMDgtNDgwNzU2MzktNTRhYjQxMTYtNzJhM2Nl, ActorId: [1:7494346500999450921:2966], ActorState: ExecuteState, TraceId: 01js2f6mpgfjdymtxcx2g4yadj, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 2025-04-17T18:12:00.452162Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346483819572674:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:00.452224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 31440, MsgBus: 14560 2025-04-17T18:12:05.225982Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346524902861161:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:05.226058Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001797/r3tmp/tmpPnsbcw/pdisk_1.dat 2025-04-17T18:12:05.304719Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31440, node 2 2025-04-17T18:12:05.334054Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:05.334071Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:05.334076Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:05.334151Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:05.347158Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:05.347217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:05.348289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14560 TClient is connected to server localhost:14560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:05.615371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:07.177837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346533492796398:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T1 ... akeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.712560Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7494346546377703862:3135];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.712736Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7494346546377703961:3145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.713024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7494346546377703961:3145];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.714584Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7494346546377703839:3126];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.714901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346546377704016:3162];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.714984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7494346546377703839:3126];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.715144Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346546377704016:3162];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.716372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7494346546377704282:3210];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.716630Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7494346546377704282:3210];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.716817Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7494346546377703825:3119];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.717014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7494346546377703825:3119];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.718032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7494346546377703966:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.718299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7494346546377703966:3146];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.719643Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7494346546377703845:3129];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.719909Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7494346546377703845:3129];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.723273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7494346546377703856:3133];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.723550Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7494346546377703856:3133];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.723761Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7494346546377703956:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.724024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7494346546377703956:3143];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.724174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7494346546377703931:3141];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038055;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.724428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7494346546377703931:3141];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.727762Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7494346546377703984:3157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.728043Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7494346546377703984:3157];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.729318Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7494346546377704163:3179];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.729726Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7494346546377704163:3179];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.729978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346546377704024:3164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.730332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346546377704024:3164];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.730577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7494346546377704103:3168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.730875Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7494346546377704103:3168];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.732062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7494346546377704148:3178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:12.732126Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7494346546377703982:3156];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-04-17T18:12:12.768610Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037888;self_id=[2:7494346533492796652:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.768696Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[2:7494346533492796654:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.768727Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[2:7494346533492796694:2351];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.768756Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[2:7494346533492796675:2346];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.768767Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037892;self_id=[2:7494346533492796775:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.768795Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[2:7494346533492796691:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.768811Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[2:7494346533492796689:2349];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.768825Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[2:7494346533492796665:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.768844Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[2:7494346533492796679:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:12.768873Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[2:7494346533492796677:2347];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> TVPatchTests::PatchPartPutError >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] >> TVPatchTests::PatchPartPutError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2025-04-17T18:12:20.366040Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:20.366998Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-17T18:12:20.367052Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-17T18:12:20.367229Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-04-17T18:12:20.367318Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-17T18:12:20.367468Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:08.134978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:08.135038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:08.135071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:08.135098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:08.135136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:08.135157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:08.135193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:08.135267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:08.135481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:08.192459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:08.192501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:08.203280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:08.203595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:08.203743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:08.210012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:08.210144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:08.210580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:08.210725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:08.212800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:08.213764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:08.213812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:08.213987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:08.214024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:08.214061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:08.214164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.219182Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:08.305209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:08.305380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.305530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:08.305705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:08.305772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.307507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:08.307596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:08.307722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.307775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:08.307798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:08.307826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:08.309115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.309151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:08.309178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:08.310567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.310619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.310660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:08.310700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:08.313383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:08.314885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:08.315014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:08.315712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:08.315793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:08.315828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:08.316031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:08.316075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:08.316231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:08.316285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:08.318127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:08.318165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:08.318306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:08.318341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:08.318619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:08.318661Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:08.318734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:08.318758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:08.318786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:08.318836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:08.318862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:08.318891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:08.318917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:08.318937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:08.318973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:08.318999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:08.319019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:08.320282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:08.320378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:08.320406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T18:12:19.963477Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T18:12:20.015748Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:352:2333]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:20.015812Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:20.015891Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:352:2333], Recipient [3:352:2333]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:20.015908Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:20.037084Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [3:352:2333]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-04-17T18:12:20.037144Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-17T18:12:20.037217Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:352:2333]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-17T18:12:20.037246Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-17T18:12:20.037267Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-17T18:12:20.037314Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-17T18:12:20.037349Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-17T18:12:20.142364Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:770:2656]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:20.142436Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:20.142508Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409552 outdated step 200 last cleanup 0 2025-04-17T18:12:20.142571Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:12:20.142597Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409552 2025-04-17T18:12:20.142619Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409552 has no attached operations 2025-04-17T18:12:20.142639Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409552 2025-04-17T18:12:20.142737Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:770:2656]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:12:20.142831Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 2 2025-04-17T18:12:20.143112Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:770:2656], Recipient [3:899:2758]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 22 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 122 TableOwnerId: 72075186233409549 FollowerId: 0 2025-04-17T18:12:20.143146Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T18:12:20.143181Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0022 2025-04-17T18:12:20.143242Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:12:20.143265Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T18:12:20.153548Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:772:2657]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:20.153594Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:20.153659Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409553 outdated step 200 last cleanup 0 2025-04-17T18:12:20.153709Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:12:20.153740Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409553 2025-04-17T18:12:20.153764Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409553 has no attached operations 2025-04-17T18:12:20.153783Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409553 2025-04-17T18:12:20.153873Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:772:2657]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:12:20.153953Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 2 2025-04-17T18:12:20.154237Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:772:2657], Recipient [3:899:2758]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 20 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 122 TableOwnerId: 72075186233409549 FollowerId: 0 2025-04-17T18:12:20.154303Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T18:12:20.154346Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.002 2025-04-17T18:12:20.154404Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:12:20.165467Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:899:2758]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:20.165513Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:20.165582Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:899:2758], Recipient [3:899:2758]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:20.165613Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:20.175905Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [3:899:2758]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-04-17T18:12:20.175975Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-17T18:12:20.176201Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:899:2758]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-17T18:12:20.176247Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-17T18:12:20.176279Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-17T18:12:20.176331Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-17T18:12:20.176374Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-17T18:12:20.176484Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269746180, Sender [3:2041:3859], Recipient [3:899:2758]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-04-17T18:12:20.176508Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-04-17T18:12:20.197696Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2044:3862], Recipient [3:770:2656]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:12:20.197758Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:12:20.197802Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409552, clientId# [3:2043:3861], serverId# [3:2044:3862], sessionId# [0:0:0] 2025-04-17T18:12:20.197932Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2042:3860], Recipient [3:770:2656]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } 2025-04-17T18:12:20.198453Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2047:3865], Recipient [3:772:2657]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:12:20.198487Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:12:20.198512Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409553, clientId# [3:2046:3864], serverId# [3:2047:3865], sessionId# [0:0:0] 2025-04-17T18:12:20.198601Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2045:3863], Recipient [3:772:2657]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } >> TVPatchTests::PatchPartGetError >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2025-04-17T18:12:20.394883Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:20.395581Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-17T18:12:20.395634Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-17T18:12:20.395767Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-17T18:12:20.395811Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:20.395942Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-04-17T18:12:20.395992Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-04-17T18:12:20.396037Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-04-17T18:12:20.396160Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2025-04-17T18:12:20.396192Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-17T18:12:20.396246Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> THiveTest::TestBootProgress >> TVPatchTests::PatchPartGetError [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:07.567546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:07.567629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:07.567671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:07.567708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:07.567765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:07.567795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:07.567845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:07.567942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:07.568297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:07.652455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:07.652509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:07.667705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:07.668089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:07.668237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:07.675944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:07.676135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:07.676794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:07.676984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:07.679638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:07.680929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:07.680988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:07.681174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:07.681225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:07.681264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:07.681412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.687712Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:07.817580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:07.817766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.817948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:07.818125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:07.818162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.820149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:07.820251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:07.820393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.820442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:07.820480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:07.820505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:07.821791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.821832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:07.821854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:07.822989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.823027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.823064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:07.823103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.825406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:07.826712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:07.826834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:07.827479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:07.827589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:07.827635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:07.827896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:07.827952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:07.828085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:07.828195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:07.829604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:07.829647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:07.829793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:07.829824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:07.830097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.830128Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:07.830218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:07.830244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.830285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:07.830310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.830335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:07.830366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.830391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:07.830411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:07.830461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:07.830497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:07.830522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:07.831817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:07.831921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:07.831961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... EMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-17T18:12:20.590894Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-17T18:12:20.590917Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-17T18:12:20.590947Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-17T18:12:20.591027Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T18:12:20.591095Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T18:12:20.643998Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:321:2305]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:20.644071Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:20.644145Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-04-17T18:12:20.644204Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:12:20.644257Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-04-17T18:12:20.644284Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-04-17T18:12:20.644305Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-04-17T18:12:20.644425Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:321:2305]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:12:20.644525Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-17T18:12:20.644771Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:321:2305], Recipient [3:129:2154]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 27 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-17T18:12:20.644803Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T18:12:20.644863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0027 2025-04-17T18:12:20.644948Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:12:20.644981Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T18:12:20.655279Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:325:2308]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:20.655346Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:20.655421Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409547 outdated step 5000002 last cleanup 0 2025-04-17T18:12:20.655462Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:12:20.655487Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409547 2025-04-17T18:12:20.655521Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409547 has no attached operations 2025-04-17T18:12:20.655547Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409547 2025-04-17T18:12:20.655671Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:325:2308]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:12:20.655766Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 2 2025-04-17T18:12:20.656026Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:325:2308], Recipient [3:129:2154]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 22 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-17T18:12:20.656065Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T18:12:20.656101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0022 2025-04-17T18:12:20.656173Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:12:20.697743Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:20.697799Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:20.697830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-17T18:12:20.697883Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-17T18:12:20.697908Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-17T18:12:20.697995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-17T18:12:20.698063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-04-17T18:12:20.698129Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2025-04-17T18:12:20.698195Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 by size, its table already has 2 out of 2 partitions 2025-04-17T18:12:20.698241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-17T18:12:20.698272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:12:20.698296Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-04-17T18:12:20.698316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 by size, its table already has 2 out of 2 partitions 2025-04-17T18:12:20.698368Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:12:20.708743Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:20.708819Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:20.708854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T18:12:20.741173Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1336:3258], Recipient [3:321:2305]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:12:20.741230Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:12:20.741272Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409546, clientId# [3:1335:3257], serverId# [3:1336:3258], sessionId# [0:0:0] 2025-04-17T18:12:20.741426Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1334:3256], Recipient [3:321:2305]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } 2025-04-17T18:12:20.742788Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1339:3261], Recipient [3:325:2308]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:12:20.742814Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:12:20.742837Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409547, clientId# [3:1338:3260], serverId# [3:1339:3261], sessionId# [0:0:0] 2025-04-17T18:12:20.742891Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1337:3259], Recipient [3:325:2308]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2025-04-17T18:12:20.978411Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:20.979308Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-17T18:12:20.979349Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-17T18:12:20.979483Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-17T18:12:20.979543Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-17T18:12:20.979685Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-17T18:12:20.979737Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |89.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageBalanceTest::TestScenario3 [GOOD] >> THiveTest::TestBootProgress [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 64518, MsgBus: 63139 2025-04-17T18:12:09.876014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346541459359875:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:09.876144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00177a/r3tmp/tmpNl6UVG/pdisk_1.dat 2025-04-17T18:12:10.105141Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64518, node 1 2025-04-17T18:12:10.154376Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:10.154431Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:10.154440Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:10.154594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:10.189949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:10.190049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:10.191682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63139 TClient is connected to server localhost:63139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:10.543057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.561656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.676139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.777317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.819998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:11.752981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346550049296227:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:11.753074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:11.980707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:12.002561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:12.065757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:12.087848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:12.109460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:12.173598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:12.209280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:12.281003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346554344264205:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:12.281057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:12.281153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346554344264210:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:12.283966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:12.291337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346554344264212:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:12.381765Z node 1 :TX_PROXY ERROR: Actor# [1:7494346554344264268:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10602, MsgBus: 32709 2025-04-17T18:12:13.850197Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346559751104666:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:13.850269Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00177a/r3tmp/tmpZmqSi6/pdisk_1.dat 2025-04-17T18:12:13.908509Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10602, node 2 2025-04-17T18:12:13.946795Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:13.946810Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:13.946815Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:13.946889Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:13.971269Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:13.971337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:13.972554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32709 TClient is connected to server localhost:32709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:14.218940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:14.226913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:14.265405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:14.376212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:14.443689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:15.786947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346568341041031:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool ... ermissions } 2025-04-17T18:12:16.027688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:16.035025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346572636008994:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:16.121826Z node 2 :TX_PROXY ERROR: Actor# [2:7494346572636009051:3535] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:16.966989Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-04-17T18:12:16.977149Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037914 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:12:16.977317Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037914 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:12:16.977519Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346572636009471:2509], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [2:7494346572636009365:2509]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037914, Sink=[2:7494346572636009471:2509].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:12:16.978047Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346572636009464:2509], SessionActorId: [2:7494346572636009365:2509], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7494346572636009365:2509]. isRollback=0 2025-04-17T18:12:16.978282Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmY0M2RjZjUtN2EwODUyNzktZDQ2MTE3MjItNWE3MzZjMDc=, ActorId: [2:7494346572636009365:2509], ActorState: ExecuteState, TraceId: 01js2f75x15yt9vrx8qevrwcw8, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7494346572636009465:2509] from: [2:7494346572636009464:2509] 2025-04-17T18:12:16.978358Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346572636009465:2509] TxId: 281474976715675. Ctx: { TraceId: 01js2f75x15yt9vrx8qevrwcw8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmY0M2RjZjUtN2EwODUyNzktZDQ2MTE3MjItNWE3MzZjMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:12:16.978530Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmY0M2RjZjUtN2EwODUyNzktZDQ2MTE3MjItNWE3MzZjMDc=, ActorId: [2:7494346572636009365:2509], ActorState: ExecuteState, TraceId: 01js2f75x15yt9vrx8qevrwcw8, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 5327, MsgBus: 22952 2025-04-17T18:12:17.687802Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346575837970610:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:17.687887Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00177a/r3tmp/tmpky3DFk/pdisk_1.dat 2025-04-17T18:12:17.765290Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5327, node 3 2025-04-17T18:12:17.807618Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:17.807635Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:17.807640Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:17.807740Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:17.820603Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:17.820670Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:17.821706Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22952 TClient is connected to server localhost:22952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:18.097637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:18.106371Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:18.169981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:18.247216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:18.283687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:19.655351Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346584427906971:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.655419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.694623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:19.717242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:19.761817Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:19.783794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:19.806536Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:19.835472Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:19.858150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:19.929244Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346584427907634:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.929311Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.929361Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494346584427907639:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.931685Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:19.937578Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494346584427907641:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:20.010465Z node 3 :TX_PROXY ERROR: Actor# [3:7494346588722874992:3537] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:20.935193Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGM5ZjlmZWQtZWRlNzFlMjgtOGYzYjllZjctZjYyMzhkMDg=, ActorId: [3:7494346588722875306:2509], ActorState: ExecuteState, TraceId: 01js2f79pp3m593c3y5w77j7ve, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken >> AnalyzeDatashard::AnalyzeOneTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: c[def1] ---------------------------------------------------------------------------------------------------- (0) 2025-04-17T18:09:22.888651Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.892056Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.892281Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-17T18:09:22.892808Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:09:22.893860Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-17T18:09:22.893908Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.894755Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-04-17T18:09:22.894797Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.894917Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.895206Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.905244Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.905304Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.907121Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.907294Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.907432Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.907588Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.907741Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.907883Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.908004Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.908028Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.908105Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-04-17T18:09:22.908149Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-04-17T18:09:22.908207Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.908252Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.908946Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.909107Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.932185Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:22.932274Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.932319Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.933799Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-04-17T18:09:22.933839Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-04-17T18:09:22.933921Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.933981Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-17T18:09:22.939204Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-17T18:09:22.939418Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.939695Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.940100Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-17T18:09:22.940224Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-17T18:09:22.940296Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:22.940525Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.940802Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.940847Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-17T18:09:22.940936Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-17T18:09:22.940968Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-17T18:09:22.941060Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2025-04-17T18:09:22.941089Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:52:2092] 2025-04-17T18:09:22.941604Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-17T18:09:22.941672Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.941862Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.941914Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.941993Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:52:2092] 2025-04-17T18:09:22.944917Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.945019Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-17T18:09:22.945050Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-17T18:09:22.946033Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.946095Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-17T18:09:22.946452Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-17T18:09:22.946512Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-17T18:09:22.946554Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-17T18:09:22.947219Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-04-17T18:09:22.947260Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-04-17T18:09:22.947468Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:22.947773Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:22.947950Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:22.948078Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:22.948141Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-17T18:09:22.948187Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-17T18:09:22.948305Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-04-17T18:09:22.948338Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-04-17T18:09:22.948411Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.948530Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-17T18:09:22.948557Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.948664Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-17T18:09:22.948780Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-17T18:09:22.948817Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-04-17T18:09:22.948843Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-04-17T18:09:22.948875Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-17T18:09:22.949007Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 ... 56Z node 12 :BS_PROXY_PUT DEBUG: [0971fc86f746b69b] Result# TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-17T18:12:19.308793Z node 12 :BS_PROXY_PUT INFO: [0971fc86f746b69b] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T18:12:19.308862Z node 12 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.54 sample PartId# [72057594037927937:2:492:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 12 } TEvVPutResult{ TimestampMs# 2.513 VDiskId# [0:1:0:0:0] NodeId# 12 Status# OK } ] } 2025-04-17T18:12:19.309242Z node 12 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:12:19.309554Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} commited cookie 1 for step 492 2025-04-17T18:12:19.310522Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-17T18:12:19.310560Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:12:19.310731Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{996, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-17T18:12:19.310765Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:12:19.310841Z node 12 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [12:1303:2642] 2025-04-17T18:12:19.310862Z node 12 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [12:1303:2642] 2025-04-17T18:12:19.310891Z node 12 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [12:1245:2604] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.06) ******---------------------------------------------------------------------------------------------- (0.058) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.054) 2025-04-17T18:12:19.412796Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-17T18:12:19.412853Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:12:19.412936Z node 12 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003103648}: tablet 72075186224037890 wasn't changed 2025-04-17T18:12:19.412961Z node 12 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003103648}: tablet 72075186224037890 skipped channel 0 2025-04-17T18:12:19.413019Z node 12 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003103648}: tablet 72075186224037890 skipped channel 1 2025-04-17T18:12:19.413041Z node 12 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003103648}: tablet 72075186224037890 skipped channel 2 2025-04-17T18:12:19.413092Z node 12 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923003103648}(72075186224037890)::Execute - TryToBoot was not successfull 2025-04-17T18:12:19.413143Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{997, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-17T18:12:19.413193Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:12:19.437856Z node 12 :BS_PROXY_PUT INFO: [ab0c98c298510cec] bootstrap ActorId# [12:11616:6240] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:493:0:0:244:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T18:12:19.437938Z node 12 :BS_PROXY_PUT DEBUG: [ab0c98c298510cec] Id# [72057594037927937:2:493:0:0:244:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T18:12:19.437964Z node 12 :BS_PROXY_PUT DEBUG: [ab0c98c298510cec] restore Id# [72057594037927937:2:493:0:0:244:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T18:12:19.437996Z node 12 :BS_PROXY_PUT DEBUG: [ab0c98c298510cec] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:493:0:0:244:1] Marker# BPG33 2025-04-17T18:12:19.438018Z node 12 :BS_PROXY_PUT DEBUG: [ab0c98c298510cec] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:493:0:0:244:1] Marker# BPG32 2025-04-17T18:12:19.438103Z node 12 :BS_PROXY DEBUG: Send to queueActorId# [12:299:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:493:0:0:244:1] FDS# 244 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T18:12:19.440548Z node 12 :BS_PROXY_PUT DEBUG: [ab0c98c298510cec] received {EvVPutResult Status# OK ID# [72057594037927937:2:493:0:0:244:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 509 } Cost# 81921 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 510 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-17T18:12:19.440611Z node 12 :BS_PROXY_PUT DEBUG: [ab0c98c298510cec] Result# TEvPutResult {Id# [72057594037927937:2:493:0:0:244:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-17T18:12:19.440640Z node 12 :BS_PROXY_PUT INFO: [ab0c98c298510cec] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:493:0:0:244:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T18:12:19.440706Z node 12 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.565 sample PartId# [72057594037927937:2:493:0:0:244:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 12 } TEvVPutResult{ TimestampMs# 3.027 VDiskId# [0:1:0:0:0] NodeId# 12 Status# OK } ] } 2025-04-17T18:12:19.441400Z node 12 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:493:0:0:244:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:12:19.442331Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} commited cookie 1 for step 493 2025-04-17T18:12:19.444213Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1492, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-17T18:12:19.444257Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1492, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:12:19.444404Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1492, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{998, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-17T18:12:19.444437Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1492, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:12:19.444517Z node 12 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [12:1303:2642] 2025-04-17T18:12:19.444541Z node 12 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [12:1303:2642] 2025-04-17T18:12:19.444573Z node 12 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [12:1245:2604] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.06) ******---------------------------------------------------------------------------------------------- (0.058) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.054) 2025-04-17T18:12:19.546778Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1493, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-17T18:12:19.546831Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1493, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:12:19.546914Z node 12 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005769024}: tablet 72075186224037959 wasn't changed 2025-04-17T18:12:19.546940Z node 12 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005769024}: tablet 72075186224037959 skipped channel 0 2025-04-17T18:12:19.546996Z node 12 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005769024}: tablet 72075186224037959 skipped channel 1 2025-04-17T18:12:19.547018Z node 12 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005769024}: tablet 72075186224037959 skipped channel 2 2025-04-17T18:12:19.547072Z node 12 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005769024}(72075186224037959)::Execute - TryToBoot was not successfull 2025-04-17T18:12:19.547116Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1493, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{999, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-17T18:12:19.547147Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1493, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} >> KqpYql::FlexibleTypes >> KqpPragma::OrderedColumns >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> KqpScripting::LimitOnShard >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpScripting::ScanQuery >> KqpYql::EvaluateIf >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> KqpYql::TestUuidDefaultColumn >> KqpYql::ScriptUdf >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: 2025-04-17T18:10:15.017114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:15.017291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:15.017447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c3a/r3tmp/tmpYsCOcT/pdisk_1.dat 2025-04-17T18:10:15.428357Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1752, node 1 2025-04-17T18:10:15.917254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:15.917309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:15.917345Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:15.918008Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:15.927818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.030702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:16.030831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:16.048287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3282 2025-04-17T18:10:16.622027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.844578Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:19.887097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.887207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.924874Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:19.928670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.171638Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.172003Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.172456Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.172697Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.172764Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.172824Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.172872Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.172914Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.172967Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.339442Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:20.339542Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:20.353064Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.492170Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:20.544901Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:20.545001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:20.578091Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:20.581846Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:20.582083Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:20.582145Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:20.582230Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:20.582300Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:20.582368Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:20.582430Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:20.583733Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:20.627505Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.627611Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.635410Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:20.639983Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:20.640131Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:20.651819Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:20.673724Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:20.673779Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:20.673852Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:20.688751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:20.699990Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:20.700135Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:20.912198Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:21.073782Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:21.131781Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:22.024175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.024387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.108581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:22.589442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2545:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.589613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.591074Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2550:3128]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:22.591280Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:22.591408Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2552:3130] 2025-04-17T18:10:22.592399Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2552:3130] 2025-04-17T18:10:22.593140Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2553:2992] 2025-04-17T18:10:22.593493Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2552:3130], server id = [2:2553:2992], tablet id = 72075186224037894, status = OK 2025-04-17T18:10:22.593708Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2553:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:10:22.593794Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:10:22.594797Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:10:22.594906Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2550:3128], StatRequests.size() = 1 2025-04-17T18:10:22.653429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2557:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.653575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.653901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3139], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.660074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:10:22.805823Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:10:22.805904Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:10:22.908345Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2552:3130], schemeshard count = 1 2025-04-17T18:10:23.222769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorAc ... 24037894] EvPropagateTimeout 2025-04-17T18:11:42.897078Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:42.897310Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:46.112769Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:48.315838Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:48.316137Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:51.613956Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:11:54.159801Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:11:54.160051Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:11:57.712062Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:12:00.319993Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:00.320248Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:12:03.909511Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:12:06.369986Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:06.370241Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:12:09.899020Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:12:12.064507Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:12.064774Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:12:15.613010Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:12:16.873043Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:12:16.873126Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:12:16.873170Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:12:16.873217Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:12:18.236857Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:18.237159Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:12:18.290113Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-17T18:12:18.290196Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 227.000000s, at schemeshard: 72075186224037897 2025-04-17T18:12:18.290418Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-04-17T18:12:18.302920Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:12:19.331073Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:19.331154Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:19.331191Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:12:19.331236Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:12:19.331277Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:19.331716Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:19.341722Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:19.345003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6612:4677], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.345109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6621:4682], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.345280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.353976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:12:19.393501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6626:4685], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:12:19.572832Z node 2 :TX_PROXY ERROR: Actor# [2:6724:4733] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:19.614060Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6753:4748]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:19.614270Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:12:19.614375Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6755:4750] 2025-04-17T18:12:19.614449Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6755:4750] 2025-04-17T18:12:19.614785Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6756:4751] 2025-04-17T18:12:19.614940Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6756:4751], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:12:19.614987Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:12:19.615122Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6755:4750], server id = [2:6756:4751], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:19.615195Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:19.615260Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6753:4748], StatRequests.size() = 1 2025-04-17T18:12:19.715246Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWE1NDhkOGMtMjc3NDVlNi1lNzkwODI0MC0zZmUyYzU4Mw==, TxId: 2025-04-17T18:12:19.715317Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWE1NDhkOGMtMjc3NDVlNi1lNzkwODI0MC0zZmUyYzU4Mw==, TxId: 2025-04-17T18:12:19.715642Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:19.728579Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:19.728642Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:19.781104Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:12:19.781181Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:12:19.854886Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6755:4750], schemeshard count = 1 2025-04-17T18:12:20.826817Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:20.826891Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T18:12:20.826923Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:22.017750Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:12:22.038684Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:22.038796Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T18:12:22.038832Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:22.039044Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:22.040713Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:22.048601Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjFhMjBhYWItZGExZThhYjUtZDQ2MDVjN2QtZjRjNmQ2NDM=, TxId: 2025-04-17T18:12:22.048636Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjFhMjBhYWItZGExZThhYjUtZDQ2MDVjN2QtZjRjNmQ2NDM=, TxId: 2025-04-17T18:12:22.048902Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:22.061051Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:22.061090Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2766:3247] 2025-04-17T18:12:22.061428Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6873:4822]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:22.063440Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:22.063478Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:12:22.066123Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:22.066172Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:12:22.066228Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:12:22.067730Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-17T18:12:22.067940Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> KqpScripting::StreamExecuteYqlScriptMixed >> KqpScripting::SelectNullType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 20527, MsgBus: 27400 2025-04-17T18:11:59.999153Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346502045916895:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:59.999198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001785/r3tmp/tmpjOwMnJ/pdisk_1.dat 2025-04-17T18:12:00.214255Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20527, node 1 2025-04-17T18:12:00.272317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:00.272332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:00.272349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:00.272427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:00.325119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:00.325202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:00.326926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27400 TClient is connected to server localhost:27400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:00.578582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:01.845260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346510635852143:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:01.845313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346510635852148:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:01.845346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:01.848686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:01.856875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346510635852157:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:12:01.917334Z node 1 :TX_PROXY ERROR: Actor# [1:7494346510635852208:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:02.111821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:02.197240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:02.733044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:03.668353Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-17T18:12:03.668496Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:12:03.668596Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:12:03.668725Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346519225795532:2966], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7494346519225795178:2966]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7494346519225795532:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:12:03.669020Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346519225795521:2966], SessionActorId: [1:7494346519225795178:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494346519225795178:2966]. isRollback=0 2025-04-17T18:12:03.669160Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjM1ZWJmNWMtMzZjYjkwNGQtYTg3MWQ2NzctYTc4MTQ4MWE=, ActorId: [1:7494346519225795178:2966], ActorState: ExecuteState, TraceId: 01js2f6ry9e2ebed4tq2kv86cz, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494346519225795522:2966] from: [1:7494346519225795521:2966] 2025-04-17T18:12:03.669207Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346519225795522:2966] TxId: 281474976715665. Ctx: { TraceId: 01js2f6ry9e2ebed4tq2kv86cz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM1ZWJmNWMtMzZjYjkwNGQtYTg3MWQ2NzctYTc4MTQ4MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:12:03.669326Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjM1ZWJmNWMtMzZjYjkwNGQtYTg3MWQ2NzctYTc4MTQ4MWE=, ActorId: [1:7494346519225795178:2966], ActorState: ExecuteState, TraceId: 01js2f6ry9e2ebed4tq2kv86cz, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:12:04.999339Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346502045916895:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:04.999410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 7606, MsgBus: 23714 2025-04-17T18:12:09.270011Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346543278389019:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:09.270114Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001785/r3tmp/tmpJmCQz1/pdisk_1.dat 2025-04-17T18:12:09.338744Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7606, node 2 2025-04-17T18:12:09.378881Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:09.378898Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:09.378904Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:09.378976Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:09.393157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:09.393223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:09.394705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23714 TClient is connected to server localhost:23714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:09.646540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:11.251527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346551868324266:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 20 ... imr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.776568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7494346564753231783:3146];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.777373Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7494346564753231954:3186];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.777779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346564753231933:3179];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.778556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7494346564753231954:3186];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.778688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346564753231933:3179];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.780241Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7494346564753231760:3136];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.780656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7494346564753231760:3136];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.780818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7494346564753231945:3183];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.781069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7494346564753231945:3183];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.781447Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7494346564753231848:3163];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.781682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7494346564753231848:3163];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.781797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7494346564753231810:3147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038055;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.781977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7494346564753231810:3147];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.783346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7494346564753231819:3151];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.783566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7494346564753231819:3151];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7494346564753231750:3131];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7494346564753231750:3131];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.786839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7494346564753231662:3112];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.787086Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7494346564753231662:3112];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.787342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7494346564753231935:3180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.787567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7494346564753231935:3180];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.788418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7494346564753231928:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.788647Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7494346564753231928:3177];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.788794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7494346564753231947:3184];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.789007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7494346564753231947:3184];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.789863Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7494346564753232028:3193];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.790084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7494346564753232028:3193];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.791523Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7494346564753231843:3161];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.791927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7494346564753231843:3161];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.792147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7494346564753231962:3191];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.792489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7494346564753231962:3191];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.794802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7494346564753231825:3153];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.795050Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7494346564753231825:3153];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.795900Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7494346564753232098:3194];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.796163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7494346564753232098:3194];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.796503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7494346564753231837:3158];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.796732Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7494346564753231837:3158];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.798730Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7494346564753232025:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.798988Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7494346564753232025:3192];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.801723Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7494346564753231850:3164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.801998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7494346564753231850:3164];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.808637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346564753231841:3160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.808901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346564753231841:3160];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 25894, MsgBus: 12873 2025-04-17T18:11:55.106547Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346483385456653:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:11:55.106675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00179b/r3tmp/tmptiAy4m/pdisk_1.dat 2025-04-17T18:11:55.335052Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25894, node 1 2025-04-17T18:11:55.379967Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:11:55.380006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:11:55.380016Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:11:55.380169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:11:55.441992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:11:55.442105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:11:55.443927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12873 TClient is connected to server localhost:12873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:11:55.747482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:11:56.835205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346487680424611:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:56.835301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346487680424621:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:56.835349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:11:56.838820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:11:56.846537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346487680424625:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:11:56.909334Z node 1 :TX_PROXY ERROR: Actor# [1:7494346487680424677:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:11:57.096036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:11:57.203859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:57.203864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:57.204018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:57.204215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:57.204305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:57.204388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:57.204477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:57.204484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:57.204608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:57.204660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:11:57.204696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:57.204743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:11:57.204824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:57.204826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:11:57.204914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:57.204938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:11:57.205016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:11:57.205049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:57.205144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494346491975392171:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:57.205163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:11:57.205250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:11:57.205344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:11:57.205438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:11:57.205518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346491975392165:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:11:57.230567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346491975392175:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:11:57.230625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494346491975392175:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:11:57.230781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;sel ... 17T18:12:16.779525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346562787376662:3103];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.779869Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7494346562787376662:3103];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.780078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7494346562787376782:3121];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.780414Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7494346562787376782:3121];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.780672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7494346562787376805:3128];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.781007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7494346562787376805:3128];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.781211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7494346562787376787:3123];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.781543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7494346562787376787:3123];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.781749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7494346562787376840:3133];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.782083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7494346562787376840:3133];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.782309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7494346562787376709:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.782678Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7494346562787376709:3110];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.782905Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7494346562787376725:3114];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.783247Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7494346562787376725:3114];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.783429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7494346562787376789:3124];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.783445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7494346562787376719:3113];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.783709Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7494346562787376789:3124];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.783807Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7494346562787376719:3113];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.783851Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7494346562787376736:3118];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784021Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7494346562787376715:3112];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7494346562787376736:3118];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346562787376728:3115];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7494346562787376715:3112];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7494346562787376728:3115];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784515Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7494346562787376540:3090];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7494346562787376746:3119];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7494346562787376540:3090];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7494346562787376780:3120];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.784932Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7494346562787376746:3119];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.785075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7494346562787376780:3120];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.785160Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7494346562787376521:3082];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.785206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7494346562787376686:3107];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.785418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7494346562787376686:3107];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.785545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7494346562787376521:3082];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.788520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7494346562787376684:3106];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.788919Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7494346562787376684:3106];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.789151Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7494346562787376982:3139];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.789447Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7494346562787376845:3134];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.789518Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7494346562787376982:3139];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.789680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7494346562787376845:3134];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:16.981833Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715667; 2025-04-17T18:12:16.983239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:17.252552Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[2:7494346554197437064:2407];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037898;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:17.252732Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[2:7494346554197437064:2407];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037898;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge >> KqpYql::InsertCVList+useSink >> KqpScripting::UnsafeTimestampCast >> KqpYql::TestUuidDefaultColumn [GOOD] >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] >> KqpSinkMvcc::OltpMultiSinks [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::FromBytes >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> KqpYql::EvaluateIf [GOOD] >> KqpYql::EvaluateFor >> KqpYql::ScriptUdf [GOOD] >> KqpYql::SelectNoAsciiValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 10990, MsgBus: 11055 2025-04-17T18:12:23.199233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346605519721715:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.199360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001444/r3tmp/tmpIJBRez/pdisk_1.dat 2025-04-17T18:12:23.499663Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10990, node 1 2025-04-17T18:12:23.578639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.578767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.580531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:23.587262Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587290Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11055 TClient is connected to server localhost:11055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.110391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.214580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346614109656948:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.214676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.599356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.707432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346614109657050:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.707504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.707543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346614109657055:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.712086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.719734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346614109657057:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:12:25.814138Z node 1 :TX_PROXY ERROR: Actor# [1:7494346614109657108:2393] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 26843, MsgBus: 5836 2025-04-17T18:12:23.198442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346604037050948:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.199034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001427/r3tmp/tmpYFI24Z/pdisk_1.dat 2025-04-17T18:12:23.485024Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26843, node 1 2025-04-17T18:12:23.547587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.547683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.549481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:23.587365Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587397Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5836 TClient is connected to server localhost:5836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.068833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.317387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612626986192:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.317494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.599399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.730153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612626986294:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.730233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.730249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612626986299:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.732865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.740129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346612626986301:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:12:25.820303Z node 1 :TX_PROXY ERROR: Actor# [1:7494346612626986352:2393] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestBootProgress [GOOD] Test command err: 2025-04-17T18:09:23.573991Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:23.577910Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:23.578139Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-17T18:09:23.578716Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:09:23.579810Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-17T18:09:23.579874Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:23.580791Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:48:2075] ControllerId# 72057594037932033 2025-04-17T18:09:23.580827Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:23.580962Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:23.581170Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:23.592710Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:23.592758Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:23.594945Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:56:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.595116Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:57:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.595260Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:58:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.595434Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:59:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.595575Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:60:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.595717Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:61:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.595871Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:62:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.595900Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:23.595995Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:48:2075] 2025-04-17T18:09:23.596028Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:48:2075] 2025-04-17T18:09:23.596082Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:23.596122Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:23.596980Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:23.597063Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:23.599617Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:23.599726Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:23.600589Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:71:2073] ControllerId# 72057594037932033 2025-04-17T18:09:23.600619Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:23.600682Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:23.600848Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:23.606405Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:23.606474Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:23.608186Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:77:2077] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.608320Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:78:2078] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.608459Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:79:2079] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.608580Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:80:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.608717Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:81:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.608850Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:82:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.608993Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:83:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:23.609018Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:23.609070Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:71:2073] 2025-04-17T18:09:23.609096Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:71:2073] 2025-04-17T18:09:23.609139Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:23.609192Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:23.609564Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:23.609976Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:23.610226Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:71:2073] 2025-04-17T18:09:23.618387Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:23.626302Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:23.626571Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:23.678416Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:48:2075] 2025-04-17T18:09:23.678483Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:23.678533Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:23.678720Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:23.678750Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-17T18:09:23.683773Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-17T18:09:23.685554Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-17T18:09:23.690359Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:23.690422Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-17T18:09:23.690549Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-17T18:09:23.690997Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-17T18:09:23.697486Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:48:2075] 2025-04-17T18:09:23.697560Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:23.697809Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:23.698095Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:23.700626Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:23.700726Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:23.700925Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-17T18:09:23.700987Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-17T18:09:23.701016Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-17T18:09:23.701069Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:23.701142Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:52:2064] 2025-04-17T18:09:23.701184Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:52:2064] 2025-04-17T18:09:23.701326Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:75:2064] 2025-04-17T18:09:23.701346Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:75:2064] 2025-04-17T18:09:23.701381Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-17T18:09:23.701424Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [2:95:2087] 2025-04-17T18:09:23.701460Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [2:95:20 ... .998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T18:12:21.707308Z node 13 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2147483651 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.546 sample PartId# [72075186224037893:1:1:1:28672:35:1] QueryCount# 1 VDiskId# [80000003:1:0:0:0] NodeId# 13 } TEvVPutResult{ TimestampMs# 4.766 VDiskId# [80000003:1:0:0:0] NodeId# 13 Status# OK } ] } 2025-04-17T18:12:21.707424Z node 13 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72075186224037893:1:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:12:21.707501Z node 13 :BS_PROXY_PUT DEBUG: [ebee40e0668be2ee] received {EvVPutResult Status# OK ID# [72075186224037893:1:2:0:0:71:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80559 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [80000001:1:0:0:0] Marker# BPP01 2025-04-17T18:12:21.707549Z node 13 :BS_PROXY_PUT DEBUG: [ebee40e0668be2ee] Result# TEvPutResult {Id# [72075186224037893:1:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483649 Marker# BPP12 2025-04-17T18:12:21.707591Z node 13 :BS_PROXY_PUT INFO: [ebee40e0668be2ee] SendReply putResult# TEvPutResult {Id# [72075186224037893:1:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T18:12:21.707648Z node 13 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2147483649 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.629 sample PartId# [72075186224037893:1:2:0:0:71:1] QueryCount# 1 VDiskId# [80000001:1:0:0:0] NodeId# 13 } TEvVPutResult{ TimestampMs# 3.896 VDiskId# [80000001:1:0:0:0] NodeId# 13 Status# OK } ] } 2025-04-17T18:12:21.707765Z node 13 :TABLET_MAIN DEBUG: Tablet: 72075186224037893 GcCollect 0 channel, tablet:gen:step => 1:0 Marker# TSYS28 2025-04-17T18:12:21.707831Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72075186224037893:1:3} commited cookie 2 for step 1 2025-04-17T18:12:21.707907Z node 13 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72075186224037893:1:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:12:21.708069Z node 13 :BS_PROXY_PUT DEBUG: [8d3ab1a908111ee8] received {EvVPutResult Status# OK ID# [72075186224037893:1:2:1:8192:289:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 82275 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [80000003:1:0:0:0] Marker# BPP01 2025-04-17T18:12:21.708126Z node 13 :BS_PROXY_PUT DEBUG: [8d3ab1a908111ee8] Result# TEvPutResult {Id# [72075186224037893:1:2:1:8192:289:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483651 Marker# BPP12 2025-04-17T18:12:21.708153Z node 13 :BS_PROXY_PUT INFO: [8d3ab1a908111ee8] SendReply putResult# TEvPutResult {Id# [72075186224037893:1:2:1:8192:289:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T18:12:21.708205Z node 13 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2147483651 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.729 sample PartId# [72075186224037893:1:2:1:8192:289:1] QueryCount# 1 VDiskId# [80000003:1:0:0:0] NodeId# 13 } TEvVPutResult{ TimestampMs# 4.38 VDiskId# [80000003:1:0:0:0] NodeId# 13 Status# OK } ] } 2025-04-17T18:12:21.708358Z node 13 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72075186224037893:1:2:1:8192:289:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:12:21.708419Z node 13 :BS_PROXY_COLLECT INFO: [fb0feab7af645c61] bootstrap ActorId# [13:637:2561] Group# 2147483649 TabletId# 72075186224037893 Channel# 0 RecordGeneration# 1 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 1 CollectStep# 0 Collect# true Hard# false RestartCounter# 0 Marker# DSPC03 2025-04-17T18:12:21.708506Z node 13 :BS_PROXY DEBUG: Send to queueActorId# [13:584:2518] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037893:1:1:0] collect=[1:0] cookie# 0 2025-04-17T18:12:21.708624Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72075186224037893:1:3} commited cookie 1 for step 2 2025-04-17T18:12:21.708706Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72075186224037893:1:3} Tx{2, NKikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit} queued, type NKikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit 2025-04-17T18:12:21.708757Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72075186224037893:1:3} Tx{2, NKikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:12:21.709200Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72075186224037893:1:3} Tx{2, NKikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:12:21.709257Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72075186224037893:1:3} Tx{2, NKikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:12:21.709394Z node 13 :TABLET_MAIN INFO: Tablet: 72075186224037893 Active! Generation: 1, Type: Dummy started in 9msec Marker# TSYS24 2025-04-17T18:12:21.709425Z node 13 :PIPE_SERVER DEBUG: [72075186224037893] Activate 2025-04-17T18:12:21.709464Z node 13 :BS_PROXY_COLLECT INFO: [22c0b25a062593d6] bootstrap ActorId# [13:638:2562] Group# 2147483651 TabletId# 72075186224037893 Channel# 1 RecordGeneration# 1 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 1 CollectStep# 0 Collect# true Hard# false RestartCounter# 0 Marker# DSPC03 2025-04-17T18:12:21.709503Z node 13 :BS_PROXY DEBUG: Send to queueActorId# [13:602:2531] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037893:1:1:1] collect=[1:0] cookie# 0 2025-04-17T18:12:21.709766Z node 13 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [13:51:2092] 2025-04-17T18:12:21.709791Z node 13 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [13:51:2092] 2025-04-17T18:12:21.710063Z node 13 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [13:50:2092] EventType# 268960257 2025-04-17T18:12:21.710131Z node 13 :BS_PROXY_COLLECT DEBUG: [fb0feab7af645c61] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037893 RecordGeneration# 1 Channel# 0 VDisk# [80000001:1:0:0:0]} Marker# DSPC01 2025-04-17T18:12:21.710196Z node 13 :BS_PROXY_COLLECT INFO: [fb0feab7af645c61] Result# TEvCollectGarbageResult {TabletId# 72075186224037893 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2025-04-17T18:12:21.710438Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{36, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-04-17T18:12:21.710490Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{36, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:12:21.710737Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{36, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{25, redo 162b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-17T18:12:21.710801Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{36, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:12:21.710943Z node 13 :BS_PROXY_COLLECT DEBUG: [22c0b25a062593d6] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037893 RecordGeneration# 1 Channel# 1 VDisk# [80000003:1:0:0:0]} Marker# DSPC01 2025-04-17T18:12:21.710977Z node 13 :BS_PROXY_COLLECT INFO: [22c0b25a062593d6] Result# TEvCollectGarbageResult {TabletId# 72075186224037893 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2025-04-17T18:12:21.711080Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{37, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-17T18:12:21.711109Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{37, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:12:21.711168Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{37, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{26, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:12:21.711202Z node 13 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{37, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:12:21.711573Z node 13 :PIPE_CLIENT DEBUG: TClient[72075186224037893] ::Bootstrap [13:640:2564] 2025-04-17T18:12:21.711609Z node 13 :PIPE_CLIENT DEBUG: TClient[72075186224037893] lookup [13:640:2564] 2025-04-17T18:12:21.711697Z node 13 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037893 entry.State: StInit ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:12:21.711792Z node 13 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037893 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:12:21.711895Z node 13 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037893 Cookie: 0} 2025-04-17T18:12:21.711950Z node 13 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037893 Cookie: 1} 2025-04-17T18:12:21.711972Z node 13 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037893 Cookie: 2} 2025-04-17T18:12:21.712023Z node 13 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037893 CurrentLeader: [13:578:2515] CurrentLeaderTablet: [13:593:2527] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:12:21.712091Z node 13 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037893 CurrentLeader: [13:578:2515] CurrentLeaderTablet: [13:593:2527] CurrentGeneration: 1 CurrentStep: 0} 2025-04-17T18:12:21.712163Z node 13 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037893 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037893 Cookie: 0 CurrentLeader: [13:578:2515] CurrentLeaderTablet: [13:593:2527] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-04-17T18:12:21.712211Z node 13 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037893 followers: 0 2025-04-17T18:12:21.712272Z node 13 :TABLET_RESOLVER DEBUG: SelectForward node 13 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [13:578:2515] 2025-04-17T18:12:21.712361Z node 13 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result local node, try to connect [13:640:2564] 2025-04-17T18:12:21.712402Z node 13 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [13:640:2564] 2025-04-17T18:12:21.712490Z node 13 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [13:640:2564] 2025-04-17T18:12:21.712614Z node 13 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [13:640:2564] 2025-04-17T18:12:21.712657Z node 13 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [13:640:2564] >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 25236, MsgBus: 32058 2025-04-17T18:12:08.315900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346539194179319:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:08.315958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001784/r3tmp/tmpiFhGoq/pdisk_1.dat 2025-04-17T18:12:08.544657Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25236, node 1 2025-04-17T18:12:08.593301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:08.593318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:08.593327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:08.593448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:08.663660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:08.663788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:08.665381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32058 TClient is connected to server localhost:32058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:08.925137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.055480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346547784114571:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.055532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346547784114555:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.055838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.060246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:10.071430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346547784114584:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:12:10.135989Z node 1 :TX_PROXY ERROR: Actor# [1:7494346547784114635:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:10.323959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:10.427835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:11.064798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:12:13.315772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346539194179319:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:13.315849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 24897, MsgBus: 4006 2025-04-17T18:12:17.647836Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346579148623378:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:17.647881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001784/r3tmp/tmpLRVWUX/pdisk_1.dat 2025-04-17T18:12:17.705763Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24897, node 2 2025-04-17T18:12:17.752745Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:17.752760Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:17.752767Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:17.752837Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:17.767456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:17.767515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:17.769117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4006 TClient is connected to server localhost:4006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:18.026407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:19.484935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346587738558600:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.485069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.485199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346587738558634:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:19.487661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:19.494669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346587738558638:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:12:19.559093Z node 2 :TX_PROXY ERROR: Actor# [2:7494346587738558689:2332] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:19.588093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:19.613318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:20.244578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:12:22.648036Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346579148623378:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:22.648143Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 20100, MsgBus: 27557 2025-04-17T18:12:01.133280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:12:01.133673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:12:01.133725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00178b/r3tmp/tmppDM9on/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20100, node 1 2025-04-17T18:12:01.496367Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:01.499328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:01.499372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:01.499400Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:01.499778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:01.534787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:01.534946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:01.546471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27557 TClient is connected to server localhost:27557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:01.741598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:01.861093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:02.188374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:02.524493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:02.790606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:03.390461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1809:3405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:03.390669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:03.411438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:03.622912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:03.855244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.090939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.319863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.628547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:04.871778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:05.175095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2558:3978], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.175201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.175437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2563:3983], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:05.179060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:05.320355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2565:3985], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:05.366719Z node 1 :TX_PROXY ERROR: Actor# [1:2634:4035] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:06.127023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:06.357350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:06.655781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12428, MsgBus: 22582 2025-04-17T18:12:10.339081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:12:10.339280Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:12:10.339399Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00178b/r3tmp/tmptz5Tnr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12428, node 2 2025-04-17T18:12:10.649068Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:10.649772Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:10.649809Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:10.649837Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:10.650019Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:10.684112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:10.684229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:10.695286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22582 TClient is connected to server localhost:22582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:10.870055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18 ... 94046644480 2025-04-17T18:12:12.998074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:13.242595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:13.510193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:13.819919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:14.109308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2552:3971], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:14.109402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:14.109650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2557:3976], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:14.113934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:14.282883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2559:3978], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:14.328362Z node 2 :TX_PROXY ERROR: Actor# [2:2628:4028] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:15.014064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:15.245282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:15.586319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14747, MsgBus: 23668 2025-04-17T18:12:19.521644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:12:19.521772Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:12:19.521860Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00178b/r3tmp/tmpeZJqTi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14747, node 3 2025-04-17T18:12:19.808172Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:19.808968Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:19.809009Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:19.809047Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:19.809369Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:19.844233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:19.844398Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:19.855641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23668 TClient is connected to server localhost:23668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:20.065761Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:20.084486Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:20.319459Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:20.665477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:20.917730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:21.341726Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1807:3402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:21.341903Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:21.358420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:21.554722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:21.792419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:22.018125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:22.257305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:22.502469Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:22.847717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:23.126733Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2550:3971], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:23.126923Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:23.127266Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2555:3976], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:23.132226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:23.303020Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2557:3978], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:23.348986Z node 3 :TX_PROXY ERROR: Actor# [3:2626:4028] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:24.569565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:24.823195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.186423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> KqpScripting::ScriptExplainCreatedTable >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan >> KqpScripting::StreamExecuteYqlScriptScan >> KqpYql::UuidPrimaryKeyBulkUpsert >> KqpYql::UuidPrimaryKey >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced >> KqpYql::InsertCVList+useSink [GOOD] >> KqpYql::InsertCVList-useSink >> KqpScripting::NoAstSizeLimit [GOOD] >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 22495, MsgBus: 1612 2025-04-17T18:12:23.206806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346603429176821:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.206899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001450/r3tmp/tmpHQw8U7/pdisk_1.dat 2025-04-17T18:12:23.475988Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22495, node 1 2025-04-17T18:12:23.553614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.553709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.555303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:23.587246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1612 TClient is connected to server localhost:1612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.130814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.159055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.289181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.405715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.451299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.384908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612019113192:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.384994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.653052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.675026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.695619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.716820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.740657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.804023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.828356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.868445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612019113867:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.868505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.868568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612019113872:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.871341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.878883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346612019113874:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:25.945475Z node 1 :TX_PROXY ERROR: Actor# [1:7494346612019113927:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:26.702367Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913546733, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 3603, MsgBus: 21262 2025-04-17T18:12:27.248992Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346619765463876:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:27.249051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001450/r3tmp/tmpR17Pqg/pdisk_1.dat 2025-04-17T18:12:27.319533Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3603, node 2 2025-04-17T18:12:27.370912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:27.370985Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:27.371995Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:27.372013Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:27.372019Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:27.372139Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:27.372611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21262 TClient is connected to server localhost:21262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:27.674078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.272915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346628355399118:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.272964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.291278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.324794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346628355399220:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.324866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.350417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346628355399231:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.350485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.350487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346628355399236:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.352945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:12:29.360301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346628355399238:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:12:29.439257Z node 2 :TX_PROXY ERROR: Actor# [2:7494346628355399289:2397] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::FromBytes [GOOD] >> KqpYql::EvaluateFor [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpYql::SelectNoAsciiValue [GOOD] >> KqpScripting::ScanQueryDisable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 22568, MsgBus: 2446 2025-04-17T18:12:23.198236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346602212666493:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.199510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001425/r3tmp/tmpwEvo2j/pdisk_1.dat 2025-04-17T18:12:23.455309Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22568, node 1 2025-04-17T18:12:23.537201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.537410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.538799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:23.587656Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587699Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2446 TClient is connected to server localhost:2446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.124486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.147096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.262602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.392831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.464370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.352820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346610802602853:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.352884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.598946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.623876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.646501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.665935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.687647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.714028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.741347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.780974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346610802603516:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.781019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.781128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346610802603521:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.783926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.791565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346610802603523:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:25.866252Z node 1 :TX_PROXY ERROR: Actor# [1:7494346610802603578:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 63494, MsgBus: 12163 2025-04-17T18:12:27.200420Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346619150226134:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:27.200494Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001425/r3tmp/tmpDqjcfy/pdisk_1.dat 2025-04-17T18:12:27.288191Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63494, node 2 2025-04-17T18:12:27.326280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:27.326342Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:27.327643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:27.328780Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:27.328797Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:27.328801Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:27.328871Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12163 TClient is connected to server localhost:12163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:27.693301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.701346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.738877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.885566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.941170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.342627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346627740162500:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.342705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.387669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.411183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.434646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.459699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.484156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.512089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.540618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.614452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346627740163160:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.614532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.614628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346627740163166:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.617352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:29.624217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346627740163168:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:29.706481Z node 2 :TX_PROXY ERROR: Actor# [2:7494346627740163225:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::JoinIndexLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 29675, MsgBus: 4363 2025-04-17T18:12:23.198412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346603427638736:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.199082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001430/r3tmp/tmplCM8Sa/pdisk_1.dat 2025-04-17T18:12:23.476137Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29675, node 1 2025-04-17T18:12:23.558380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.558479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.560358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:23.587301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587335Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4363 TClient is connected to server localhost:4363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.074503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.099200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.205388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.328869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.399554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.383673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612017575085:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.383778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.605476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.629098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.654012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.676598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.698363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.739532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.764253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.835709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612017575748:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.835761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.835804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612017575753:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.838959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.847308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346612017575755:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:25.922819Z node 1 :TX_PROXY ERROR: Actor# [1:7494346612017575810:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2572, MsgBus: 19866 2025-04-17T18:12:27.217781Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346620107832411:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:27.217892Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001430/r3tmp/tmprxebO0/pdisk_1.dat 2025-04-17T18:12:27.289812Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2572, node 2 2025-04-17T18:12:27.341610Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:27.341710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:27.343182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:27.350097Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:27.350127Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:27.350135Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:27.350256Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19866 TClient is connected to server localhost:19866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:27.667461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.675479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.717153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.814273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.883991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.440714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346628697768766:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.440784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.481330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.504942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.528922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.555493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.581713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.610121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.686006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.757210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346628697769434:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.757287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.757297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346628697769439:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.759725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:29.766580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346628697769441:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:29.850841Z node 2 :TX_PROXY ERROR: Actor# [2:7494346628697769497:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 21513, MsgBus: 20158 2025-04-17T18:12:23.198556Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346603637287321:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.199288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001428/r3tmp/tmp9vw3Wi/pdisk_1.dat 2025-04-17T18:12:23.468043Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21513, node 1 2025-04-17T18:12:23.557976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.558101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.559646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:23.587416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587452Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587609Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20158 TClient is connected to server localhost:20158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.082302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.105968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.224649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.341320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.389393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.301711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612227223669:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.301827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.599212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.622126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.643628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.665179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.687663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.714496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.756816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.796872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612227224338:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.796955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.797000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612227224343:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.799475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.805668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346612227224345:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:25.890209Z node 1 :TX_PROXY ERROR: Actor# [1:7494346612227224401:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26032, MsgBus: 27376 2025-04-17T18:12:27.204796Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346622188758558:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:27.204894Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001428/r3tmp/tmp7I7H4r/pdisk_1.dat 2025-04-17T18:12:27.282827Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26032, node 2 2025-04-17T18:12:27.332541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:27.332593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:27.333814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:27.340860Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:27.340883Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:27.340890Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:27.340986Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27376 TClient is connected to server localhost:27376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:27.688970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.696454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.764120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.900004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.970234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.359183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346630778694919:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.359263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.409404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.434822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.459808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.485259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.511374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.576534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.614214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.653414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346630778695580:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.653467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.653470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346630778695585:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.655662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:29.662535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346630778695587:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:29.737242Z node 2 :TX_PROXY ERROR: Actor# [2:7494346630778695639:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] >> KqpYql::TableRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 13378, MsgBus: 30952 2025-04-17T18:12:23.198294Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346604849147120:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.199275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00141f/r3tmp/tmpBeVrnk/pdisk_1.dat 2025-04-17T18:12:23.510267Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13378, node 1 2025-04-17T18:12:23.519212Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:12:23.587285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587322Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:23.594004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.594072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.595753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30952 TClient is connected to server localhost:30952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.087623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.105994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.226544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.356275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.415836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.412691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346613439083484:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.412770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.674385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.696187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.717034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.739821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.763559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.826894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.853395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.891800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346613439084143:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.891867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.891876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346613439084148:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.893829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.900389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346613439084150:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:25.998865Z node 1 :TX_PROXY ERROR: Actor# [1:7494346613439084205:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 Trying to start YDB, gRPC: 62552, MsgBus: 5140 2025-04-17T18:12:27.324513Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346622944908076:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:27.324586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00141f/r3tmp/tmpNOEiYa/pdisk_1.dat 2025-04-17T18:12:27.401677Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62552, node 2 2025-04-17T18:12:27.451397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:27.451496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:27.453150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:27.461992Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:27.462023Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:27.462030Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:27.462144Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5140 TClient is connected to server localhost:5140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:27.852941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.865998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.907292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.047423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.116568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.628103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346631534844426:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.628196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.666532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.690400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.711838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.734284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.757902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.785801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.812063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.886779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346631534845088:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.886853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.886919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346631534845093:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.889533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:29.895864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346631534845095:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:29.966944Z node 2 :TX_PROXY ERROR: Actor# [2:7494346631534845150:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:30.664411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.894709Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913550933, txId: 281474976715674] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 18099, MsgBus: 64627 2025-04-17T18:12:23.198441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346605630839384:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.198882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001422/r3tmp/tmpyg1CKk/pdisk_1.dat 2025-04-17T18:12:23.460712Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18099, node 1 2025-04-17T18:12:23.535612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.535679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.537511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:23.587288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64627 TClient is connected to server localhost:64627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.084153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.105713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.207423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.331492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.404507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.288905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346614220775722:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.289001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.598949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.619937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.640097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.660107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.682877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.747499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.776330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.816774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346614220776390:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.816844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.816913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346614220776395:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.819069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.825922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346614220776397:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:25.917073Z node 1 :TX_PROXY ERROR: Actor# [1:7494346614220776452:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:26.892853Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913546915, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 6797, MsgBus: 27270 2025-04-17T18:12:27.427998Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346622003933166:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:27.428079Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001422/r3tmp/tmph8NLKJ/pdisk_1.dat 2025-04-17T18:12:27.493098Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6797, node 2 2025-04-17T18:12:27.541200Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:27.541216Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:27.541221Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:27.541286Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:27.554102Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:27.554154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:27.555545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27270 TClient is connected to server localhost:27270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:27.912751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.928891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.969982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.086852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.159439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.561955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346630593869516:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.562031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.579865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.601981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.623906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.645857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.670411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.735091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.761843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.834805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346630593870183:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.834884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.834899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346630593870188:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.836950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:29.843336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346630593870190:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:29.910255Z node 2 :TX_PROXY ERROR: Actor# [2:7494346630593870243:3537] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:30.970843Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913551010, txId: 281474976715672] shutting down >> KqpScripting::StreamDdlAndDml [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 9550, MsgBus: 63784 2025-04-17T18:12:29.205871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346630109476485:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:29.205994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001408/r3tmp/tmpBWheIq/pdisk_1.dat 2025-04-17T18:12:29.462098Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9550, node 1 2025-04-17T18:12:29.521841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:29.521861Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:29.521867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:29.522002Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:29.557265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:29.557394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:29.559228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63784 TClient is connected to server localhost:63784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:29.871190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:31.281693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346638699411730:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.281824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.505384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.628229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346638699411839:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.628310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.628385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346638699411844:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.631393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:12:31.639228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346638699411846:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:12:31.723897Z node 1 :TX_PROXY ERROR: Actor# [1:7494346638699411897:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 21405, MsgBus: 15243 2025-04-17T18:12:23.198297Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346604166382849:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.198391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00141e/r3tmp/tmp4wjvgQ/pdisk_1.dat 2025-04-17T18:12:23.505580Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21405, node 1 2025-04-17T18:12:23.587385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587431Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:23.589917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.590032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.591721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15243 TClient is connected to server localhost:15243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.133335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.153077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.272708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.384637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.440953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.316457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612756319198:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.316537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.599042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.619122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.640231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.660399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.721802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.744083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.784101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.853736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612756319871:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.853782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.853914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612756319876:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.856439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.862065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346612756319878:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:25.925290Z node 1 :TX_PROXY ERROR: Actor# [1:7494346612756319932:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:27.158880Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547167, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 21081, MsgBus: 25391 2025-04-17T18:12:27.882026Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346620585206620:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:27.882078Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00141e/r3tmp/tmpQW2Vdz/pdisk_1.dat 2025-04-17T18:12:28.014224Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:28.027914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:28.028006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:28.029897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21081, node 2 2025-04-17T18:12:28.068610Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:28.068635Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:28.068649Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:28.068788Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25391 TClient is connected to server localhost:25391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:28.402693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.418770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.486987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.598029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.666211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.982194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346629175142982:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.982271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.021792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.044068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.065197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.084911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.108679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.145715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.171331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.213628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346633470110931:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.213711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.213722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346633470110936:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.216640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:30.224673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346633470110938:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:30.277051Z node 2 :TX_PROXY ERROR: Actor# [2:7494346633470110992:3535] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 25055, MsgBus: 16771 2025-04-17T18:12:23.198255Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346603828927192:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.199231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00144a/r3tmp/tmpcoopPk/pdisk_1.dat 2025-04-17T18:12:23.500885Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25055, node 1 2025-04-17T18:12:23.590686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.590798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.591459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.591478Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.591492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.591627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:23.592360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16771 TClient is connected to server localhost:16771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.127776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.153338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.277763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.391141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.463526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.348599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612418863545:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.348704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.598943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.620092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.639860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.660453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.683045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.745708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.808837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.881420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612418864225:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.881486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.881494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612418864230:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.883778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.890065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346612418864232:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:25.963856Z node 1 :TX_PROXY ERROR: Actor# [1:7494346612418864286:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:26.706872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.848626Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913546887, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 10179, MsgBus: 12462 2025-04-17T18:12:27.497444Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346622569254384:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:27.497533Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00144a/r3tmp/tmpY7aVz9/pdisk_1.dat 2025-04-17T18:12:27.586416Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10179, node 2 2025-04-17T18:12:27.629656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:27.629727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:27.631286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:27.632845Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:27.632864Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:27.632869Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:27.632943Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12462 TClient is connected to server localhost:12462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:28.008140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.024773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.094161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.225595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.294030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.851159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346631159190758:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.851224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:29.913626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.935597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.957909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:29.979961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.002912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.030910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.095745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.169364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346635454158719:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.169430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.169480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346635454158724:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.172147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:30.179408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346635454158726:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:30.259796Z node 2 :TX_PROXY ERROR: Actor# [2:7494346635454158779:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:31.061999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.617139Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913551647, txId: 281474976710674] shutting down >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> KqpYql::EvaluateExprPgNull >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> KqpYql::UuidPrimaryKey [GOOD] >> KqpYql::EvaluateExpr1 >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain >> KqpYql::InsertIgnore >> KqpScripting::StreamExecuteYqlScriptData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 5813, MsgBus: 24073 2025-04-17T18:12:24.357702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346607749675791:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:24.357825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001419/r3tmp/tmpAMu75Y/pdisk_1.dat 2025-04-17T18:12:24.674240Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5813, node 1 2025-04-17T18:12:24.701265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:24.701290Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:24.701299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:24.701412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:24.725894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:24.726016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:24.727637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24073 TClient is connected to server localhost:24073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:25.056200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.072226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.178456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.300583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.373048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:26.430829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346616339612170:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.430927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.660420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.682857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.705268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.725846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.745725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.781351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.803674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.875803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346616339612832:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.875870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.875893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346616339612837:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.878277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:26.885709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346616339612839:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:26.971126Z node 1 :TX_PROXY ERROR: Actor# [1:7494346616339612896:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:27.670562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:27.935201Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547972, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 11894, MsgBus: 31292 2025-04-17T18:12:28.607066Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346626825178311:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:28.607123Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001419/r3tmp/tmp4gkYS7/pdisk_1.dat 2025-04-17T18:12:28.665745Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11894, node 2 2025-04-17T18:12:28.707992Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:28.708014Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:28.708019Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:28.708105Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:28.732646Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:28.732727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:28.734238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31292 TClient is connected to server localhost:31292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:28.974161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.981671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.067363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.193437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.265805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.873622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346635415114699:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.873691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.912090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.935725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.960756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:30.986673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.013066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.038017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.065359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.107432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346639710082649:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.107486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.107499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346639710082654:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.110758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:31.119008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346639710082656:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:31.215436Z node 2 :TX_PROXY ERROR: Actor# [2:7494346639710082712:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:31.787333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.177458Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913552214, txId: 281474976715674] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 7726, MsgBus: 11830 2025-04-17T18:12:23.823733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346603495944062:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.823817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00141c/r3tmp/tmpkKRXen/pdisk_1.dat 2025-04-17T18:12:24.052040Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7726, node 1 2025-04-17T18:12:24.115422Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:24.115454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:24.115465Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:24.115551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:24.153030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:24.153110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:24.155211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11830 TClient is connected to server localhost:11830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.518291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.541113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.655865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.761571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.833410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.850800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346612085880426:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.850894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.034781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.056591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.080952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.104734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.128072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.156678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.196796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:26.270125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346616380848385:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.270213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.270281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346616380848390:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.273245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:26.280637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346616380848392:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:26.341021Z node 1 :TX_PROXY ERROR: Actor# [1:7494346616380848446:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:27.522198Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547552, txId: 281474976710672] shutting down 2025-04-17T18:12:27.810405Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547846, txId: 281474976710674] shutting down 2025-04-17T18:12:28.307182Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913548343, txId: 281474976710678] shutting down Trying to start YDB, gRPC: 3301, MsgBus: 25246 2025-04-17T18:12:28.834571Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346624601474502:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:28.834618Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00141c/r3tmp/tmpaHkSSl/pdisk_1.dat 2025-04-17T18:12:28.914555Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3301, node 2 2025-04-17T18:12:28.957720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:28.957787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:28.959358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:28.971163Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:28.971183Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:28.971191Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:28.971317Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25246 TClient is connected to server localhost:25246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:29.293346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.301289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.370659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.500800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.550717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.999893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346633191410863:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.999978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.060998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.085812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.108895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.130599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.194092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.259251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.288586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.333048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346637486378821:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.333149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.333209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346637486378826:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.336080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:31.343069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346637486378828:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:31.407147Z node 2 :TX_PROXY ERROR: Actor# [2:7494346637486378880:3537] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:32.177057Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913552214, txId: 281474976715672] shutting down 2025-04-17T18:12:32.422961Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913552438, txId: 281474976715674] shutting down >> KqpYql::InsertCVList-useSink [GOOD] >> KqpScripting::ScriptValidate >> KqpScripting::EndOfQueryCommit >> KqpYql::BinaryJsonOffsetNormal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 61391, MsgBus: 18252 2025-04-17T18:12:29.523015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346631380955805:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:29.523136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001405/r3tmp/tmpJM3fFQ/pdisk_1.dat 2025-04-17T18:12:29.757541Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61391, node 1 2025-04-17T18:12:29.808942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:29.808963Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:29.808969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:29.809076Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:29.876607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:29.876763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:29.878242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18252 TClient is connected to server localhost:18252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:30.196585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:31.585381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346639970891058:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.585464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.751894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.861257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346639970891160:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.861322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346639970891165:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.861359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.864696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:12:31.872443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346639970891167:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:12:31.957904Z node 1 :TX_PROXY ERROR: Actor# [1:7494346639970891218:2393] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:32.227740Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494346644265858612:2366], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2025-04-17T18:12:32.227949Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2FkMzc0YzQtYjBhODFmNC00NGVhNWM3NC0xYTllNDU5Nw==, ActorId: [1:7494346639970891040:2328], ActorState: ExecuteState, TraceId: 01js2f7mty40tsd72dbfq8vf09, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> KqpScripting::SystemTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 13771, MsgBus: 21719 2025-04-17T18:12:26.106631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346616435827353:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:26.106757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001414/r3tmp/tmpoK2RoU/pdisk_1.dat 2025-04-17T18:12:26.329972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13771, node 1 2025-04-17T18:12:26.392642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:26.392665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:26.392675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:26.392816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:26.430878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:26.430958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:26.432639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21719 TClient is connected to server localhost:21719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:26.750896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:26.774218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:26.886744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.017199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.088945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.128743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346625025763730:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.128893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.348388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.368740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.387844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.408644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.430862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.482020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.504486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.542654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346625025764388:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.542712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.542754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346625025764393:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.545619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:28.552833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346625025764395:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:28.626882Z node 1 :TX_PROXY ERROR: Actor# [1:7494346625025764450:3539] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:29.282423Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-17T18:12:29.291486Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:12:29.291631Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:12:29.291824Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346629320732097:2516], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7494346629320732079:2516]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7494346629320732097:2516].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:12:29.292370Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346629320732090:2516], SessionActorId: [1:7494346629320732079:2516], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494346629320732079:2516]. isRollback=0 2025-04-17T18:12:29.292556Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjNhNzA3ZC00OTJkMTkyMy03Nzg3NzRiYi1jZDBmM2EzYQ==, ActorId: [1:7494346629320732079:2516], ActorState: ExecuteState, TraceId: 01js2f7hwk7471h5d0jbx05qfk, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494346629320732091:2516] from: [1:7494346629320732090:2516] 2025-04-17T18:12:29.292638Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346629320732091:2516] TxId: 281474976710672. Ctx: { TraceId: 01js2f7hwk7471h5d0jbx05qfk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNhNzA3ZC00OTJkMTkyMy03Nzg3NzRiYi1jZDBmM2EzYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:12:29.292803Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjNhNzA3ZC00OTJkMTkyMy03Nzg3NzRiYi1jZDBmM2EzYQ==, ActorId: [1:7494346629320732079:2516], ActorState: ExecuteState, TraceId: 01js2f7hwk7471h5d0jbx05qfk, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 17432, MsgBus: 61550 2025-04-17T18:12:29.905950Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346629985707386:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:29.906011Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001414/r3tmp/tmpnnfjTo/pdisk_1.dat 2025-04-17T18:12:29.966436Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17432, node 2 2025-04-17T18:12:30.027113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:30.027178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:30.028733Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:30.029785Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:30.029809Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:30.029817Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:30.029926Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61550 TClient is connected to server localhost:61550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:30.341745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.349424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.419217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.577382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.632476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:32.122186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346642870611043:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.122277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.156758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.178432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.202271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.225979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.247824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.271298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.296915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.373050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346642870611703:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.373119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.373160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346642870611708:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.376058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:32.383276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346642870611710:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:32.435925Z node 2 :TX_PROXY ERROR: Actor# [2:7494346642870611765:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:33.523965Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346647165579424:2516], TxId: 281474976715673, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2f7nt45p8g59bymjsbvfg6. SessionId : ydb://session/3?node_id=2&id=Y2YyNDRlMzAtNDgyODM0YzEtNjFlYjY3NjYtN2Q4NjQzZjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-17T18:12:33.524258Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346647165579426:2517], TxId: 281474976715673, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2f7nt45p8g59bymjsbvfg6. SessionId : ydb://session/3?node_id=2&id=Y2YyNDRlMzAtNDgyODM0YzEtNjFlYjY3NjYtN2Q4NjQzZjI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494346647165579421:2507], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:33.524584Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2YyNDRlMzAtNDgyODM0YzEtNjFlYjY3NjYtN2Q4NjQzZjI=, ActorId: [2:7494346647165579368:2507], ActorState: ExecuteState, TraceId: 01js2f7nt45p8g59bymjsbvfg6, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 >> KqpYql::UpdatePk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: 2025-04-17T18:10:16.843136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:16.843259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:16.843381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c25/r3tmp/tmphjShDA/pdisk_1.dat 2025-04-17T18:10:17.188999Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30209, node 1 2025-04-17T18:10:17.441836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:17.441887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:17.441923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:17.442645Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:17.447871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:17.559126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:17.559242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:17.579392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12168 2025-04-17T18:10:18.193192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:21.152994Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:21.186512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:21.186585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:21.223327Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:21.225509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:21.444237Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.444625Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.445119Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.445408Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.445480Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.445555Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.445646Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.445711Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.445778Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:21.605594Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:21.605701Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:21.619075Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:21.780130Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:21.836544Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:21.836652Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:21.872103Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:21.874050Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:21.874339Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:21.874403Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:21.874474Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:21.874546Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:21.874610Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:21.874673Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:21.876094Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:21.914734Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:21.914834Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:21.922603Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:21.927879Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:21.928023Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:21.941660Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:21.961411Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:21.961487Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:21.961557Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:21.971417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:21.980190Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:21.980337Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:22.175057Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:22.351387Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:22.460917Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:23.135142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:23.135265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:23.149366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:23.254055Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:23.254333Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:23.254638Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:23.254789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:23.254936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:23.255076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:23.255209Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:23.255404Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:23.255549Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:23.255676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:23.255795Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:23.255926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:23.284558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:23.284644Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... nt = 0, need schemeshards count = 1 2025-04-17T18:12:28.052271Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:12:28.052427Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7131:5238], server id = [2:7132:5239], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:28.052476Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:28.052537Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7129:5236], StatRequests.size() = 1 2025-04-17T18:12:28.163841Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTM5Nzk4YzAtZjZkMWNlNDctODA1OTJiYmMtNzBlM2Q4MDM=, TxId: 2025-04-17T18:12:28.163904Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTM5Nzk4YzAtZjZkMWNlNDctODA1OTJiYmMtNzBlM2Q4MDM=, TxId: 2025-04-17T18:12:28.164354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:28.177489Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:28.177540Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:28.219827Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:12:28.219902Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:12:28.304948Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7131:5238], schemeshard count = 1 2025-04-17T18:12:29.340644Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:29.340746Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:29.343634Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:29.359484Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:29.359921Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:29.359975Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-17T18:12:29.372407Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:30.675722Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:30.675770Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:30.675793Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:12:30.675823Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:30.675865Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:30.676488Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:30.689395Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-17T18:12:30.689483Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:30.689865Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:30.689945Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:30.690656Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-17T18:12:30.690757Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-17T18:12:30.691357Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:30.704448Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:12:30.704528Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:30.704883Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:12:30.705403Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7256:5313], server id = [2:7257:5314], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:30.705515Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7256:5313], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:30.711322Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:30.711401Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:30.711594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:30.711749Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:30.711948Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7256:5313], server id = [2:7257:5314], tablet id = 72075186224037899 2025-04-17T18:12:30.711977Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:30.712126Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:30.714874Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:30.745990Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7277:5333]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:30.746165Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:30.746231Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7277:5333], StatRequests.size() = 1 2025-04-17T18:12:30.855923Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmJjMjY2YjMtMmYyMjE1MzItODg3NTJmZDEtOTRmNzMyMmE=, TxId: 2025-04-17T18:12:30.855985Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmJjMjY2YjMtMmYyMjE1MzItODg3NTJmZDEtOTRmNzMyMmE=, TxId: 2025-04-17T18:12:30.856418Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:30.873399Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:30.873457Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:31.427775Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:12:31.427864Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:32.150279Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:32.150345Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:32.150400Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:12:33.364127Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:33.364315Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:33.385785Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:33.385906Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:33.385943Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:33.386522Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:33.399882Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:33.400237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:33.400293Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:33.400660Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:33.424623Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:33.424851Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:33.425335Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7371:5383], server id = [2:7372:5384], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:33.425427Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7371:5383], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:33.426721Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:33.426807Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:33.427006Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:33.427169Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:33.427339Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7371:5383], server id = [2:7372:5384], tablet id = 72075186224037899 2025-04-17T18:12:33.427370Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:33.427523Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:33.430283Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:33.450906Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWZjOWU1OTYtNTI5MzgzMTMtMzU1OWI3YjItNWE0ZjZiMWY=, TxId: 2025-04-17T18:12:33.450970Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWZjOWU1OTYtNTI5MzgzMTMtMzU1OWI3YjItNWE0ZjZiMWY=, TxId: 2025-04-17T18:12:33.451453Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:33.465167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:33.465234Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2802:3223] >> KqpYql::TableConcat >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkTx::DeferredEffects >> KqpScripting::ScriptingCreateAndAlterTableTest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 62859, MsgBus: 7455 2025-04-17T18:12:26.106149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346617622890397:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:26.106253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00140e/r3tmp/tmpw0QvN6/pdisk_1.dat 2025-04-17T18:12:26.334236Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62859, node 1 2025-04-17T18:12:26.391364Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:26.391383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:26.391387Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:26.391500Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:26.422538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:26.422612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:26.424222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7455 TClient is connected to server localhost:7455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:26.757544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:26.772493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:26.883185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:26.985855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:27.052923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:28.217916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346626212826794:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.218016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.414216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.436381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.459603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.481136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.501586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.524946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.553605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:28.626806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346626212827455:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.626889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.626981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346626212827460:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.630206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:28.638647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346626212827462:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:28.739124Z node 1 :TX_PROXY ERROR: Actor# [1:7494346626212827519:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:29.409255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63799, MsgBus: 16799 2025-04-17T18:12:30.098370Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346632371103265:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:30.098453Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00140e/r3tmp/tmpDlTTYr/pdisk_1.dat 2025-04-17T18:12:30.187504Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63799, node 2 2025-04-17T18:12:30.228344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:30.228422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:30.229677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:30.233442Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:30.233457Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:30.233474Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:30.233561Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16799 TClient is connected to server localhost:16799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:30.507319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.515217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.583910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.709391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.779305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:32.367633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346640961039633:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.367699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.402230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.425452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.449190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.472915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.497120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.525794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.571829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.617089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346640961040288:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.617191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.617224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346640961040293:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.619808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:32.626742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346640961040295:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:32.722712Z node 2 :TX_PROXY ERROR: Actor# [2:7494346640961040350:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:33.689398Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913553679, txId: 281474976715672] shutting down 2025-04-17T18:12:33.773981Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913553766, txId: 281474976715674] shutting down 2025-04-17T18:12:34.468875Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554496, txId: 281474976715676] shutting down >> KqpYql::TableRange [GOOD] >> KqpPragma::ResetPerQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: 2025-04-17T18:10:14.857622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:14.857762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:14.857910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c53/r3tmp/tmpuX17xW/pdisk_1.dat 2025-04-17T18:10:15.379682Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23023, node 1 2025-04-17T18:10:15.917709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:15.917764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:15.917795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:15.918570Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:15.930654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.031219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:16.031354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:16.048548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23146 2025-04-17T18:10:16.622028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.897204Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:19.939213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.939323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.979905Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:19.987143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.248021Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.248516Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.249166Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.249518Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.249604Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.249693Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.249769Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.249845Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.249929Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.419731Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:20.419840Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:20.435778Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.571253Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:20.618056Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:20.618200Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:20.652692Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:20.654339Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:20.654542Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:20.654596Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:20.654668Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:20.654730Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:20.654780Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:20.654823Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:20.656047Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:20.695383Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.695503Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.703529Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:20.707792Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:20.707935Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:20.720178Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:20.741068Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:20.741133Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:20.741211Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:20.752246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:20.807053Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:20.807214Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:20.996315Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:21.177113Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:21.234597Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:22.033702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.033850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.108842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:22.233649Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:22.233862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:22.234163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:22.234326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:22.234475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:22.234611Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:22.234723Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:22.234898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:22.235028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:22.235193Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:22.235315Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:22.235420Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:22.291669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:22.291757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ble, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-17T18:10:24.100808Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000014s ... waiting for TEvAnalyzeTableResponse 2025-04-17T18:10:25.973677Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2806:3166] 2025-04-17T18:10:25.976182Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2804:3224] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-17T18:10:25.976256Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=operationId 2025-04-17T18:10:25.976332Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=operationId , PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:10:26.003457Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-17T18:10:32.683357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:32.683456Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:33.671756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:10:33.671850Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:25.582944Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:12:25.583022Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:12:25.583059Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:12:25.583090Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:12:27.063182Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-17T18:12:27.063252Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 239.000000s, at schemeshard: 72075186224037897 2025-04-17T18:12:27.063515Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-17T18:12:27.076077Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:12:28.200504Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:28.200592Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:28.200641Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:12:28.200699Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:12:28.200744Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:28.201200Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:28.205430Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:28.209797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6987:5166], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.209899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6997:5171], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.209979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:28.224840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:12:28.274516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7001:5174], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:12:28.488976Z node 2 :TX_PROXY ERROR: Actor# [2:7094:5220] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:28.549621Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7123:5235]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:28.549801Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:12:28.549866Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7125:5237] 2025-04-17T18:12:28.549921Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7125:5237] 2025-04-17T18:12:28.550127Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7126:5238] 2025-04-17T18:12:28.550288Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7125:5237], server id = [2:7126:5238], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:28.550339Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7126:5238], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:12:28.550383Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:12:28.550471Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:28.550527Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7123:5235], StatRequests.size() = 1 2025-04-17T18:12:28.657781Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTJjOTI3ODAtY2E2ZmY3YTMtOGMyNThlYTMtNzJhZDlmZjk=, TxId: 2025-04-17T18:12:28.657839Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTJjOTI3ODAtY2E2ZmY3YTMtOGMyNThlYTMtNzJhZDlmZjk=, TxId: 2025-04-17T18:12:28.658248Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:28.671367Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:28.671419Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:28.714614Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:12:28.714681Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:12:28.778495Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7125:5237], schemeshard count = 1 2025-04-17T18:12:29.754639Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:29.754731Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:29.757512Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:29.773055Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:29.773554Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:29.773612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-17T18:12:29.786548Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:29.797386Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-17T18:12:30.228851Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:30.228924Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:30.228966Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:12:30.229012Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:30.229063Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:30.232378Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:30.246297Z node 2 :STATISTICS ERROR: [72075186224037894] Delete long analyze operation, OperationId=operationId 2025-04-17T18:12:30.515725Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:30.515905Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:32.384131Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:12:32.384226Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:12:32.384284Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:12:32.384327Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:12:34.562601Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-17T18:12:34.562679Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 209.000000s, at schemeshard: 72075186224037897 2025-04-17T18:12:34.562869Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-17T18:12:34.703174Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:34.703371Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:2804:3224] 2025-04-17T18:12:34.703414Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:12:34.703686Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:34.703723Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:34.704196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 >> KqpYql::UpdateBadType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 16660, MsgBus: 10183 2025-04-17T18:12:25.014661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346610174134260:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:25.014818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001416/r3tmp/tmps9ZNSI/pdisk_1.dat 2025-04-17T18:12:25.245481Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16660, node 1 2025-04-17T18:12:25.310778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:25.310819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:25.310833Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:25.310955Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:25.359113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:25.359226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:25.361038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10183 TClient is connected to server localhost:10183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:25.653082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.674150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.774896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.875660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.918168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:26.929213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346614469103335:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:26.929284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:27.129075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:27.152864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:27.177267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:27.200466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:27.222163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:27.286610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:27.328003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:27.365806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346618764071299:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:27.365873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:27.365909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346618764071304:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:27.368760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:27.375089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346618764071306:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:27.477798Z node 1 :TX_PROXY ERROR: Actor# [1:7494346618764071360:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:29.176026Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346623059038982:2510] 2025-04-17T18:12:29.176696Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346623059039055:2516] TxId: 281474976710673. Ctx: { TraceId: 01js2f7gwf4cmb5m1yytxpk443, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjliOGRkYjItNzY1NTAxZDYtMmU4OTQ5NGYtMWVkMTQxZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:12:29.176939Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjliOGRkYjItNzY1NTAxZDYtMmU4OTQ5NGYtMWVkMTQxZWQ=, ActorId: [1:7494346623059039001:2516], ActorState: ExecuteState, TraceId: 01js2f7gwf4cmb5m1yytxpk443, Create QueryResponse for error on request, msg: 2025-04-17T18:12:29.182402Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346623059039064:2523], TxId: 281474976710673, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=YjliOGRkYjItNzY1NTAxZDYtMmU4OTQ5NGYtMWVkMTQxZWQ=. CustomerSuppliedId : . TraceId : 01js2f7gwf4cmb5m1yytxpk443. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494346623059039055:2516], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:29.182786Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346623059039065:2524], TxId: 281474976710673, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjliOGRkYjItNzY1NTAxZDYtMmU4OTQ5NGYtMWVkMTQxZWQ=. TraceId : 01js2f7gwf4cmb5m1yytxpk443. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494346623059039055:2516], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:29.182928Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346623059039066:2525], TxId: 281474976710673, task: 6. Ctx: { CustomerSuppliedId : . TraceId : 01js2f7gwf4cmb5m1yytxpk443. SessionId : ydb://session/3?node_id=1&id=YjliOGRkYjItNzY1NTAxZDYtMmU4OTQ5NGYtMWVkMTQxZWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494346623059039055:2516], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:29.183042Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346623059039067:2526], TxId: 281474976710673, task: 7. Ctx: { TraceId : 01js2f7gwf4cmb5m1yytxpk443. SessionId : ydb://session/3?node_id=1&id=YjliOGRkYjItNzY1NTAxZDYtMmU4OTQ5NGYtMWVkMTQxZWQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494346623059039055:2516], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:29.183077Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913548406, txId: 281474976710672] shutting down 2025-04-17T18:12:29.183164Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346623059039068:2527], TxId: 281474976710673, task: 8. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjliOGRkYjItNzY1NTAxZDYtMmU4OTQ5NGYtMWVkMTQxZWQ=. TraceId : 01js2f7gwf4cmb5m1yytxpk443. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494346623059039055:2516], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:29.183263Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346623059039069:2528], TxId: 281474976710673, task: 9. Ctx: { TraceId : 01js2f7gwf4cmb5m1yytxpk443. SessionId : ydb://session/3?node_id=1&id=YjliOGRkYjItNzY1NTAxZDYtMmU4OTQ5NGYtMWVkMTQxZWQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494346623059039055:2516], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:29.183313Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346623059039060:2520], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2f7gwf4cmb5m1yytxpk443. SessionId : ... schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:31.222620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:31.339633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:31.408779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:32.677936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346641951761696:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.678000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.713453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.742106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.764820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.787262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.808625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.830577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.855775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.898967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346641951762351:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.899019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.899114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346641951762356:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.901899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:32.910634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346641951762358:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:32.982408Z node 2 :TX_PROXY ERROR: Actor# [2:7494346641951762413:3535] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:33.580634Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730024:2508] 2025-04-17T18:12:33.598427Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730030:2511] 2025-04-17T18:12:33.613166Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730046:2517] 2025-04-17T18:12:33.624106Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730053:2520] 2025-04-17T18:12:33.642099Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730068:2525] 2025-04-17T18:12:33.676638Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730078:2529] 2025-04-17T18:12:33.692379Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730088:2534] 2025-04-17T18:12:33.714101Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730100:2538] 2025-04-17T18:12:33.745403Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730122:2546] 2025-04-17T18:12:33.777222Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730153:2553] 2025-04-17T18:12:33.813229Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730168:2559] 2025-04-17T18:12:33.852013Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730189:2567] 2025-04-17T18:12:33.893934Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730202:2571] 2025-04-17T18:12:33.939242Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730230:2577] 2025-04-17T18:12:33.986914Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730258:2588] 2025-04-17T18:12:34.038142Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346646246730275:2594] 2025-04-17T18:12:34.092183Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697613:2600] 2025-04-17T18:12:34.155808Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697665:2609] 2025-04-17T18:12:34.211417Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697677:2613] 2025-04-17T18:12:34.274321Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697707:2624] 2025-04-17T18:12:34.340257Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697737:2630] 2025-04-17T18:12:34.409103Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697760:2639] 2025-04-17T18:12:34.481140Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697790:2645] 2025-04-17T18:12:34.549652Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554587, txId: 281474976715673] shutting down 2025-04-17T18:12:34.633064Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697923:2669] 2025-04-17T18:12:34.714146Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697948:2678] 2025-04-17T18:12:34.799166Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541697979:2689] 2025-04-17T18:12:34.886575Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346650541698038:2703] TxId: 281474976715676. Ctx: { TraceId: 01js2f7qbkb5dbcrxnar7zxjbt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTZjYjIyODktNGI5NjNlZTktZTllZWNmM2ItZTVkZDI3NDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:12:34.886629Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541698001:2698] 2025-04-17T18:12:34.886787Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTZjYjIyODktNGI5NjNlZTktZTllZWNmM2ItZTVkZDI3NDM=, ActorId: [2:7494346650541698011:2703], ActorState: ExecuteState, TraceId: 01js2f7qbkb5dbcrxnar7zxjbt, Create QueryResponse for error on request, msg: 2025-04-17T18:12:34.887142Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554923, txId: 281474976715675] shutting down 2025-04-17T18:12:34.887663Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346650541698048:2708], TxId: 281474976715676, task: 5. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTZjYjIyODktNGI5NjNlZTktZTllZWNmM2ItZTVkZDI3NDM=. TraceId : 01js2f7qbkb5dbcrxnar7zxjbt. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494346650541698038:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:34.888484Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346650541698043:2704], TxId: 281474976715676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTZjYjIyODktNGI5NjNlZTktZTllZWNmM2ItZTVkZDI3NDM=. TraceId : 01js2f7qbkb5dbcrxnar7zxjbt. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346650541698038:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:34.888827Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346650541698045:2705], TxId: 281474976715676, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MTZjYjIyODktNGI5NjNlZTktZTllZWNmM2ItZTVkZDI3NDM=. TraceId : 01js2f7qbkb5dbcrxnar7zxjbt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346650541698038:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:34.889468Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346650541698046:2706], TxId: 281474976715676, task: 3. Ctx: { TraceId : 01js2f7qbkb5dbcrxnar7zxjbt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MTZjYjIyODktNGI5NjNlZTktZTllZWNmM2ItZTVkZDI3NDM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494346650541698038:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:34.889793Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346650541698047:2707], TxId: 281474976715676, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MTZjYjIyODktNGI5NjNlZTktZTllZWNmM2ItZTVkZDI3NDM=. TraceId : 01js2f7qbkb5dbcrxnar7zxjbt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346650541698038:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:34.900464Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715676. Snapshot is not valid, tabletId: 72075186224037894, step: 1744913554923 2025-04-17T18:12:34.901656Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715676. Snapshot is not valid, tabletId: 72075186224037895, step: 1744913554923 2025-04-17T18:12:34.977510Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346650541698088:2713] 2025-04-17T18:12:35.047369Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555084, txId: 281474976715678] shutting down 2025-04-17T18:12:35.146943Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346654836665510:2737] 2025-04-17T18:12:35.246448Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7494346654836665531:2746] 2025-04-17T18:12:35.342063Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555378, txId: 281474976715680] shutting down >> AnalyzeColumnshard::AnalyzeStatus [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] Test command err: Trying to start YDB, gRPC: 21656, MsgBus: 16380 2025-04-17T18:12:32.393619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346640485992545:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:32.393679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001401/r3tmp/tmpbHBzyS/pdisk_1.dat 2025-04-17T18:12:32.637481Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21656, node 1 2025-04-17T18:12:32.700552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:32.700574Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:32.700579Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:32.700702Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:32.743622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:32.743699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:32.745421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16380 TClient is connected to server localhost:16380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:33.070189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:33.088276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:33.201033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:33.336775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:33.388611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.548443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346649075928909:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:34.548525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:34.797422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:34.824205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:34.848902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:34.874038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:34.900147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:34.929149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:34.957283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:35.001665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346653370896859:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.001738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.001792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346653370896864:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.004121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:35.010247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346653370896866:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:35.073436Z node 1 :TX_PROXY ERROR: Actor# [1:7494346653370896920:3536] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. >> KqpSinkTx::OlapInteractive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] Test command err: 2025-04-17T18:10:14.946748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:14.946908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:14.947064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c47/r3tmp/tmpWCpZ3m/pdisk_1.dat 2025-04-17T18:10:15.378118Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20013, node 1 2025-04-17T18:10:15.919234Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:15.919289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:15.919324Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:15.919835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:15.933815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.031425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:16.031548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:16.049135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11879 2025-04-17T18:10:16.645577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.926557Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:19.981809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.981921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:20.023750Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:20.026270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.282219Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.282687Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.283260Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.283604Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.283696Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.283779Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.283861Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.283932Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.284017Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.452454Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:20.452577Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:20.467894Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.618537Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:20.666143Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:20.666282Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:20.701862Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:20.718289Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:20.718530Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:20.718615Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:20.718670Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:20.718720Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:20.718785Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:20.718842Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:20.720372Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:20.758365Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.758476Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.766973Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:20.772310Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:20.772465Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:20.788072Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:20.809508Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:20.809574Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:20.809652Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:20.822711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:20.830724Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:20.830885Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:21.050110Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:21.200209Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:21.258847Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:22.058359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.058530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.114027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:22.541379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:22.541665Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:22.542042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:22.542311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:22.542511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:22.542713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:22.542926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:22.543113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:22.543343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:22.543543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:22.543735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:22.543920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2391:2888];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:22.597402Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2892];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:22.597485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2892];tablet_id=72075186224037900;process= ... ATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8797:6638] 2025-04-17T18:12:33.010583Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8797:6638], schemeshard id = 72075186224037897 2025-04-17T18:12:33.023503Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:33.023566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:33.089660Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8801:6641] 2025-04-17T18:12:33.090481Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4084:3320] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-17T18:12:33.090564Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:4084:3320] 2025-04-17T18:12:33.090632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-17T18:12:33.695059Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:12:33.695139Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:33.705843Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-17T18:12:33.705893Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:34.179022Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:34.179126Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:34.179163Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:12:35.278529Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:35.278689Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:35.278751Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:35.279442Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:35.292768Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:35.293219Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:35.293302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:35.293707Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:35.306928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:35.307064Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-17T18:12:35.307936Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8882:6682], server id = [2:8887:6687], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:35.308048Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8882:6682], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.308248Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8883:6683], server id = [2:8888:6688], tablet id = 72075186224037900, status = OK 2025-04-17T18:12:35.308294Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8883:6683], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.309251Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8884:6684], server id = [2:8889:6689], tablet id = 72075186224037901, status = OK 2025-04-17T18:12:35.309309Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8884:6684], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.310031Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8885:6685], server id = [2:8890:6690], tablet id = 72075186224037902, status = OK 2025-04-17T18:12:35.310095Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8885:6685], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.310902Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8886:6686], server id = [2:8891:6691], tablet id = 72075186224037903, status = OK 2025-04-17T18:12:35.310956Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8886:6686], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.311320Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:35.338669Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:12:35.339441Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8882:6682], server id = [2:8887:6687], tablet id = 72075186224037899 2025-04-17T18:12:35.339490Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.339874Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:12:35.340481Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8883:6683], server id = [2:8888:6688], tablet id = 72075186224037900 2025-04-17T18:12:35.340513Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.340824Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:12:35.340943Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8884:6684], server id = [2:8889:6689], tablet id = 72075186224037901 2025-04-17T18:12:35.340964Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.341200Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:12:35.341532Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8895:6695], server id = [2:8900:6700], tablet id = 72075186224037904, status = OK 2025-04-17T18:12:35.341619Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8895:6695], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.341853Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8898:6698], server id = [2:8901:6701], tablet id = 72075186224037905, status = OK 2025-04-17T18:12:35.341906Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8898:6698], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.342699Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8885:6685], server id = [2:8890:6690], tablet id = 72075186224037902 2025-04-17T18:12:35.342729Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.342864Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8899:6699], server id = [2:8903:6703], tablet id = 72075186224037906, status = OK 2025-04-17T18:12:35.342916Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8899:6699], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.343691Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8886:6686], server id = [2:8891:6691], tablet id = 72075186224037903 2025-04-17T18:12:35.343719Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.343849Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8902:6702], server id = [2:8905:6705], tablet id = 72075186224037907, status = OK 2025-04-17T18:12:35.343897Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8902:6702], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.344612Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8904:6704], server id = [2:8906:6706], tablet id = 72075186224037908, status = OK 2025-04-17T18:12:35.344664Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8904:6704], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.345174Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:12:35.345828Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:12:35.346289Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8895:6695], server id = [2:8900:6700], tablet id = 72075186224037904 2025-04-17T18:12:35.346330Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.346586Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:12:35.346698Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8898:6698], server id = [2:8901:6701], tablet id = 72075186224037905 2025-04-17T18:12:35.346713Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.346817Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:12:35.346898Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:12:35.346926Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:35.347124Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8899:6699], server id = [2:8903:6703], tablet id = 72075186224037906 2025-04-17T18:12:35.347142Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.347245Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:35.347374Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:35.347605Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:35.349625Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8902:6702], server id = [2:8905:6705], tablet id = 72075186224037907 2025-04-17T18:12:35.349649Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.349896Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8904:6704], server id = [2:8906:6706], tablet id = 72075186224037908 2025-04-17T18:12:35.349912Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.350097Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:35.369253Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODYxZGIxNTEtZmI5Y2RhMjAtOWI4OTQ0ZGYtMzZmZjlmMjE=, TxId: 2025-04-17T18:12:35.369314Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODYxZGIxNTEtZmI5Y2RhMjAtOWI4OTQ0ZGYtMzZmZjlmMjE=, TxId: 2025-04-17T18:12:35.369980Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:35.382824Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:35.382886Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:4084:3320] >> KqpYql::EvaluateExpr2 >> KqpYql::InsertIgnore [GOOD] >> KqpYql::JsonCast >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpYql::EvaluateExprYsonAndType >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::Discard >> KqpScripting::ScriptExplain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: 2025-04-17T18:10:17.954766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:17.954918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:17.955100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c1b/r3tmp/tmpV3EjSU/pdisk_1.dat 2025-04-17T18:10:18.336098Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7153, node 1 2025-04-17T18:10:18.594898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:18.594967Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:18.595002Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:18.595720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:18.597913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:18.706682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:18.706832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:18.735825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2402 2025-04-17T18:10:19.326126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.410667Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:22.446376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:22.446489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:22.484498Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:22.490851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:22.734045Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:22.734627Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:22.735139Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:22.735265Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:22.735491Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:22.735566Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:22.735634Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:22.735747Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:22.735813Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:22.906023Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:22.906165Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:22.920138Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:23.067241Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:23.104560Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:23.104668Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:23.144237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:23.145016Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:23.145237Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:23.145295Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:23.145341Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:23.145394Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:23.145450Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:23.145503Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:23.146323Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:23.179412Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:23.179510Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:23.185327Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1885:2607] 2025-04-17T18:10:23.189552Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2616] 2025-04-17T18:10:23.189915Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2616], schemeshard id = 72075186224037897 2025-04-17T18:10:23.196527Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:23.217978Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:23.218039Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:23.218105Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:23.232737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:23.239731Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:23.239870Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:23.440565Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:23.593108Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:23.651231Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:24.404574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2242:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:24.404705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:24.420021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:24.530951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:24.531224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:24.531554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:24.531735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:24.531883Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:24.532016Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:24.532157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:24.532294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:24.532436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:24.532571Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:24.532718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:24.532855Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:24.566206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:24.566319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descri ... ase 2025-04-17T18:12:33.101251Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:33.103950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6986:5165], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:33.104034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6996:5170], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:33.104097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:33.114357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:12:33.158367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7000:5173], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:12:33.338674Z node 2 :TX_PROXY ERROR: Actor# [2:7093:5219] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:33.401942Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7122:5234]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:33.402093Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:12:33.402163Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7124:5236] 2025-04-17T18:12:33.402233Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7124:5236] 2025-04-17T18:12:33.402455Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7125:5237] 2025-04-17T18:12:33.402543Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7125:5237], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:12:33.402584Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:12:33.402685Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7124:5236], server id = [2:7125:5237], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:33.402731Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:33.402780Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7122:5234], StatRequests.size() = 1 2025-04-17T18:12:33.512100Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmZmYTAyNTctZTY4NGRmMDgtYjEwNTNjNi05MDM1ZDYxZg==, TxId: 2025-04-17T18:12:33.512170Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmZmYTAyNTctZTY4NGRmMDgtYjEwNTNjNi05MDM1ZDYxZg==, TxId: 2025-04-17T18:12:33.512623Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:33.526357Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:33.526412Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:33.569878Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:12:33.569926Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:12:33.623039Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7124:5236], schemeshard count = 1 2025-04-17T18:12:34.607320Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:34.607430Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:34.610692Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:34.626799Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:34.627268Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:34.627335Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-17T18:12:34.640649Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:34.651647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-17T18:12:34.653628Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7206:5285] 2025-04-17T18:12:34.654154Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_ENQUEUED 2025-04-17T18:12:34.655279Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7208:5286]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 3
FastCheckInFlight: 0
FastSchemeShards: 0
FastNodes: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 2
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 4], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 4], [OwnerId: 72075186224037897, LocalPathId: 3]
ForceTraversals: 1
    1970-01-01T00:00:05Z
NavigateType: Analyze
NavigateAnalyzeOperationId: 
NavigatePathId: 
ForceTraversalOperationId: 
TraversalStartTime: 1970-01-01T00:00:00Z
TraversalPathId: 
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-04-17T18:12:34.656365Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-17T18:12:34.656495Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-17T18:12:34.670207Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:12:35.843149Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:35.843284Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:35.843378Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:35.843941Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:35.857151Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:35.857527Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:35.857605Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:35.858501Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:35.871795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:35.871999Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:12:35.872599Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7258:5315], server id = [2:7259:5316], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:35.872716Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7258:5315], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.876688Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:35.876797Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:35.877034Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:35.877217Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:35.877391Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7258:5315], server id = [2:7259:5316], tablet id = 72075186224037899 2025-04-17T18:12:35.877428Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.877596Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:35.881032Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:35.917933Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7279:5335]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:35.918246Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:35.918299Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7279:5335], StatRequests.size() = 1 2025-04-17T18:12:36.033016Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2MwMjdhMTctYmYyYzdlMmItM2U3ZWI3NzQtZTViZGIyM2E=, TxId: 2025-04-17T18:12:36.033078Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2MwMjdhMTctYmYyYzdlMmItM2U3ZWI3NzQtZTViZGIyM2E=, TxId: 2025-04-17T18:12:36.033452Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:36.047016Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:36.047095Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:950:2754] 2025-04-17T18:12:36.048338Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7296:5343] 2025-04-17T18:12:36.048902Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION >> KqpYql::InsertCV+useSink >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats >> KqpYql::RefSelect >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] >> KqpScripting::StreamScanQuery >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 23485, MsgBus: 28638 2025-04-17T18:12:29.181738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346630327936746:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:29.181851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00140b/r3tmp/tmp1Nuo7e/pdisk_1.dat 2025-04-17T18:12:29.421973Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23485, node 1 2025-04-17T18:12:29.469781Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:29.469806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:29.469811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:29.469924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:29.518442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:29.518564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:29.520119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28638 TClient is connected to server localhost:28638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:29.893788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.910432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.010561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.127690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.180575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:31.207024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346638917873123:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.207113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.447412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.468569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.488053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.508157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.529767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.552624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.581670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.653913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346638917873782:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.653978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346638917873787:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.653982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.656567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:31.662552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346638917873789:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:31.722405Z node 1 :TX_PROXY ERROR: Actor# [1:7494346638917873844:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:32.820829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913552851, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 7761, MsgBus: 3820 2025-04-17T18:12:33.407564Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346646366768576:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:33.407622Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00140b/r3tmp/tmpO5DdPa/pdisk_1.dat 2025-04-17T18:12:33.483527Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7761, node 2 2025-04-17T18:12:33.530956Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:33.530990Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:33.530999Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:33.531106Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:33.538650Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:33.538731Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:33.540193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3820 TClient is connected to server localhost:3820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:33.930262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:33.943918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.010850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.111646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.184695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.673341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346654956704937:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.673422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.695820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:35.722724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:35.745775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:35.767567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:35.790155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:35.817282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:35.844603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:35.896662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346654956705593:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.896732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346654956705598:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.896733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.899704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:35.907853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346654956705600:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:35.990908Z node 2 :TX_PROXY ERROR: Actor# [2:7494346654956705656:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:36.839739Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556876, txId: 281474976715672] shutting down 2025-04-17T18:12:37.014558Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557023, txId: 281474976715674] shutting down 2025-04-17T18:12:37.169193Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557198, txId: 281474976715676] shutting down >> KqpScripting::QueryStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 21893, MsgBus: 10809 2025-04-17T18:12:28.726925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346625786167259:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:28.727041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00140d/r3tmp/tmpovSl5B/pdisk_1.dat 2025-04-17T18:12:28.931130Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21893, node 1 2025-04-17T18:12:28.994112Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:28.994134Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:28.994145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:28.994291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:29.047792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:29.047865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:29.049482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10809 TClient is connected to server localhost:10809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:29.359025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.383662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.486805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.613616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:29.683690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.791774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346634376103631:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.791850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:30.991599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.013625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.033642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.052149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.072373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.095530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.132259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:31.167212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346638671071583:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.167273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.167341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346638671071588:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:31.169570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:31.176573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346638671071590:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:31.274510Z node 1 :TX_PROXY ERROR: Actor# [1:7494346638671071647:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:31.838629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6897, MsgBus: 28589 2025-04-17T18:12:33.550736Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346644775611514:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:33.550875Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00140d/r3tmp/tmptDlp4H/pdisk_1.dat 2025-04-17T18:12:33.624830Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6897, node 2 2025-04-17T18:12:33.674695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:33.674793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:33.676419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:33.680547Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:33.680567Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:33.680571Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:33.680643Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28589 TClient is connected to server localhost:28589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:33.994052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.010878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.065569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.244870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.316906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.996463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346653365547876:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.996525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.065401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.091781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.114858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.138013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.161576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.191042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.219163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.267685Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346657660515824:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.267752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346657660515829:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.267752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.270261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:36.276888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346657660515831:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:36.360669Z node 2 :TX_PROXY ERROR: Actor# [2:7494346657660515887:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:37.611808Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494346661955483581:2517], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:12:37.612089Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmQ3NjcyYTUtMzU2YzI3ZWMtZjk3ZDE4ZjItYzg0NjMzZg==, ActorId: [2:7494346661955483579:2516], ActorState: ExecuteState, TraceId: 01js2f7t2z6d3cr01pm2e2wswm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 16485, MsgBus: 22955 2025-04-17T18:12:23.198259Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346602238372784:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:23.199296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00143e/r3tmp/tmp7sTqSY/pdisk_1.dat 2025-04-17T18:12:23.462976Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16485, node 1 2025-04-17T18:12:23.552667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:23.552743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:23.554409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:23.587295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:23.587319Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:23.587326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:23.587456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22955 TClient is connected to server localhost:22955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:24.068638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.099188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.202895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.315474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:24.385894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:25.510380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346610828309147:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.510471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.708946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.730294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.749327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.770408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.789852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.811960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.851090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:25.891648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346610828309800:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.891691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.891775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346610828309805:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:25.894937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:25.901842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346610828309807:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:25.999987Z node 1 :TX_PROXY ERROR: Actor# [1:7494346610828309864:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:26.938451Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913546915, txId: 281474976715674] shutting down 2025-04-17T18:12:26.938896Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913546915, txId: 281474976715672] shutting down 2025-04-17T18:12:26.943952Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913546915, txId: 281474976715673] shutting down 2025-04-17T18:12:27.087842Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547041, txId: 281474976715679] shutting down 2025-04-17T18:12:27.088248Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547041, txId: 281474976715678] shutting down 2025-04-17T18:12:27.126980Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547132, txId: 281474976715682] shutting down 2025-04-17T18:12:27.127963Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547132, txId: 281474976715683] shutting down 2025-04-17T18:12:27.246126Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547251, txId: 281474976715686] shutting down 2025-04-17T18:12:27.278305Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547251, txId: 281474976715687] shutting down 2025-04-17T18:12:27.316989Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547335, txId: 281474976715690] shutting down 2025-04-17T18:12:27.318510Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547335, txId: 281474976715691] shutting down 2025-04-17T18:12:27.400623Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547433, txId: 281474976715695] shutting down 2025-04-17T18:12:27.400750Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547433, txId: 281474976715694] shutting down 2025-04-17T18:12:27.576990Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547601, txId: 281474976715698] shutting down 2025-04-17T18:12:27.581458Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547608, txId: 281474976715699] shutting down 2025-04-17T18:12:27.735415Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547734, txId: 281474976715703] shutting down 2025-04-17T18:12:27.752746Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547734, txId: 281474976715702] shutting down 2025-04-17T18:12:27.878640Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547902, txId: 281474976715707] shutting down 2025-04-17T18:12:27.879287Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913547902, txId: 281474976715706] shutting down 2025-04-17T18:12:28.038137Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913548063, txId: 281474976715711] shutting down 2025-04-17T18:12:28.040853Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913548063, txId: 281474976715710] shutting down 2025-04-17T18:12:28.171975Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913548189, txId: 281474976715714] shutting down 2025-04-17T18:12:28.198519Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346602238372784:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:28.203810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snaps ... PoolFetcherActor] ActorId: [2:7494346643413233018:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.779570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.810330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.831306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.851438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.871654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.892244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.914496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.938317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.977814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346643413233669:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.977886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346643413233674:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.977890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.980213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:32.986318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346643413233676:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:33.070223Z node 2 :TX_PROXY ERROR: Actor# [2:7494346647708201027:3535] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:34.073043Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554034, txId: 281474976715673] shutting down 2025-04-17T18:12:34.073680Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554027, txId: 281474976715672] shutting down 2025-04-17T18:12:34.074259Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554027, txId: 281474976715674] shutting down 2025-04-17T18:12:34.076339Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554034, txId: 281474976715675] shutting down 2025-04-17T18:12:34.220230Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554153, txId: 281474976715681] shutting down 2025-04-17T18:12:34.220533Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554153, txId: 281474976715680] shutting down 2025-04-17T18:12:34.261979Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554272, txId: 281474976715684] shutting down 2025-04-17T18:12:34.296038Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554272, txId: 281474976715686] shutting down 2025-04-17T18:12:34.299261Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554272, txId: 281474976715685] shutting down 2025-04-17T18:12:34.416746Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554433, txId: 281474976715692] shutting down 2025-04-17T18:12:34.428688Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554433, txId: 281474976715690] shutting down 2025-04-17T18:12:34.430170Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554433, txId: 281474976715691] shutting down 2025-04-17T18:12:34.539774Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554566, txId: 281474976715696] shutting down 2025-04-17T18:12:34.568769Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554566, txId: 281474976715697] shutting down 2025-04-17T18:12:34.670131Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554692, txId: 281474976715700] shutting down 2025-04-17T18:12:34.670994Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554692, txId: 281474976715701] shutting down 2025-04-17T18:12:34.851215Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554818, txId: 281474976715704] shutting down 2025-04-17T18:12:34.892835Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554902, txId: 281474976715707] shutting down 2025-04-17T18:12:34.893447Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554902, txId: 281474976715706] shutting down 2025-04-17T18:12:35.049836Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555070, txId: 281474976715710] shutting down 2025-04-17T18:12:35.051036Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555070, txId: 281474976715711] shutting down 2025-04-17T18:12:35.246934Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555266, txId: 281474976715715] shutting down 2025-04-17T18:12:35.247567Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555266, txId: 281474976715714] shutting down 2025-04-17T18:12:35.360427Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555385, txId: 281474976715718] shutting down 2025-04-17T18:12:35.517481Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555539, txId: 281474976715720] shutting down 2025-04-17T18:12:35.518233Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555539, txId: 281474976715721] shutting down 2025-04-17T18:12:35.623442Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346634823296643:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:35.623491Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:12:35.679305Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555707, txId: 281474976715724] shutting down 2025-04-17T18:12:35.770292Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555798, txId: 281474976715726] shutting down 2025-04-17T18:12:35.911864Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555917, txId: 281474976715728] shutting down 2025-04-17T18:12:35.959041Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555994, txId: 281474976715730] shutting down 2025-04-17T18:12:36.103662Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556134, txId: 281474976715732] shutting down 2025-04-17T18:12:36.262646Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556260, txId: 281474976715734] shutting down 2025-04-17T18:12:36.320845Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556351, txId: 281474976715736] shutting down 2025-04-17T18:12:36.444213Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556477, txId: 281474976715738] shutting down 2025-04-17T18:12:36.575683Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556610, txId: 281474976715740] shutting down 2025-04-17T18:12:36.689597Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556729, txId: 281474976715742] shutting down 2025-04-17T18:12:36.855823Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556890, txId: 281474976715744] shutting down 2025-04-17T18:12:36.945579Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556981, txId: 281474976715746] shutting down 2025-04-17T18:12:37.146484Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557177, txId: 281474976715748] shutting down 2025-04-17T18:12:37.248227Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557282, txId: 281474976715750] shutting down 2025-04-17T18:12:37.384160Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557415, txId: 281474976715752] shutting down 2025-04-17T18:12:37.532027Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557569, txId: 281474976715754] shutting down 2025-04-17T18:12:37.653151Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557681, txId: 281474976715756] shutting down 2025-04-17T18:12:37.791642Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557828, txId: 281474976715758] shutting down >> KqpYql::TableConcat [GOOD] >> KqpYql::TableNameConflict >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce >> KqpYql::UpdatePk [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 16835, MsgBus: 63169 2025-04-17T18:12:09.054923Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346542608281553:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:09.055011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001780/r3tmp/tmpp22BmF/pdisk_1.dat 2025-04-17T18:12:09.310972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16835, node 1 2025-04-17T18:12:09.359112Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:09.359138Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:09.359144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:09.359241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:09.419835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:09.419937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:09.421604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63169 TClient is connected to server localhost:63169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:09.705987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:10.852576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346546903249510:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.852576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346546903249521:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.852644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:10.855295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:10.862059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346546903249524:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:12:10.927861Z node 1 :TX_PROXY ERROR: Actor# [1:7494346546903249575:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:11.127795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:11.248625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:12:11.248626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:12:11.248734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:12:11.248914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:12:11.249002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:12:11.249070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:12:11.249197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:12:11.249270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:12:11.249308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:12:11.249365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:12:11.249452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:12:11.249505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:12:11.249519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:12:11.249590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:12:11.249604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:12:11.249692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494346551198217079:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:12:11.249707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:12:11.249777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:12:11.249863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:12:11.249922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:12:11.250059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:12:11.250227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:12:11.250385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:12:11.250464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494346551198217077:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:12:11.272598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346551198217062:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:12:11.272646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494346551198217062:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:12:11.272787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;sel ... 3442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7494346608437446112:3218];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.573731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7494346608437446123:3225];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.573884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7494346608437446123:3225];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.574249Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7494346608437446103:3211];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.574409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7494346608437446103:3211];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.574576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7494346608437446100:3210];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.574714Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7494346608437446100:3210];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.574859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7494346608437446350:3260];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.575036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7494346608437446350:3260];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.578314Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7494346608437446396:3267];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.578488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7494346608437446396:3267];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.578816Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7494346608437446393:3266];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.578969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7494346608437446393:3266];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.579117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7494346608437446118:3222];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.579214Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7494346608437446118:3222];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.580415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7494346608437446402:3270];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.580596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7494346608437446402:3270];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.580708Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7494346608437446339:3256];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.580813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7494346608437446339:3256];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.582481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7494346608437446286:3252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.582634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7494346608437446286:3252];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.584504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7494346608437446334:3255];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.584627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7494346608437446334:3255];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.585016Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7494346608437446202:3243];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.585181Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7494346608437446202:3243];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.587571Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7494346608437446410:3272];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.587694Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7494346608437446410:3272];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.588058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7494346608437446249:3244];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.588191Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7494346608437446249:3244];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.589070Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7494346608437446428:3273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.589188Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7494346608437446428:3273];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.590107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7494346608437446471:3274];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.590308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7494346608437446471:3274];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.591104Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7494346608437446398:3268];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.591245Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7494346608437446398:3268];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.593704Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7494346608437446391:3265];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.593859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7494346608437446391:3265];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.597258Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;self_id=[2:7494346608437445431:3055];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038086;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.597417Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;self_id=[2:7494346608437445431:3055];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038086;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.598281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7494346608437446162:3241];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.598443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7494346608437446162:3241];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.648025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7494346608437446389:3264];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T18:12:30.648338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7494346608437446389:3264];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-17T18:12:34.205405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:12:34.205450Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] Test command err: 2025-04-17T18:10:14.821670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:14.821827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:14.821994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c45/r3tmp/tmplgnLbF/pdisk_1.dat 2025-04-17T18:10:15.376084Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7803, node 1 2025-04-17T18:10:15.916741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:15.916821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:15.916864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:15.917630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:15.929714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.028410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:16.028520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:16.047137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26345 2025-04-17T18:10:16.648735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.887224Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:19.931081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.931176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.978003Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:19.980797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.234779Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.235298Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.235885Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.236236Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.236347Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.236440Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.236546Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.236636Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.236740Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.407141Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:20.407248Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:20.420550Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.566227Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:20.611351Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:20.611463Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:20.644001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:20.645731Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:20.645959Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:20.646015Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:20.646078Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:20.646163Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:20.646288Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:20.646346Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:20.647501Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:20.689123Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.689231Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.699351Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:20.704657Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:20.704829Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:20.717302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:20.780291Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:20.780352Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:20.780423Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:20.790912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:20.798465Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:20.798599Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:20.979979Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:21.159287Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:21.218533Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:22.024827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.025013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.109877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:22.234808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:22.235053Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:22.235411Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:22.235565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:22.235701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:22.235836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:22.236006Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:22.236153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:22.236273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:22.236405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:22.236522Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:22.236646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:22.308072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:22.308191Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... S INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:12:35.654362Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:12:35.654988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:12:35.655664Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:12:35.656002Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-17T18:12:35.656056Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-17T18:12:35.656102Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-17T18:12:35.656139Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-17T18:12:35.656178Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744913555600825 2025-04-17T18:12:35.656217Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-17T18:12:35.656320Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-17T18:12:35.656403Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:12:35.656495Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-17T18:12:35.656583Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-17T18:12:35.656677Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-17T18:12:35.656740Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:12:35.656897Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:35.657705Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:12:35.658583Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:35.658654Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:35.658783Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-04-17T18:12:35.722288Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7299:5341] 2025-04-17T18:12:35.722488Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7299:5341], schemeshard id = 72075186224037897 2025-04-17T18:12:35.722562Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7253:5312], server id = [2:7300:5342], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:35.722593Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7300:5342] 2025-04-17T18:12:35.722652Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7300:5342], node id = 2, have schemeshards count = 1, need schemeshards count = 0 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-04-17T18:12:35.819259Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:35.819367Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:35.820760Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:35.821078Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7304:5345] 2025-04-17T18:12:35.821768Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2804:3224] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-17T18:12:35.821820Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2804:3224] 2025-04-17T18:12:35.834622Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:35.834708Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-17T18:12:35.834798Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:12:35.835156Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7307:5348], server id = [2:7308:5349], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:35.835234Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7307:5348], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:35.837915Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:35.838014Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:35.838218Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:35.838378Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:35.838574Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:35.840888Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7307:5348], server id = [2:7308:5349], tablet id = 72075186224037899 2025-04-17T18:12:35.840922Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:35.841331Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:35.866745Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7328:5368]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:35.866933Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:35.866967Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7328:5368], StatRequests.size() = 1 2025-04-17T18:12:35.946782Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmYyMDE5YTAtNThlM2FjOGQtOGE1M2FlMTEtNzU0M2Y0YTE=, TxId: 2025-04-17T18:12:35.946837Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmYyMDE5YTAtNThlM2FjOGQtOGE1M2FlMTEtNzU0M2Y0YTE=, TxId: 2025-04-17T18:12:35.947249Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:35.960368Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:35.960446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:36.510556Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:12:36.510626Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:37.173150Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:37.173221Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:37.173266Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:12:38.399771Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:38.399919Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:38.399984Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:38.400723Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:38.413996Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:38.414415Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:38.414475Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:38.414887Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:38.428026Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:38.428198Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:38.428733Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7422:5418], server id = [2:7423:5419], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:38.428824Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7422:5418], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:38.429942Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:38.430035Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:38.430164Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:38.430329Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:38.430530Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:38.432647Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7422:5418], server id = [2:7423:5419], tablet id = 72075186224037899 2025-04-17T18:12:38.432680Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:38.433080Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:38.453639Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzZmZjZlNjktMTAxMzExYmUtYTQ3NzcwZC1mZWU5NzE2OQ==, TxId: 2025-04-17T18:12:38.453700Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzZmZjZlNjktMTAxMzExYmUtYTQ3NzcwZC1mZWU5NzE2OQ==, TxId: 2025-04-17T18:12:38.454114Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2025-04-17T18:12:36.000000Z 2025-04-17T18:12:38.454377Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:38.467953Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:38.468043Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2804:3224] >> KqpScripting::ScanQueryInvalid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 4270, MsgBus: 17420 2025-04-17T18:12:35.568772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346656369617572:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:35.568850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013cf/r3tmp/tmpNh6F7N/pdisk_1.dat 2025-04-17T18:12:35.791294Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4270, node 1 2025-04-17T18:12:35.856809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:35.856834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:35.856839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:35.856927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:35.890787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:35.890893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:35.892555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17420 TClient is connected to server localhost:17420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:36.245095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.276233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.402473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.520765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.568875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.649337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346664959553964:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.649435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.876609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.901883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.924965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.946646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.968104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.005640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.028466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.069064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346669254521914:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.069120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.069182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346669254521919:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.071836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:38.079615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346669254521921:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:38.177889Z node 1 :TX_PROXY ERROR: Actor# [1:7494346669254521977:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:20: Warning: At function: AsStruct
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] >> KqpYql::DdlDmlMix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] Test command err: 2025-04-17T18:10:18.972085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:18.972232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:18.972369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c01/r3tmp/tmpxzYfNT/pdisk_1.dat 2025-04-17T18:10:19.336080Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29080, node 1 2025-04-17T18:10:19.566122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:19.566165Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:19.566220Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:19.566758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:19.568420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.651271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.651385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.668299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9808 2025-04-17T18:10:20.203884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:23.321627Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:23.356802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:23.356893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:23.394459Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:23.397078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:23.626139Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.626616Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.627230Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.627543Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.627621Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.627724Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.627777Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.627839Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.627902Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.795316Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:23.795432Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:23.808928Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:23.964499Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:24.012340Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:24.012446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:24.044822Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:24.048368Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:24.048607Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:24.048703Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:24.048769Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:24.048826Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:24.048891Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:24.048965Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:24.050479Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:24.089277Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:24.089380Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:24.097914Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:24.102542Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:24.102696Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:24.115473Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:24.182778Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:24.182838Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:24.182920Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:24.194857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:24.202544Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:24.202691Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:24.382820Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:24.558846Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:24.616799Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:25.305778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.305933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.322739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:25.442022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:25.442355Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:25.442717Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:25.442909Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:25.443066Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:25.443246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:25.443494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:25.443699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:25.443869Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:25.444022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:25.444171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:25.444316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:25.483857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:25.483983Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 3655Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7125:5237], server id = [2:7126:5238], tablet id = 72075186224037894 2025-04-17T18:12:36.083733Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7256:5315] 2025-04-17T18:12:36.083791Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7256:5315] 2025-04-17T18:12:36.118087Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:12:36.118161Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:12:36.118712Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:12:36.119351Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:12:36.119613Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-17T18:12:36.119657Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-17T18:12:36.119690Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-17T18:12:36.119727Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-17T18:12:36.119761Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744913556052757 2025-04-17T18:12:36.119789Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-17T18:12:36.119833Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-04-17T18:12:36.119895Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-17T18:12:36.119952Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:12:36.120023Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-17T18:12:36.120091Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-17T18:12:36.120158Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-17T18:12:36.120219Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:12:36.120340Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:36.120952Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:12:36.121591Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:36.121647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:36.121740Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:12:36.122689Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:36.122752Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:36.124162Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:36.187196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:36.187405Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:36.187998Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7303:5346], server id = [2:7304:5347], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:36.188104Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7303:5346], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:36.191924Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:36.192053Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:36.192227Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:36.192440Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:36.192737Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:36.195803Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7303:5346], server id = [2:7304:5347], tablet id = 72075186224037899 2025-04-17T18:12:36.195850Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:36.196419Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:36.233393Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7324:5366]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:36.233619Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:36.233668Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7324:5366], StatRequests.size() = 1 2025-04-17T18:12:36.345841Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTUwNGUyYjctOTYwMDA4MWYtMzhjMzM4ODItYjkwOTNmZGE=, TxId: 2025-04-17T18:12:36.345913Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTUwNGUyYjctOTYwMDA4MWYtMzhjMzM4ODItYjkwOTNmZGE=, TxId: 2025-04-17T18:12:36.346363Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:36.358519Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7339:5372] 2025-04-17T18:12:36.358700Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7339:5372], schemeshard id = 72075186224037897 2025-04-17T18:12:36.358798Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7256:5315], server id = [2:7340:5373], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:36.358828Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7340:5373] 2025-04-17T18:12:36.358881Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7340:5373], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-17T18:12:36.371759Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:36.371844Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:36.458206Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7344:5376] 2025-04-17T18:12:36.458742Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2804:3223] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-17T18:12:36.458797Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2804:3223] 2025-04-17T18:12:36.458857Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-17T18:12:36.958436Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-17T18:12:36.958520Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:37.687511Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:37.687603Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:37.687650Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:12:38.945631Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:38.945768Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:38.945814Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:38.946415Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:38.959496Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:38.959867Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:38.959952Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:38.960313Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:38.973486Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:38.973707Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-17T18:12:38.974267Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7423:5419], server id = [2:7424:5420], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:38.974381Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7423:5419], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:38.975724Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:38.975822Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:38.975990Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:38.976143Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:38.976368Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:38.978970Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7423:5419], server id = [2:7424:5420], tablet id = 72075186224037899 2025-04-17T18:12:38.979011Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:38.979451Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:39.000025Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjI3NDczN2QtMzIxNGY2YjUtMWU4MWFhZWYtY2VmOTJiYTI=, TxId: 2025-04-17T18:12:39.000102Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjI3NDczN2QtMzIxNGY2YjUtMWU4MWFhZWYtY2VmOTJiYTI=, TxId: 2025-04-17T18:12:39.000543Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:39.024781Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:39.024850Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2804:3223] >> KqpYql::UpdateBadType [GOOD] >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] Test command err: 2025-04-17T18:10:14.823196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:14.823357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:14.823534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c5d/r3tmp/tmpHcrgFE/pdisk_1.dat 2025-04-17T18:10:15.373756Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21752, node 1 2025-04-17T18:10:15.919986Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:15.920027Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:15.920065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:15.920730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:15.929063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.025512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:16.026057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:16.054834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65209 2025-04-17T18:10:16.675779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.840248Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:19.892421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.892530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.932616Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:19.936948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.195524Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.195945Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.196457Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.196709Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.196799Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.196850Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.196905Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.197017Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.197077Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.367930Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:20.368040Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:20.392000Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.538640Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:20.587946Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:20.588048Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:20.626323Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:20.632146Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:20.632396Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:20.632456Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:20.632515Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:20.632566Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:20.632626Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:20.632676Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:20.634057Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:20.669046Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.669142Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.681608Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:20.685359Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:20.685517Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:20.695614Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:20.713740Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:20.713792Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:20.713875Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:20.724706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:20.732212Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:20.732349Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:20.953756Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:21.117903Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:21.176143Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:22.031675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.031847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.111987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:22.232701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:22.232999Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:22.233377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:22.233581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:22.233801Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:22.233994Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:22.234266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:22.234451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:22.234658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:22.234846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:22.235042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:22.235187Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:22.265215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:22.265332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 15] 2025-04-17T18:12:33.719451Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7210:5288], server id = [2:7255:5316], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:33.719490Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7255:5316] 2025-04-17T18:12:33.719555Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7254:5315], schemeshard id = 72075186224037897 2025-04-17T18:12:33.719632Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7255:5316], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-17T18:12:33.796637Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7264:5317] 2025-04-17T18:12:33.797410Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2806:3224] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-17T18:12:33.797464Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2806:3224] 2025-04-17T18:12:33.797530Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-17T18:12:34.945035Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:34.945120Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:34.945171Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:12:34.945220Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:34.945268Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:34.946020Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:34.959450Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:34.959835Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:34.959909Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:34.960821Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:34.974192Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:34.974419Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:12:34.974953Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7305:5344], server id = [2:7306:5345], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:34.975066Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7305:5344], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:34.978824Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:34.978939Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:34.979173Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:34.979355Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:34.979640Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:34.982568Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7305:5344], server id = [2:7306:5345], tablet id = 72075186224037899 2025-04-17T18:12:34.982617Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:34.983209Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:35.018829Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7326:5364]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:35.019109Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:35.019158Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7326:5364], StatRequests.size() = 1 2025-04-17T18:12:35.138947Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTA1M2Y4MTUtOGQwYzkxZjctM2ZjMWU5Y2EtMWIzNzNjZmU=, TxId: 2025-04-17T18:12:35.139025Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTA1M2Y4MTUtOGQwYzkxZjctM2ZjMWU5Y2EtMWIzNzNjZmU=, TxId: 2025-04-17T18:12:35.139564Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:35.153118Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:35.153188Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:35.638329Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:12:35.638416Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:36.319784Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:36.319876Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:36.320587Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:36.333918Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:36.334208Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:36.334252Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-17T18:12:36.357863Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:37.487436Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:37.487522Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:37.487561Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:12:37.487846Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-17T18:12:37.488452Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-17T18:12:37.488553Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-17T18:12:37.501861Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:12:38.599209Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:38.599319Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:38.599371Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:12:39.721814Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:39.722060Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:39.732955Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:39.733087Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:39.733123Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:39.733719Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:39.747348Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:39.747651Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:39.747703Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:39.748074Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:39.771593Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:39.771717Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:39.772170Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7494:5454], server id = [2:7495:5455], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:39.772264Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7494:5454], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:39.773368Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:39.773456Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:39.773566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:39.773726Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:39.773938Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:39.776168Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7494:5454], server id = [2:7495:5455], tablet id = 72075186224037899 2025-04-17T18:12:39.776205Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:39.776778Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:39.795927Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTcxMDRkZTEtNjZjZTBkMGEtYzg2YmI2YWQtODJlNjVmMGE=, TxId: 2025-04-17T18:12:39.795974Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTcxMDRkZTEtNjZjZTBkMGEtYzg2YmI2YWQtODJlNjVmMGE=, TxId: 2025-04-17T18:12:39.796328Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:39.809938Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:39.809997Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2806:3224] >> KqpYql::JsonCast [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 10389, MsgBus: 26065 2025-04-17T18:12:36.934239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346660724214884:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:36.934305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013be/r3tmp/tmpW8Cg3W/pdisk_1.dat 2025-04-17T18:12:37.203581Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10389, node 1 2025-04-17T18:12:37.259966Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:37.259990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:37.259999Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:37.260099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:37.280624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:37.280716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:37.282474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26065 TClient is connected to server localhost:26065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:37.698563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.725330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.827407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.961096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.024562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.214821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346673609118552:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:39.214937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:39.524139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.550973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.575852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.602077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.626946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.655477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.681629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.760653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346673609119211:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:39.760715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:39.760720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346673609119216:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:39.763646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:39.771769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346673609119218:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:39.841944Z node 1 :TX_PROXY ERROR: Actor# [1:7494346673609119273:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] >> KqpYql::Discard [GOOD] >> KqpYql::EvaluateExprYsonAndType [GOOD] >> KqpYql::EvaluateExpr2 [GOOD] >> KqpYql::EvaluateExpr3 >> KqpYql::InsertCV+useSink [GOOD] >> KqpYql::InsertCV-useSink >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpYql::RefSelect [GOOD] >> KqpYql::PgIntPrimaryKey >> KqpYql::Closure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 32500, MsgBus: 16963 2025-04-17T18:12:33.719753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346646957101876:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:33.719847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013ef/r3tmp/tmpuHyHDy/pdisk_1.dat 2025-04-17T18:12:33.992098Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32500, node 1 2025-04-17T18:12:34.050361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:34.050381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:34.050386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:34.050464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:34.079217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:34.079309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:34.080964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16963 TClient is connected to server localhost:16963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:34.459283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.481590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.570049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.683337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.740723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.041725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346659842005537:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.041845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.275485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.302629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.324385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.346028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.371118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.397248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.423584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.467964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346659842006195:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.468029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.468043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346659842006200:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.471134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:36.479243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346659842006202:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:36.562511Z node 1 :TX_PROXY ERROR: Actor# [1:7494346659842006256:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. Trying to start YDB, gRPC: 24212, MsgBus: 2590 2025-04-17T18:12:37.807982Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346663579676018:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:37.808054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013ef/r3tmp/tmpiXpITn/pdisk_1.dat 2025-04-17T18:12:37.883648Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24212, node 2 2025-04-17T18:12:37.921127Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:37.921151Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:37.921158Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:37.921251Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:37.924647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:37.924706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:37.926509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2590 TClient is connected to server localhost:2590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:38.248079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.258958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.295314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.397538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.467340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.257075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346676464579680:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.257141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.284370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.305496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.329426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.353444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.378993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.408305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.438119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.490001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346676464580336:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.490077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.490100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346676464580341:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.492915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:40.501343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346676464580343:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:40.603207Z node 2 :TX_PROXY ERROR: Actor# [2:7494346676464580401:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[#]] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard [GOOD] Test command err: Trying to start YDB, gRPC: 61887, MsgBus: 65309 2025-04-17T18:12:33.619570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346644812637102:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:33.619716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013f7/r3tmp/tmpCGYpdN/pdisk_1.dat 2025-04-17T18:12:33.843521Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61887, node 1 2025-04-17T18:12:33.916931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:33.916964Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:33.916971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:33.917083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:33.966158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:33.966293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:33.967924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65309 TClient is connected to server localhost:65309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:34.304080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.323145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.443392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.556941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.611570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.898684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346653402573473:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.898788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.135497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.160866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.184503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.208623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.231771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.260090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.325103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.400305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346657697541436:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.400366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.400372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346657697541441:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.403294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:36.412519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346657697541443:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:36.488610Z node 1 :TX_PROXY ERROR: Actor# [1:7494346657697541496:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13734, MsgBus: 11194 2025-04-17T18:12:37.897594Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346661792497402:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:37.897671Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013f7/r3tmp/tmpvwWsrE/pdisk_1.dat 2025-04-17T18:12:37.993001Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13734, node 2 2025-04-17T18:12:38.022514Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:38.022586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:38.024152Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:38.027579Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:38.027594Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:38.027598Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:38.027664Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11194 TClient is connected to server localhost:11194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:38.316331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.324807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.392179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.526539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.577719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.448747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346674677401060:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.448813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.495544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.518993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.542169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.562933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.585406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.633569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.697414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.771311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346674677401725:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.771402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.771522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346674677401730:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.774642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:40.783746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346674677401732:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:40.845767Z node 2 :TX_PROXY ERROR: Actor# [2:7494346674677401785:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:41.536574Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494346678972369412:2515], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2025-04-17T18:12:41.536780Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjRmMTIxY2QtY2M2ZmI4NjktY2QzNDA0Y2EtZTRkYTkyZjg=, ActorId: [2:7494346678972369405:2511], ActorState: ExecuteState, TraceId: 01js2f7xw94e6g8pjcq1cfw7g0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprYsonAndType [GOOD] Test command err: Trying to start YDB, gRPC: 21395, MsgBus: 31151 2025-04-17T18:12:33.398542Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346644670991294:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:33.398615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013ff/r3tmp/tmp8G9TBZ/pdisk_1.dat 2025-04-17T18:12:33.642820Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21395, node 1 2025-04-17T18:12:33.715728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:33.715751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:33.715763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:33.715895Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:33.741077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:33.741197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:33.742847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31151 TClient is connected to server localhost:31151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:34.108696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.126762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.248168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.364329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.420674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.760883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346653260927663:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:35.761011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.002052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.029289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.057021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.079769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.120011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.143875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.171314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.219643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346657555895612:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.219770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.219845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346657555895618:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.222952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:36.232206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346657555895620:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:36.294207Z node 1 :TX_PROXY ERROR: Actor# [1:7494346657555895673:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21655, MsgBus: 31691 2025-04-17T18:12:37.849727Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346662634347995:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:37.849821Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013ff/r3tmp/tmpo4noVt/pdisk_1.dat 2025-04-17T18:12:37.922940Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21655, node 2 2025-04-17T18:12:37.973952Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:37.973976Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:37.973982Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:37.974088Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:37.974091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:37.974140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:37.975835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31691 TClient is connected to server localhost:31691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:38.367548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.384411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.434831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.551846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.625736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.577394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346675519251669:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.577494Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.626616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.652880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.673724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.695801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.717355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.782744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.811124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.859789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346675519252331:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.859844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346675519252336:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.859854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.862928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:40.871384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346675519252338:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:40.925095Z node 2 :TX_PROXY ERROR: Actor# [2:7494346675519252390:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> KqpScripting::ExecuteYqlScriptPg [GOOD] >> KqpScripting::ScriptStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 2960, MsgBus: 64751 2025-04-17T18:12:12.128136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346558140095493:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:12.128240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001773/r3tmp/tmpPfkurm/pdisk_1.dat 2025-04-17T18:12:12.353856Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2960, node 1 2025-04-17T18:12:12.401074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:12.401098Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:12.401103Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:12.401211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:12.452943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:12.453046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:12.454747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64751 TClient is connected to server localhost:64751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:12.794591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:13.869429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346562435063457:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:13.869490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346562435063449:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:13.869600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:13.872224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:13.878993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346562435063463:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:12:13.942345Z node 1 :TX_PROXY ERROR: Actor# [1:7494346562435063514:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:14.129460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:14.223191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:12:14.223191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:12:14.223361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:12:14.223536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:12:14.223608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:12:14.223674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:12:14.223730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:12:14.223777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:12:14.223812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:12:14.223901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:12:14.223907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:12:14.223978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:12:14.223997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:12:14.224056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:12:14.224116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:12:14.224120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:12:14.224196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:12:14.224224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:12:14.224287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:12:14.224342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494346566730030999:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:12:14.224352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:12:14.224417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:12:14.224492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:12:14.224564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494346566730030995:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:12:14.249190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346566730031025:2350];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:12:14.249257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494346566730031025:2350];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:12:14.249409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_ ... node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037961;self_id=[2:7494346634285197149:2495];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037961;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439145Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037964;self_id=[2:7494346634285197096:2468];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037964;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439229Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037962;self_id=[2:7494346629990229434:2422];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037962;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439230Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037975;self_id=[2:7494346629990229720:2455];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037975;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439303Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037966;self_id=[2:7494346629990229699:2447];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037966;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439307Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037971;self_id=[2:7494346634285197107:2473];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439362Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037963;self_id=[2:7494346634285197099:2469];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037963;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439364Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037967;self_id=[2:7494346629990229724:2457];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439417Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037968;self_id=[2:7494346634285197133:2487];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037968;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439420Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7494346629990229622:2439];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439471Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037969;self_id=[2:7494346634285197139:2490];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439471Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7494346629990229503:2429];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439522Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037976;self_id=[2:7494346629990229761:2459];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037976;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439526Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7494346629990229683:2445];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439579Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037978;self_id=[2:7494346629990229659:2443];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439579Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7494346629990229567:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439628Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037977;self_id=[2:7494346629990229628:2440];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439637Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037972;self_id=[2:7494346629990229791:2465];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037972;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439682Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7494346629990229431:2421];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439688Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7494346629990229630:2441];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439753Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7494346629990229494:2425];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439761Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037973;self_id=[2:7494346629990229711:2451];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037973;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439822Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7494346629990229617:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439824Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7494346629990229563:2432];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439878Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7494346629990229490:2423];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439879Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037974;self_id=[2:7494346629990229706:2449];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037974;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439951Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7494346629990229588:2434];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.439959Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7494346629990229593:2435];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.440009Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7494346629990229546:2430];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.440012Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7494346629990229500:2428];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.440059Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037990;self_id=[2:7494346629990229498:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.440066Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7494346629990229609:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.440121Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7494346629990229561:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.440181Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7494346629990229492:2424];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.440238Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7494346629990229496:2426];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-17T18:12:36.440297Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7494346629990229404:2418];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 62514, MsgBus: 11072 2025-04-17T18:12:34.569235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346651651624765:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:34.569339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013de/r3tmp/tmpWjWP2p/pdisk_1.dat 2025-04-17T18:12:34.803939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62514, node 1 2025-04-17T18:12:34.867024Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:34.867046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:34.867051Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:34.867130Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:34.893349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:34.893447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:34.895010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11072 TClient is connected to server localhost:11072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:35.260335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.284720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.408480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.515541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.576065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.923055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346660241561145:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.923150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.230269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.256020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.279101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.302342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.325528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.355038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.379057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.421337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346664536529095:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.421399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.421449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346664536529100:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.424393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:37.431794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346664536529102:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:37.487546Z node 1 :TX_PROXY ERROR: Actor# [1:7494346664536529155:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17823, MsgBus: 6318 2025-04-17T18:12:38.836953Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346667068114968:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:38.837152Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013de/r3tmp/tmpgesL7E/pdisk_1.dat 2025-04-17T18:12:38.902957Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17823, node 2 2025-04-17T18:12:38.955307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:38.955384Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:38.956920Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:38.957592Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:38.957613Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:38.957620Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:38.957697Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6318 TClient is connected to server localhost:6318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:39.329748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.346652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.416144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.563911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.633013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.243734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346679953018634:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.243833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.281530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.306772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.330907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.355996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.379647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.408408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.473087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.519329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346679953019296:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.519401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.519469Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346679953019301:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.522032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:41.529038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346679953019304:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:41.588445Z node 2 :TX_PROXY ERROR: Actor# [2:7494346679953019358:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::TableNameConflict [GOOD] >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink >> KqpQueryPerf::IdxLookupJoin+QueryService >> KqpQueryPerf::ComputeLength+QueryService >> KqpQueryPerf::Replace-QueryService-UseSink >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> KqpQueryPerf::IndexReplace-QueryService-UseSink >> KqpQueryPerf::UpdateOn-QueryService+UseSink >> KqpQueryPerf::RangeLimitRead-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 62220, MsgBus: 61871 2025-04-17T18:12:34.091998Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346650891695210:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:34.092114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013eb/r3tmp/tmpVvpYWH/pdisk_1.dat 2025-04-17T18:12:34.380085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:34.380200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:34.382348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:34.383521Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62220, node 1 2025-04-17T18:12:34.441427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:34.441455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:34.441462Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:34.441600Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61871 TClient is connected to server localhost:61871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:34.841244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.862842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:34.996642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.112407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.179049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.259329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346659481631581:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.259436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.517936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.542697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.566871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.590774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.615043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.665823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.690625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.763375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346659481632239:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.763447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.763482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346659481632244:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.765983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:36.772502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346659481632246:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:36.842590Z node 1 :TX_PROXY ERROR: Actor# [1:7494346659481632300:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:37.841608Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557870, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 2134, MsgBus: 23390 2025-04-17T18:12:38.536586Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346668454790368:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:38.536669Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013eb/r3tmp/tmpIivUzd/pdisk_1.dat 2025-04-17T18:12:38.647082Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2134, node 2 2025-04-17T18:12:38.675793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:38.675874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:38.677554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:38.687473Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:38.687497Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:38.687505Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:38.687627Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23390 TClient is connected to server localhost:23390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:39.100403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.116185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.185905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.309515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.382928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.529260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346681339694041:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.529343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.579614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.606868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.638331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.661220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.687724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.712642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.740986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.786935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346681339694698:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.786995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346681339694703:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.787003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.789401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:41.796822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346681339694705:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:41.889591Z node 2 :TX_PROXY ERROR: Actor# [2:7494346681339694761:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:42.711927Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913562749, txId: 281474976715672] shutting down 2025-04-17T18:12:42.823000Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913562861, txId: 281474976715674] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 21554, MsgBus: 11520 2025-04-17T18:12:34.456335Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346651559147326:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:34.456442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013df/r3tmp/tmp5YVpSA/pdisk_1.dat 2025-04-17T18:12:34.738569Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21554, node 1 2025-04-17T18:12:34.809887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:34.809905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:34.809910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:34.809995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:34.835359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:34.835455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:34.837000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11520 TClient is connected to server localhost:11520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:35.204945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.219670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.328610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.451313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.506771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.809578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346660149083698:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.809679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.021453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.045539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.066346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.088607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.115497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.144840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.210516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.254817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346664444051657:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.254889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.254910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346664444051662:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.257610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:37.265459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346664444051664:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:37.336532Z node 1 :TX_PROXY ERROR: Actor# [1:7494346664444051718:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:37.987132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.366432Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913558402, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 63039, MsgBus: 19341 2025-04-17T18:12:38.915667Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346668430254028:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:38.915720Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013df/r3tmp/tmp6c5S5B/pdisk_1.dat 2025-04-17T18:12:39.008565Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63039, node 2 2025-04-17T18:12:39.051325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:39.051402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:39.053095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:39.064123Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:39.064145Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:39.064153Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:39.064250Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19341 TClient is connected to server localhost:19341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:39.456325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.468250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.538052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.656005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.712746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.781823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346681315157686:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.781912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.815843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.837705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.861437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.886303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.911032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.941203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.007206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.080727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346685610125650:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.080788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346685610125655:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.080790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.083133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:42.090069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346685610125657:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:42.171471Z node 2 :TX_PROXY ERROR: Actor# [2:7494346685610125712:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 2398, MsgBus: 28292 2025-04-17T18:12:35.615882Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346654973759893:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:35.615974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013ce/r3tmp/tmphhN61S/pdisk_1.dat 2025-04-17T18:12:35.856333Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2398, node 1 2025-04-17T18:12:35.917544Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:35.917581Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:35.917589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:35.917756Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:35.946759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:35.946846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:35.948600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28292 TClient is connected to server localhost:28292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:36.328091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.352887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.472841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.582124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.633205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.785895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346663563696276:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.785990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.995578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.017118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.039767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.065460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.090526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.181394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.205969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.279357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346667858664242:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.279422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.279433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346667858664247:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.282414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:38.289682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346667858664249:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:38.344492Z node 1 :TX_PROXY ERROR: Actor# [1:7494346667858664302:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. Trying to start YDB, gRPC: 13616, MsgBus: 9138 2025-04-17T18:12:39.749945Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346671912174957:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:39.750067Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013ce/r3tmp/tmpREzDLf/pdisk_1.dat 2025-04-17T18:12:39.815062Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13616, node 2 2025-04-17T18:12:39.874420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:39.874498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:39.875939Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:39.875961Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:39.875970Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:39.875984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:39.876072Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9138 TClient is connected to server localhost:9138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:40.187800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.203474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.272045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.401403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.457485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:42.033027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346684797078610:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.033082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.067490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.089159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.109480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.131145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.154295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.183342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.220856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.262344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346684797079264:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.262414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.262425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346684797079269:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.265278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:42.274405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346684797079271:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:42.373796Z node 2 :TX_PROXY ERROR: Actor# [2:7494346684797079327:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 21779, MsgBus: 5536 2025-04-17T18:12:34.291546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346651361752761:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:34.291760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013e3/r3tmp/tmp1K2hFK/pdisk_1.dat 2025-04-17T18:12:34.557302Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21779, node 1 2025-04-17T18:12:34.613328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:34.613352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:34.613360Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:34.613445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:34.649709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:34.649800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:34.651602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5536 TClient is connected to server localhost:5536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:34.997043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.019100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.141123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.251575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.327357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.534210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346659951689132:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.534342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.785052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.806086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.829138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.851702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.877486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.906728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.932932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:36.978081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346659951689791:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.978132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.978281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346659951689796:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.981377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:36.989719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346659951689798:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:37.087577Z node 1 :TX_PROXY ERROR: Actor# [1:7494346664246657150:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6518, MsgBus: 11177 2025-04-17T18:12:38.331488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346666088400034:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:38.331566Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013e3/r3tmp/tmpErSbZC/pdisk_1.dat 2025-04-17T18:12:38.399244Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6518, node 2 2025-04-17T18:12:38.445780Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:38.445798Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:38.445803Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:38.445871Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:38.453937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:38.454012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:38.455549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11177 TClient is connected to server localhost:11177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:38.841534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.858641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.926488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.062756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.134839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.742020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346674678336410:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.742076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.789479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.811874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.833813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.858563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.883501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.909295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.934963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.977364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346674678337064:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.977429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346674678337069:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.977477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.980053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:40.987761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346674678337071:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:41.063090Z node 2 :TX_PROXY ERROR: Actor# [2:7494346678973304421:3537] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:42.080837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.399678Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913562420, txId: 281474976715675] shutting down 2025-04-17T18:12:42.805572Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913562819, txId: 281474976715679] shutting down 2025-04-17T18:12:43.052412Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913563048, txId: 281474976715683] shutting down 2025-04-17T18:12:43.331792Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346666088400034:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:43.331882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> KqpQueryPerf::Upsert-QueryService-UseSink >> KqpQueryPerf::RangeRead-QueryService >> KqpPragma::Warning [GOOD] >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: 2025-04-17T18:10:14.954623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:14.954800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:14.954962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c69/r3tmp/tmpKK57HI/pdisk_1.dat 2025-04-17T18:10:15.406382Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14811, node 1 2025-04-17T18:10:15.917111Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:15.917166Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:15.917193Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:15.917855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:15.933567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.040367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:16.040498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:16.057354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28385 2025-04-17T18:10:16.631307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.802252Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:19.869850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.869972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.913067Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:19.915572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.174377Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.174917Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.175485Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.175637Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.175743Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.175984Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.176070Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.176147Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.176238Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.342494Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:20.342573Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:20.355669Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.511916Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:20.553978Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:20.554085Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:20.595817Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:20.596983Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:20.597163Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:20.597208Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:20.597251Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:20.597300Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:20.597347Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:20.597386Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:20.597782Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:20.637845Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.637971Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1867:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.648674Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1880:2607] 2025-04-17T18:10:20.654828Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1903:2616] 2025-04-17T18:10:20.655061Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1903:2616], schemeshard id = 72075186224037897 2025-04-17T18:10:20.668174Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:20.688828Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:20.688889Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:20.688968Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:20.698942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:20.751612Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:20.751771Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:20.955671Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:21.104731Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:21.150563Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:22.023468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.023655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.108546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:22.591294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2540:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.591462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.592446Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2545:3126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:10:22.592715Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:10:22.592807Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2547:3128] 2025-04-17T18:10:22.592876Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2547:3128] 2025-04-17T18:10:22.593521Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2548:2993] 2025-04-17T18:10:22.593846Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2547:3128], server id = [2:2548:2993], tablet id = 72075186224037894, status = OK 2025-04-17T18:10:22.594060Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2548:2993], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:10:22.594118Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:10:22.594817Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:10:22.594922Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2545:3126], StatRequests.size() = 1 2025-04-17T18:10:22.653322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2552:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.653417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.653761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2557:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.659126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:10:22.819281Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:10:22.819354Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:10:22.889618Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2547:3128], schemeshard count = 1 2025-04-17T18:10:23.209474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... ry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:12:36.225462Z node 2 :TX_PROXY ERROR: Actor# [2:7214:5026] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:36.261567Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7243:5041]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:36.261809Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:12:36.261897Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7245:5043] 2025-04-17T18:12:36.261950Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7245:5043] 2025-04-17T18:12:36.262324Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7246:5044] 2025-04-17T18:12:36.262431Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7245:5043], server id = [2:7246:5044], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:36.262514Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7246:5044], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:12:36.262572Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:12:36.262696Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:36.262768Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7243:5041], StatRequests.size() = 1 2025-04-17T18:12:36.373824Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzM2YjEyYmEtZGJhYzAwY2UtMzc0NjdhZGMtZjFmZWIyZmQ=, TxId: 2025-04-17T18:12:36.373883Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzM2YjEyYmEtZGJhYzAwY2UtMzc0NjdhZGMtZjFmZWIyZmQ=, TxId: 2025-04-17T18:12:36.374521Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:36.387129Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:12:36.387183Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:36.451014Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:12:36.451100Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:12:36.514805Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7245:5043], schemeshard count = 1 2025-04-17T18:12:37.623465Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:37.623561Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T18:12:37.623613Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:38.872112Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:12:38.893509Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:38.893642Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T18:12:38.893676Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:38.893987Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:38.896350Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:38.907838Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Njk4NWI0MmUtYWJmMGE2OTEtMTg1N2JlMTQtN2MxMTIyOWQ=, TxId: 2025-04-17T18:12:38.907893Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Njk4NWI0MmUtYWJmMGE2OTEtMTg1N2JlMTQtN2MxMTIyOWQ=, TxId: 2025-04-17T18:12:38.908527Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:38.921538Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:38.921598Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3114:3319] 2025-04-17T18:12:40.236207Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:40.236279Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-17T18:12:40.236317Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:12:41.434477Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:12:41.435670Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:41.436030Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:12:41.457806Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:41.457881Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:12:41.457916Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:41.458299Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:41.460676Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:41.492408Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjliNjAwMzItM2MxYzNkMmYtNWFiNWVmNWUtODkyZWMyOTg=, TxId: 2025-04-17T18:12:41.492472Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjliNjAwMzItM2MxYzNkMmYtNWFiNWVmNWUtODkyZWMyOTg=, TxId: 2025-04-17T18:12:41.493032Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:41.506008Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:41.506050Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:42.769427Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:42.769505Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:42.769547Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:12:43.984831Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:43.984937Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-17T18:12:43.984963Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:12:43.985223Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:43.987080Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:43.994382Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODUwNjgzZmYtMmY5NzgzN2ItM2E1ZWI3NTUtMjA0ODNiYTc=, TxId: 2025-04-17T18:12:43.994425Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODUwNjgzZmYtMmY5NzgzN2ItM2E1ZWI3NTUtMjA0ODNiYTc=, TxId: 2025-04-17T18:12:43.994976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:44.007651Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:12:44.007708Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3114:3319] 2025-04-17T18:12:44.008129Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7570:5226]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:44.010602Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:44.010645Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:12:44.014841Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:44.014909Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:12:44.014961Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:12:44.017164Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-17T18:12:44.017513Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 2025-04-17T18:12:44.017913Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:7600:5238]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:44.020698Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:44.020753Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:12:44.021370Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:44.021423Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:12:44.021472Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:12:44.023718Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-17T18:12:44.024028Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >> KqpQueryPerf::MultiRead-QueryService >> KqpQueryPerf::UpdateOn+QueryService+UseSink >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> KqpQueryPerf::IndexReplace+QueryService-UseSink >> KqpSinkTx::DeferredEffects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 28579, MsgBus: 30761 2025-04-17T18:12:36.677439Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346661613025216:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:36.677594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013c0/r3tmp/tmpEmP50f/pdisk_1.dat 2025-04-17T18:12:36.929526Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28579, node 1 2025-04-17T18:12:36.979381Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:36.979405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:36.979428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:36.979514Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:37.035465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:37.035582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:37.037322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30761 TClient is connected to server localhost:30761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:37.394740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.423900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.514145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.623684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.701104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.805129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346670202961596:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.805219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:39.064338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.089084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.112637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.136069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.159670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.186226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.230807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.275438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346674497929548:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:39.275517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:39.275664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346674497929553:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:39.278769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:39.288484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346674497929555:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:39.367490Z node 1 :TX_PROXY ERROR: Actor# [1:7494346674497929611:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:40.445152Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494346678792897305:2522], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial
:2:20: Error: At function: Aggregate /lib/yql/aggregate.yqls:648:18: Error: At function: AggregationTraits /lib/yql/aggregate.yqls:60:31: Error: At function: AggrCountInit
:2:20: Error: At function: PersistableRepr
:2:26: Error: At function: Member
:2:26: Error: Member not found: _yql_partition_id 2025-04-17T18:12:40.445323Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjM1OWU4MzctMzE4ZjAwOWQtYzE5YmNiZWUtYjE4ZjNhYTE=, ActorId: [1:7494346678792897239:2511], ActorState: ExecuteState, TraceId: 01js2f7wv4c6n16vrggvrhd5rz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 26571, MsgBus: 64211 2025-04-17T18:12:41.195112Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346680100374834:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:41.195203Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013c0/r3tmp/tmphiuOi2/pdisk_1.dat 2025-04-17T18:12:41.271839Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26571, node 2 2025-04-17T18:12:41.321855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:41.321929Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:41.323460Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:41.324045Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:41.324069Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:41.324077Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:41.324190Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64211 TClient is connected to server localhost:64211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:41.699130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.714309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.777708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.879958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.931295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:43.648957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346688690311195:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.649032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.676797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.699516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.726496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.749916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.773010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.799133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.822437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.861877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346688690311848:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.861931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.861998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346688690311853:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.864422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:43.870390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346688690311855:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:43.951169Z node 2 :TX_PROXY ERROR: Actor# [2:7494346688690311910:3533] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::PgIntPrimaryKey [GOOD] >> KqpScripting::SecondaryIndexes [GOOD] >> KqpYql::EvaluateExpr3 [GOOD] >> KqpYql::InsertCV-useSink [GOOD] >> KqpQueryPerf::Delete+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::DeferredEffects [GOOD] Test command err: Trying to start YDB, gRPC: 6455, MsgBus: 15086 2025-04-17T18:12:12.009007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346556509901506:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:12.009087Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001774/r3tmp/tmpEnaXi5/pdisk_1.dat 2025-04-17T18:12:12.252493Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6455, node 1 2025-04-17T18:12:12.311037Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:12.311058Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:12.311063Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:12.311173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:12.360214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:12.360310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:12.361951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15086 TClient is connected to server localhost:15086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:12.662539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:13.806018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346560804869463:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:13.806139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346560804869444:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:13.806239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:13.809256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:13.815849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346560804869473:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:12:13.916194Z node 1 :TX_PROXY ERROR: Actor# [1:7494346560804869524:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:14.116447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:14.209692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:14.806372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:17.009284Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346556509901506:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:17.009333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:12:27.252441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:12:27.252489Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:30.062044Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346633819323646:3272], TxId: 281474976710679, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2f7jqafg2cfdfh08qvqch0. SessionId : ydb://session/3?node_id=1&id=ZDQ5NjI5ZGUtMzBiZTZjYmMtYzAxOWY2OWQtYjc4MGMwOWE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1744913535624/18446744073709551615 shard 72075186224037889 with lowWatermark v1744913535820/18446744073709551615 (node# 1 state# Ready) } } 2025-04-17T18:12:30.062722Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346633819323646:3272], TxId: 281474976710679, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2f7jqafg2cfdfh08qvqch0. SessionId : ydb://session/3?node_id=1&id=ZDQ5NjI5ZGUtMzBiZTZjYmMtYzAxOWY2OWQtYjc4MGMwOWE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1744913535624/18446744073709551615 shard 72075186224037889 with lowWatermark v1744913535820/18446744073709551615 (node# 1 state# Ready) } }. 2025-04-17T18:12:30.063182Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346633819323649:3274], TxId: 281474976710679, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDQ5NjI5ZGUtMzBiZTZjYmMtYzAxOWY2OWQtYjc4MGMwOWE=. TraceId : 01js2f7jqafg2cfdfh08qvqch0. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494346633819323642:2966], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:30.063585Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDQ5NjI5ZGUtMzBiZTZjYmMtYzAxOWY2OWQtYjc4MGMwOWE=, ActorId: [1:7494346569394812442:2966], ActorState: ExecuteState, TraceId: 01js2f7jqafg2cfdfh08qvqch0, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 22762, MsgBus: 6168 2025-04-17T18:12:35.866309Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346653634292849:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:35.866384Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001774/r3tmp/tmpkEGNoL/pdisk_1.dat 2025-04-17T18:12:35.970703Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:35.998758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:35.998851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22762, node 2 2025-04-17T18:12:36.004081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:36.027692Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:36.027717Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:36.027725Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:36.027843Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6168 TClient is connected to server localhost:6168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:36.338687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.279411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346666519195378:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.279483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346666519195394:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.279516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.282728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:12:38.289162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346666519195407:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:12:38.377197Z node 2 :TX_PROXY ERROR: Actor# [2:7494346666519195458:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:38.419160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.449630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.307977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-17T18:12:40.866465Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346653634292849:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:40.866532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 23111, MsgBus: 3348 2025-04-17T18:12:38.526968Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346668281191623:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:38.527103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013a8/r3tmp/tmpldwzLp/pdisk_1.dat 2025-04-17T18:12:38.818403Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23111, node 1 2025-04-17T18:12:38.880645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:38.880673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:38.880680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:38.880833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:38.920734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:38.920844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:38.922522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3348 TClient is connected to server localhost:3348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:39.288918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.303939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.420203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.531286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.603927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.822313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346676871127990:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.822430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.038957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.061365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.081512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.145626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.167895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.194288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.222041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.295724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346681166095950:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.295815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.295919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346681166095955:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.298823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:41.305849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346681166095957:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:41.359619Z node 1 :TX_PROXY ERROR: Actor# [1:7494346681166096010:3539] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 61978, MsgBus: 31177 2025-04-17T18:12:42.788130Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346686191353329:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:42.788187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013a8/r3tmp/tmpNgYnPE/pdisk_1.dat 2025-04-17T18:12:42.862972Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61978, node 2 2025-04-17T18:12:42.913193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:42.913255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:42.914933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:42.919757Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:42.919776Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:42.919781Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:42.919872Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31177 TClient is connected to server localhost:31177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:43.271270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.221799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346699076255872:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.221878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.239768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.276292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346699076255972:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.276365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.276376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346699076255977:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.279860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:12:45.292404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346699076255979:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:12:45.371967Z node 2 :TX_PROXY ERROR: Actor# [2:7494346699076256030:2391] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 18312, MsgBus: 16168 2025-04-17T18:12:35.963412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346655652730392:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:35.963516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013c2/r3tmp/tmptxar8I/pdisk_1.dat 2025-04-17T18:12:36.239721Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18312, node 1 2025-04-17T18:12:36.309165Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:36.309189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:36.309201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:36.309357Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:36.338971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:36.339048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:36.340879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16168 TClient is connected to server localhost:16168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:36.729791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.753780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.887539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.007317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:37.058258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.165713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346668537634052:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.165825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.501889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.526887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.552184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.576408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.599389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.647547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.712523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:38.788182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346668537634722:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.788269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.788339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346668537634727:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:38.791067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:38.799454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346668537634729:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:38.886898Z node 1 :TX_PROXY ERROR: Actor# [1:7494346668537634786:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:39.681080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.082283Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913560124, txId: 281474976715674] shutting down 2025-04-17T18:12:40.111756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.148203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.155176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.163379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.207405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.216157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.534591Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913560579, txId: 281474976715683] shutting down 2025-04-17T18:12:40.564920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.587405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.832473Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913560873, txId: 281474976715688] shutting down Trying to start YDB, gRPC: 7301, MsgBus: 26214 2025-04-17T18:12:41.422666Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346681361642375:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:41.422765Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013c2/r3tmp/tmpoJJJsN/pdisk_1.dat 2025-04-17T18:12:41.515595Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7301, node 2 2025-04-17T18:12:41.547470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:41.547534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:41.549631Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:41.568192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:41.568210Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:41.568215Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:41.568305Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26214 TClient is connected to server localhost:26214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:41.889876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.898206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.941303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:42.114957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:42.165482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:43.512075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346689951578751:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.512150Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.553806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.576712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.597805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.621889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.646021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.712766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.744491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.818208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346689951579418:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.818282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.818349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346689951579423:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.821465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:43.829871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346689951579425:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:43.886570Z node 2 :TX_PROXY ERROR: Actor# [2:7494346689951579478:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:44.603788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:44.660207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:44.690228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.422911Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346681361642375:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:46.423005Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Update-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr3 [GOOD] Test command err: Trying to start YDB, gRPC: 15437, MsgBus: 61136 2025-04-17T18:12:37.747331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346661970096679:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:37.747386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013b7/r3tmp/tmpfKiiGR/pdisk_1.dat 2025-04-17T18:12:37.979839Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15437, node 1 2025-04-17T18:12:38.046387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:38.046413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:38.046420Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:38.046567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:38.091906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:38.091971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:38.093857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61136 TClient is connected to server localhost:61136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:38.439309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.460514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.581684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.695531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:38.752215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.249007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346674855000346:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.249130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.558246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.581480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.603504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.626598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.673133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.696269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.721341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.793541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346674855001008:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.793599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.793636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346674855001013:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.796394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:40.804308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346674855001015:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:40.906156Z node 1 :TX_PROXY ERROR: Actor# [1:7494346674855001071:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64287, MsgBus: 27680 2025-04-17T18:12:42.465588Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346686966740147:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:42.465683Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013b7/r3tmp/tmpXqvrfh/pdisk_1.dat 2025-04-17T18:12:42.560575Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64287, node 2 2025-04-17T18:12:42.603082Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:42.603205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:42.604738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:42.617798Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:42.617822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:42.617827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:42.617922Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27680 TClient is connected to server localhost:27680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:42.979730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:42.995842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:43.040739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:43.165886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:43.237080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.966162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346695556676512:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:44.966284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:44.995154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.021768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.051982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.078289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.104116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.171188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.203421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.287718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346699851644472:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.287807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.287849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346699851644477:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.291104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:45.301416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346699851644479:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:45.374876Z node 2 :TX_PROXY ERROR: Actor# [2:7494346699851644535:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCV-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 16501, MsgBus: 65314 2025-04-17T18:12:38.305963Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346668045221930:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:38.306113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013b2/r3tmp/tmpot9tx3/pdisk_1.dat 2025-04-17T18:12:38.587860Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16501, node 1 2025-04-17T18:12:38.648850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:38.648873Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:38.648895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:38.649054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:38.661962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:38.662070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:38.663803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65314 TClient is connected to server localhost:65314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:39.070771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.088813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.192106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.316354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.387823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.745712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346676635158316:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.745815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:40.967729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:40.990343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.012529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.034622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.070289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.094408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.119375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.193162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346680930126275:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.193244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.193265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346680930126280:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.196175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:41.203855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346680930126282:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:41.304110Z node 1 :TX_PROXY ERROR: Actor# [1:7494346680930126339:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:42.045084Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-17T18:12:42.052810Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:12:42.052976Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:12:42.053140Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346685225093992:2516], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7494346680930126674:2516]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7494346685225093992:2516].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:12:42.053638Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346685225093985:2516], SessionActorId: [1:7494346680930126674:2516], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7494346680930126674:2516]. isRollback=0 2025-04-17T18:12:42.053860Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzNjYTFjZTMtNDA1YjRkZDEtM2NiYmZlNDktN2MyYWYwZmI=, ActorId: [1:7494346680930126674:2516], ActorState: ExecuteState, TraceId: 01js2f7yba02khh7ndm8kkcnb3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7494346685225093986:2516] from: [1:7494346685225093985:2516] 2025-04-17T18:12:42.053949Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346685225093986:2516] TxId: 281474976710672. Ctx: { TraceId: 01js2f7yba02khh7ndm8kkcnb3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNjYTFjZTMtNDA1YjRkZDEtM2NiYmZlNDktN2MyYWYwZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:12:42.054149Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzNjYTFjZTMtNDA1YjRkZDEtM2NiYmZlNDktN2MyYWYwZmI=, ActorId: [1:7494346680930126674:2516], ActorState: ExecuteState, TraceId: 01js2f7yba02khh7ndm8kkcnb3, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 65337, MsgBus: 19644 2025-04-17T18:12:42.512278Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346687206748347:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:42.512360Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013b2/r3tmp/tmp4iQHfY/pdisk_1.dat 2025-04-17T18:12:42.583981Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65337, node 2 2025-04-17T18:12:42.639006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:42.639068Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:42.640679Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:42.648595Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:42.648616Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:42.648623Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:42.648732Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19644 TClient is connected to server localhost:19644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:42.932222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:42.949186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:42.988248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:43.102577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:43.170371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.848513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346695796684703:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:44.848591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:44.891855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:44.915626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:44.948013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:44.975384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.003777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.071792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.103283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.169875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346700091652659:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.169937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346700091652664:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.169960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:45.172700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:45.182899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346700091652666:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:45.243786Z node 2 :TX_PROXY ERROR: Actor# [2:7494346700091652719:3536] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:46.295039Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346704386620373:2516], TxId: 281474976715673, task: 1. Ctx: { TraceId : 01js2f82cfdh1c5zw103ss68m8. SessionId : ydb://session/3?node_id=2&id=OGVkODcwZWYtMjJlZWQzZjctNWE3YmE1NzktMmNkZDU3MjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-17T18:12:46.295250Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346704386620375:2517], TxId: 281474976715673, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGVkODcwZWYtMjJlZWQzZjctNWE3YmE1NzktMmNkZDU3MjI=. TraceId : 01js2f82cfdh1c5zw103ss68m8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346704386620370:2507], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:46.295481Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGVkODcwZWYtMjJlZWQzZjctNWE3YmE1NzktMmNkZDU3MjI=, ActorId: [2:7494346704386620321:2507], ActorState: ExecuteState, TraceId: 01js2f82cfdh1c5zw103ss68m8, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Conflict with existing key., code: 2012 >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce >> KqpScripting::Pure [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] >> KqpQueryPerf::RangeLimitRead+QueryService >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> KqpQueryPerf::DeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink >> KqpQueryPerf::Insert-QueryService-UseSink >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService >> KqpYql::CreateUseTable [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService-UseSink >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 12832, MsgBus: 27212 2025-04-17T18:12:38.727844Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346669238890938:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:38.727925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013a7/r3tmp/tmph8mQRR/pdisk_1.dat 2025-04-17T18:12:39.013185Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12832, node 1 2025-04-17T18:12:39.078886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:39.078910Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:39.078915Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:39.078996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:39.106837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:39.106916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:39.108653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27212 TClient is connected to server localhost:27212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:39.483131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.506653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.624030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:12:39.739392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:39.811568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.334939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346682123794599:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.335040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.594556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.618374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.641514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.665406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.690086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.755895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.823235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.865357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346682123795268:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.865433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.865447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346682123795273:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.868394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:41.876768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346682123795275:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:41.962900Z node 1 :TX_PROXY ERROR: Actor# [1:7494346682123795329:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:43.066856Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913563092, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 24278, MsgBus: 2783 2025-04-17T18:12:43.617479Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346689181772000:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:43.617590Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013a7/r3tmp/tmpTuKo1u/pdisk_1.dat 2025-04-17T18:12:43.694836Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24278, node 2 2025-04-17T18:12:43.749030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:43.749093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:43.750136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:43.755194Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:43.755238Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:43.755251Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:43.755385Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2783 TClient is connected to server localhost:2783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:44.070872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.077193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.143215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.264028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.335839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.239205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346702066675653:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.239274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.309513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.334002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.356048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.379686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.406467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.433287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.499390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.571819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346702066676321:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.571903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.572132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346702066676326:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.575056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:46.583204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346702066676328:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:46.676346Z node 2 :TX_PROXY ERROR: Actor# [2:7494346702066676386:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::UpdateOn+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 3077, MsgBus: 29965 2025-04-17T18:12:39.056401Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346671979030746:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:39.056561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013a6/r3tmp/tmpaOu28E/pdisk_1.dat 2025-04-17T18:12:39.326888Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3077, node 1 2025-04-17T18:12:39.407385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:39.407408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:39.407415Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:39.407597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:39.426342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:39.426433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:39.428219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29965 TClient is connected to server localhost:29965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:39.834825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.857323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:39.980094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.099300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.150219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.330019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346680568967123:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.330120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.552736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.574753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.596387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.620294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.640891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.669597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.733735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:41.807376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346680568967790:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.807458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.807550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346680568967795:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.810027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:41.817383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346680568967797:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:41.897412Z node 1 :TX_PROXY ERROR: Actor# [1:7494346680568967850:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:43.343583Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913563379, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 16599, MsgBus: 24503 2025-04-17T18:12:43.914798Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346689172208110:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:43.914844Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013a6/r3tmp/tmpYQNkoN/pdisk_1.dat 2025-04-17T18:12:44.001812Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16599, node 2 2025-04-17T18:12:44.043907Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:44.043935Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:44.043943Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:44.044035Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:44.045934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:44.045994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:44.047628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24503 TClient is connected to server localhost:24503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:44.373832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.389345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.466868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.604410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:44.659072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.754007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346702057111779:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.754125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.780312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.809498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.836677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.864639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.895750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.927307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.992372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.036290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346706352079741:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.036344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.036420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346706352079746:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.038608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.044688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346706352079748:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:47.146272Z node 2 :TX_PROXY ERROR: Actor# [2:7494346706352079803:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::ScanQueryTruncate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9431, MsgBus: 61689 2025-04-17T18:12:44.359847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346695765950669:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.359983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016a1/r3tmp/tmpYaFc1T/pdisk_1.dat 2025-04-17T18:12:44.643118Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9431, node 1 2025-04-17T18:12:44.699765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:44.699873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:44.701722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:44.708429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:44.708454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:44.708460Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:44.708555Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61689 TClient is connected to server localhost:61689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.235074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.250722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.379932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.514428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.589080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.874323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346704355887036:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.874410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.156502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.184375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.212032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.240008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.266733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.300807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.328056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.402880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346708650854997:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.402935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.402981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346708650855003:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.406210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.429453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346708650855005:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:47.483418Z node 1 :TX_PROXY ERROR: Actor# [1:7494346708650855059:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: 2025-04-17T18:10:14.996878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:14.996974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:14.997064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c51/r3tmp/tmpJfj63u/pdisk_1.dat 2025-04-17T18:10:15.396199Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17463, node 1 2025-04-17T18:10:15.922894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:15.922943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:15.922983Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:15.924939Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:15.932365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:16.025765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:16.026139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:16.045204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13780 2025-04-17T18:10:16.632710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.845352Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:19.890027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.890153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.928136Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:19.930414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.164067Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.166467Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.167059Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.167216Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.167314Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.167556Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.167653Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.167718Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.167789Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:20.339466Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:20.339576Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:20.352877Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:20.511916Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:20.554284Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:20.554371Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:20.586640Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:20.587652Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:20.587812Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:20.587864Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:20.587910Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:20.587958Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:20.588000Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:20.588040Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:20.588373Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:20.617575Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.617727Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1867:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:20.627220Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1880:2607] 2025-04-17T18:10:20.634780Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1903:2616] 2025-04-17T18:10:20.635056Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1903:2616], schemeshard id = 72075186224037897 2025-04-17T18:10:20.648516Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:20.705755Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:20.705804Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:20.705882Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:20.715918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:20.723355Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:20.723517Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:20.924500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:21.082127Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:21.150375Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:22.043352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.043504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:22.114213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:22.455216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:22.455444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:22.455893Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:22.456049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:22.456190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:22.456335Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:22.456457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:22.456574Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:22.456700Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:22.456844Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:22.457067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:22.457190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:22.506414Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2396:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:22.506492Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2396:2889];tablet_id=72075186224037900;process= ... [72075186224037894] Subscribed for config changes 2025-04-17T18:12:47.301750Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:47.301802Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:47.301928Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:12:47.302944Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:47.303011Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:47.304655Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:47.400483Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:47.400697Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:47.401418Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8650:6547], server id = [2:8655:6552], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:47.401530Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8650:6547], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.401783Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8651:6548], server id = [2:8656:6553], tablet id = 72075186224037900, status = OK 2025-04-17T18:12:47.401817Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8651:6548], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.402721Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8652:6549], server id = [2:8657:6554], tablet id = 72075186224037901, status = OK 2025-04-17T18:12:47.402766Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8652:6549], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.403572Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8653:6550], server id = [2:8658:6555], tablet id = 72075186224037902, status = OK 2025-04-17T18:12:47.403643Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8653:6550], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.405290Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8654:6551], server id = [2:8660:6557], tablet id = 72075186224037903, status = OK 2025-04-17T18:12:47.405333Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8654:6551], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.405604Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:47.406113Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:12:47.406588Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:12:47.406890Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8650:6547], server id = [2:8655:6552], tablet id = 72075186224037899 2025-04-17T18:12:47.406920Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.407192Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:12:47.407330Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8651:6548], server id = [2:8656:6553], tablet id = 72075186224037900 2025-04-17T18:12:47.407345Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.407509Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8652:6549], server id = [2:8657:6554], tablet id = 72075186224037901 2025-04-17T18:12:47.407530Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.407796Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8664:6561], server id = [2:8668:6565], tablet id = 72075186224037904, status = OK 2025-04-17T18:12:47.407866Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8664:6561], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.408081Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:12:47.408744Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8653:6550], server id = [2:8658:6555], tablet id = 72075186224037902 2025-04-17T18:12:47.408763Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.408859Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8665:6562], server id = [2:8669:6566], tablet id = 72075186224037905, status = OK 2025-04-17T18:12:47.408892Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8665:6562], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.409128Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8667:6564], server id = [2:8671:6568], tablet id = 72075186224037906, status = OK 2025-04-17T18:12:47.409157Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8667:6564], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.409589Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8670:6567], server id = [2:8672:6569], tablet id = 72075186224037907, status = OK 2025-04-17T18:12:47.409622Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8670:6567], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.409780Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8654:6551], server id = [2:8660:6557], tablet id = 72075186224037903 2025-04-17T18:12:47.409801Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.410525Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:12:47.410730Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8673:6570], server id = [2:8675:6572], tablet id = 72075186224037908, status = OK 2025-04-17T18:12:47.410764Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8673:6570], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:47.411023Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:12:47.411226Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8664:6561], server id = [2:8668:6565], tablet id = 72075186224037904 2025-04-17T18:12:47.411246Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.411673Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:12:47.411939Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8665:6562], server id = [2:8669:6566], tablet id = 72075186224037905 2025-04-17T18:12:47.411960Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.412085Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:12:47.412213Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8667:6564], server id = [2:8671:6568], tablet id = 72075186224037906 2025-04-17T18:12:47.412235Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.412310Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:12:47.412337Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:47.412537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:47.412710Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:47.412886Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:47.414998Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8670:6567], server id = [2:8672:6569], tablet id = 72075186224037907 2025-04-17T18:12:47.415026Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.415369Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8673:6570], server id = [2:8675:6572], tablet id = 72075186224037908 2025-04-17T18:12:47.415396Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:47.415617Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:47.444043Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8696:6593]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:47.444384Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:47.444427Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8696:6593], StatRequests.size() = 1 2025-04-17T18:12:47.568663Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWUyZGE3MTAtZTkzMmZkZjQtMmRkNWJhZDEtMWM3M2Q3ZjA=, TxId: 2025-04-17T18:12:47.568739Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWUyZGE3MTAtZTkzMmZkZjQtMmRkNWJhZDEtMWM3M2Q3ZjA=, TxId: 2025-04-17T18:12:47.569299Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:47.582082Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8711:6599] 2025-04-17T18:12:47.582219Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8600:6515], server id = [2:8711:6599], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:47.582356Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8711:6599], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-17T18:12:47.582426Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8712:6600] 2025-04-17T18:12:47.582523Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8712:6600], schemeshard id = 72075186224037897 2025-04-17T18:12:47.595611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:47.595669Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:47.693287Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8715:6603]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:47.693696Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:47.693761Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:12:47.697020Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:47.697106Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:12:47.697164Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:12:47.702690Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3789, MsgBus: 29310 2025-04-17T18:12:44.187908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346695097462981:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.188013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016ba/r3tmp/tmpw1tJEo/pdisk_1.dat 2025-04-17T18:12:44.505979Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3789, node 1 2025-04-17T18:12:44.585632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:44.585771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:44.587844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:44.666427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:44.666453Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:44.666466Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:44.666581Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29310 TClient is connected to server localhost:29310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.202570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.241097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.370728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.500693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.565017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.825690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346703687399360:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.825765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.097331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.123750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.147180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.199717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.224897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.251782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.279430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.354631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346707982367318:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.354701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.354714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346707982367323:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.357542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.365030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346707982367325:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:47.446585Z node 1 :TX_PROXY ERROR: Actor# [1:7494346707982367381:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 21646, MsgBus: 10163 2025-04-17T18:12:44.669617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346691989906799:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.669670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00169b/r3tmp/tmp04f0OT/pdisk_1.dat 2025-04-17T18:12:44.959556Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21646, node 1 2025-04-17T18:12:45.042220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:45.042242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:45.042252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:45.042354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:45.058683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:45.058849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:45.060450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10163 TClient is connected to server localhost:10163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.514043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.533298Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.541585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:45.676169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.813377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.868307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.204992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346704874810468:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.205129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.448927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.475206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.499632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.523941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.545736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.585388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.608680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.684052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346704874811132:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.684114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.684135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346704874811137:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.686352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.693424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346704874811139:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:47.762087Z node 1 :TX_PROXY ERROR: Actor# [1:7494346704874811194:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::RangeRead-QueryService [GOOD] >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 20669, MsgBus: 16245 2025-04-17T18:12:41.023391Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346682277308344:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:41.023476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001394/r3tmp/tmpsTZ50I/pdisk_1.dat 2025-04-17T18:12:41.253570Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20669, node 1 2025-04-17T18:12:41.311716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:41.311732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:41.311739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:41.311811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:41.363633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:41.363735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:41.365351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16245 TClient is connected to server localhost:16245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:41.701070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.720420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.841181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.949841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:42.020718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:43.232512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346690867244724:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.232638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.419654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.443651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.467791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.491969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.513900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.541170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.580400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.617318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346690867245380:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.617382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.617426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346690867245385:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.620457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:43.627580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346690867245387:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:43.683201Z node 1 :TX_PROXY ERROR: Actor# [1:7494346690867245441:3536] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 20368, MsgBus: 22288 2025-04-17T18:12:45.085165Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346696911860219:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:45.085238Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001394/r3tmp/tmpyvx5zV/pdisk_1.dat 2025-04-17T18:12:45.177622Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20368, node 2 2025-04-17T18:12:45.217127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:45.217200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:45.218647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:45.235051Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:45.235071Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:45.235081Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:45.235172Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22288 TClient is connected to server localhost:22288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.607274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.613562Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:12:45.620849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.694778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.810403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.889195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.510350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346705501796591:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.510456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.555788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.577900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.600053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.621868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.643820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.672263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.702596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.761850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346705501797244:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.761935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.761963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346705501797249:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.764667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.771965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346705501797251:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:47.859122Z node 2 :TX_PROXY ERROR: Actor# [2:7494346705501797307:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:48.586541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.743570Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913568783, txId: 281474976715674] shutting down >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink >> KqpQueryPerf::IndexReplace-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 4724, MsgBus: 8384 2025-04-17T18:12:40.732432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346675697819347:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:40.732574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001396/r3tmp/tmpt0gJ0C/pdisk_1.dat 2025-04-17T18:12:41.000677Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4724, node 1 2025-04-17T18:12:41.056458Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:41.056531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:41.056544Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:41.056672Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:41.091777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:41.091899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:41.093800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8384 TClient is connected to server localhost:8384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:41.459844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.481382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.566866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.696874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.773895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:43.010228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346688582723015:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.010332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.224129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.246336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.268099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.289290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.312525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.337435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.400009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.441462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346688582723673:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.441536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346688582723678:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.441576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.444766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:43.453511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346688582723680:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:43.549331Z node 1 :TX_PROXY ERROR: Actor# [1:7494346688582723735:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:44.460613Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494346692877691386:2519], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-04-17T18:12:44.460887Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGIzNTM1MDUtZjY1YjZjMjQtMzkxNWU2NGMtNTY2NDU4ZTM=, ActorId: [1:7494346692877691384:2518], ActorState: ExecuteState, TraceId: 01js2f80rt6xv9p04k2jvkszvm, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-04-17T18:12:44.551445Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494346692877691430:2532], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 2025-04-17T18:12:44.551601Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmZhZTkxOWEtZDVhYTFjNjQtOGIwYjk4Y2EtNTYzNGZlYjA=, ActorId: [1:7494346692877691428:2531], ActorState: ExecuteState, TraceId: 01js2f80w29qst2jwwzeyr111m, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 2235, MsgBus: 11285 2025-04-17T18:12:45.292306Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346698255473192:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:45.292375Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001396/r3tmp/tmpoBSabS/pdisk_1.dat 2025-04-17T18:12:45.407143Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:45.426028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:45.426141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:45.428112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2235, node 2 2025-04-17T18:12:45.468239Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:45.468267Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:45.468275Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:45.468386Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11285 TClient is connected to server localhost:11285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.875106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.891646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.965570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.096895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.169392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.991858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346706845409570:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.991937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.055828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.081589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.107078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.132107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.159436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.189822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.257282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.312154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346711140377533:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.312250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346711140377538:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.312289Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.315113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:48.323247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346711140377540:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:48.397993Z node 2 :TX_PROXY ERROR: Actor# [2:7494346711140377594:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:49.197497Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913569224, txId: 281474976715672] shutting down >> KqpQueryPerf::MultiRead-QueryService [GOOD] >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::Upsert+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32215, MsgBus: 22337 2025-04-17T18:12:44.713518Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346692201604904:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.713624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001696/r3tmp/tmpDvhBdp/pdisk_1.dat 2025-04-17T18:12:44.990632Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32215, node 1 2025-04-17T18:12:45.061887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:45.061941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:45.061952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:45.062081Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:45.086664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:45.086769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:45.088404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22337 TClient is connected to server localhost:22337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.486432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.512361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.646968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.787352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.860558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.335037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346705086508576:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.335106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.627192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.652380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.675078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.701093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.748146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.773991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.799628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.874908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346705086509240:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.874990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346705086509245:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.874991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.877444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.884113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346705086509247:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:47.943679Z node 1 :TX_PROXY ERROR: Actor# [1:7494346705086509301:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21240, MsgBus: 8696 2025-04-17T18:12:45.536643Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346700312313989:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:45.537899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00168f/r3tmp/tmpYVlAnG/pdisk_1.dat 2025-04-17T18:12:45.846859Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21240, node 1 2025-04-17T18:12:45.909613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:45.909730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:45.909934Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:45.909954Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:45.909963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:45.910094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:45.911940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8696 TClient is connected to server localhost:8696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:46.366893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.388313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.496913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.629774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.692626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.179928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346713197217650:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.180040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.477363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.500844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.527075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.555272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.577820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.602039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.667973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.731339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346713197218314:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.731376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346713197218319:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.731434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.734369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:48.741171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346713197218321:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:48.836086Z node 1 :TX_PROXY ERROR: Actor# [1:7494346713197218378:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 30081, MsgBus: 12009 2025-04-17T18:12:45.642335Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346698917624452:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:45.642564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00168a/r3tmp/tmpYRfuGJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30081, node 1 2025-04-17T18:12:45.936478Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:12:45.936524Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:12:45.953766Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:45.966913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:45.966936Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:45.966948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:45.967091Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:46.000293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:46.000397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:46.002055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12009 TClient is connected to server localhost:12009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:46.394415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.407817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.517851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.654229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.729664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.175621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346711802528134:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.175756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.412946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.436233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.459757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.482582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.522827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.548816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.576357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.625190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346711802528787:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.625242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.625282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346711802528792:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.628818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:48.638517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346711802528794:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:48.697057Z node 1 :TX_PROXY ERROR: Actor# [1:7494346711802528849:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Delete-QueryService-UseSink >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 64283, MsgBus: 62898 2025-04-17T18:12:39.735196Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346671577888466:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:39.735371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00139f/r3tmp/tmp1H6bze/pdisk_1.dat 2025-04-17T18:12:40.052611Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64283, node 1 2025-04-17T18:12:40.093100Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:40.093128Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:40.093147Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:40.093290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:40.104918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:40.105045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:40.106652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62898 TClient is connected to server localhost:62898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:40.480860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.508597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.615546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.750757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:40.802735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.894987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346680167824859:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:41.895098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.110030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.131910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.152052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.172539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.191908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.215028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.244058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:42.284273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346684462792809:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.284331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.284347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346684462792814:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.287652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:42.296324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346684462792816:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:42.350616Z node 1 :TX_PROXY ERROR: Actor# [1:7494346684462792870:3536] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:43.073150Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzUxMGQzNTAtOGViODJmNTYtZjNmMDk2ZmUtYjVhNDg5YzA=, ActorId: [1:7494346688757760489:2511], ActorState: ExecuteState, TraceId: 01js2f7zcc91r0r8a4cga0g95h, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.074165Z node 1 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 5 2025-04-17T18:12:43.089331Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTU5NWM5YjktOWJkMWQwOTQtMTEwYmRhZDYtNDZiMTM3NzE=, ActorId: [1:7494346688757760505:2516], ActorState: ExecuteState, TraceId: 01js2f7ze7b4amfg21b728fn9e, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.104293Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzA1ZjliMDUtYTgwYjIxYzgtZDgwNTU3NzYtNzg2ZmVmOWQ=, ActorId: [1:7494346688757760514:2520], ActorState: ExecuteState, TraceId: 01js2f7zep4pgphdprzjjk35jh, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.144263Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWVmYzFjMmEtNzgzYmIxMGYtN2FkYzgzNzctYzNmY2Q3ZmE=, ActorId: [1:7494346688757760526:2524], ActorState: ExecuteState, TraceId: 01js2f7zf51yyzq0kq590rsybd, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.179158Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGZlYWU3MjMtMmMxYjYyOGEtOGQ3NmE5OWItNGViOTkyNjY=, ActorId: [1:7494346688757760542:2532], ActorState: ExecuteState, TraceId: 01js2f7zgd81d8ajan9exketb5, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.215860Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTIzNzg2NTMtNTZhZGI5YjUtOGUwMTNkMGItNzAxYzE3Nzk=, ActorId: [1:7494346688757760571:2539], ActorState: ExecuteState, TraceId: 01js2f7zhg2tnxj53ksb23xeqx, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.262050Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzAxYmJiMzAtNDNkNjA4OWQtM2ZmZTlkMmItZTE1NTkwOTg=, ActorId: [1:7494346688757760586:2545], ActorState: ExecuteState, TraceId: 01js2f7zjp6xz4a49wgrswbpdf, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.286963Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913563295, txId: 281474976710673] shutting down 2025-04-17T18:12:43.287211Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913563295, txId: 281474976710674] shutting down 2025-04-17T18:12:43.288161Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913563295, txId: 281474976710672] shutting down 2025-04-17T18:12:43.313659Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWVlOGRkOTEtNDY0MGY0NDgtNjQ5ODgxYTQtMmY4M2E0NjQ=, ActorId: [1:7494346688757760799:2580], ActorState: ExecuteState, TraceId: 01js2f7zm41qv4t5rn4nbr44hg, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.359522Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWE5MTM5MzMtZWQ0NDA3MDItNTdkNDY1NTYtZGY5NDg2OTk=, ActorId: [1:7494346688757760845:2587], ActorState: ExecuteState, TraceId: 01js2f7znq388mkpe69za2fn0a, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.408630Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913563407, txId: 281474976710679] shutting down 2025-04-17T18:12:43.408864Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODU4NDJjNDMtNDk3Yjk3NDQtYzhmMWI2N2YtYmU2NWE5MzI=, ActorId: [1:7494346688757760898:2594], ActorState: ExecuteState, TraceId: 01js2f7zq50h2tadra1rt4wj9y, Create QueryResponse for error on request, msg: 2025-04-17T18:12:43.412969Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913563407, txId: 281474976710678] shutting down 2025-04-17T18:12:43.447852Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDQyYWQ0NzAtYjVjOWJmMWUtOWU3ZmEzYTctNTA4NzcyOTg=, ActorId: [1:7494346688757761047:2620], ActorState: ExecuteState, TraceId: 01js2f7zrqff63gsgyx6p8j6 ... WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913564513, txId: 281474976710716] shutting down 2025-04-17T18:12:44.536976Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjcwOTcxZmUtYjI1YWQzYmYtYzAwNjYxYzgtNGMzMTE4MmI=, ActorId: [1:7494346693052729985:2898], ActorState: ExecuteState, TraceId: 01js2f80qh3jzf0xkq5xjggfzd, Create QueryResponse for error on request, msg: 2025-04-17T18:12:44.603324Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913564632, txId: 281474976710718] shutting down 2025-04-17T18:12:44.628975Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2QxMTFlMDktNWViMTA5NDMtNTA4NzE1MzQtYjNiNzA3ZjQ=, ActorId: [1:7494346693052730094:2916], ActorState: ExecuteState, TraceId: 01js2f80w07cdps699g9raejw2, Create QueryResponse for error on request, msg: 2025-04-17T18:12:44.698204Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913564730, txId: 281474976710720] shutting down 2025-04-17T18:12:44.723188Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDRmMGE5MjAtZTZiZjUzZTUtYjRmZTk5YWMtMTI0NzBmZDk=, ActorId: [1:7494346693052730202:2934], ActorState: ExecuteState, TraceId: 01js2f80yv8r9xjctmsvjw1f6w, Create QueryResponse for error on request, msg: 2025-04-17T18:12:44.742118Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346671577888466:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.742201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:12:44.811342Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913564842, txId: 281474976710722] shutting down 2025-04-17T18:12:44.811658Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913564842, txId: 281474976710723] shutting down 2025-04-17T18:12:44.913154Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjY2YzdlNDAtNDYwMThmYS0yZDZkNzRiOC1hZTQ2OTY5YQ==, ActorId: [1:7494346693052730476:2977], ActorState: ExecuteState, TraceId: 01js2f814mfcfg19ahaqaxz7ds, Create QueryResponse for error on request, msg: 2025-04-17T18:12:45.016668Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2Y0ZTY2Mi05ZDIxZGI3YS1mMzE0NjRjYy1hMzQ5YTA5ZQ==, ActorId: [1:7494346693052730498:2987], ActorState: ExecuteState, TraceId: 01js2f817r9f1cpe5ze2v0xg2p, Create QueryResponse for error on request, msg: 2025-04-17T18:12:45.022574Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913565052, txId: 281474976710726] shutting down 2025-04-17T18:12:45.082564Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913565115, txId: 281474976710728] shutting down 2025-04-17T18:12:45.121482Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjhiYWM5Yi01MmIzMTM1OC01YjZhYzI2OC0xNjU1YzEyOQ==, ActorId: [1:7494346697347697902:3005], ActorState: ExecuteState, TraceId: 01js2f81aycemned3x5a8th3s0, Create QueryResponse for error on request, msg: 2025-04-17T18:12:45.184991Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913565213, txId: 281474976710730] shutting down 2025-04-17T18:12:45.231561Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTZlYWZiMjYtOTdmMGUwMTItNWIwMzQ3OWYtODMxNzJkZGQ=, ActorId: [1:7494346697347698019:3023], ActorState: ExecuteState, TraceId: 01js2f81e900af8hgb29rkragn, Create QueryResponse for error on request, msg: 2025-04-17T18:12:45.323078Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913565353, txId: 281474976710733] shutting down 2025-04-17T18:12:45.323725Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913565353, txId: 281474976710732] shutting down Trying to start YDB, gRPC: 63151, MsgBus: 11594 2025-04-17T18:12:46.052891Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346701010323647:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:46.052981Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00139f/r3tmp/tmpAgGOkP/pdisk_1.dat 2025-04-17T18:12:46.142712Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63151, node 2 2025-04-17T18:12:46.191318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:46.191409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:46.193088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:46.197707Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:46.197738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:46.197746Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:46.197858Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11594 TClient is connected to server localhost:11594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:46.595008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.606038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.678919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.798292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.878305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.560540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346709600260004:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.560612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.587003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.610977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.633008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.657732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.682625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.709918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.776949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.823361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346709600260667:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.823425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.823454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346709600260672:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.826630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:48.835944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346709600260674:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:48.936156Z node 2 :TX_PROXY ERROR: Actor# [2:7494346709600260728:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> KqpQueryPerf::IndexUpsert+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 23954, MsgBus: 28517 2025-04-17T18:12:46.333735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346703467658721:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:46.333915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001688/r3tmp/tmpXyJe7u/pdisk_1.dat 2025-04-17T18:12:46.600730Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23954, node 1 2025-04-17T18:12:46.656898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:46.656917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:46.656923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:46.657043Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:46.694129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:46.694271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:46.695352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28517 TClient is connected to server localhost:28517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:47.040769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.068346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.179640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.320817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.382279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.789729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346712057595105:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.789859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.987859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.010953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.035236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.060354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.097810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.121445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.148478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.227237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346716352563062:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.227310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346716352563067:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.227320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.229907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:49.237497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346716352563069:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:49.327780Z node 1 :TX_PROXY ERROR: Actor# [1:7494346716352563125:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpWorkload::KV ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16754, MsgBus: 19561 2025-04-17T18:12:46.410442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346701088328175:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:46.410521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001686/r3tmp/tmpXuFn9w/pdisk_1.dat 2025-04-17T18:12:46.644121Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16754, node 1 2025-04-17T18:12:46.714423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:46.714467Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:46.714476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:46.714594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:46.751132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:46.751242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:46.752906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19561 TClient is connected to server localhost:19561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:47.164784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.193173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.330843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.466682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.538250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.870207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346709678264551:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:48.870310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.155799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.181414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.206525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.229746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.306143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.334108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.363196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.442261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346713973232514:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.442340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.442587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346713973232519:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.445377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:49.456979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346713973232521:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:49.517409Z node 1 :TX_PROXY ERROR: Actor# [1:7494346713973232575:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> KqpQueryPerf::Delete+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] >> KqpQueryPerf::Update+QueryService+UseSink >> KqpWorkload::STOCK >> KqpQueryPerf::IndexInsert-QueryService-UseSink >> KqpQueryPerf::DeleteOn-QueryService-UseSink >> KqpQueryPerf::IndexReplace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15194, MsgBus: 12421 2025-04-17T18:12:48.202883Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346712842566890:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:48.204030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00167c/r3tmp/tmpbgvwtl/pdisk_1.dat 2025-04-17T18:12:48.460989Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15194, node 1 2025-04-17T18:12:48.517499Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:48.517522Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:48.517530Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:48.517641Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:48.534318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:48.534450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:48.536205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12421 TClient is connected to server localhost:12421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:48.973238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.995461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.136328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.289989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.341293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.634167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346721432503186:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:50.634277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:50.860138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.887583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.910510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.946070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.967126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.992695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.019966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.067968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346725727471134:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.068031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.068037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346725727471139:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.071234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:51.080251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346725727471141:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:51.167721Z node 1 :TX_PROXY ERROR: Actor# [1:7494346725727471197:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: 2025-04-17T18:10:18.869797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:18.869927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:18.870073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c0b/r3tmp/tmpTm9jfM/pdisk_1.dat 2025-04-17T18:10:19.277818Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21877, node 1 2025-04-17T18:10:19.521315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:19.521365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:19.521404Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:19.522167Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:19.524356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.608337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.608487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.622337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2100 2025-04-17T18:10:20.177389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:23.167228Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:23.209287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:23.209415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:23.250163Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:23.252286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:23.484086Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.484599Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.485215Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.485548Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.485636Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.485712Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.485791Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.485857Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.485926Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.653328Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:23.653443Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:23.666964Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:23.800171Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:23.845736Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:23.845851Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:23.876031Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:23.878879Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:23.879057Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:23.879115Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:23.879154Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:23.879194Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:23.879236Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:23.879272Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:23.880390Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:23.914695Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:23.914806Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:23.922793Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:23.927798Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:23.927976Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:23.942144Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:23.963959Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:23.964024Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:23.964105Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:23.974585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:23.982167Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:23.982348Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:24.190263Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:24.357527Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:24.418550Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:25.182681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.182855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:25.202513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:25.541648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:25.541880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:25.542251Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:25.542438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:25.542578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:25.542749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:25.542914Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:25.543076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:25.543219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:25.543399Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:25.543537Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:25.543671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:25.577180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:25.577292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=T ... status = OK 2025-04-17T18:12:50.896613Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8596:6509], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:50.896948Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:12:50.897950Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8576:6492], server id = [2:8581:6497], tablet id = 72075186224037902 2025-04-17T18:12:50.897977Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:50.898133Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:12:50.898885Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8598:6510], server id = [2:8601:6512], tablet id = 72075186224037905, status = OK 2025-04-17T18:12:50.898951Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8598:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:50.899289Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8575:6491], server id = [2:8580:6496], tablet id = 72075186224037901 2025-04-17T18:12:50.899314Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:50.899667Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8602:6513], server id = [2:8606:6517], tablet id = 72075186224037906, status = OK 2025-04-17T18:12:50.899719Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8602:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:50.900432Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:12:50.901577Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8577:6493], server id = [2:8584:6500], tablet id = 72075186224037903 2025-04-17T18:12:50.901605Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:50.902093Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8604:6515], server id = [2:8607:6518], tablet id = 72075186224037907, status = OK 2025-04-17T18:12:50.902151Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8604:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:50.903307Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8609:6520], server id = [2:8614:6524], tablet id = 72075186224037908, status = OK 2025-04-17T18:12:50.903360Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8609:6520], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:50.905560Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:12:50.905782Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8596:6509], server id = [2:8599:6511], tablet id = 72075186224037904 2025-04-17T18:12:50.905807Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:50.907603Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:12:50.907780Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8598:6510], server id = [2:8601:6512], tablet id = 72075186224037905 2025-04-17T18:12:50.907814Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:50.909184Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:12:50.909645Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8602:6513], server id = [2:8606:6517], tablet id = 72075186224037906 2025-04-17T18:12:50.909683Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:50.909993Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:12:50.910407Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8604:6515], server id = [2:8607:6518], tablet id = 72075186224037907 2025-04-17T18:12:50.910433Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:50.910567Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:12:50.910618Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:50.910786Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:50.910966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:50.911343Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:50.913940Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8609:6520], server id = [2:8614:6524], tablet id = 72075186224037908 2025-04-17T18:12:50.913967Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:50.914474Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:50.944033Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8639:6545]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:50.944202Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:50.944235Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8639:6545], StatRequests.size() = 1 ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to KQP_TABLE_RESOLVER ... waiting for 3rd TEvResolveKeySetResult (done) 2025-04-17T18:12:51.008319Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8450:6422], server id = [2:8451:6423], tablet id = 72075186224037894 2025-04-17T18:12:51.008414Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:8645:6548] 2025-04-17T18:12:51.008467Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:8645:6548] 2025-04-17T18:12:51.008673Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:8646:6549], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:12:51.086657Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:12:51.086725Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:12:51.087197Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:12:51.087798Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:12:51.088022Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-17T18:12:51.088058Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-17T18:12:51.088086Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-17T18:12:51.088116Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-17T18:12:51.088144Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744913570849115 2025-04-17T18:12:51.088178Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-17T18:12:51.088210Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-04-17T18:12:51.088262Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-17T18:12:51.088313Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:12:51.088375Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-17T18:12:51.088419Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:12:51.088468Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:12:51.088516Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:12:51.088648Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:51.089244Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:12:51.089826Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:51.089871Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:51.089979Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-04-17T18:12:51.183149Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8694:6578] 2025-04-17T18:12:51.183274Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8645:6548], server id = [2:8694:6578], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:51.183457Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8694:6578], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-17T18:12:51.183640Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8695:6579] 2025-04-17T18:12:51.183728Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8695:6579], schemeshard id = 72075186224037897 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to KQP_TABLE_RESOLVER ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2025-04-17T18:12:51.269520Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:51.269635Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:51.272143Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:51.290685Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDljNTJhYy01MmI5ZDQ4Yi1mNDQwYjc4Ny1kMDQ0ZDRlNg==, TxId: 2025-04-17T18:12:51.290757Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDljNTJhYy01MmI5ZDQ4Yi1mNDQwYjc4Ny1kMDQ0ZDRlNg==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-17T18:12:51.291389Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8703:6583]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:51.291762Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:51.291804Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:12:51.293977Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:51.294022Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:12:51.294058Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:12:51.298852Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpQueryPerf::Insert-QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::ComputeLength-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28261, MsgBus: 14174 2025-04-17T18:12:44.210312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346694066562172:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.210399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016ad/r3tmp/tmppn2odT/pdisk_1.dat 2025-04-17T18:12:44.506633Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28261, node 1 2025-04-17T18:12:44.587539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:44.587799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:44.590235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:44.663879Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:44.663904Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:44.663910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:44.664048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14174 TClient is connected to server localhost:14174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.224906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.249579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.380169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.512644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:12:45.590384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:46.791169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346702656498546:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.791320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.097312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.123733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.150101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.175021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.230956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.259314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.325212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.370528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346706951466505:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.370609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.370713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346706951466510:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.373861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.382380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346706951466512:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:47.465299Z node 1 :TX_PROXY ERROR: Actor# [1:7494346706951466567:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 62906, MsgBus: 1966 2025-04-17T18:12:49.334731Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346716464446500:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.334833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016ad/r3tmp/tmpNeCN7M/pdisk_1.dat 2025-04-17T18:12:49.423984Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62906, node 2 2025-04-17T18:12:49.466192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:49.466312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:49.468231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:49.482675Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:49.482701Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:49.482708Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:49.482808Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1966 TClient is connected to server localhost:1966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:49.850766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.858214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.928164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.051935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.102605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.712944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346725054382881:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.713030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.762050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.787304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.815858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.841244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.865767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.892514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.920522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.001075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346729349350836:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.001128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346729349350841:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.001139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.004152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:52.017139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346729349350843:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:52.110692Z node 2 :TX_PROXY ERROR: Actor# [2:7494346729349350900:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> KqpQueryPerf::Replace+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 32046, MsgBus: 20889 2025-04-17T18:12:49.305028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346715638794568:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.305105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001675/r3tmp/tmp5N5bu3/pdisk_1.dat 2025-04-17T18:12:49.580968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32046, node 1 2025-04-17T18:12:49.651012Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:49.651049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:49.651060Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:49.651249Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:49.663075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:49.663193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:49.664956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20889 TClient is connected to server localhost:20889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:50.101037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.126140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.228721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.360024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.419308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.823147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346724228730943:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.823279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.143737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.172159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.197654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.221532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.268249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.295012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.332506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.429104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346728523698904:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.429185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.429366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346728523698909:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.432266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:52.443057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346728523698911:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:52.523396Z node 1 :TX_PROXY ERROR: Actor# [1:7494346728523698968:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 65141, MsgBus: 27829 2025-04-17T18:12:44.187770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346694469312174:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.187861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016ae/r3tmp/tmplJ8UfG/pdisk_1.dat 2025-04-17T18:12:44.505998Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65141, node 1 2025-04-17T18:12:44.573960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:44.574071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:44.576005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:44.664097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:44.664127Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:44.664137Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:44.664266Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27829 TClient is connected to server localhost:27829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.202773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.239559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.403464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.524602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.599679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.696116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346703059248543:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.696214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.097267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.124015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.148929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.175550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.203865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.230276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.295356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.338381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346707354216500:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.338429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.338509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346707354216505:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.341881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.350322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346707354216507:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:47.406493Z node 1 :TX_PROXY ERROR: Actor# [1:7494346707354216561:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2152, MsgBus: 65331 2025-04-17T18:12:49.415546Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346716684431076:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.415598Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016ae/r3tmp/tmpuyJjew/pdisk_1.dat 2025-04-17T18:12:49.497621Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2152, node 2 2025-04-17T18:12:49.544186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:49.544256Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:49.545427Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:49.545435Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:49.545441Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:49.545532Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:49.545608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65331 TClient is connected to server localhost:65331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:49.875811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.886329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.955249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.079376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.132027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.941484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346725274367445:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.941549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.980783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.006522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.030146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.054770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.087157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.117827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.145322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.199848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346729569335396:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.199938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.200000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346729569335401:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.202715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:52.210097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346729569335403:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:52.273417Z node 2 :TX_PROXY ERROR: Actor# [2:7494346729569335456:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7823, MsgBus: 18748 2025-04-17T18:12:49.688976Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346716218024202:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.689035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001668/r3tmp/tmpC0Ok8r/pdisk_1.dat 2025-04-17T18:12:49.949488Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7823, node 1 2025-04-17T18:12:50.008364Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:50.008390Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:50.008398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:50.008513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:50.011739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:50.011849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:50.013584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18748 TClient is connected to server localhost:18748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:50.430166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.451819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.565051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.685036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.746255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.142966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346729102927874:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.143055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.438146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.463539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.491539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.519041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.568669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.636465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.664200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.719056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346729102928536:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.719116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346729102928541:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.719126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.721722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:52.729536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346729102928543:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:52.827007Z node 1 :TX_PROXY ERROR: Actor# [1:7494346729102928599:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16340, MsgBus: 25394 2025-04-17T18:12:44.187864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346694020637001:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.187989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016a2/r3tmp/tmpKr74GX/pdisk_1.dat 2025-04-17T18:12:44.513713Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16340, node 1 2025-04-17T18:12:44.569315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:44.569753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:44.571300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:44.664213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:44.664237Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:44.664248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:44.664363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25394 TClient is connected to server localhost:25394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.236767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.261546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.390756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.516473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.573143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.688010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346702610573359:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.688111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.097255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.123114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.147265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.174769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.203431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.270644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.299114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.377439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346706905541326:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.377505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.377554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346706905541331:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.380438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.387510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346706905541333:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:47.464297Z node 1 :TX_PROXY ERROR: Actor# [1:7494346706905541389:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28067, MsgBus: 25803 2025-04-17T18:12:49.458502Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346717221365298:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.458590Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016a2/r3tmp/tmpr4tQ6e/pdisk_1.dat 2025-04-17T18:12:49.554139Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28067, node 2 2025-04-17T18:12:49.590304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:49.590379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:49.593074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:49.621143Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:49.621170Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:49.621179Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:49.621297Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25803 TClient is connected to server localhost:25803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:49.953924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.961346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.000296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.159176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.208392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.193391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346730106268957:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.193472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.258212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.282374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.312026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.340307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.367179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.397630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.434670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.495322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346730106269615:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.495414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.495468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346730106269620:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.498422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:52.507050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346730106269622:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:52.575515Z node 2 :TX_PROXY ERROR: Actor# [2:7494346730106269677:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:06.887496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:06.887575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:06.887614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:06.887657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:06.888634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:06.888664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:06.888759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:06.888816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:06.889751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:06.945493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:06.945533Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:06.957149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:06.957440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:06.957591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:06.966118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:06.967927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:06.970824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:06.971715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:06.977203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:06.985045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:06.985157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:06.985910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:06.985956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:06.986014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:06.986154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:06.991474Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:07.079308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:07.080803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.081639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:07.082698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:07.082763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.085368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:07.085493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:07.085680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.085730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:07.085766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:07.085809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:07.087606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.087657Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:07.087692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:07.089401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.089446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.089502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:07.089569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.098871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:07.100876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:07.101054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:07.101773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:07.101863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:07.101898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:07.102074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:07.102121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:07.102265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:07.102315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:07.103745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:07.103773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:07.103874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:07.103922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:07.104174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.104210Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:07.104281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:07.104303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.104327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:07.104374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.104397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:07.104423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.104463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:07.104484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:07.104518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:07.104539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:07.104559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:07.105642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:07.105711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:07.105734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... esponseTime 2025-04-17T18:12:52.158790Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:52.529157Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:52.529258Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:52.529362Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:52.529396Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:52.938715Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:52.938785Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:52.938869Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:52.938890Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:53.003058Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:312:2299]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:53.003138Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-17T18:12:53.003228Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-04-17T18:12:53.003295Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:12:53.003331Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-04-17T18:12:53.003366Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-04-17T18:12:53.003394Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-04-17T18:12:53.003526Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:312:2299]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:12:53.003776Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-17T18:12:53.004789Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:312:2299], Recipient [3:129:2154]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 134 Memory: 124232 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 263 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 2025-04-17T18:12:53.004835Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T18:12:53.004888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0134 2025-04-17T18:12:53.005009Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:12:53.005048Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T18:12:53.006662Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1062:3006], Recipient [3:312:2299]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-17T18:12:53.048855Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:53.048942Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:53.048977Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-17T18:12:53.049050Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T18:12:53.049080Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T18:12:53.049220Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-17T18:12:53.049300Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-04-17T18:12:53.049356Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2025-04-17T18:12:53.049419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-04-17T18:12:53.049496Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:12:53.059967Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:53.060049Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:12:53.060083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T18:12:53.362294Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:53.362381Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:53.362513Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:53.362549Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:53.740703Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:53.740793Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:53.740891Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:53.740923Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:54.107744Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:54.107817Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:54.107891Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:54.107917Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:54.472611Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:54.472685Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:54.472796Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:54.472827Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:54.836043Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:54.836135Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:54.836237Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:54.836267Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:54.867235Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:312:2299]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:12:55.199629Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:55.199711Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:12:55.199816Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:12:55.199848Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink >> KqpQueryPerf::Replace-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: 2025-04-17T18:10:32.863846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:32.863965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:32.864098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b62/r3tmp/tmpjx9mZU/pdisk_1.dat 2025-04-17T18:10:33.196722Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24624, node 1 2025-04-17T18:10:33.405425Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:33.405471Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:33.405503Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:33.406088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:33.407656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:33.486800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:33.486935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:33.500728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28212 2025-04-17T18:10:34.017176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:36.787999Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:36.827981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:36.828102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:36.865435Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:36.867665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:37.100237Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.100798Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.101364Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.101748Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.101856Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.101969Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.102047Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.102114Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.102223Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.263289Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:37.263378Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:37.275910Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:37.389955Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:37.425245Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:37.425314Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:37.449454Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:37.452206Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:37.452366Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:37.452418Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:37.452453Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:37.452491Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:37.452525Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:37.452564Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:37.453603Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:37.485447Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:37.485522Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:37.491608Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:37.494903Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:37.495018Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:37.504353Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:37.521328Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:37.521372Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:37.521423Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:37.529467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:37.534765Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:37.534867Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:37.686101Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:37.884247Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:37.984024Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:38.649203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:38.649377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:38.669742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:38.942845Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:38.943032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:38.943279Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:38.943397Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:38.943506Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:38.943621Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:38.943695Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:38.943771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:38.943896Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:38.943994Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:38.944078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:38.944184Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:38.966664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:38.966740Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... [72075186224037894] Subscribed for config changes 2025-04-17T18:12:53.943090Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:53.943137Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:53.943208Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:12:53.944133Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:53.944185Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:53.945348Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:54.025833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:54.025977Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:12:54.026784Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8617:6512], server id = [2:8622:6517], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:54.027224Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8617:6512], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.028829Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8618:6513], server id = [2:8623:6518], tablet id = 72075186224037900, status = OK 2025-04-17T18:12:54.028908Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8618:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.029494Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8619:6514], server id = [2:8624:6519], tablet id = 72075186224037901, status = OK 2025-04-17T18:12:54.029554Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8619:6514], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.029884Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8620:6515], server id = [2:8625:6520], tablet id = 72075186224037902, status = OK 2025-04-17T18:12:54.029942Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8620:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.030879Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8621:6516], server id = [2:8626:6521], tablet id = 72075186224037903, status = OK 2025-04-17T18:12:54.030935Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8621:6516], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.035484Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:54.036159Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8617:6512], server id = [2:8622:6517], tablet id = 72075186224037899 2025-04-17T18:12:54.036198Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.037361Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:12:54.037782Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8640:6532], server id = [2:8642:6533], tablet id = 72075186224037904, status = OK 2025-04-17T18:12:54.037852Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8640:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.038330Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8618:6513], server id = [2:8623:6518], tablet id = 72075186224037900 2025-04-17T18:12:54.038360Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.039662Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:12:54.039996Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8643:6534], server id = [2:8645:6535], tablet id = 72075186224037905, status = OK 2025-04-17T18:12:54.040069Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8643:6534], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.040829Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8619:6514], server id = [2:8624:6519], tablet id = 72075186224037901 2025-04-17T18:12:54.040856Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.041641Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:12:54.042296Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8620:6515], server id = [2:8625:6520], tablet id = 72075186224037902 2025-04-17T18:12:54.042336Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.042492Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:12:54.043013Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8647:6537], server id = [2:8649:6539], tablet id = 72075186224037906, status = OK 2025-04-17T18:12:54.043088Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8647:6537], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.043420Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8621:6516], server id = [2:8626:6521], tablet id = 72075186224037903 2025-04-17T18:12:54.043445Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.044115Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8651:6541], server id = [2:8655:6544], tablet id = 72075186224037907, status = OK 2025-04-17T18:12:54.044169Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8651:6541], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.044782Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8653:6543], server id = [2:8656:6545], tablet id = 72075186224037908, status = OK 2025-04-17T18:12:54.044838Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8653:6543], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.046298Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:12:54.046825Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8640:6532], server id = [2:8642:6533], tablet id = 72075186224037904 2025-04-17T18:12:54.046854Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.049216Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:12:54.049669Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8643:6534], server id = [2:8645:6535], tablet id = 72075186224037905 2025-04-17T18:12:54.049697Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.050276Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:12:54.050617Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8647:6537], server id = [2:8649:6539], tablet id = 72075186224037906 2025-04-17T18:12:54.050643Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.051110Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:12:54.051383Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8651:6541], server id = [2:8655:6544], tablet id = 72075186224037907 2025-04-17T18:12:54.051407Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.051672Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:12:54.051714Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:54.051951Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:54.052157Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:54.052419Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:54.055051Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8653:6543], server id = [2:8656:6545], tablet id = 72075186224037908 2025-04-17T18:12:54.055082Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:54.055664Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:54.092074Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8683:6568]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:54.092303Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:54.092350Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8683:6568], StatRequests.size() = 1 2025-04-17T18:12:54.202325Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8694:6571] 2025-04-17T18:12:54.202503Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8694:6571], schemeshard id = 72075186224037897 2025-04-17T18:12:54.202563Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8568:6481], server id = [2:8695:6572], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:54.202661Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8695:6572] 2025-04-17T18:12:54.202722Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8695:6572], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-17T18:12:54.218681Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmZjMWM5YzgtZmU5ODEzNTItOWEzYjljYS1mYWQyZDRlNg==, TxId: 2025-04-17T18:12:54.218756Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmZjMWM5YzgtZmU5ODEzNTItOWEzYjljYS1mYWQyZDRlNg==, TxId: 2025-04-17T18:12:54.219720Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:54.233418Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:54.233482Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:54.351442Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8702:6578]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:54.351765Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:54.351817Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:12:54.354648Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:54.354706Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:12:54.354754Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:12:54.360270Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService >> KqpQueryPerf::Update-QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::Upsert-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10980, MsgBus: 27633 2025-04-17T18:12:51.248304Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346723035881505:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:51.248407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001666/r3tmp/tmpz4unh1/pdisk_1.dat 2025-04-17T18:12:51.491085Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10980, node 1 2025-04-17T18:12:51.565357Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:51.565378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:51.565383Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:51.565476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:51.598558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:51.598669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:51.600396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27633 TClient is connected to server localhost:27633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:52.072733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.095078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.201066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.345507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.420794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.816839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346731625817874:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.816941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.081595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.108052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.133547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.159614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.205296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.230863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.260944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.305269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346735920785825:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.305346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.305401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346735920785830:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.308675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:54.317641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346735920785832:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:54.401537Z node 1 :TX_PROXY ERROR: Actor# [1:7494346735920785889:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Update+QueryService-UseSink >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: 2025-04-17T18:10:18.361678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:18.361830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:18.361968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c13/r3tmp/tmpRcKJlK/pdisk_1.dat 2025-04-17T18:10:18.765434Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27494, node 1 2025-04-17T18:10:19.046896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:19.046942Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:19.046982Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:19.047557Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:19.049738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:19.162610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:19.162729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:19.179651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12134 2025-04-17T18:10:19.746110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:22.863044Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:22.909535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:22.909658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:22.949251Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:22.953443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:23.183897Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.184439Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.185073Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.185468Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.185567Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.185676Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.185776Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.185850Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.185947Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:23.352762Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:23.352869Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:23.366115Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:23.512626Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:23.554787Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:23.554884Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:23.580184Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:23.583044Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:23.583246Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:23.583311Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:23.583358Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:23.583398Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:23.583439Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:23.583479Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:23.584566Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:23.618951Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:23.619040Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:23.626864Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:23.631666Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:23.631815Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:23.644880Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:23.666644Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:23.666697Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:23.666766Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:23.675908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:23.683078Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:23.683223Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:23.884331Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:24.063340Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:24.133056Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:24.865257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:24.865469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:24.890638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:25.182136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:25.182336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:25.182604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:25.182725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:25.182808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:25.182895Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:25.182969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:25.183071Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:25.183159Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:25.183265Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:25.183343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:25.183416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:25.205022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:25.205108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... 981217Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:54.981295Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:54.982205Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:54.995675Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:54.995883Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:12:54.996664Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8570:6488], server id = [2:8575:6493], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:54.997001Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8570:6488], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.997208Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8571:6489], server id = [2:8576:6494], tablet id = 72075186224037900, status = OK 2025-04-17T18:12:54.997253Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8571:6489], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.998554Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8572:6490], server id = [2:8577:6495], tablet id = 72075186224037901, status = OK 2025-04-17T18:12:54.998609Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8572:6490], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:54.999124Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8573:6491], server id = [2:8578:6496], tablet id = 72075186224037902, status = OK 2025-04-17T18:12:54.999169Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8573:6491], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:55.000201Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8574:6492], server id = [2:8579:6497], tablet id = 72075186224037903, status = OK 2025-04-17T18:12:55.000252Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8574:6492], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:55.004706Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:55.005312Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8570:6488], server id = [2:8575:6493], tablet id = 72075186224037899 2025-04-17T18:12:55.005354Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.006805Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:12:55.007512Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8593:6508], server id = [2:8596:6509], tablet id = 72075186224037904, status = OK 2025-04-17T18:12:55.007585Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8593:6508], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:55.007835Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8571:6489], server id = [2:8576:6494], tablet id = 72075186224037900 2025-04-17T18:12:55.007860Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.009018Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:12:55.009630Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8572:6490], server id = [2:8577:6495], tablet id = 72075186224037901 2025-04-17T18:12:55.009678Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.010032Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8597:6510], server id = [2:8598:6511], tablet id = 72075186224037905, status = OK 2025-04-17T18:12:55.010090Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8597:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:55.010475Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:12:55.011558Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8573:6491], server id = [2:8578:6496], tablet id = 72075186224037902 2025-04-17T18:12:55.011585Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.011835Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:12:55.012341Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8600:6513], server id = [2:8601:6514], tablet id = 72075186224037906, status = OK 2025-04-17T18:12:55.012407Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8600:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:55.012781Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8574:6492], server id = [2:8579:6497], tablet id = 72075186224037903 2025-04-17T18:12:55.012808Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.012917Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8602:6515], server id = [2:8607:6520], tablet id = 72075186224037907, status = OK 2025-04-17T18:12:55.012960Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8602:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:55.014077Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8605:6518], server id = [2:8609:6521], tablet id = 72075186224037908, status = OK 2025-04-17T18:12:55.014127Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8605:6518], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:55.017219Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:12:55.017514Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8593:6508], server id = [2:8596:6509], tablet id = 72075186224037904 2025-04-17T18:12:55.017541Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.019020Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:12:55.019834Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8597:6510], server id = [2:8598:6511], tablet id = 72075186224037905 2025-04-17T18:12:55.019863Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.020230Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:12:55.020353Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:12:55.020774Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8600:6513], server id = [2:8601:6514], tablet id = 72075186224037906 2025-04-17T18:12:55.020799Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.020989Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8602:6515], server id = [2:8607:6520], tablet id = 72075186224037907 2025-04-17T18:12:55.021010Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.021216Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:12:55.021254Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:55.021474Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:55.021594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:55.021775Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8605:6518], server id = [2:8609:6521], tablet id = 72075186224037908 2025-04-17T18:12:55.021799Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.022198Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:55.058789Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:55.058959Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:55.059510Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8624:6530], server id = [2:8625:6531], tablet id = 72075186224037900, status = OK 2025-04-17T18:12:55.059595Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8624:6530], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:55.060695Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:12:55.060765Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:55.060942Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:55.061088Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:55.061441Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:55.063380Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8624:6530], server id = [2:8625:6531], tablet id = 72075186224037900 2025-04-17T18:12:55.063411Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.064015Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:55.092902Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8643:6549]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:55.093155Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:55.093197Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8643:6549], StatRequests.size() = 1 2025-04-17T18:12:55.184823Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjA3NjZhZTUtMzdmN2E4MTgtNzVlM2Q0MDItMzkyZmIyZDM=, TxId: 2025-04-17T18:12:55.184864Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjA3NjZhZTUtMzdmN2E4MTgtNzVlM2Q0MDItMzkyZmIyZDM=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-17T18:12:55.185315Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8657:6555]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:55.185462Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:55.185827Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:55.185866Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:12:55.188347Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:55.188402Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:12:55.188435Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:12:55.194294Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService-UseSink >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] Test command err: 2025-04-17T18:10:30.032881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:30.033027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:30.033156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b9d/r3tmp/tmpWImZUA/pdisk_1.dat 2025-04-17T18:10:30.427700Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7777, node 1 2025-04-17T18:10:30.661557Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:30.661610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:30.661657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:30.662487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:30.664908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:30.752862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:30.753027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:30.767087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16512 2025-04-17T18:10:31.275839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:34.031997Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:34.067333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:34.067419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:34.105079Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:34.108154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:34.328695Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.329089Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.329550Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.329767Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.329823Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.329872Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.329935Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.329997Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.330075Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.496065Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:34.496212Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:34.509323Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:34.647568Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:34.687239Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:34.687343Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:34.711443Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:34.712862Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:34.713074Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:34.713122Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:34.713180Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:34.713227Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:34.713270Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:34.713312Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:34.714330Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:34.749797Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:34.749965Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:34.758167Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:34.762793Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:34.762945Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:34.775404Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:34.794667Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:34.794721Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:34.794783Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:34.805964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:34.813807Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:34.813977Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:34.988258Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:35.169349Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:35.260102Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:36.000644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:36.000822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:36.021725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:36.138847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:36.139103Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:36.139423Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:36.139583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:36.139725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:36.139857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:36.139997Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:36.140175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:36.140321Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:36.140445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:36.140667Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:36.140814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:36.173969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:36.174084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:12:52.426954Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7132:5239], server id = [2:7133:5240], tablet id = 72075186224037894 2025-04-17T18:12:52.427056Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7258:5315] 2025-04-17T18:12:52.427110Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7258:5315] 2025-04-17T18:12:52.465227Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:12:52.465325Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:12:52.465916Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:12:52.466270Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:12:52.466615Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-17T18:12:52.466670Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-17T18:12:52.466713Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-17T18:12:52.466754Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-17T18:12:52.466794Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744913572407127 2025-04-17T18:12:52.466822Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-17T18:12:52.466894Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-17T18:12:52.466953Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:12:52.467052Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-17T18:12:52.467189Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-17T18:12:52.467256Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-17T18:12:52.467304Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:12:52.467426Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:52.468430Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:12:52.468987Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:52.469057Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:52.469185Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:12:52.470376Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:52.470428Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:52.471566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:52.545216Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:52.545433Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:12:52.545958Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7305:5346], server id = [2:7306:5347], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:52.546069Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7305:5346], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:52.549841Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:52.549956Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:52.550194Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:52.550389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:52.550671Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:52.553681Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7305:5346], server id = [2:7306:5347], tablet id = 72075186224037899 2025-04-17T18:12:52.553739Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:52.554339Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:52.591303Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7326:5366]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:52.591547Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:52.591593Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7326:5366], StatRequests.size() = 1 2025-04-17T18:12:52.703812Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzM4OTE2NDktN2I1ODE4OTktNzM0N2MyZWYtOGJkNjU5NmQ=, TxId: 2025-04-17T18:12:52.703898Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzM4OTE2NDktN2I1ODE4OTktNzM0N2MyZWYtOGJkNjU5NmQ=, TxId: 2025-04-17T18:12:52.704476Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:52.717250Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7341:5372] 2025-04-17T18:12:52.717469Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7341:5372], schemeshard id = 72075186224037897 2025-04-17T18:12:52.717580Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7258:5315], server id = [2:7342:5373], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:52.717615Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7342:5373] 2025-04-17T18:12:52.717728Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7342:5373], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-17T18:12:52.731664Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:52.731742Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:52.820708Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7346:5376] 2025-04-17T18:12:52.821520Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2806:3224] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-17T18:12:52.821584Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2806:3224] 2025-04-17T18:12:52.821672Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-17T18:12:53.337317Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:12:53.337409Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:54.095563Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:54.095636Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:54.095681Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:12:55.350730Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:55.350876Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:55.350926Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:55.351615Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:55.364728Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:55.365131Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:55.365204Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:55.365590Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:55.378711Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:55.378900Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:55.379414Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7425:5419], server id = [2:7426:5420], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:55.379522Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7425:5419], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:55.380927Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:55.381034Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:55.381196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:55.381369Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:55.381643Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:55.384488Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7425:5419], server id = [2:7426:5420], tablet id = 72075186224037899 2025-04-17T18:12:55.384529Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:55.385077Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:55.405922Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjQ1ZTNhNmEtY2IwY2IyMTEtY2RjMzdkMzYtNjMxOWNjOGQ=, TxId: 2025-04-17T18:12:55.405981Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjQ1ZTNhNmEtY2IwY2IyMTEtY2RjMzdkMzYtNjMxOWNjOGQ=, TxId: 2025-04-17T18:12:55.406380Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:55.430756Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:55.430839Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2806:3224] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16200, MsgBus: 14122 2025-04-17T18:12:46.572967Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346701030377771:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:46.573038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001680/r3tmp/tmpBmlDU7/pdisk_1.dat 2025-04-17T18:12:46.826568Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16200, node 1 2025-04-17T18:12:46.900343Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:46.900372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:46.900378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:46.900528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:46.929351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:46.929475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:46.931146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14122 TClient is connected to server localhost:14122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:47.282606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.302819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.423568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.533784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.595879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.037927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346713915281440:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.038035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.314255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.339747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.362987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.388847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.440640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.511461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.536718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.582213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346713915282099:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.582279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.582315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346713915282105:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.585844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:49.594694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346713915282107:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:49.652050Z node 1 :TX_PROXY ERROR: Actor# [1:7494346713915282159:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1705, MsgBus: 28406 2025-04-17T18:12:51.774412Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346724175053146:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:51.774516Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001680/r3tmp/tmp534OHj/pdisk_1.dat 2025-04-17T18:12:51.865213Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1705, node 2 2025-04-17T18:12:51.908526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:51.908604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:51.909871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:51.917135Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:51.917161Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:51.917168Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:51.917260Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28406 TClient is connected to server localhost:28406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:52.251286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.260840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.306092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.430558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:12:52.499657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.549560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346737059956819:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.549639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.603862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.628037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.653205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.678787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.705832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.737128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.766262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.826823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346737059957478:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.826919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.826957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346737059957483:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.829961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:54.837575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346737059957485:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:54.932650Z node 2 :TX_PROXY ERROR: Actor# [2:7494346737059957542:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::DeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7638, MsgBus: 28810 2025-04-17T18:12:44.226436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346694819067187:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.226580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016b4/r3tmp/tmpyRKY28/pdisk_1.dat 2025-04-17T18:12:44.507655Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7638, node 1 2025-04-17T18:12:44.596053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:44.596158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:44.597992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:44.663942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:44.663970Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:44.663978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:44.664082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28810 TClient is connected to server localhost:28810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.244103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.264301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.401481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.534843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.609228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.852185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346703409003552:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.852310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.105440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.132187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.157577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.186163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.234220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.262091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.332383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.377982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346707703971510:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.378028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.378061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346707703971515:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.380415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.387300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346707703971517:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:47.444399Z node 1 :TX_PROXY ERROR: Actor# [1:7494346707703971571:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:48.369298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.410841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.478383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.226604Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346694819067187:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.226688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4493, MsgBus: 1660 2025-04-17T18:12:50.894553Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346721187763611:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:50.894615Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016b4/r3tmp/tmps2cP5s/pdisk_1.dat 2025-04-17T18:12:50.972225Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4493, node 2 2025-04-17T18:12:51.021526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:51.021605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:51.023359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:51.030664Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:51.030683Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:51.030690Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:51.030818Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1660 TClient is connected to server localhost:1660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:51.398543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.416167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.485130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.617398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.689125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.430081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346734072667274:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.430155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.466757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.491481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.516375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.542334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.571050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.601991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.668377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.722936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346734072667933:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.723017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.723439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346734072667938:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.726542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:53.735645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346734072667940:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:53.825036Z node 2 :TX_PROXY ERROR: Actor# [2:7494346734072667995:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:54.643588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.675225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.734447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.894867Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346721187763611:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:55.894931Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16250, MsgBus: 9331 2025-04-17T18:12:52.853784Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346728210794051:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:52.853853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00163e/r3tmp/tmpLp1u4B/pdisk_1.dat 2025-04-17T18:12:53.115128Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16250, node 1 2025-04-17T18:12:53.170623Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:53.170647Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:53.170658Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:53.170776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:53.202836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:53.202931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:53.204557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9331 TClient is connected to server localhost:9331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:53.609969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.625810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.728157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:12:53.871768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.946198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:55.389137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346741095697711:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.389253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.654629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.678652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.701862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.727120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.776193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.801679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.827451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.901779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346741095698370:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.901846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.901923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346741095698375:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.904891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:55.913643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346741095698377:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:56.013518Z node 1 :TX_PROXY ERROR: Actor# [1:7494346745390665730:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17222, MsgBus: 1933 2025-04-17T18:12:44.187728Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346695776636962:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:44.187856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016a8/r3tmp/tmpHdeGif/pdisk_1.dat 2025-04-17T18:12:44.523790Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17222, node 1 2025-04-17T18:12:44.569328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:44.569797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:44.571280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:44.663952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:44.663979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:44.663989Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:44.664105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1933 TClient is connected to server localhost:1933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:45.239754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.261521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.374293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.505326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:45.581459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:46.678209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346704366573336:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:46.678281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.097248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.124932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.151320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.200173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.220716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.246128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.272153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:47.330737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346708661541294:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.330799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346708661541299:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.330812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:47.335127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:47.343997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346708661541301:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:47.421520Z node 1 :TX_PROXY ERROR: Actor# [1:7494346708661541356:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:48.459789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.509692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:48.536038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.187825Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346695776636962:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.197078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24726, MsgBus: 30204 2025-04-17T18:12:50.853666Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346718123858227:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:50.853749Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0016a8/r3tmp/tmpBl54PQ/pdisk_1.dat 2025-04-17T18:12:50.954441Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24726, node 2 2025-04-17T18:12:50.984440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:50.984515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:50.985977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:51.013896Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:51.013919Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:51.013927Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:51.014051Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30204 TClient is connected to server localhost:30204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:51.373765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.381015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.425394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:12:51.548222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.617933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.539434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346731008761903:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.539520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.588718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.640086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.666760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.692886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.718791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.746298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.772183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.847588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346731008762563:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.847654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.847787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346731008762568:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.850562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:53.863484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346731008762570:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:53.930442Z node 2 :TX_PROXY ERROR: Actor# [2:7494346731008762627:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:54.758128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.792712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.863757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.854031Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346718123858227:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:55.854094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27575, MsgBus: 24000 2025-04-17T18:12:47.720682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346707046991649:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:47.720772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00167e/r3tmp/tmppzqIRg/pdisk_1.dat 2025-04-17T18:12:48.020487Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27575, node 1 2025-04-17T18:12:48.081760Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:48.081790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:48.081800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:48.081947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:48.092884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:48.093018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:48.094832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24000 TClient is connected to server localhost:24000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:48.508154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.530126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.647361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.773900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:48.844136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.260930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346719931895318:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:50.261038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:50.510736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.532756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.556094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.576907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.602009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.667102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.731199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.772853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346719931895988:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:50.772929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:50.772989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346719931895993:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:50.775608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:50.782536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346719931895995:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:50.864121Z node 1 :TX_PROXY ERROR: Actor# [1:7494346719931896048:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14980, MsgBus: 20827 2025-04-17T18:12:52.512432Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346727086244488:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:52.512477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00167e/r3tmp/tmphd9r83/pdisk_1.dat 2025-04-17T18:12:52.609671Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14980, node 2 2025-04-17T18:12:52.651231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:52.651307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:52.652748Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:52.668195Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:52.668217Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:52.668225Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:52.668325Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20827 TClient is connected to server localhost:20827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:53.022064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.029556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.100521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.280311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.352495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:55.300164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346739971148168:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.300262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.343475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.369078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.395741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.422002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.487073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.533743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.601871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.675805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346739971148841:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.675867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346739971148846:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.675882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.678657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:55.686129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346739971148848:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:55.765791Z node 2 :TX_PROXY ERROR: Actor# [2:7494346739971148906:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexUpsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService+UseSink >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace >> TLocalTests::TestAddTenant >> TNodeBrokerTest::TestListNodes >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] >> KqpQueryPerf::IndexInsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> KqpJoinOrder::TestJoinHint2-ColumnStore >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3700, MsgBus: 1815 2025-04-17T18:12:49.417970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346713468927541:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.418091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001674/r3tmp/tmpwtRkK4/pdisk_1.dat 2025-04-17T18:12:49.690118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:49.690234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:49.692408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:49.707820Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3700, node 1 2025-04-17T18:12:49.719186Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:12:49.762449Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:49.762470Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:49.762476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:49.762569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1815 TClient is connected to server localhost:1815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:50.183323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.204016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.328239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.447433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.512717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.719479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346722058863916:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.719608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:51.963618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.990338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.015681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.040652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.064013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.094548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.143908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.193500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346726353831872:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.193600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346726353831877:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.193616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.196576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:52.205994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346726353831879:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:52.300613Z node 1 :TX_PROXY ERROR: Actor# [1:7494346726353831934:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28428, MsgBus: 13594 2025-04-17T18:12:54.176724Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346735280372037:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:54.176829Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001674/r3tmp/tmprjXLQb/pdisk_1.dat 2025-04-17T18:12:54.271087Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28428, node 2 2025-04-17T18:12:54.314930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:54.315028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:54.317311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:54.338281Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:54.338304Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:54.338312Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:54.338419Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13594 TClient is connected to server localhost:13594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:54.694116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.711618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.765345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.912367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.974677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.237971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346748165275692:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.238061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.293189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.334333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.365242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.396877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.427709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.456587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.489946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.544474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346748165276348:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.544573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.544641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346748165276353:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.547905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:57.556037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346748165276355:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:57.648003Z node 2 :TX_PROXY ERROR: Actor# [2:7494346748165276410:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink >> TLocalTests::TestAddTenant [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] >> TNodeBrokerTest::SingleDomainModeBannedIds >> TDynamicNameserverTest::CacheMissSimpleDeadline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2025-04-17T18:13:00.290811Z node 1 :LOCAL ERROR: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2025-04-17T18:13:00.290992Z node 1 :LOCAL ERROR: Unknown domain dc-3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: 2025-04-17T18:10:33.174955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:33.175053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:33.175152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b5e/r3tmp/tmpJomwTR/pdisk_1.dat 2025-04-17T18:10:33.489815Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25928, node 1 2025-04-17T18:10:33.710497Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:33.710544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:33.710567Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:33.711065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:33.712883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:33.794045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:33.794143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:33.807645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29282 2025-04-17T18:10:34.311570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:37.089355Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:37.131438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:37.131535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:37.168913Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:37.171191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:37.387261Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.387787Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.388180Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.388383Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.388447Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.388525Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.388572Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.388649Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.388693Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:37.549354Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:37.549451Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:37.562492Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:37.708684Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:37.757027Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:37.757156Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:37.789599Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:37.793268Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:37.793513Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:37.793601Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:37.793657Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:37.793718Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:37.793776Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:37.793839Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:37.795483Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:37.835060Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:37.835168Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:37.842318Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:37.846944Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:37.847092Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:37.859470Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:37.880350Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:37.880410Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:37.880483Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:37.891799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:37.899741Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:37.899883Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:38.076699Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:38.242750Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:38.300323Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:39.005140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:39.005240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:39.022157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:39.127418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:39.127682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:39.128022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:39.128291Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:39.128466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:39.128632Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:39.128836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:39.128998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:39.129128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:39.129257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:39.129385Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:39.129517Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2331:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:39.156822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:39.156949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... = 72075186224037899 2025-04-17T18:12:53.789828Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:53.789957Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:53.791750Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:53.819474Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7574:5551]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:53.819619Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:53.819646Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7574:5551], StatRequests.size() = 1 2025-04-17T18:12:53.907375Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmVkZGZhZDYtM2QyYzFjYzEtNTdiZDg1NDgtOTAxYzAxYTg=, TxId: 2025-04-17T18:12:53.907423Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmVkZGZhZDYtM2QyYzFjYzEtNTdiZDg1NDgtOTAxYzAxYTg=, TxId: 2025-04-17T18:12:53.907909Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:53.921847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:53.921915Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3270:3372] 2025-04-17T18:12:54.411046Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:12:54.411132Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:55.122646Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:55.122741Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-04-17T18:12:55.125786Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:55.141960Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:55.142525Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:55.142584Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 5], AnalyzedShards 1 2025-04-17T18:12:55.167475Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:55.178440Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-17T18:12:55.179346Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-17T18:12:55.179432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-17T18:12:55.192567Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:12:56.401385Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:56.401470Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-04-17T18:12:56.401555Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:12:56.402172Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:56.433606Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:56.433953Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:56.434017Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:56.434435Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:56.462800Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:56.462952Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:56.463504Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7694:5611], server id = [2:7695:5612], tablet id = 72075186224037900, status = OK 2025-04-17T18:12:56.463611Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7694:5611], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-04-17T18:12:56.466758Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:12:56.466837Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:56.467039Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:56.467205Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:56.467424Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:56.469540Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7694:5611], server id = [2:7695:5612], tablet id = 72075186224037900 2025-04-17T18:12:56.469574Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:56.470169Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:56.493516Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:05.000000Z, event interval end# 2025-04-17T18:12:54.000000Z 2025-04-17T18:12:56.493938Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTRiMGFkNGItODFlZjgzNDktZjNkOTM1MjktODAxZjQxZGI=, TxId: 2025-04-17T18:12:56.493992Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTRiMGFkNGItODFlZjgzNDktZjNkOTM1MjktODAxZjQxZGI=, TxId: 2025-04-17T18:12:56.496065Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:56.523906Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:12:56.523965Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:57.020023Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-17T18:12:57.020085Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:57.678010Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:12:57.678223Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:57.689208Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:57.689291Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:12:57.689335Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:12:58.813516Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:58.813603Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-04-17T18:12:58.813651Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:12:58.814001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:58.826877Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:58.827186Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:58.827232Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:58.827570Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:58.840387Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:58.840535Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-17T18:12:58.841064Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7795:5669], server id = [2:7796:5670], tablet id = 72075186224037900, status = OK 2025-04-17T18:12:58.841157Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7795:5669], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-04-17T18:12:58.842133Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:12:58.842259Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:58.842431Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:58.842612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:58.842789Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:58.845002Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7795:5669], server id = [2:7796:5670], tablet id = 72075186224037900 2025-04-17T18:12:58.845040Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.845875Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:58.867292Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjdlMjcwZDUtYjU0MDY2ZjAtODY5NzRiOGUtYWIxMWIyYjI=, TxId: 2025-04-17T18:12:58.867362Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjdlMjcwZDUtYjU0MDY2ZjAtODY5NzRiOGUtYWIxMWIyYjI=, TxId: 2025-04-17T18:12:58.867873Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:58.881653Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:12:58.881725Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3270:3372] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: 2025-04-17T18:10:39.899523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:39.899631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:39.899728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b1f/r3tmp/tmpMHlrde/pdisk_1.dat 2025-04-17T18:10:40.216026Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62281, node 1 2025-04-17T18:10:40.435519Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:40.435567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:40.435605Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:40.436272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:40.438452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:40.519909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:40.520033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:40.533771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20548 2025-04-17T18:10:41.076910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:43.882563Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:43.926427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:43.926561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:43.964450Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:43.966850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:44.194795Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:44.195299Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:44.195899Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:44.196252Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:44.196357Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:44.196474Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:44.196570Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:44.196642Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:44.196712Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:44.360203Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:44.360335Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:44.374084Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:44.507966Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:44.553093Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:44.553191Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:44.585837Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:44.587153Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:44.587315Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:44.587364Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:44.587414Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:44.587456Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:44.587498Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:44.587536Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:44.588510Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:44.622130Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:44.622269Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:44.630161Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:44.634563Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:44.634699Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:44.647095Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:44.666561Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:44.666627Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:44.666707Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:44.677194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:44.684209Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:44.684332Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:44.847447Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:44.998055Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:45.055955Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:45.787067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:45.787227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:45.807196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:46.078289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:46.078517Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:46.078833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:46.078967Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:46.079086Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:46.079235Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:46.079372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:46.079518Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:46.079656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:46.079784Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:46.079932Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:46.080064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:46.112585Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:46.112713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... [72075186224037894] Subscribed for config changes 2025-04-17T18:12:58.504240Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:58.504300Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:58.504402Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:12:58.505468Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:58.505522Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:58.507120Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:58.560271Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:58.560416Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:58.561115Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8618:6515], server id = [2:8623:6520], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:58.561542Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8618:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.563212Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8619:6516], server id = [2:8624:6521], tablet id = 72075186224037900, status = OK 2025-04-17T18:12:58.563283Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8619:6516], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.563786Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8620:6517], server id = [2:8625:6522], tablet id = 72075186224037901, status = OK 2025-04-17T18:12:58.563833Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8620:6517], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.564490Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8621:6518], server id = [2:8626:6523], tablet id = 72075186224037902, status = OK 2025-04-17T18:12:58.564537Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8621:6518], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.564775Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8622:6519], server id = [2:8629:6526], tablet id = 72075186224037903, status = OK 2025-04-17T18:12:58.564814Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8622:6519], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.569803Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:58.570583Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8618:6515], server id = [2:8623:6520], tablet id = 72075186224037899 2025-04-17T18:12:58.570626Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.570908Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:12:58.571805Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8619:6516], server id = [2:8624:6521], tablet id = 72075186224037900 2025-04-17T18:12:58.571830Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.572489Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8641:6535], server id = [2:8644:6537], tablet id = 72075186224037904, status = OK 2025-04-17T18:12:58.572555Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8641:6535], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.572894Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:12:58.573869Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8643:6536], server id = [2:8646:6538], tablet id = 72075186224037905, status = OK 2025-04-17T18:12:58.573934Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8643:6536], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.574233Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8621:6518], server id = [2:8626:6523], tablet id = 72075186224037902 2025-04-17T18:12:58.574261Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.575196Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:12:58.575624Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:12:58.576061Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8620:6517], server id = [2:8625:6522], tablet id = 72075186224037901 2025-04-17T18:12:58.576088Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.576379Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8647:6539], server id = [2:8650:6542], tablet id = 72075186224037906, status = OK 2025-04-17T18:12:58.576429Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8647:6539], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.576783Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8622:6519], server id = [2:8629:6526], tablet id = 72075186224037903 2025-04-17T18:12:58.576807Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.577023Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8652:6544], server id = [2:8655:6547], tablet id = 72075186224037907, status = OK 2025-04-17T18:12:58.577073Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8652:6544], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.577565Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8653:6545], server id = [2:8657:6548], tablet id = 72075186224037908, status = OK 2025-04-17T18:12:58.577613Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8653:6545], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.580459Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:12:58.580969Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8641:6535], server id = [2:8644:6537], tablet id = 72075186224037904 2025-04-17T18:12:58.580995Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.581391Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:12:58.581721Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8643:6536], server id = [2:8646:6538], tablet id = 72075186224037905 2025-04-17T18:12:58.581744Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.582800Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:12:58.583268Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8647:6539], server id = [2:8650:6542], tablet id = 72075186224037906 2025-04-17T18:12:58.583294Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.583984Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:12:58.584405Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8652:6544], server id = [2:8655:6547], tablet id = 72075186224037907 2025-04-17T18:12:58.584427Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.584493Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:12:58.584541Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:58.584765Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:58.584930Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:58.585150Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:58.587598Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8653:6545], server id = [2:8657:6548], tablet id = 72075186224037908 2025-04-17T18:12:58.587639Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.588237Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:58.622708Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8684:6571]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:58.622961Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:58.623023Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8684:6571], StatRequests.size() = 1 2025-04-17T18:12:58.750447Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWY3NGQzMzItNDc5NDQ2N2MtY2ZjNmNkZTQtYjczMDY5MjY=, TxId: 2025-04-17T18:12:58.750521Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWY3NGQzMzItNDc5NDQ2N2MtY2ZjNmNkZTQtYjczMDY5MjY=, TxId: 2025-04-17T18:12:58.751109Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:58.763690Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8699:6577] 2025-04-17T18:12:58.763785Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8568:6483], server id = [2:8699:6577], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:58.763918Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8699:6577], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-17T18:12:58.764050Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8700:6578] 2025-04-17T18:12:58.764109Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8700:6578], schemeshard id = 72075186224037897 2025-04-17T18:12:58.788104Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:58.788172Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:58.896126Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8703:6581]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:58.896344Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:58.896383Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:12:58.898611Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:12:58.898654Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:12:58.898688Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:12:58.903154Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7729, MsgBus: 13540 2025-04-17T18:12:55.408825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346740407235063:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:55.408958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00160d/r3tmp/tmpwQilcY/pdisk_1.dat 2025-04-17T18:12:55.701664Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7729, node 1 2025-04-17T18:12:55.763919Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:55.763946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:55.763954Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:55.764075Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:55.795542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:55.795639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:55.797267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13540 TClient is connected to server localhost:13540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:56.172489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.194040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.303031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.438366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.520813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.066075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346753292138739:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.066200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.285979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.308422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.330553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.355932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.381279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.409938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.475139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.519523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346753292139404:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.519610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.519715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346753292139409:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.522668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:58.530214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346753292139411:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:58.606853Z node 1 :TX_PROXY ERROR: Actor# [1:7494346753292139465:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20681, MsgBus: 13419 2025-04-17T18:12:46.636457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346702822835328:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:46.636540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001682/r3tmp/tmpLkewSs/pdisk_1.dat 2025-04-17T18:12:46.930311Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20681, node 1 2025-04-17T18:12:46.983714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:46.983748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:46.983757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:46.983896Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:47.015322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:47.015431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:47.017100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13419 TClient is connected to server localhost:13419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:47.416329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.439501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.567073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.694682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:47.768971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:49.028887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346715707738986:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.028988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.267211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.292830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.313951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.338006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.362231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.428188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.493555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:49.543375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346715707739651:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.543455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.544544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346715707739656:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:49.547690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:49.557392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346715707739658:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:49.614820Z node 1 :TX_PROXY ERROR: Actor# [1:7494346715707739711:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:50.536391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.564519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:50.591101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:51.636474Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346702822835328:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:51.636555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9529, MsgBus: 63613 2025-04-17T18:12:53.354763Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346731313276850:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:53.354841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001682/r3tmp/tmptWiA1q/pdisk_1.dat 2025-04-17T18:12:53.449228Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9529, node 2 2025-04-17T18:12:53.485099Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:53.485188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:53.486422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:53.490258Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:53.490279Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:53.490286Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:53.490394Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63613 TClient is connected to server localhost:63613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:53.857788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.873132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.919872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.035301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.106069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.037134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346744198180499:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.037234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.092578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.120959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.151500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.176831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.203628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.273370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.304325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.359927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346744198181160:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.360017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.360243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346744198181165:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.363559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:56.372048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346744198181167:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:56.428241Z node 2 :TX_PROXY ERROR: Actor# [2:7494346744198181221:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:57.450629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.483294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.517930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.354877Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346731313276850:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:58.354924Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29497, MsgBus: 12108 2025-04-17T18:12:55.258499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346742661798972:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:55.258911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001617/r3tmp/tmpu9kPCI/pdisk_1.dat 2025-04-17T18:12:55.541676Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29497, node 1 2025-04-17T18:12:55.612176Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:55.612197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:55.612202Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:55.612291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:55.628260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:55.628375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:55.630130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12108 TClient is connected to server localhost:12108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:56.038658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.070028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.203654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.328431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.387283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.966202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346751251735347:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.966324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.204266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.230208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.253925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.278670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.302193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.366037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.395807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.449300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346755546703307:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.449352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346755546703312:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.449411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.452224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:58.461071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346755546703314:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:58.547962Z node 1 :TX_PROXY ERROR: Actor# [1:7494346755546703368:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] >> TTenantPoolTests::TestStateStatic >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] >> TNodeBrokerTest::TestRandomActions |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] Test command err: 2025-04-17T18:13:01.215442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:01.215505Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25052, MsgBus: 30953 2025-04-17T18:12:51.552008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346725414986844:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:51.552071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00165b/r3tmp/tmpHqCO0W/pdisk_1.dat 2025-04-17T18:12:51.828776Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25052, node 1 2025-04-17T18:12:51.887761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:51.887784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:51.887791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:51.887917Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:51.921210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:51.921357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:51.922378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30953 TClient is connected to server localhost:30953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:52.294793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:12:52.316704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.413244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.533244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.594231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.992917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346734004923209:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.992993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.205037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.231328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.258328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.284291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.309524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.357011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.421810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.468921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346738299891169:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.468988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.469149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346738299891174:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.472430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:54.480452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346738299891176:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:54.539716Z node 1 :TX_PROXY ERROR: Actor# [1:7494346738299891230:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24948, MsgBus: 15001 2025-04-17T18:12:56.395832Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346743466779899:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:56.397716Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00165b/r3tmp/tmpZLWkUl/pdisk_1.dat 2025-04-17T18:12:56.521662Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:56.546047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:56.546124Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:56.547864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24948, node 2 2025-04-17T18:12:56.582590Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:56.582612Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:56.582620Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:56.582724Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15001 TClient is connected to server localhost:15001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:56.946575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.962944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.024750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.209615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.291193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.386016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346756351683482:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.386115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.437824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.463479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.489477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.513184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.541297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.608199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.637247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.689475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346756351684146:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.689562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.689568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346756351684151:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.692846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:59.700944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346756351684153:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:59.762591Z node 2 :TX_PROXY ERROR: Actor# [2:7494346756351684205:3544] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> TTenantPoolTests::TestStateStatic [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19253, MsgBus: 16315 2025-04-17T18:12:56.544491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346746220717724:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:56.544639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00160c/r3tmp/tmpgtXOib/pdisk_1.dat 2025-04-17T18:12:56.830008Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19253, node 1 2025-04-17T18:12:56.896731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:56.896759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:56.896772Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:56.896958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:56.910906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:56.911011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:56.912600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16315 TClient is connected to server localhost:16315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:57.386656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.407804Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:12:57.422805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.560793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.698556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:12:57.756708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.322330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346759105621383:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.322428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.562106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.589141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.611530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.635890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.660070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.690943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.717019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.763596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346759105622041:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.763674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.763824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346759105622046:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.767079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:59.775821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346759105622048:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:59.877018Z node 1 :TX_PROXY ERROR: Actor# [1:7494346759105622104:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2025-04-17T18:13:01.134010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:01.134075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:01.236766Z node 1 :NODE_BROKER ERROR: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2025-04-17T18:13:01.262630Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-17T18:13:01.276580Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2025-04-17T18:13:02.095927Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-17T18:13:02.115006Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpQueryPerf::Insert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22241, MsgBus: 7138 2025-04-17T18:12:57.122829Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346749657219865:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:57.122965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001602/r3tmp/tmpIj9Hy7/pdisk_1.dat 2025-04-17T18:12:57.404391Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22241, node 1 2025-04-17T18:12:57.480691Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:57.480716Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:57.480725Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:57.480831Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:57.498908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:57.498995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:57.500868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7138 TClient is connected to server localhost:7138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:57.931728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.963380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.106681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.234392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.305780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.747993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346758247156242:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.748144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.979793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.003748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.032283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.055523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.097841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.127963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.157111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.205704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346762542124199:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.205777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.205834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346762542124204:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.209457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:00.220890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346762542124206:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:13:00.280597Z node 1 :TX_PROXY ERROR: Actor# [1:7494346762542124259:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: 2025-04-17T18:10:28.539122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:28.539268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:28.539440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bba/r3tmp/tmpw050Kr/pdisk_1.dat 2025-04-17T18:10:28.896266Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24363, node 1 2025-04-17T18:10:29.113780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:29.113821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:29.113852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:29.114396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:29.119867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:29.209787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:29.209918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:29.224723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24497 2025-04-17T18:10:29.753239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:32.515216Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:32.553312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:32.553404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:32.593060Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:32.595721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:32.831759Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:32.832286Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:32.832885Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:32.833243Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:32.833346Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:32.833454Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:32.833526Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:32.833598Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:32.833666Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:33.000097Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:33.000244Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:33.013501Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:33.137517Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:33.176884Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:33.176992Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:33.207713Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:33.210969Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:33.211187Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:33.211267Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:33.211317Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:33.211390Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:33.211443Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:33.211501Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:33.212907Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:33.251133Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:33.251239Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:33.259005Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:33.263277Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:33.263439Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:33.275475Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:33.296380Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:33.296443Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:33.296519Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:33.307217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:33.313655Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:33.313772Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:33.474971Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:33.638388Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:33.696594Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:34.421697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:34.421875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:34.446630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:34.740923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:34.741163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:34.741528Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:34.741720Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:34.741877Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:34.742021Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:34.742160Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:34.742364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:34.742522Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:34.742687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:34.742831Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:34.743043Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:34.782641Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:34.782731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... 45:6417], schemeshard count = 1 2025-04-17T18:12:59.298960Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:12:59.299017Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:12:59.299061Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:59.299113Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:59.302428Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:59.319206Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:59.319730Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:59.319832Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:59.320632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:59.320705Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets do not exist in Hive anymore; tablet count = 3 2025-04-17T18:12:59.320757Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:00.428237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:00.428319Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:00.428721Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:13:00.441754Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:00.441952Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:13:00.442719Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8604:6501], server id = [2:8609:6506], tablet id = 72075186224037899, status = OK 2025-04-17T18:13:00.443075Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8604:6501], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.443394Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8605:6502], server id = [2:8610:6507], tablet id = 72075186224037900, status = OK 2025-04-17T18:13:00.443448Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8605:6502], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.444672Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8606:6503], server id = [2:8611:6508], tablet id = 72075186224037901, status = OK 2025-04-17T18:13:00.444728Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8606:6503], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.445836Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8607:6504], server id = [2:8612:6509], tablet id = 72075186224037902, status = OK 2025-04-17T18:13:00.445885Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8607:6504], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.446028Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8608:6505], server id = [2:8615:6512], tablet id = 72075186224037903, status = OK 2025-04-17T18:13:00.446067Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8608:6505], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.451160Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:13:00.452252Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8604:6501], server id = [2:8609:6506], tablet id = 72075186224037899 2025-04-17T18:13:00.452295Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.453133Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:13:00.453770Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8605:6502], server id = [2:8610:6507], tablet id = 72075186224037900 2025-04-17T18:13:00.453800Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.454125Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8626:6521], server id = [2:8630:6522], tablet id = 72075186224037904, status = OK 2025-04-17T18:13:00.454213Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8626:6521], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.454464Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:13:00.455357Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8607:6504], server id = [2:8612:6509], tablet id = 72075186224037902 2025-04-17T18:13:00.455387Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.456023Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8631:6523], server id = [2:8633:6525], tablet id = 72075186224037905, status = OK 2025-04-17T18:13:00.456092Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8631:6523], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.457484Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:13:00.458049Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8606:6503], server id = [2:8611:6508], tablet id = 72075186224037901 2025-04-17T18:13:00.458073Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.458266Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:13:00.458484Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8632:6524], server id = [2:8636:6528], tablet id = 72075186224037906, status = OK 2025-04-17T18:13:00.458533Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8632:6524], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.458939Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8608:6505], server id = [2:8615:6512], tablet id = 72075186224037903 2025-04-17T18:13:00.458965Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.459617Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8638:6530], server id = [2:8642:6533], tablet id = 72075186224037907, status = OK 2025-04-17T18:13:00.459670Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8638:6530], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.460072Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8640:6532], server id = [2:8643:6534], tablet id = 72075186224037908, status = OK 2025-04-17T18:13:00.460132Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8640:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.463355Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:13:00.463799Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8626:6521], server id = [2:8630:6522], tablet id = 72075186224037904 2025-04-17T18:13:00.463820Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.464336Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:13:00.464576Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8631:6523], server id = [2:8633:6525], tablet id = 72075186224037905 2025-04-17T18:13:00.464603Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.465074Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:13:00.465484Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8632:6524], server id = [2:8636:6528], tablet id = 72075186224037906 2025-04-17T18:13:00.465505Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.465965Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:13:00.466349Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8638:6530], server id = [2:8642:6533], tablet id = 72075186224037907 2025-04-17T18:13:00.466372Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.466562Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:13:00.466596Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:13:00.466737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:13:00.466860Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:13:00.467144Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:13:00.468758Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8640:6532], server id = [2:8643:6534], tablet id = 72075186224037908 2025-04-17T18:13:00.468785Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.469557Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:00.500000Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8670:6557]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:00.500208Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:13:00.500247Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8670:6557], StatRequests.size() = 1 2025-04-17T18:13:00.625199Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Mjg2MTZmYzItMmY5ZGZiZTItNjU4ZGJkOS0yOGViNWYyOA==, TxId: 2025-04-17T18:13:00.625266Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mjg2MTZmYzItMmY5ZGZiZTItNjU4ZGJkOS0yOGViNWYyOA==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-17T18:13:00.625865Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8683:6563]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:00.626069Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:00.626512Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:00.626572Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:13:00.629559Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:00.629616Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:13:00.629668Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:13:00.635639Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> TSlotIndexesPoolTest::Ranges [GOOD] >> TNodeBrokerTest::NodeNameReuseRestart >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63089, MsgBus: 12820 2025-04-17T18:12:57.218759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346751011028938:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:57.218830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015eb/r3tmp/tmp1F7QVZ/pdisk_1.dat 2025-04-17T18:12:57.517801Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63089, node 1 2025-04-17T18:12:57.582353Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:57.582377Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:57.582383Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:57.582527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:57.606167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:57.606282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:57.608121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12820 TClient is connected to server localhost:12820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:58.017475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:12:58.047993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.165306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.294525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.347163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.725469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346759600965320:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.725562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.989248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.015642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.040409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.064773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.089988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.135818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.162913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.209993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346763895933277:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.210079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.210458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346763895933282:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.214570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:00.223231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346763895933284:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:13:00.291541Z node 1 :TX_PROXY ERROR: Actor# [1:7494346763895933339:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 12914, MsgBus: 11421 2025-04-17T18:12:51.544395Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346725961219470:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:51.544550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00165a/r3tmp/tmpRvorDQ/pdisk_1.dat 2025-04-17T18:12:51.832578Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12914, node 1 2025-04-17T18:12:51.902105Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:51.902125Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:51.902148Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:51.902304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:51.921138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:51.921220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:51.922725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11421 TClient is connected to server localhost:11421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:52.350621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.374285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.512888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.634039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.705708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.258128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346738846123158:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.258281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.522564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.547618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.574450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.599403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.621266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.646863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.711908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.786850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346738846123826:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.786939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.787004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346738846123831:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.790138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:54.798486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346738846123833:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:54.902055Z node 1 :TX_PROXY ERROR: Actor# [1:7494346738846123889:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30832, MsgBus: 3225 2025-04-17T18:12:56.778155Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346744443981684:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:56.778268Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00165a/r3tmp/tmpaFTfcI/pdisk_1.dat 2025-04-17T18:12:56.859021Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30832, node 2 2025-04-17T18:12:56.905992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:56.906069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:56.907173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:56.912957Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:56.912976Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:56.912982Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:56.913079Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3225 TClient is connected to server localhost:3225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:57.305427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.320490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.397764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.538041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.599141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.685580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346757328885351:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.685672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.736641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.764870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.792957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.815804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.879200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.905743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.932765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.981611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346757328886013:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.981693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.981714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346757328886018:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.984696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:59.992839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346757328886020:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:00.088939Z node 2 :TX_PROXY ERROR: Actor# [2:7494346761623853370:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18781, MsgBus: 64258 2025-04-17T18:12:56.954405Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346744984086416:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:56.954545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015fb/r3tmp/tmpt5buL2/pdisk_1.dat 2025-04-17T18:12:57.281502Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18781, node 1 2025-04-17T18:12:57.333006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:57.333092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:57.339122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:57.358784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:57.358816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:57.358848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:57.358972Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64258 TClient is connected to server localhost:64258 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:57.778689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.802929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.921298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.056401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.113582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.748593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346757868990073:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.748696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.003788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.032863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.055119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.077364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.105303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.135950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.203527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.268615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346762163958032:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.268709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346762163958037:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.268719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.271542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:00.279308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346762163958039:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:13:00.357322Z node 1 :TX_PROXY ERROR: Actor# [1:7494346762163958092:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] >> TLocalTests::TestAlterTenant >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] >> TTenantPoolTests::TestSensorsConfigForStaticSlot |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31873, MsgBus: 32330 2025-04-17T18:12:53.225029Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346733633404012:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:53.225136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00162b/r3tmp/tmpxPvHtL/pdisk_1.dat 2025-04-17T18:12:53.527132Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31873, node 1 2025-04-17T18:12:53.602678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:53.602701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:53.602708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:53.602804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:53.615057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:53.615192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:53.616945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32330 TClient is connected to server localhost:32330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:54.028879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.048813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.192137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.324046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.383368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:55.744175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346742223340390:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.744254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.094080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.120651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.143535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.183724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.209417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.248368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.279677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.324355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346746518308348:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.324444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.324513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346746518308353:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.328044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:56.352923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346746518308355:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:56.442032Z node 1 :TX_PROXY ERROR: Actor# [1:7494346746518308410:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24185, MsgBus: 4243 2025-04-17T18:12:58.216359Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346752951524466:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:58.216563Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00162b/r3tmp/tmpEPlO37/pdisk_1.dat 2025-04-17T18:12:58.334098Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:58.356116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:58.356192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24185, node 2 2025-04-17T18:12:58.359347Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:58.383453Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:58.383477Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:58.383483Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:58.383622Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4243 TClient is connected to server localhost:4243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:58.741174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.748938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.789052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.931878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.986700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.779040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346761541460823:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.779097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.822864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.847899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.871554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.896046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.923315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.966626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.997227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.044850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346765836428776:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.044906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.044941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346765836428781:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.047997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:01.057241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346765836428783:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:01.119379Z node 2 :TX_PROXY ERROR: Actor# [2:7494346765836428836:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 23190, MsgBus: 24610 2025-04-17T18:12:52.579948Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346729614048329:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:52.581635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001640/r3tmp/tmpg68QKO/pdisk_1.dat 2025-04-17T18:12:52.835933Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23190, node 1 2025-04-17T18:12:52.894710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:52.894738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:52.894745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:52.894876Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:52.941720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:52.941824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:52.943501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24610 TClient is connected to server localhost:24610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:53.322015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.342671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.459428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.575082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.650071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:55.390659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346742498951801:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.390753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.681603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.706334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.727493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.759699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.783100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.847156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.912363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.989393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346742498952469:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.989469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.989549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346742498952474:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.992676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:56.001345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346742498952476:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:56.083654Z node 1 :TX_PROXY ERROR: Actor# [1:7494346746793919829:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 4090, MsgBus: 25821 2025-04-17T18:12:57.963588Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346748193223365:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:57.963680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001640/r3tmp/tmpfgrH4h/pdisk_1.dat 2025-04-17T18:12:58.052239Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4090, node 2 2025-04-17T18:12:58.085612Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:58.085704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:58.087451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:58.109259Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:58.109286Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:58.109294Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:58.109402Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25821 TClient is connected to server localhost:25821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:58.461290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.468645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.509615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.667345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.737592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.648020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346761078127012:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.648100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.691283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.717402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.744955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.793136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.819665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.848111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.876889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.954267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346761078127671:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.954336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346761078127676:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.954354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.957529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:00.968028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346761078127678:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:01.022388Z node 2 :TX_PROXY ERROR: Actor# [2:7494346765373095031:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TNodeBrokerTest::ExtendLeasePipelining >> GracefulShutdown::TTxGracefulShutdown >> TDynamicNameserverTest::TestCacheUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] Test command err: 2025-04-17T18:10:38.782789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:38.782912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:38.783061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b2a/r3tmp/tmpKQmQNG/pdisk_1.dat 2025-04-17T18:10:39.083199Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11451, node 1 2025-04-17T18:10:39.303132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:39.303196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:39.303237Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:39.303981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:39.306292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:39.392741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:39.392840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:39.406322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10505 2025-04-17T18:10:39.894902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:42.478125Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:42.516557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:42.516666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:42.553338Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:42.555460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:42.769710Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.770147Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.770669Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.770993Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.771097Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.771178Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.771249Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.771313Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.771375Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.929026Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:42.929133Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:42.941782Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:43.057745Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:43.095101Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:43.095166Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:43.117246Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:43.120190Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:43.120354Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:43.120406Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:43.120446Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:43.120481Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:43.120530Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:43.120593Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:43.121628Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:43.154948Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:43.155021Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:43.160921Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:43.164121Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:43.164238Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:43.173395Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:43.190988Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:43.191050Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:43.191112Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:43.200945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:43.207841Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:43.207989Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:43.404098Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:43.567216Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:43.624243Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:44.318791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:44.318960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:44.336914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:44.450978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:44.451230Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:44.451526Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:44.451659Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:44.451798Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:44.451944Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:44.452085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:44.452261Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:44.452394Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:44.452499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:44.452661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:44.452792Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:44.482562Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:44.482682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7261:5319] 2025-04-17T18:12:58.808810Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7261:5319] 2025-04-17T18:12:58.842291Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:12:58.842378Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:12:58.842965Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:12:58.843818Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:12:58.844144Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-17T18:12:58.844192Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-17T18:12:58.844232Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-17T18:12:58.844265Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-17T18:12:58.844300Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744913578775381 2025-04-17T18:12:58.844334Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-17T18:12:58.844370Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-04-17T18:12:58.844440Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-17T18:12:58.844488Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:12:58.844590Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-17T18:12:58.844683Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-17T18:12:58.844755Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-17T18:12:58.844810Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:12:58.844954Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:58.845610Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:12:58.846321Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:58.846381Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:58.846503Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:12:58.847558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:58.847613Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:58.849225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:12:58.910755Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:12:58.910932Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:12:58.911511Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7308:5350], server id = [2:7309:5351], tablet id = 72075186224037899, status = OK 2025-04-17T18:12:58.911615Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7308:5350], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:12:58.913005Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:12:58.913101Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:12:58.913254Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:12:58.913432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:12:58.913667Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:58.916082Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7308:5350], server id = [2:7309:5351], tablet id = 72075186224037899 2025-04-17T18:12:58.916115Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:12:58.916604Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:12:58.943630Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7327:5369]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:58.943763Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:12:58.943792Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7327:5369], StatRequests.size() = 1 2025-04-17T18:12:59.027641Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTYyYmM4OS1mN2E2YzE0YS05ZWJlNTdiZC03ZGFmMDE1OQ==, TxId: 2025-04-17T18:12:59.027716Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTYyYmM4OS1mN2E2YzE0YS05ZWJlNTdiZC03ZGFmMDE1OQ==, TxId: 2025-04-17T18:12:59.028215Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:59.040443Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7342:5375] 2025-04-17T18:12:59.040661Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7342:5375], schemeshard id = 72075186224037897 2025-04-17T18:12:59.040761Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7261:5319], server id = [2:7343:5376], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:59.040794Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7343:5376] 2025-04-17T18:12:59.040860Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7343:5376], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-17T18:12:59.053882Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:12:59.053938Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:59.139608Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7347:5379] 2025-04-17T18:12:59.140461Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2806:3224] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-17T18:12:59.140535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2806:3224] 2025-04-17T18:12:59.140591Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-17T18:12:59.646457Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:12:59.646555Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:12:59.657280Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-17T18:12:59.657339Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:13:00.404781Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:13:00.404862Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:13:00.404900Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:13:01.696735Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:13:01.696896Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:13:01.696960Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:01.697591Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:13:01.711125Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:13:01.711545Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:01.711621Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:01.712012Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:13:01.725037Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:01.725231Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-17T18:13:01.725638Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7426:5422], server id = [2:7427:5423], tablet id = 72075186224037899, status = OK 2025-04-17T18:13:01.725740Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7426:5422], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:01.726992Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:13:01.727077Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:13:01.727236Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:13:01.727388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:13:01.727717Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:13:01.730253Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7426:5422], server id = [2:7427:5423], tablet id = 72075186224037899 2025-04-17T18:13:01.730307Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:01.730779Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:01.751842Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWI1NDRiNzQtOWY1YmEyY2YtZWU0NjA4YTAtYjZiOTNkNWY=, TxId: 2025-04-17T18:13:01.751918Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWI1NDRiNzQtOWY1YmEyY2YtZWU0NjA4YTAtYjZiOTNkNWY=, TxId: 2025-04-17T18:13:01.752507Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:01.765924Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:01.765986Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2806:3224] >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8793, MsgBus: 12931 2025-04-17T18:12:49.583347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346716259193085:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.583495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00166c/r3tmp/tmprIF63x/pdisk_1.dat 2025-04-17T18:12:49.869113Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8793, node 1 2025-04-17T18:12:49.920936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:49.920959Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:49.920970Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:49.921101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:49.943717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:49.943836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:49.945584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12931 TClient is connected to server localhost:12931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:50.359661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.375372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.499982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.617071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.690845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.117806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346729144096756:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.117897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.410454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.447665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.475298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.501820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.560136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.627277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.657900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.713256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346729144097418:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.713330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.713360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346729144097423:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.716812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:52.725670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346729144097425:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:52.783128Z node 1 :TX_PROXY ERROR: Actor# [1:7494346729144097478:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:53.620142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.677699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.706532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.583512Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346716259193085:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:54.583572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23515, MsgBus: 30242 2025-04-17T18:12:56.377014Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346745024400331:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:56.377129Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00166c/r3tmp/tmp1oVtyi/pdisk_1.dat 2025-04-17T18:12:56.457360Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23515, node 2 2025-04-17T18:12:56.525489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:56.525592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:56.527279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:56.542594Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:56.542619Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:56.542626Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:56.542729Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30242 TClient is connected to server localhost:30242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:56.919359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.936696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.006262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.145863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.222334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.984801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346753614336706:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:58.984921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.035439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.061407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.086412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.108214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.130998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.157284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.223732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.279381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346757909304662:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.279476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.279520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346757909304667:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.283022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:59.293163Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346757909304669:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:59.381663Z node 2 :TX_PROXY ERROR: Actor# [2:7494346757909304723:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:00.160602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.195919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.267988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.377261Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346745024400331:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:01.377325Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TLocalTests::TestAlterTenant [GOOD] >> TLocalTests::TestRemoveTenantWhileResolving >> TLocalTests::TestAddTenantWhileResolving |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28833, MsgBus: 26554 2025-04-17T18:12:49.543220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346714427772511:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:49.543288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001673/r3tmp/tmpKnqBNl/pdisk_1.dat 2025-04-17T18:12:49.859425Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28833, node 1 2025-04-17T18:12:49.912567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:49.912748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:49.914307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:49.930902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:49.930924Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:49.930930Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:49.931033Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26554 TClient is connected to server localhost:26554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:50.370781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.400091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.518208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.628517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:50.698418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.347841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346727312676167:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.347973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.632970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.658628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.681245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.703112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.725894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.749316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.774044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.817048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346727312676823:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.817103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.817223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346727312676828:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:52.820190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:52.828458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346727312676830:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:52.914779Z node 1 :TX_PROXY ERROR: Actor# [1:7494346727312676885:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:53.830855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.863444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:53.907883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.542997Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346714427772511:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:54.543060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15225, MsgBus: 12155 2025-04-17T18:12:56.481332Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346745080849315:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:56.481936Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001673/r3tmp/tmpJrNtAj/pdisk_1.dat 2025-04-17T18:12:56.559236Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15225, node 2 2025-04-17T18:12:56.607629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:56.607716Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:56.609495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:56.618777Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:56.618801Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:56.618810Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:56.618914Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12155 TClient is connected to server localhost:12155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:57.004072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.028881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.073702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.185670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:57.244997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.340704Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346757965752909:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.340787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.394380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.420398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.447766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.473123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.509280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.543906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.576145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:59.653839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346757965753571:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.653912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.653914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346757965753576:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:59.657133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:59.665065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346757965753578:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:59.764963Z node 2 :TX_PROXY ERROR: Actor# [2:7494346757965753635:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:00.654594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.682657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.714235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.481606Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346745080849315:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:01.481681Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2025-04-17T18:13:00.249912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:00.249992Z node 1 :IMPORT WARN: Table profiles were not loaded ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... sending extend lease request ... captured cache request ... waiting for response ... waiting for epoch update >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> TNodeBrokerTest::ConfigPipelining |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3800, MsgBus: 5875 2025-04-17T18:12:51.080592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346723317667868:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:51.081377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001667/r3tmp/tmpUXBLBY/pdisk_1.dat 2025-04-17T18:12:51.378221Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3800, node 1 2025-04-17T18:12:51.433155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:51.433178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:51.433183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:51.433308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:51.446573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:51.446672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:51.448282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5875 TClient is connected to server localhost:5875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:51.853604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.874710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:51.986120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.127386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.189931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.733501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346731907604237:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:53.733629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.005747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.033890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.060789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.088628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.117368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.152140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.177245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.256977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346736202572199:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.257114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.257214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346736202572204:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.260129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:54.267154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346736202572206:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:54.329242Z node 1 :TX_PROXY ERROR: Actor# [1:7494346736202572260:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:55.193425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.221898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.288970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.080820Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346723317667868:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:56.080951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32151, MsgBus: 22785 2025-04-17T18:12:57.648437Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346747809111671:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:57.648487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001667/r3tmp/tmpZyh1mU/pdisk_1.dat 2025-04-17T18:12:57.735107Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:57.792344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:57.792439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:57.794313Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32151, node 2 2025-04-17T18:12:57.832291Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:57.832321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:57.832329Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:57.832452Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22785 TClient is connected to server localhost:22785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:58.187060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.194624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.235279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.369918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.431349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.413188Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346760694015335:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.413270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.467356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.494815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.519622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.541622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.565847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.611751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.677627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.725057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346760694016001:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.725141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346760694016006:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.725151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.728092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:00.736334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346760694016008:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:00.831253Z node 2 :TX_PROXY ERROR: Actor# [2:7494346760694016063:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:01.649035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.697998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.730333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.648686Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346747809111671:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:02.648771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2025-04-17T18:13:03.753168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:03.753228Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:03.793739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] >> TNodeBrokerTest::UpdateEpochPipelining >> TSlotIndexesPoolTest::Basic [GOOD] >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline >> TDynamicNameserverTest::TestCacheUsage [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> TNodeBrokerTest::RegistrationPipelining ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 61580, MsgBus: 13199 2025-04-17T18:12:53.593534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346733535145602:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:53.593608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001622/r3tmp/tmpBjDQM1/pdisk_1.dat 2025-04-17T18:12:53.889778Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61580, node 1 2025-04-17T18:12:53.955408Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:53.955432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:53.955451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:53.955611Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:53.972597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:53.972701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:53.975157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13199 TClient is connected to server localhost:13199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:54.354326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.374700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.486730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.610486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.669919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.495053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346746420049270:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.495192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.772278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.797054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.823176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.848459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.873722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.901166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.929025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.010240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346750715017226:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.010321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.010351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346750715017231:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.013729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:57.023084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346750715017233:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:57.089322Z node 1 :TX_PROXY ERROR: Actor# [1:7494346750715017288:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:58.609398Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346733535145602:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:58.609653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22632, MsgBus: 19645 2025-04-17T18:12:59.278332Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346759226537714:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:59.278427Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001622/r3tmp/tmpzL3cTc/pdisk_1.dat 2025-04-17T18:12:59.367867Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22632, node 2 2025-04-17T18:12:59.412583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:59.412689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:59.414322Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:59.429644Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:59.429667Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:59.429676Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:59.429786Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19645 TClient is connected to server localhost:19645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:59.841362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.848710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.892263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.013906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.085741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:01.746130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346767816474078:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.746229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.774008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.835841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.859564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.880816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.903265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.931166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.996375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.071828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346772111442039:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:02.071897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346772111442044:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:02.071912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:02.074464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:02.082791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346772111442046:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:02.162806Z node 2 :TX_PROXY ERROR: Actor# [2:7494346772111442101:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] Test command err: 2025-04-17T18:13:04.775822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:04.775893Z node 1 :IMPORT WARN: Table profiles were not loaded |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2025-04-17T18:13:03.504175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:03.504247Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:03.525042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8401, MsgBus: 10568 2025-04-17T18:12:51.654105Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346722919755788:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:51.654975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001651/r3tmp/tmp4PuQl3/pdisk_1.dat 2025-04-17T18:12:51.966623Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8401, node 1 2025-04-17T18:12:52.024458Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:52.024480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:52.024509Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:52.024656Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:52.037360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:52.037454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:52.038960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10568 TClient is connected to server localhost:10568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:52.475039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.490846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.616326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.758639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.831031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.379479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346735804659388:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.379632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.689285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.716413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.741776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.765541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.789946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.816956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.842884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.891424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346735804660044:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.891490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.891523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346735804660049:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.894360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:54.901904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346735804660051:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:54.958280Z node 1 :TX_PROXY ERROR: Actor# [1:7494346735804660105:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:55.896572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.926148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.964900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.653507Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346722919755788:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:56.653573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2733, MsgBus: 19475 2025-04-17T18:12:58.410218Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346752981873286:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:58.410276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001651/r3tmp/tmpQE5hhB/pdisk_1.dat 2025-04-17T18:12:58.520943Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:58.539794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:58.539872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 2733, node 2 2025-04-17T18:12:58.542990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:58.571591Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:58.571612Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:58.571619Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:58.571706Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19475 TClient is connected to server localhost:19475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:58.904254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.911497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.955711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.072678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.125628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:01.159841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346765866776939:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.159926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.204895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.229899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.253961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.278356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.303405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.367293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.394127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.433598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346765866777598:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.433673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346765866777603:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.433674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.436793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:01.443705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346765866777605:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:01.520480Z node 2 :TX_PROXY ERROR: Actor# [2:7494346765866777660:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:02.239191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.273289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.303264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.410354Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346752981873286:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:03.410419Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] Test command err: 2025-04-17T18:13:04.165000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:04.165047Z node 1 :IMPORT WARN: Table profiles were not loaded |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] >> TNodeBrokerTest::FixedNodeId [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] >> TSlotIndexesPoolTest::Expansion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2025-04-17T18:13:05.098821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:05.098886Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:05.167931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:13:05.203351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28780, MsgBus: 18361 2025-04-17T18:12:51.918373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346722719522409:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:51.918611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00164c/r3tmp/tmp4yTuPL/pdisk_1.dat 2025-04-17T18:12:52.209035Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28780, node 1 2025-04-17T18:12:52.272439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:52.272467Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:52.272474Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:52.272623Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:52.283386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:52.283510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:52.285097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18361 TClient is connected to server localhost:18361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:52.704015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.715606Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.728440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:52.816068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:52.941450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.012660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.416750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346735604426061:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.416982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.691760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.717223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.739238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.784713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.809535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.834791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.859746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:54.935434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346735604426723:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.935512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.935525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346735604426728:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.938778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:54.946834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346735604426730:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:12:55.067558Z node 1 :TX_PROXY ERROR: Actor# [1:7494346739899394083:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:56.052268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.106929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.134305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.918541Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346722719522409:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:56.955264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24579, MsgBus: 7319 2025-04-17T18:12:58.868034Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346755715605547:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:58.868132Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00164c/r3tmp/tmptLxy33/pdisk_1.dat 2025-04-17T18:12:58.947893Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24579, node 2 2025-04-17T18:12:58.996311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:58.996419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:58.997854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:59.001612Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:59.001632Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:59.001641Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:59.001768Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7319 TClient is connected to server localhost:7319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:59.384490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.391978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.461686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.606112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:59.680343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:01.504211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346768600509213:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.504312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.559104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.585150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.606648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.633108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.657151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.686643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.715212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:01.793812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346768600509870:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.793893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346768600509875:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.793903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:01.797106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:01.805738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346768600509877:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:01.901605Z node 2 :TX_PROXY ERROR: Actor# [2:7494346768600509936:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:02.692730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.727675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.759379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.868446Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346755715605547:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:03.868521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TNodeBrokerTest::NodeNameWithDifferentTenants >> GracefulShutdown::TTxGracefulShutdown [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2025-04-17T18:13:00.247141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:00.247210Z node 1 :IMPORT WARN: Table profiles were not loaded |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2025-04-17T18:13:04.105050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:04.105124Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] Test command err: 2025-04-17T18:13:05.585627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:05.585685Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2025-04-17T18:13:04.413964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:04.414033Z node 1 :IMPORT WARN: Table profiles were not loaded |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16316, MsgBus: 22923 2025-04-17T18:12:53.074878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346734413846416:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:53.074959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001639/r3tmp/tmptJ1a7r/pdisk_1.dat 2025-04-17T18:12:53.343129Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16316, node 1 2025-04-17T18:12:53.412531Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:53.412553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:53.412561Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:53.412709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:53.426405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:53.426516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:53.428178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22923 TClient is connected to server localhost:22923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:53.851493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:53.872232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.002714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.125202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.180294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:55.786031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346743003782775:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.786154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.043150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.075993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.105262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.135517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.162006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.231004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.260644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.307327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346747298750738:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.307394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.307454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346747298750743:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.309804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:56.317340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346747298750745:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:56.382415Z node 1 :TX_PROXY ERROR: Actor# [1:7494346747298750797:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:57.345086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.374966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:57.410861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.074874Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346734413846416:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:58.074938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25324, MsgBus: 20121 2025-04-17T18:12:59.998787Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346759657144021:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:59.998859Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001639/r3tmp/tmpD52jNc/pdisk_1.dat 2025-04-17T18:13:00.092477Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25324, node 2 2025-04-17T18:13:00.136725Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:00.136826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:00.138582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:00.155740Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:00.155765Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:00.155774Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:00.155893Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20121 TClient is connected to server localhost:20121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:00.499755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.506313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.574780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.691088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.762561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:02.921783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346772542047693:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:02.921976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:02.945244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.972151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.001445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.033804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.065951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.097116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.127353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.202735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346776837015653:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:03.202790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346776837015658:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:03.202806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:03.205722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:03.213888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346776837015660:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:03.267398Z node 2 :TX_PROXY ERROR: Actor# [2:7494346776837015714:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:03.994782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.037301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.108173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.999171Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346759657144021:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:04.999253Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TNodeBrokerTest::BasicFunctionality >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder >> TNodeBrokerTest::ConfigPipelining [GOOD] >> TDynamicNameserverTest::BasicFunctionality >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] >> TNodeBrokerTest::NoEffectBeforeCommit |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] >> TKeyValueTest::TestBasicWriteRead >> TSlotIndexesPoolTest::Init [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] Test command err: 2025-04-17T18:13:05.391770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:05.391831Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:05.462503Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: ERROR_TEMP: No free node IDs ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] Test command err: 2025-04-17T18:13:06.777618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:06.777667Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: 2025-04-17T18:10:33.558928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:33.559054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:33.559173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b56/r3tmp/tmpl0wjQX/pdisk_1.dat 2025-04-17T18:10:33.923817Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16310, node 1 2025-04-17T18:10:34.153926Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:34.153975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:34.154028Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:34.154718Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:34.156831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:34.242621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:34.242741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:34.256417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8774 2025-04-17T18:10:34.768150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:37.978364Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:38.017420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:38.017526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:38.054483Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:38.056857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:38.288462Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:38.288980Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:38.289551Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:38.289878Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:38.289973Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:38.290059Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:38.290133Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:38.290228Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:38.290328Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:38.452840Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:38.452949Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:38.466397Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:38.606620Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:38.650173Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:38.650302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:38.679692Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:38.682751Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:38.682947Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:38.683011Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:38.683062Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:38.683104Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:38.683158Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:38.683205Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:38.684430Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:38.718193Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:38.718301Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:38.725506Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:38.729633Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:38.729769Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:38.740469Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:38.759482Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:38.759532Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:38.759603Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:38.769309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:38.776376Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:38.776508Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:38.987708Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:39.130437Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:39.187473Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:39.891541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:39.891687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:39.913222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:40.191156Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:40.191373Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:40.191685Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:40.191836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:40.191907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:40.191971Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:40.192072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:40.192211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:40.192383Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:40.192523Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:40.192671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:40.192812Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:40.215205Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:40.215275Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=T ... extTraversal 2025-04-17T18:13:04.647271Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:13:04.647316Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:13:04.647359Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:04.650923Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:13:04.666787Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:13:04.667237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:04.667316Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:04.667965Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:13:04.682102Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:04.682331Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:13:04.683008Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8566:6477], server id = [2:8571:6482], tablet id = 72075186224037899, status = OK 2025-04-17T18:13:04.683345Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8566:6477], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.685038Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8567:6478], server id = [2:8572:6483], tablet id = 72075186224037900, status = OK 2025-04-17T18:13:04.685122Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8567:6478], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.685764Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8568:6479], server id = [2:8573:6484], tablet id = 72075186224037901, status = OK 2025-04-17T18:13:04.685819Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8568:6479], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.686161Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8569:6480], server id = [2:8574:6485], tablet id = 72075186224037902, status = OK 2025-04-17T18:13:04.686243Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8569:6480], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.687310Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8570:6481], server id = [2:8577:6488], tablet id = 72075186224037903, status = OK 2025-04-17T18:13:04.687364Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8570:6481], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.692042Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:13:04.692551Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8566:6477], server id = [2:8571:6482], tablet id = 72075186224037899 2025-04-17T18:13:04.692604Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.694010Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:13:04.694591Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8567:6478], server id = [2:8572:6483], tablet id = 72075186224037900 2025-04-17T18:13:04.694625Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.695251Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8589:6497], server id = [2:8591:6498], tablet id = 72075186224037904, status = OK 2025-04-17T18:13:04.695343Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8589:6497], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.695607Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:13:04.696781Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8568:6479], server id = [2:8573:6484], tablet id = 72075186224037901 2025-04-17T18:13:04.696812Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.697351Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:13:04.697812Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8592:6499], server id = [2:8594:6500], tablet id = 72075186224037905, status = OK 2025-04-17T18:13:04.697885Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8592:6499], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.698339Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8569:6480], server id = [2:8574:6485], tablet id = 72075186224037902 2025-04-17T18:13:04.698367Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.699064Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:13:04.699718Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8595:6501], server id = [2:8599:6505], tablet id = 72075186224037906, status = OK 2025-04-17T18:13:04.699780Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8595:6501], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.700288Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8570:6481], server id = [2:8577:6488], tablet id = 72075186224037903 2025-04-17T18:13:04.700316Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.700889Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8598:6504], server id = [2:8600:6506], tablet id = 72075186224037907, status = OK 2025-04-17T18:13:04.700944Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8598:6504], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.701516Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8602:6508], server id = [2:8607:6512], tablet id = 72075186224037908, status = OK 2025-04-17T18:13:04.701568Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8602:6508], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:04.703816Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:13:04.704552Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8589:6497], server id = [2:8591:6498], tablet id = 72075186224037904 2025-04-17T18:13:04.704582Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.705723Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:13:04.706140Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8592:6499], server id = [2:8594:6500], tablet id = 72075186224037905 2025-04-17T18:13:04.706169Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.706553Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:13:04.706711Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8595:6501], server id = [2:8599:6505], tablet id = 72075186224037906 2025-04-17T18:13:04.706736Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.707701Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:13:04.708060Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8598:6504], server id = [2:8600:6506], tablet id = 72075186224037907 2025-04-17T18:13:04.708085Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.708478Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:13:04.708524Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:13:04.708688Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:13:04.708884Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:13:04.709253Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:13:04.711291Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8602:6508], server id = [2:8607:6512], tablet id = 72075186224037908 2025-04-17T18:13:04.711332Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:04.711914Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:04.745830Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8632:6533]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:04.746029Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:13:04.746069Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8632:6533], StatRequests.size() = 1 2025-04-17T18:13:04.853297Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2I2YTdlZjYtNDg2OTc1ZTItOWJiZmUxMzgtYTNmNWVjNjk=, TxId: 2025-04-17T18:13:04.853365Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2I2YTdlZjYtNDg2OTc1ZTItOWJiZmUxMzgtYTNmNWVjNjk=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-17T18:13:04.854141Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:04.855340Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-04-17T18:13:04.886353Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:04.886408Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:13:04.962738Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:8655:6544];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-17T18:13:05.224691Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8765:6639]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:05.225035Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:05.225102Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:13:05.228167Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:05.228239Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:13:05.228299Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:13:05.234098Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2025-04-17T18:13:04.428228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:04.428307Z node 1 :IMPORT WARN: Table profiles were not loaded ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5282, MsgBus: 26914 2025-04-17T18:12:53.817013Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346730854269288:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:53.817131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001620/r3tmp/tmp41c1PV/pdisk_1.dat 2025-04-17T18:12:54.119913Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5282, node 1 2025-04-17T18:12:54.182617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:54.182632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:54.182637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:54.182732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:54.192050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:54.192225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:54.194390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26914 TClient is connected to server localhost:26914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:54.602685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.627278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.746449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.874804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.948170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:56.338105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346743739172967:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.338246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.610329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.675506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.702298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.729020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.775050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.800676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.835232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.881240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346743739173628:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.881324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.881364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346743739173633:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.884443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:56.892401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346743739173635:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:56.973457Z node 1 :TX_PROXY ERROR: Actor# [1:7494346743739173690:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:58.021414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.053737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.082194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:12:58.816684Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346730854269288:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:58.816759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22281, MsgBus: 22142 2025-04-17T18:13:00.580149Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346761139055093:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:00.580205Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001620/r3tmp/tmptRm3dY/pdisk_1.dat 2025-04-17T18:13:00.652442Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22281, node 2 2025-04-17T18:13:00.699866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:00.699923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:00.700602Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:00.700617Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:00.700624Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:00.700741Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:00.701289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22142 TClient is connected to server localhost:22142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:01.084444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:01.097108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:01.143401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:01.280325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:01.351160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:03.021871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346769728991458:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:03.022029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:03.049862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.077691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.101207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.125662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.149597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.179927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.210974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.253605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346774023959408:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:03.253666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346774023959413:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:03.253684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:03.256372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:03.263961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346774023959415:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:03.347932Z node 2 :TX_PROXY ERROR: Actor# [2:7494346774023959471:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:04.073447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.109197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.178598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:13:05.580682Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346761139055093:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:05.580738Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19844, MsgBus: 13417 2025-04-17T18:12:57.600482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346748427917153:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:57.600657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d7/r3tmp/tmpMBqf8l/pdisk_1.dat 2025-04-17T18:12:57.897314Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19844, node 1 2025-04-17T18:12:57.980906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:57.981031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:57.982731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:57.984251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:57.984330Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:57.984341Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:57.984475Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13417 TClient is connected to server localhost:13417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:58.399242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.422209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.548174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.676774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:58.736757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:00.230848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346761312820819:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.230957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.503037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.528735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.552906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.577854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.629604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.653606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.677395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:00.720429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346761312821475:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.720489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.720544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346761312821480:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:00.723203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:00.730814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346761312821482:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:13:00.815714Z node 1 :TX_PROXY ERROR: Actor# [1:7494346761312821535:3538] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 32079, MsgBus: 10857 2025-04-17T18:13:02.754851Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346772719074639:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:02.754941Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d7/r3tmp/tmpAzM3nu/pdisk_1.dat 2025-04-17T18:13:02.849093Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32079, node 2 2025-04-17T18:13:02.882250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:02.882350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:02.883989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:02.895632Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:02.895654Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:02.895662Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:02.895765Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10857 TClient is connected to server localhost:10857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:03.239858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:03.247332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:03.309895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:03.409195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:03.480559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:05.099077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346785603978302:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:05.099161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:05.149288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:05.171749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:05.194832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:05.218883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:05.242753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:05.269281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:05.297520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:05.348878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346785603978957:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:05.348961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:05.348967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346785603978962:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:05.351568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:05.360515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346785603978964:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:05.452808Z node 2 :TX_PROXY ERROR: Actor# [2:7494346785603979020:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] Test command err: 2025-04-17T18:13:07.082416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:07.082481Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2025-04-17T18:13:05.916240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:05.916299Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TNodeBrokerTest::LoadStateMoveEpoch >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] Test command err: 2025-04-17T18:13:07.396359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:07.396428Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: 2025-04-17T18:10:29.878801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:29.878913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:29.879022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ba1/r3tmp/tmpvhlHY1/pdisk_1.dat 2025-04-17T18:10:30.174060Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16486, node 1 2025-04-17T18:10:30.384898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:30.384937Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:30.384964Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:30.385482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:30.387149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:30.466582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:30.466686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:30.480060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26496 2025-04-17T18:10:31.006974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:33.800122Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:33.832264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:33.832385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:33.869204Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:33.871625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:34.091010Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.091448Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.091814Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.092117Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.092244Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.092334Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.092411Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.092481Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.092553Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.255469Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:34.255579Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:34.268390Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:34.385190Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:34.420106Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:34.420192Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:34.443852Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:34.446870Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:34.447052Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:34.447118Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:34.447159Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:34.447207Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:34.447250Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:34.447307Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:34.448481Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:34.483538Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:34.483634Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:34.491756Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:34.496306Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:34.496447Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:34.507898Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:34.525113Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:34.525159Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:34.525219Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:34.534194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:34.540218Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:34.540341Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:34.740551Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:34.899087Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:34.968576Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:35.738774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:35.738972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:35.765241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:36.068614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:36.068866Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:36.069197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:36.069347Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:36.069493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:36.069642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:36.069780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:36.069902Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:36.070046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:36.070214Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:36.070372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:36.070505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:36.101049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:36.101143Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... 4-17T18:13:05.813298Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:13:05.814389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:05.814466Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:05.816704Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:13:05.893391Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:05.893546Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2025-04-17T18:13:05.894333Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8569:6481], server id = [2:8576:6488], tablet id = 72075186224037903 2025-04-17T18:13:05.894412Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.894802Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8662:6547], server id = [2:8667:6552], tablet id = 72075186224037899, status = OK 2025-04-17T18:13:05.894921Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8662:6547], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.895407Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8663:6548], server id = [2:8668:6553], tablet id = 72075186224037900, status = OK 2025-04-17T18:13:05.895467Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8663:6548], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.895998Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8664:6549], server id = [2:8669:6554], tablet id = 72075186224037901, status = OK 2025-04-17T18:13:05.896065Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8664:6549], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.897240Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8665:6550], server id = [2:8670:6555], tablet id = 72075186224037902, status = OK 2025-04-17T18:13:05.897297Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8665:6550], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.897536Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8666:6551], server id = [2:8671:6556], tablet id = 72075186224037903, status = OK 2025-04-17T18:13:05.897581Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8666:6551], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.898768Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:13:05.899728Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8662:6547], server id = [2:8667:6552], tablet id = 72075186224037899 2025-04-17T18:13:05.899762Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.900167Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:13:05.901019Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:13:05.901498Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8663:6548], server id = [2:8668:6553], tablet id = 72075186224037900 2025-04-17T18:13:05.901528Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.901849Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:13:05.902138Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8665:6550], server id = [2:8670:6555], tablet id = 72075186224037902 2025-04-17T18:13:05.902165Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.902689Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8676:6561], server id = [2:8680:6565], tablet id = 72075186224037904, status = OK 2025-04-17T18:13:05.902754Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8676:6561], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.902842Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8664:6549], server id = [2:8669:6554], tablet id = 72075186224037901 2025-04-17T18:13:05.902863Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.903803Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8678:6563], server id = [2:8682:6567], tablet id = 72075186224037905, status = OK 2025-04-17T18:13:05.903863Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8678:6563], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.904165Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8681:6566], server id = [2:8684:6569], tablet id = 72075186224037906, status = OK 2025-04-17T18:13:05.904215Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8681:6566], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.905354Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8683:6568], server id = [2:8686:6570], tablet id = 72075186224037907, status = OK 2025-04-17T18:13:05.905408Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8683:6568], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.906025Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:13:05.906872Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8676:6561], server id = [2:8680:6565], tablet id = 72075186224037904 2025-04-17T18:13:05.906900Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.907293Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:13:05.907714Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:13:05.908140Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8666:6551], server id = [2:8671:6556], tablet id = 72075186224037903 2025-04-17T18:13:05.908179Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.908335Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:13:05.908482Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8678:6563], server id = [2:8682:6567], tablet id = 72075186224037905 2025-04-17T18:13:05.908504Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.908610Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8690:6574], server id = [2:8692:6576], tablet id = 72075186224037908, status = OK 2025-04-17T18:13:05.908662Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8690:6574], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:05.908746Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:13:05.908958Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8681:6566], server id = [2:8684:6569], tablet id = 72075186224037906 2025-04-17T18:13:05.908985Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.909482Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8683:6568], server id = [2:8686:6570], tablet id = 72075186224037907 2025-04-17T18:13:05.909508Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.909796Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:13:05.909845Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:13:05.910070Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:13:05.910284Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:13:05.910569Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:13:05.913315Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8690:6574], server id = [2:8692:6576], tablet id = 72075186224037908 2025-04-17T18:13:05.913363Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:05.914059Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:05.951553Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8710:6594]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:05.951866Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:13:05.951921Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8710:6594], StatRequests.size() = 1 2025-04-17T18:13:06.086000Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjU2NTVkNzctYjlmMTE2MzctNjU1NmE4YTAtNWJjZGUwZmQ=, TxId: 2025-04-17T18:13:06.086099Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjU2NTVkNzctYjlmMTE2MzctNjU1NmE4YTAtNWJjZGUwZmQ=, TxId: 2025-04-17T18:13:06.086741Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:06.099729Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8725:6600] 2025-04-17T18:13:06.099855Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8586:6495], server id = [2:8725:6600], tablet id = 72075186224037894, status = OK 2025-04-17T18:13:06.100001Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8725:6600], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-17T18:13:06.100173Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8726:6601] 2025-04-17T18:13:06.100263Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8726:6601], schemeshard id = 72075186224037897 2025-04-17T18:13:06.113717Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:06.113785Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:13:06.233597Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8729:6604]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:06.233957Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:06.234021Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:13:06.237290Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:06.237371Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:13:06.237429Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:13:06.243528Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TDynamicNameserverTest::CacheMissSameDeadline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2025-04-17T18:13:07.362983Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:13:07.363681Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0014c8/r3tmp/tmpCVEHWl/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:13:07.364211Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0014c8/r3tmp/tmpCVEHWl/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0014c8/r3tmp/tmpCVEHWl/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16009823212017464828 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:13:07.371595Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:13:07.372066Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0014c8/r3tmp/tmpCVEHWl/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:13:07.372340Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0014c8/r3tmp/tmpCVEHWl/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0014c8/r3tmp/tmpCVEHWl/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3820389646334608271 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: 2025-04-17T18:10:42.352216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:42.352376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:42.352511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b19/r3tmp/tmpmFvyK3/pdisk_1.dat 2025-04-17T18:10:42.687827Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62999, node 1 2025-04-17T18:10:42.898444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:42.898495Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:42.898517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:42.898969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:42.905757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:42.984608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:42.984733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:42.998406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26464 2025-04-17T18:10:43.512028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:46.364484Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:46.397481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:46.397565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:46.434865Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:46.437004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:46.655806Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.656146Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.656595Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.656893Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.656975Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.657048Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.657127Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.657200Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.657248Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.815347Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:46.815463Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:46.828778Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:46.959645Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:47.006615Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:47.006729Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:47.032644Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:47.035784Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:47.035962Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:47.036012Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:47.036050Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:47.036091Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:47.036134Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:47.036171Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:47.037198Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:47.071090Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:47.071223Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:47.077756Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:47.080892Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:47.080996Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:47.090387Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:47.106968Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:47.107030Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:47.107089Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:47.116958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:47.123240Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:47.123388Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:47.274936Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:47.428304Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:47.507500Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:48.199680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2242:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:48.199819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:48.215157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:48.310873Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:48.311070Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:48.311364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:48.311495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:48.311624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:48.311765Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:48.311885Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:48.312012Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:48.312130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:48.312260Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:48.312383Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:48.312497Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:48.337272Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:48.337377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... p traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:57.486003Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:12:57.489212Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:12:57.492482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6991:5168], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.492560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7001:5173], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.492655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:57.502583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:12:57.561783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7005:5176], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:12:57.783201Z node 2 :TX_PROXY ERROR: Actor# [2:7098:5222] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:57.864824Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7127:5237]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:12:57.865092Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:12:57.865178Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7129:5239] 2025-04-17T18:12:57.865250Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7129:5239] 2025-04-17T18:12:57.865572Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7130:5240] 2025-04-17T18:12:57.865735Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7129:5239], server id = [2:7130:5240], tablet id = 72075186224037894, status = OK 2025-04-17T18:12:57.865820Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7130:5240], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:12:57.865881Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:12:57.866001Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:12:57.866076Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7127:5237], StatRequests.size() = 1 2025-04-17T18:12:57.980191Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzEwNGJiMDYtODVkNWRmZjAtOWVjMTliNDEtNGI3NDE1ZTg=, TxId: 2025-04-17T18:12:57.980251Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzEwNGJiMDYtODVkNWRmZjAtOWVjMTliNDEtNGI3NDE1ZTg=, TxId: 2025-04-17T18:12:57.980690Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:12:57.994061Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:12:57.994116Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:12:58.038793Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:12:58.038880Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:12:58.104464Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7129:5239], schemeshard count = 1 2025-04-17T18:12:59.166307Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:12:59.166376Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:12:59.169240Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:12:59.185222Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:12:59.185685Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:12:59.185744Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-17T18:12:59.198967Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:12:59.210001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) ... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-04-17T18:12:59.211203Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-17T18:12:59.211342Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-17T18:12:59.212081Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2804:3223] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-17T18:12:59.212131Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2804:3223] 2025-04-17T18:12:59.225726Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:12:59.225815Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-17T18:13:00.497636Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:13:00.497749Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:13:00.497790Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:00.498276Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:13:00.512059Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:13:00.512477Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:00.512542Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:00.513269Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:13:00.526769Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:00.526991Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:13:00.527560Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7259:5316], server id = [2:7260:5317], tablet id = 72075186224037899, status = OK 2025-04-17T18:13:00.527679Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7259:5316], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:00.531361Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:13:00.531472Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:13:00.531697Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:13:00.531897Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:13:00.532082Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7259:5316], server id = [2:7260:5317], tablet id = 72075186224037899 2025-04-17T18:13:00.532112Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:00.532298Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:13:00.535163Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:00.564410Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7280:5336]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:00.564672Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:13:00.564711Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7280:5336], StatRequests.size() = 1 2025-04-17T18:13:00.674456Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDExYzcwZWEtMjlhNjU5OGItODhlODY2NzgtNThkYTI2N2M=, TxId: 2025-04-17T18:13:00.674569Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDExYzcwZWEtMjlhNjU5OGItODhlODY2NzgtNThkYTI2N2M=, TxId: 2025-04-17T18:13:00.675184Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:00.690007Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:00.690083Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2804:3223] 2025-04-17T18:13:01.251599Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:13:01.251687Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:13:03.165683Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:13:03.165821Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:13:03.187264Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:13:05.596109Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:13:05.596165Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:13:06.779158Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:13:06.800845Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:13:06.800931Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: 2025-04-17T18:10:30.741702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:30.741806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:30.741918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b88/r3tmp/tmpe7H2QX/pdisk_1.dat 2025-04-17T18:10:31.061285Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18059, node 1 2025-04-17T18:10:31.277880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:31.277938Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:31.277986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:31.278713Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:31.280417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:31.368314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:31.368447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:31.382024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6730 2025-04-17T18:10:31.899381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:34.584645Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:34.625303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:34.625417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:34.662962Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:34.665361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:34.884996Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.885493Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.886052Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.886379Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.886484Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.886614Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.886682Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.886737Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:34.886792Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:35.048313Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:35.048420Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:35.061289Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:35.177836Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:35.221435Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:35.221528Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:35.250157Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:35.252906Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:35.253066Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:35.253131Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:35.253170Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:35.253204Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:35.253251Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:35.253297Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:35.254586Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:35.287471Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:35.287571Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:35.294548Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:35.298142Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:35.298271Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:35.308222Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:35.326281Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:35.326333Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:35.326419Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:35.334627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:35.344134Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:35.344283Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:35.493743Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:35.662526Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:35.760281Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:36.477079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:36.477228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:36.499182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:36.783343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:36.783584Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:36.783804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:36.783886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:36.783959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:36.784033Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:36.784098Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:36.784178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:36.784268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:36.784366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:36.784483Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:36.784559Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:36.808252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:36.808349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=T ... ode 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:13:03.719070Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8441:6407], schemeshard count = 1 2025-04-17T18:13:05.520924Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:13:05.520968Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:13:05.520997Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:13:05.521032Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:05.523253Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:13:05.537847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:13:05.538277Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:05.538344Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:05.538948Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2025-04-17T18:13:05.538984Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2025-04-17T18:13:05.539035Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:06.541432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:13:06.554737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:06.554968Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:13:06.555727Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8599:6490], server id = [2:8604:6495], tablet id = 72075186224037899, status = OK 2025-04-17T18:13:06.556090Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8599:6490], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.556437Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8600:6491], server id = [2:8605:6496], tablet id = 72075186224037900, status = OK 2025-04-17T18:13:06.556494Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8600:6491], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.557799Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8601:6492], server id = [2:8606:6497], tablet id = 72075186224037901, status = OK 2025-04-17T18:13:06.557857Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8601:6492], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.558066Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8602:6493], server id = [2:8607:6498], tablet id = 72075186224037902, status = OK 2025-04-17T18:13:06.558120Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8602:6493], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.559816Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8603:6494], server id = [2:8610:6501], tablet id = 72075186224037903, status = OK 2025-04-17T18:13:06.559875Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8603:6494], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.564803Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:13:06.565665Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8599:6490], server id = [2:8604:6495], tablet id = 72075186224037899 2025-04-17T18:13:06.565702Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.566235Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:13:06.566839Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8600:6491], server id = [2:8605:6496], tablet id = 72075186224037900 2025-04-17T18:13:06.566871Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.567479Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:13:06.567735Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8622:6510], server id = [2:8625:6512], tablet id = 72075186224037904, status = OK 2025-04-17T18:13:06.567808Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8622:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.568418Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8601:6492], server id = [2:8606:6497], tablet id = 72075186224037901 2025-04-17T18:13:06.568447Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.568912Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8624:6511], server id = [2:8628:6514], tablet id = 72075186224037905, status = OK 2025-04-17T18:13:06.568971Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8624:6511], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.569992Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:13:06.571025Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8602:6493], server id = [2:8607:6498], tablet id = 72075186224037902 2025-04-17T18:13:06.571057Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.571242Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:13:06.571632Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8627:6513], server id = [2:8629:6515], tablet id = 72075186224037906, status = OK 2025-04-17T18:13:06.571692Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8627:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.572140Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8603:6494], server id = [2:8610:6501], tablet id = 72075186224037903 2025-04-17T18:13:06.572169Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.572922Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8631:6517], server id = [2:8635:6521], tablet id = 72075186224037907, status = OK 2025-04-17T18:13:06.572993Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8631:6517], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.573389Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8634:6520], server id = [2:8637:6523], tablet id = 72075186224037908, status = OK 2025-04-17T18:13:06.573442Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8634:6520], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:06.577588Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:13:06.578137Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8622:6510], server id = [2:8625:6512], tablet id = 72075186224037904 2025-04-17T18:13:06.578168Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.579599Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:13:06.579972Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:13:06.580576Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8624:6511], server id = [2:8628:6514], tablet id = 72075186224037905 2025-04-17T18:13:06.580607Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.581282Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8627:6513], server id = [2:8629:6515], tablet id = 72075186224037906 2025-04-17T18:13:06.581313Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.581526Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:13:06.581848Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8631:6517], server id = [2:8635:6521], tablet id = 72075186224037907 2025-04-17T18:13:06.581877Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.581994Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:13:06.582049Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:13:06.582235Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:13:06.582416Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:13:06.582839Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:13:06.584933Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8634:6520], server id = [2:8637:6523], tablet id = 72075186224037908 2025-04-17T18:13:06.584965Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:06.585664Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:06.619075Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8665:6546]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:06.619270Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:13:06.619311Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8665:6546], StatRequests.size() = 1 2025-04-17T18:13:06.718049Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmJjZGMzYy1mYjc5ZTlhMC04YTI1NmNkZC1lZTc3ZDk0Mg==, TxId: 2025-04-17T18:13:06.718099Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmJjZGMzYy1mYjc5ZTlhMC04YTI1NmNkZC1lZTc3ZDk0Mg==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-17T18:13:06.718581Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8678:6552]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:06.718787Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:06.719178Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:06.719230Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:13:06.721628Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:06.721676Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:13:06.721722Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:13:06.725853Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-04-17T18:13:06.803531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:06.803594Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:06.814874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:13:06.846870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2025-04-17T18:13:05.771575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:05.771640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:06.958904Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1001: ERROR_TEMP: No free node IDs ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] Test command err: 2025-04-17T18:13:08.604962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:08.605017Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] Test command err: 2025-04-17T18:13:07.292624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:07.292675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:07.303389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:13:07.330635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] Test command err: 2025-04-17T18:13:07.744042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:07.744104Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-04-17T18:13:06.099700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:06.099768Z node 1 :IMPORT WARN: Table profiles were not loaded |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed >> BSCRestartPDisk::RestartNotAllowed >> BSCRestartPDisk::RestartOneByOneWithReconnects >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] >> TDynamicNameserverTest::BasicFunctionality [GOOD] |90.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> IndexBuildTest::RejectsCreate >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted >> IndexBuildTest::CheckLimitWithDroppedIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2025-04-17T18:13:06.805912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:06.805970Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:06.819220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> IndexBuildTest::WithFollowers >> IndexBuildTest::BaseCase >> IndexBuildTest::Lock >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> IndexBuildTest::ShadowDataNotAllowedByDefault >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] >> VectorIndexBuildTest::BaseCase >> IndexBuildTest::CancellationNotEnoughRetries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] Test command err: 2025-04-17T18:13:07.506172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:07.506232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:07.575057Z node 1 :NODE_BROKER ERROR: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2025-04-17T18:13:07.586938Z node 1 :NODE_BROKER ERROR: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 14083991954608352086 2025-04-17T18:13:10.631317Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.631440Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.631488Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.631526Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.631562Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.631602Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.631641Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.631681Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.632343Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.632419Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.632453Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.632488Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.632551Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.632582Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.632613Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.632647Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.632695Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.632743Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.632809Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.632848Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.632874Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.632897Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.632918Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.632942Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.634381Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.634452Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.634481Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.634507Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.634537Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.634568Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.634601Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.634646Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 17504150050784811605 2025-04-17T18:13:10.634505Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.634605Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.634667Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.634712Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.634750Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.634798Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.634846Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.635524Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.635596Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.635631Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.635680Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.635711Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.635739Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.635769Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.635823Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.635879Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.635899Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.635934Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.635972Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.636000Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.636018Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-17T18:13:10.637210Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.637267Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.637305Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.637358Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.637396Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.637428Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-17T18:13:10.637464Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> TNodeBrokerTest::BasicFunctionality [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2025-04-17T18:13:08.555207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:08.555295Z node 1 :IMPORT WARN: Table profiles were not loaded |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2025-04-17T18:13:07.464709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:07.464779Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:08.724548Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001 2025-04-17T18:13:08.737880Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-17T18:13:08.738368Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-17T18:13:08.738770Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] >> TraverseColumnShard::TraverseColumnTable [GOOD] >> IndexBuildTest::WithFollowers [GOOD] >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted |90.6%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] >> TReplicaTest::Commit >> TReplicaTest::Handshake >> TReplicaTest::Subscribe >> TReplicaTest::Unsubscribe >> TReplicaTest::HandshakeWithStaleGeneration >> TReplicaTest::Update >> TReplicaTest::CommitWithoutHandshake >> TReplicaTest::Merge >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::DropIndex >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::DoubleUnsubscribe >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::WithFollowers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.328313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.328358Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.339952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.340262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.340372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.345529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.345637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.346075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.346212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.348084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.348878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.348923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.349045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.349086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.349125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.349202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.353318Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.453922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.454088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.454273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.454455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.454511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.456098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.456186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.456290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.456340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.456369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.456409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.457713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.457758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.457787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.459358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.459398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.459440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.459479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.462956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.464558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.464730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.465625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.465724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.465769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.465995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.466051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.466269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.466353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.468001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.468038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.468183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.468222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.468545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.468583Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.468661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.468690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.468743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.468782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.468809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.468862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.468893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.468917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.468975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.469011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.469047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.470546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.470642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.470672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -17T18:13:12.071005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-17T18:13:12.071036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:1 progress is 2/3 2025-04-17T18:13:12.071079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-17T18:13:12.071114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: false 2025-04-17T18:13:12.071712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.071778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.071802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:13:12.071827Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:13:12.071873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:13:12.072483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.072539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.072569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:13:12.072593Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:13:12.072617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:12.073849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.073896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.073928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:13:12.073955Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-17T18:13:12.073973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:13:12.074526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.074630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.074652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:13:12.074999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-17T18:13:12.075032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:12.075225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T18:13:12.075299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-17T18:13:12.075320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T18:13:12.075350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-17T18:13:12.075374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T18:13:12.075394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: false 2025-04-17T18:13:12.075412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-17T18:13:12.075437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:13:12.075454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:13:12.075506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:13:12.075528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-17T18:13:12.075698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-17T18:13:12.075722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:13:12.075741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-17T18:13:12.075751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-17T18:13:12.075774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:13:12.075792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 1 2025-04-17T18:13:12.075812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-17T18:13:12.076169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.076211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:13:12.076226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:13:12.076275Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T18:13:12.076302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T18:13:12.076346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-17T18:13:12.076369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:333:2312] 2025-04-17T18:13:12.077476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:13:12.078259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:13:12.078335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:13:12.078355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:13:12.079533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:13:12.079579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:13:12.079602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:696:2654] TestWaitNotification: OK eventTxId 104 2025-04-17T18:13:12.080088Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/WithFollowers" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:13:12.080292Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/WithFollowers" took 208us result status StatusSuccess 2025-04-17T18:13:12.080615Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/WithFollowers" PathDescription { Self { Name: "WithFollowers" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "WithFollowers" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "valueFloat" Type: "Float" TypeId: 33 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber >> TReplicaTest::UpdateWithoutHandshake >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsDropIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] Test command err: 2025-04-17T18:13:08.237890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:08.237970Z node 1 :IMPORT WARN: Table profiles were not loaded >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 >> IndexBuildTest::ShadowDataEdgeCases [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: 2025-04-17T18:10:38.677256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:38.677400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:38.677545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b2b/r3tmp/tmpG2FCOq/pdisk_1.dat 2025-04-17T18:10:39.043337Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22251, node 1 2025-04-17T18:10:39.248410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:39.248454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:39.248477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:39.248962Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:39.250585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:39.329748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:39.329885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:39.343278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26106 2025-04-17T18:10:39.838188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:42.575236Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:42.609616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:42.609702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:42.646935Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:42.649124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:42.882774Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.883266Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.883805Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.884116Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.884200Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.884279Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.884357Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.884426Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:42.884491Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.047054Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:43.047160Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:43.060667Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:43.181148Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:43.221539Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:43.221669Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:43.246310Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:43.249210Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:43.249364Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:43.249418Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:43.249453Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:43.249489Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:43.249532Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:43.249566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:43.250675Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:43.287076Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:43.287186Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:43.296027Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:43.300978Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:43.301137Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:43.315248Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:43.338091Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:43.338167Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:43.338261Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:43.349541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:43.357121Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:43.357275Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:43.524715Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:43.698360Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:43.754891Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:44.480018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:44.480175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:44.498347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:44.725176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:44.725380Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:44.725630Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:44.725744Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:44.725816Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:44.725894Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:44.725960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:44.726029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:44.726107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:44.726236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:44.726317Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:44.726382Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:44.747944Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:44.748017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... ode 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:13:08.672256Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:13:08.758692Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:13:08.758768Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:13:08.832420Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8441:6414], schemeshard count = 1 2025-04-17T18:13:11.081110Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:13:11.081155Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:13:11.081183Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:13:11.081214Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:11.083594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:13:11.098490Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:13:11.099228Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:11.099330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:11.099977Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:13:11.113297Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:11.113451Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:13:11.113924Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8564:6481], server id = [2:8569:6486], tablet id = 72075186224037899, status = OK 2025-04-17T18:13:11.114225Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8564:6481], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.114446Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8565:6482], server id = [2:8570:6487], tablet id = 72075186224037900, status = OK 2025-04-17T18:13:11.114480Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8565:6482], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.114564Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8566:6483], server id = [2:8571:6488], tablet id = 72075186224037901, status = OK 2025-04-17T18:13:11.114589Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8566:6483], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.115595Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8567:6484], server id = [2:8572:6489], tablet id = 72075186224037902, status = OK 2025-04-17T18:13:11.115638Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8567:6484], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.117073Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8568:6485], server id = [2:8575:6492], tablet id = 72075186224037903, status = OK 2025-04-17T18:13:11.117113Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8568:6485], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.120147Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:13:11.120689Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8564:6481], server id = [2:8569:6486], tablet id = 72075186224037899 2025-04-17T18:13:11.120728Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.121206Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:13:11.121662Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8565:6482], server id = [2:8570:6487], tablet id = 72075186224037900 2025-04-17T18:13:11.121698Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.122233Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:13:11.122914Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8587:6501], server id = [2:8591:6503], tablet id = 72075186224037904, status = OK 2025-04-17T18:13:11.123003Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8587:6501], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.123502Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8566:6483], server id = [2:8571:6488], tablet id = 72075186224037901 2025-04-17T18:13:11.123527Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.124003Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:13:11.124274Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8590:6502], server id = [2:8593:6505], tablet id = 72075186224037905, status = OK 2025-04-17T18:13:11.124311Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8590:6502], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.124574Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8567:6484], server id = [2:8572:6489], tablet id = 72075186224037902 2025-04-17T18:13:11.124591Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.124896Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:13:11.125327Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8592:6504], server id = [2:8594:6506], tablet id = 72075186224037906, status = OK 2025-04-17T18:13:11.125365Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8592:6504], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.125570Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8568:6485], server id = [2:8575:6492], tablet id = 72075186224037903 2025-04-17T18:13:11.125599Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.126134Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8596:6508], server id = [2:8598:6510], tablet id = 72075186224037907, status = OK 2025-04-17T18:13:11.126170Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8596:6508], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.126428Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8599:6511], server id = [2:8602:6514], tablet id = 72075186224037908, status = OK 2025-04-17T18:13:11.126461Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8599:6511], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:13:11.128576Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:13:11.129165Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8587:6501], server id = [2:8591:6503], tablet id = 72075186224037904 2025-04-17T18:13:11.129194Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.129633Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:13:11.129842Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8590:6502], server id = [2:8593:6505], tablet id = 72075186224037905 2025-04-17T18:13:11.129860Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.130203Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:13:11.130596Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8592:6504], server id = [2:8594:6506], tablet id = 72075186224037906 2025-04-17T18:13:11.130616Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.131217Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:13:11.131340Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8596:6508], server id = [2:8598:6510], tablet id = 72075186224037907 2025-04-17T18:13:11.131356Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.131541Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:13:11.131568Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:13:11.131693Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:13:11.131825Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:13:11.132070Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:13:11.133663Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8599:6511], server id = [2:8602:6514], tablet id = 72075186224037908 2025-04-17T18:13:11.133701Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:11.134533Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:11.158611Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8630:6537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:11.158738Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:13:11.158763Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8630:6537], StatRequests.size() = 1 2025-04-17T18:13:11.275401Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2M2ZTdlMDEtZmU5Y2FiZDUtOTg4OTE5NjMtYThiNGY2ZjA=, TxId: 2025-04-17T18:13:11.275450Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2M2ZTdlMDEtZmU5Y2FiZDUtOTg4OTE5NjMtYThiNGY2ZjA=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-17T18:13:11.275952Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8645:6543]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:11.276109Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:11.276432Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:11.276466Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:13:11.278710Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:11.278756Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:13:11.278794Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:13:11.282823Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> TReplicaTest::Delete [GOOD] >> TReplicaTest::SyncVersion [GOOD] >> TReplicaTest::StrongNotificationAfterCommit [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> TReplicaTest::DoubleDelete [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2025-04-17T18:13:12.632307Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.632350Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.635334Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.635382Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.639454Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.639598Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-17T18:13:12.639674Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.639867Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-04-17T18:13:12.640685Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-04-17T18:13:12.640735Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-17T18:13:12.640786Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.640866Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-04-17T18:13:12.640895Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.895463Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-17T18:13:12.895529Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.895657Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.895692Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.895740Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.895803Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2025-04-17T18:13:12.633614Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.633672Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.633761Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-04-17T18:13:12.633824Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-04-17T18:13:12.636026Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.636142Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2025-04-17T18:13:12.636200Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.636330Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.636369Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.641112Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.642105Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2025-04-17T18:13:12.642135Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:8:2055], path# path 2025-04-17T18:13:12.642218Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:12.642243Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-17T18:13:12.642270Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.895492Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2025-04-17T18:13:12.633078Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.633146Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.879312Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-17T18:13:12.879358Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.879475Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.879502Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.883830Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.883952Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-17T18:13:12.884029Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.884101Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-04-17T18:13:12.884151Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-04-17T18:13:12.884185Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-04-17T18:13:13.149953Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-17T18:13:13.150008Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:13.150080Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-04-17T18:13:13.150100Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-04-17T18:13:13.150157Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.150284Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.150325Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:13.150381Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:13.150511Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:13.150544Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-17T18:13:13.150580Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:13.150686Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2025-04-17T18:13:13.150723Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.150779Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:13.150805Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.324550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.324585Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.334031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.334274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.334396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.339365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.339495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.339897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.340024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.341785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.342898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.343935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.343988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.344089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.348324Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.432279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.433886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.434952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.436112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.436192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.439083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.439180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.439338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.439419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.439443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.439485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.440881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.440916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.440938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.442115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.442142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.442192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.442244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.445259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.446526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.446657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.448317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.448422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.448457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.449423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.449465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.449612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.449662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.451190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.451369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.451653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451687Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.451763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.451802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.451837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.451886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.451916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.451944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.451966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.451988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.452040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.452072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.452092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.453379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.453488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.453520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... bletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-17T18:13:12.972292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 161500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 122 } } 2025-04-17T18:13:12.972399Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 109:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 161500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 122 } } 2025-04-17T18:13:12.972438Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-17T18:13:12.972542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 109:0, left await: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:12.972582Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 3 -> 128 2025-04-17T18:13:12.974391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-17T18:13:12.974539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-17T18:13:12.974589Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:12.974688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 109 ready parts: 1/1 2025-04-17T18:13:12.974828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:12.976170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 109:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:109 msg type: 269090816 2025-04-17T18:13:12.976278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 109, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 109 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 109 at step: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 109 at step: 5000008 2025-04-17T18:13:12.976814Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:12.976927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 109 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:12.976982Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 HandleReply TEvOperationPlan, operationId: 109:0, stepId: 5000008, at schemeshard: 72057594046678944 2025-04-17T18:13:12.977216Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 128 -> 129 2025-04-17T18:13:12.977359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000008 2025-04-17T18:13:12.981455Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:12.981504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-17T18:13:12.981723Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:12.981769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 109, path id: 4 2025-04-17T18:13:12.982548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-17T18:13:12.982600Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:13:12.983105Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-04-17T18:13:12.983175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-04-17T18:13:12.983200Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 109 2025-04-17T18:13:12.983228Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-04-17T18:13:12.983273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T18:13:12.983337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 109 2025-04-17T18:13:12.988487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-04-17T18:13:12.989132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 792 } } 2025-04-17T18:13:12.989172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-17T18:13:12.989285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 792 } } 2025-04-17T18:13:12.989375Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 792 } } 2025-04-17T18:13:12.990042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 676 RawX2: 8589937222 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-17T18:13:12.990092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-17T18:13:12.990241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: Source { RawX1: 676 RawX2: 8589937222 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-17T18:13:12.990292Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:13:12.990374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 676 RawX2: 8589937222 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-17T18:13:12.990442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 109:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:12.990493Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-17T18:13:12.990530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 109:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-17T18:13:12.990568Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 129 -> 240 2025-04-17T18:13:12.992586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-17T18:13:12.992970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-17T18:13:12.993224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-17T18:13:12.993275Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 109:0 ProgressState 2025-04-17T18:13:12.993419Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-17T18:13:12.993456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-17T18:13:12.993508Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-17T18:13:12.993549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-17T18:13:12.993585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: true 2025-04-17T18:13:12.993650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:335:2314] message: TxId: 109 2025-04-17T18:13:12.993698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-17T18:13:12.993738Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2025-04-17T18:13:12.993773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 109:0 2025-04-17T18:13:12.993884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:13:12.995247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-17T18:13:12.995291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:785:2730] TestWaitNotification: OK eventTxId 109 >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot >> IndexBuildTest::RejectsDropIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2025-04-17T18:13:12.928344Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.928387Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject update from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-04-17T18:13:12.928454Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-17T18:13:12.928475Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-04-17T18:13:12.928555Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.928625Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-04-17T18:13:12.928661Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-04-17T18:13:12.928702Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-17T18:13:12.928739Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.928796Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.928849Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-04-17T18:13:12.928879Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:13.180803Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-17T18:13:13.180868Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:13.180983Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.181016Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject update from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-04-17T18:13:13.181089Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-17T18:13:13.181121Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-04-17T18:13:13.181175Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.181249Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-04-17T18:13:13.181283Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-04-17T18:13:13.181334Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-17T18:13:13.181389Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:13.181447Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.181519Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:7:2054] 2025-04-17T18:13:13.181554Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2025-04-17T18:13:12.632050Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.632105Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.632232Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.632259Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 1, generation# 1 2025-04-17T18:13:12.632304Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-04-17T18:13:12.632327Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-04-17T18:13:12.895508Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2025-04-17T18:13:12.895580Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-04-17T18:13:12.895733Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-04-17T18:13:12.895850Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-17T18:13:12.895892Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.895988Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.896030Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.900139Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.900272Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:8:2055] 2025-04-17T18:13:12.900340Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:12.900361Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-17T18:13:12.900382Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.900439Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:8:2055] 2025-04-17T18:13:13.157421Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-17T18:13:13.157467Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:13.157570Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.157605Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:13.157649Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:13.157731Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-04-17T18:13:13.157779Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-04-17T18:13:13.157854Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.157882Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:13.157926Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:13.158068Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.158092Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-17T18:13:13.158111Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:13.158166Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-04-17T18:13:13.158230Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-04-17T18:13:13.158266Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:13.158378Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:8:2055] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2025-04-17T18:13:12.635572Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.635612Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject commit from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-04-17T18:13:12.635648Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.635672Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.907102Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-04-17T18:13:12.907146Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 0 2025-04-17T18:13:12.907208Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-04-17T18:13:12.907231Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.907280Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-04-17T18:13:12.907305Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Commit generation: owner# 1, generation# 1 2025-04-17T18:13:12.907350Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-04-17T18:13:12.907371Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject commit from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-04-17T18:13:12.907395Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:7:2054] 2025-04-17T18:13:12.907430Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 2 2025-04-17T18:13:13.159208Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-17T18:13:13.159250Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:13.159369Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.159403Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2025-04-17T18:13:13.164209Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:13.164349Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-04-17T18:13:13.164428Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.164549Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:9:2056] 2025-04-17T18:13:13.164581Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.164671Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:13.164693Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2025-04-17T18:13:13.164715Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2025-04-17T18:13:13.164808Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2025-04-17T18:13:13.164835Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.164899Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:11:2058] 2025-04-17T18:13:13.164926Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:11:2058], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.165001Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:12:2059] 2025-04-17T18:13:13.165028Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:12:2059], path# path, domainOwnerId# 0, capabilities# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-04-17T18:13:12.651913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-04-17T18:13:12.651971Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-04-17T18:13:12.652059Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.652212Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:9:2056] 2025-04-17T18:13:12.652245Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.652279Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.652353Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.652383Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.652487Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.652514Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.656962Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.657172Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:12.657198Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-17T18:13:12.657217Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.913334Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-17T18:13:12.913430Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.913544Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-04-17T18:13:12.913578Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.913644Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.913779Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.913814Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.913862Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.913985Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:12.914013Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-17T18:13:12.914044Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.914126Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2025-04-17T18:13:12.914193Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.914248Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.914279Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.914305Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.914376Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.914416Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-17T18:13:12.914490Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.914561Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-04-17T18:13:12.914605Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.178681Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:8:2055] 2025-04-17T18:13:13.178728Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-04-17T18:13:13.178791Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 1, capabilities# 2025-04-17T18:13:13.178906Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-17T18:13:13.178950Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:13.179032Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-17T18:13:13.179061Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 1, generation# 1 2025-04-17T18:13:13.179152Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2025-04-17T18:13:12.631917Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-04-17T18:13:12.632001Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-04-17T18:13:12.632068Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.632095Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject handshake from stale populator: sender# [1:7:2054], owner# 1, generation# 1, pending generation# 2 2025-04-17T18:13:12.907730Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-17T18:13:12.907792Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.907956Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-04-17T18:13:12.908017Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.908163Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.908341Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.908383Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.915295Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.915486Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:12.915529Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-17T18:13:12.915563Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.915660Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.915717Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.915762Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.915834Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.915896Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-17T18:13:12.915957Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.916069Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-04-17T18:13:12.916122Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.177059Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-17T18:13:13.177139Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:13.177252Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.177287Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:13.177330Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:13.177407Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.177433Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-17T18:13:13.177500Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:13.177552Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:13.177625Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:13.177658Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2025-04-17T18:13:13.177687Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2025-04-17T18:13:13.177741Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.177770Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:13.177795Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:13.177839Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:13.177865Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-17T18:13:13.177890Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2025-04-17T18:13:12.633394Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:12.633437Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:12.635340Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-17T18:13:12.635384Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:12.640331Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-17T18:13:12.640491Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-17T18:13:12.640586Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:12.640740Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-04-17T18:13:12.640807Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-17T18:13:12.640842Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-17T18:13:12.878068Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-17T18:13:12.878113Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-04-17T18:13:12.878158Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.131383Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-17T18:13:13.131425Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-17T18:13:13.131512Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 76 2025-04-17T18:13:13.131536Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-17T18:13:13.131576Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2025-04-17T18:13:13.131627Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2025-04-17T18:13:13.131663Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-17T18:13:13.131740Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:7:2054], cookie# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.308969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.309007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.319787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.320143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.320321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.326994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.327861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.330971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.332093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.337545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.342970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.343951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.343987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.344086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.349005Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.447876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.448083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.448289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.448510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.448595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.450804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.450916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.451117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.451221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.451268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.452963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.453018Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.453057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.454783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.454835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.454876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.454957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.458698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.460463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.460645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.461659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.461781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.461833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.462207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.462271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.462443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.462522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.464321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.464367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.464507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.464546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.464933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.464993Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.465096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.465144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.465189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.465230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.465286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.465326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.465361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.465404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.465461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.465500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.465532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.472828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.472970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.473011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... on: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:13.251390Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:13:13.251575Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 203us result status StatusSuccess 2025-04-17T18:13:13.252226Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:13.252800Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:13:13.253062Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 308us result status StatusSuccess 2025-04-17T18:13:13.253877Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IndexBuildTest::DropIndex [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath >> AnalyzeColumnshard::AnalyzeServerless [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: 2025-04-17T18:10:41.823029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:41.823172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:41.823309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b1e/r3tmp/tmpAUtPGJ/pdisk_1.dat 2025-04-17T18:10:42.150044Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1991, node 1 2025-04-17T18:10:42.369774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:42.369823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:42.369864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:42.370611Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:42.372831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:42.459693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:42.459832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:42.473739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3725 2025-04-17T18:10:42.971735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:45.819218Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:45.852097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:45.852182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:45.889272Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:45.891055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:46.111820Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.112202Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.112649Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.112892Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.112949Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.113003Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.113056Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.113100Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.113140Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:46.273742Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:46.273826Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:46.287155Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:46.405049Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:46.438160Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:46.438249Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:46.458811Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:46.461595Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:46.461763Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:46.461822Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:46.461865Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:46.461910Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:46.461947Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:46.461990Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:46.463100Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:46.495071Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:46.495214Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:46.501477Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:46.505260Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:46.505358Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:46.514971Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:10:46.532267Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:46.532324Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:46.532392Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:10:46.540761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:46.546106Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:46.546237Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:46.700068Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:46.860656Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:46.918076Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:47.451504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:10:48.013775Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:48.161408Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:10:48.161477Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:48.161575Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2598:2950], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:48.162370Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2599:2951] 2025-04-17T18:10:48.162504Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2599:2951], schemeshard id = 72075186224037899 2025-04-17T18:10:49.158869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2734:3250], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:49.159000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:49.178878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-17T18:10:49.433596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2882:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:49.433779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2882:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:49.434015Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2882:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:49.434359Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2882:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:49.434487Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2882:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:49.434588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2882:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:49.434665Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2882:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:49.434731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2882:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:49.434799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2882:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:1 ... 025-04-17T18:13:10.222794Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:9508:7169], schemeshard count = 1 2025-04-17T18:13:11.493356Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-17T18:13:11.493413Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 221.000000s, at schemeshard: 72075186224037899 2025-04-17T18:13:11.493603Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-17T18:13:11.506910Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:13:12.232030Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:13:12.232076Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:13:12.232107Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-17T18:13:12.232151Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:13:12.235373Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:13:12.251074Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:13:12.251505Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:12.251569Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:12.252294Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:13:12.264992Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:12.265188Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:13:12.265737Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9651:7252], server id = [2:9656:7257], tablet id = 72075186224037905, status = OK 2025-04-17T18:13:12.266017Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9651:7252], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.267681Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9652:7253], server id = [2:9657:7258], tablet id = 72075186224037906, status = OK 2025-04-17T18:13:12.267737Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9652:7253], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.268385Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9653:7254], server id = [2:9658:7259], tablet id = 72075186224037907, status = OK 2025-04-17T18:13:12.268426Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9653:7254], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.268956Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9654:7255], server id = [2:9659:7260], tablet id = 72075186224037908, status = OK 2025-04-17T18:13:12.268998Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9654:7255], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.269736Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9655:7256], server id = [2:9660:7261], tablet id = 72075186224037909, status = OK 2025-04-17T18:13:12.269777Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9655:7256], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.272658Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:13:12.273285Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9651:7252], server id = [2:9656:7257], tablet id = 72075186224037905 2025-04-17T18:13:12.273313Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.274003Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:13:12.274646Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9652:7253], server id = [2:9657:7258], tablet id = 72075186224037906 2025-04-17T18:13:12.274668Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.274756Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9673:7272], server id = [2:9675:7273], tablet id = 72075186224037910, status = OK 2025-04-17T18:13:12.274796Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9673:7272], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.275658Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:13:12.275862Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9654:7255], server id = [2:9659:7260], tablet id = 72075186224037908 2025-04-17T18:13:12.275880Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.276444Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9676:7274], server id = [2:9678:7275], tablet id = 72075186224037911, status = OK 2025-04-17T18:13:12.276486Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9676:7274], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.277264Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:13:12.277401Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9681:7277], server id = [2:9683:7279], tablet id = 72075186224037912, status = OK 2025-04-17T18:13:12.277448Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9681:7277], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.278554Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9653:7254], server id = [2:9658:7259], tablet id = 72075186224037907 2025-04-17T18:13:12.278592Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.279672Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-04-17T18:13:12.280547Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9655:7256], server id = [2:9660:7261], tablet id = 72075186224037909 2025-04-17T18:13:12.280569Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.280650Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9685:7281], server id = [2:9688:7283], tablet id = 72075186224037913, status = OK 2025-04-17T18:13:12.280686Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9685:7281], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.281346Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-04-17T18:13:12.281781Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9673:7272], server id = [2:9675:7273], tablet id = 72075186224037910 2025-04-17T18:13:12.281800Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.282284Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9689:7284], server id = [2:9692:7287], tablet id = 72075186224037914, status = OK 2025-04-17T18:13:12.282327Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9689:7284], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:12.283579Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-17T18:13:12.284195Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9676:7274], server id = [2:9678:7275], tablet id = 72075186224037911 2025-04-17T18:13:12.284216Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.284802Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-17T18:13:12.285136Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9681:7277], server id = [2:9683:7279], tablet id = 72075186224037912 2025-04-17T18:13:12.285155Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.285343Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-04-17T18:13:12.285644Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9685:7281], server id = [2:9688:7283], tablet id = 72075186224037913 2025-04-17T18:13:12.285662Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.286225Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-04-17T18:13:12.286276Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:13:12.286477Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:13:12.286629Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:13:12.286824Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:13:12.288292Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9689:7284], server id = [2:9692:7287], tablet id = 72075186224037914 2025-04-17T18:13:12.288312Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:12.288738Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:12.316094Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:9717:7308]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:12.316285Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:13:12.316315Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:9717:7308], StatRequests.size() = 1 2025-04-17T18:13:12.413542Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjQ3YjM2ZDUtMjJiMmM4NmEtZDMxYjY4ZTUtZDU3YTMyYmQ=, TxId: 2025-04-17T18:13:12.413605Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjQ3YjM2ZDUtMjJiMmM4NmEtZDMxYjY4ZTUtZDU3YTMyYmQ=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-17T18:13:12.414254Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:9730:7314]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:12.414461Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:12.414957Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:12.415009Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:13:12.418365Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:13:12.418430Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T18:13:12.418477Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:13:12.423829Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.307782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.307831Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.321668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.322042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.322221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.329520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.329669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.331018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.332086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.337223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.344010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.344057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.344184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.348467Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.447356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.447515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.447695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.447840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.447878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.449609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.449701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.449821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.449862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.449887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.449919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.451243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451280Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.452581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.452621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.452646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.452688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.459870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.461212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.461338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.461908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.461978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.462014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.462253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.462288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.462415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.462468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.463700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.463732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.463855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.463877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.464155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.464186Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.464252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.464291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.464318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.464341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.464366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.464400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.464433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.464453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.464490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.464512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.464532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.465648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.465725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.465747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 107 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 107 at step: 5000004 2025-04-17T18:13:13.530421Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:13.530500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:13.530539Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId# 107:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-04-17T18:13:13.530598Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 136 2025-04-17T18:13:13.534310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:13:13.534381Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-17T18:13:13.534432Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, no renaming has been detected for this operation 2025-04-17T18:13:13.534462Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 136 -> 137 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 107 2025-04-17T18:13:13.535558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 463 } } 2025-04-17T18:13:13.535599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-04-17T18:13:13.535712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 463 } } 2025-04-17T18:13:13.535797Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 463 } } 2025-04-17T18:13:13.536681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-17T18:13:13.536723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-04-17T18:13:13.536836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-17T18:13:13.536883Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-17T18:13:13.537218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:13:13.537259Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-17T18:13:13.537300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:13:13.537334Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-17T18:13:13.537408Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-17T18:13:13.537504Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-17T18:13:13.537596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:13.537640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:13:13.539395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:13:13.539783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:13:13.539943Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:13.539980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:13.540120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:13:13.540242Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:13.540276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-04-17T18:13:13.540311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-17T18:13:13.540355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:13:13.540395Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:13:13.540471Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:13:13.540531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:13:13.540569Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-17T18:13:13.541504Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-04-17T18:13:13.541590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-04-17T18:13:13.541621Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-17T18:13:13.541662Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T18:13:13.541698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:13.542442Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-17T18:13:13.542512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-17T18:13:13.542538Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-17T18:13:13.542562Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:13:13.542583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:13:13.542635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-17T18:13:13.544080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:13:13.544122Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:13.544323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:13:13.544420Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-17T18:13:13.544446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T18:13:13.544481Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-17T18:13:13.544513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T18:13:13.544550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-17T18:13:13.544598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:379:2347] message: TxId: 107 2025-04-17T18:13:13.544633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-17T18:13:13.544667Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-17T18:13:13.544694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-17T18:13:13.544769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:13:13.545806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T18:13:13.546072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-17T18:13:13.546960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-17T18:13:13.547013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:584:2544] TestWaitNotification: OK eventTxId 107 >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.331203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.331263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.344583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.344871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.345038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.351772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.351914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.352471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.352634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.354973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.356055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.356119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.356270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.356315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.356360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.356473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.361889Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.467242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.467408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.467592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.467768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.467824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.469313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.469415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.469531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.469578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.469599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.469628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.471018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.471050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.471072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.472311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.472343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.472372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.472425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.474793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.476115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.476223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.476886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.476964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.477008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.477208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.477243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.477361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.477413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.478736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.478768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.478869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.478895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.479107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.479135Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.479199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.479237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.479264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.479281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.479324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.479368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.479391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.479409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.479445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.479466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.479487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.480605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.480675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.480696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 09550, at schemeshard: 72057594046678944 2025-04-17T18:13:13.690934Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-04-17T18:13:13.691218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-04-17T18:13:13.691241Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-04-17T18:13:13.691309Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-04-17T18:13:13.691337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-04-17T18:13:13.691376Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-04-17T18:13:13.691395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-04-17T18:13:13.691418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: false 2025-04-17T18:13:13.691555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-17T18:13:13.691576Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:2 ProgressState at tablet: 72057594046678944 2025-04-17T18:13:13.691607Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-17T18:13:13.691622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-17T18:13:13.691638Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:2 129 -> 240 2025-04-17T18:13:13.691930Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.691995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.692016Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:13:13.692042Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-17T18:13:13.692066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-04-17T18:13:13.692349Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.692386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.692399Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:13:13.692412Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2025-04-17T18:13:13.692428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-04-17T18:13:13.693200Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.693243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.693258Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:13:13.693275Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-04-17T18:13:13.693291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:13.693837Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.693895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.693914Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:13:13.694210Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.694260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.694273Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:13:13.694738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:13:13.694770Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:13.694905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-04-17T18:13:13.694986Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-04-17T18:13:13.695012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-04-17T18:13:13.695036Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-04-17T18:13:13.695058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-04-17T18:13:13.695083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2025-04-17T18:13:13.695409Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.695453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:13:13.695468Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:13:13.695486Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-04-17T18:13:13.695501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2025-04-17T18:13:13.695554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2025-04-17T18:13:13.696567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-17T18:13:13.696603Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:13.696737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-17T18:13:13.696818Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-04-17T18:13:13.696841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-17T18:13:13.696868Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-04-17T18:13:13.696889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-17T18:13:13.696911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2025-04-17T18:13:13.696961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:414:2371] message: TxId: 105 2025-04-17T18:13:13.696995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-17T18:13:13.697031Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T18:13:13.697064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T18:13:13.697137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-17T18:13:13.697168Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-04-17T18:13:13.697186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-04-17T18:13:13.697203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-04-17T18:13:13.697215Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:2 2025-04-17T18:13:13.697226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:2 2025-04-17T18:13:13.697246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-17T18:13:13.697572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:13:13.697668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:13:13.698244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:13:13.698279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:13:13.698305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:13:13.699258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:13:13.699353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:13:13.699383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:938:2862] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2025-04-17T18:13:13.744177Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:13.744240Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-17T18:13:13.744311Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-04-17T18:13:13.744332Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-04-17T18:13:13.744383Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-04-17T18:13:13.744404Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-17T18:13:13.744446Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-04-17T18:13:13.744472Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-04-17T18:13:13.744612Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 103 2025-04-17T18:13:13.744665Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-17T18:13:13.748601Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-17T18:13:13.748760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2025-04-17T18:13:13.748801Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-17T18:13:13.748836Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-17T18:13:13.748917Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:9:2056] 2025-04-17T18:13:13.748982Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-04-17T18:13:13.775889Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-04-17T18:13:13.775923Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 800, generation# 1 2025-04-17T18:13:13.775970Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-04-17T18:13:13.775986Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 800, generation# 1 2025-04-17T18:13:13.776018Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-04-17T18:13:13.776032Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 900, generation# 1 2025-04-17T18:13:13.776095Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-04-17T18:13:13.776114Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 900, generation# 1 2025-04-17T18:13:13.776174Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:11:2058], cookie# 0, event size# 103 2025-04-17T18:13:13.776199Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-17T18:13:13.776246Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-17T18:13:13.776312Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2025-04-17T18:13:13.776332Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2025-04-17T18:13:13.776358Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2025-04-17T18:13:13.776396Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-17T18:13:13.776472Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:13:2060] 2025-04-17T18:13:13.776502Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Subscribe: subscriber# [1:13:2060], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-04-17T18:13:13.776710Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-04-17T18:13:13.776727Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-04-17T18:13:13.776752Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-04-17T18:13:13.776766Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-04-17T18:13:13.776816Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-04-17T18:13:13.776831Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-04-17T18:13:13.776866Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-04-17T18:13:13.776881Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-04-17T18:13:13.776923Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:15:2062], cookie# 0, event size# 103 2025-04-17T18:13:13.776947Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-17T18:13:13.776976Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-17T18:13:13.777015Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2025-04-17T18:13:13.777033Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-17T18:13:13.777057Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-17T18:13:13.777099Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:17:2064] 2025-04-17T18:13:13.777117Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Subscribe: subscriber# [1:17:2064], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-04-17T18:13:13.777281Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-04-17T18:13:13.777305Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 800, generation# 1 2025-04-17T18:13:13.777350Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-04-17T18:13:13.777368Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 800, generation# 1 2025-04-17T18:13:13.777426Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-04-17T18:13:13.777439Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 900, generation# 1 2025-04-17T18:13:13.777461Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-04-17T18:13:13.777473Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 900, generation# 1 2025-04-17T18:13:13.777516Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:19:2066], cookie# 0, event size# 103 2025-04-17T18:13:13.777549Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-17T18:13:13.777576Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:18:2065] Upsert description: path# /Root/Te ... ble_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-04-17T18:13:14.125641Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:394:2441] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:397:2444] 2025-04-17T18:13:14.125654Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Upsert description: path# /Root/Tenant/table_inside 2025-04-17T18:13:14.125694Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Subscribe: subscriber# [2:397:2444], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-04-17T18:13:14.127316Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-04-17T18:13:14.127340Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-04-17T18:13:14.127366Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-04-17T18:13:14.127379Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-04-17T18:13:14.127407Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-04-17T18:13:14.127420Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-04-17T18:13:14.127443Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-04-17T18:13:14.127456Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-04-17T18:13:14.127497Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:399:2446], cookie# 0, event size# 64 2025-04-17T18:13:14.127512Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-04-17T18:13:14.127525Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-04-17T18:13:14.127597Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 130 2025-04-17T18:13:14.127613Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2025-04-17T18:13:14.127627Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-04-17T18:13:14.127658Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:401:2448] 2025-04-17T18:13:14.127670Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# /Root/Tenant/table_inside 2025-04-17T18:13:14.127700Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Subscribe: subscriber# [2:401:2448], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-04-17T18:13:14.129326Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-04-17T18:13:14.129348Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-04-17T18:13:14.129375Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-04-17T18:13:14.129388Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-04-17T18:13:14.129430Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-04-17T18:13:14.129452Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-04-17T18:13:14.129481Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-04-17T18:13:14.129493Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-04-17T18:13:14.129535Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:403:2450], cookie# 0, event size# 64 2025-04-17T18:13:14.129552Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-04-17T18:13:14.129567Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-04-17T18:13:14.129601Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2025-04-17T18:13:14.129616Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-04-17T18:13:14.129644Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:405:2452] 2025-04-17T18:13:14.129660Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# /Root/Tenant/table_inside 2025-04-17T18:13:14.129682Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Subscribe: subscriber# [2:405:2452], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-04-17T18:13:14.270538Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-04-17T18:13:14.270603Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-17T18:13:14.270685Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-04-17T18:13:14.270719Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 800, generation# 1 2025-04-17T18:13:14.270798Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-04-17T18:13:14.270835Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-17T18:13:14.270897Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-04-17T18:13:14.270922Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-04-17T18:13:14.271036Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 118 2025-04-17T18:13:14.271092Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-04-17T18:13:14.271147Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-17T18:13:14.271248Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 117 2025-04-17T18:13:14.271294Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-04-17T18:13:14.271354Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2025-04-17T18:13:14.271388Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2025-04-17T18:13:14.271440Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-17T18:13:14.271547Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:9:2056] 2025-04-17T18:13:14.271607Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.307820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.307867Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.319810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.320144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.320283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.327989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.328148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.330984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.332055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.336911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.342985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.343960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.343995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.344083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.348886Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.441120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.441295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.441480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.441657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.441700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.443486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.443600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.443722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.443766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.443794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.443818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.445025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.445066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.445091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.446280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.446321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.446352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.446387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.448908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.450271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.450441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.451387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.451820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.451885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.452124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.452222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.453852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.453892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.454005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.454034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.454317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.454352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.454436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.454460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.454506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.454530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.454568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.454596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.454622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.454644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.454696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.454725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.454750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.455918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.455994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.456020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 3] was 2 2025-04-17T18:13:14.129669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:14.129891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.129947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.130115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.130220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.130296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.130407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.130446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.130594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.130795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:14.130925Z node 1 :BUILD_INDEX DEBUG: AddShardStatus id# 102 shard 72057594046678944:11 range { From: -inf, To: inf } 2025-04-17T18:13:14.130982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.131011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.131062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:13:14.134521Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:13:14.134590Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: by_embedding, IndexColumn: embedding, DataColumns: covered, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976720765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976720766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:13:14.134612Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 0 2025-04-17T18:13:14.136855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:14.136891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:14.137243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:14.137272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:14.137295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:14.138804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:3104:4900] sender: [1:3162:2058] recipient: [1:15:2062] 2025-04-17T18:13:14.180822Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:13:14.181001Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding" took 217us result status StatusSuccess 2025-04-17T18:13:14.181597Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding" PathDescription { Self { Name: "by_embedding" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "by_embedding" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "covered" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: 2025-04-17T18:10:20.581615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:20.581753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:20.581878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bfa/r3tmp/tmpiB1UDF/pdisk_1.dat 2025-04-17T18:10:20.928520Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25879, node 1 2025-04-17T18:10:21.153734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:21.153783Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:21.153813Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:21.154516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:21.159972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:21.248684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:21.248822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:21.262612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17327 2025-04-17T18:10:21.821396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:24.850815Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:24.885809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:24.885904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:24.923125Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:24.925380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:25.146988Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:25.147540Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:25.148076Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:25.148348Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:25.148409Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:25.148465Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:25.148515Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:25.148558Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:25.148610Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:25.311056Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:25.311174Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:25.325236Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:25.464059Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:25.505195Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:25.505306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:25.538373Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:25.540102Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:25.540320Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:25.540396Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:25.540457Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:25.540526Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:25.540590Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:25.540644Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:25.541859Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:25.582145Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:25.582357Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:25.590969Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:25.598395Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:25.598554Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:25.612816Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:10:25.635730Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:25.635793Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:25.635861Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:10:25.646656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:25.654542Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:25.654698Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:25.843182Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:26.018917Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:26.076532Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:26.618114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:10:27.177648Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:27.322543Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:10:27.322608Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:27.322699Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2598:2950], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:10:27.323593Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2599:2951] 2025-04-17T18:10:27.323738Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2599:2951], schemeshard id = 72075186224037899 2025-04-17T18:10:28.428491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2734:3250], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:28.428640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:28.447565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-17T18:10:28.550605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2821:3055];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:28.550802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2821:3055];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:28.551075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2821:3055];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:28.551245Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2821:3055];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:28.551376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2821:3055];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:28.551520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2821:3055];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:28.551693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2821:3055];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:28.551843Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2821:3055];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:28.551993Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2821:3055];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18 ... 06.655938Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:13:08.434419Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-04-17T18:13:08.434490Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 229.000000s, at schemeshard: 72075186224037897 2025-04-17T18:13:08.434837Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-17T18:13:08.448278Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:13:09.785649Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:13:09.785707Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:13:09.785735Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:13:09.785774Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:13:09.785804Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:13:09.786052Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:13:09.788798Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:13:09.791583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8069:5956], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:09.791664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8080:5961], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:09.792080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:09.800669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:13:09.842662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8083:5964], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:13:10.061410Z node 2 :TX_PROXY ERROR: Actor# [2:8183:6013] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:10.109256Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:8212:6028]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:10.109430Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:13:10.109494Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:8214:6030] 2025-04-17T18:13:10.109544Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:8214:6030] 2025-04-17T18:13:10.109823Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8215:6031] 2025-04-17T18:13:10.109941Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8214:6030], server id = [2:8215:6031], tablet id = 72075186224037894, status = OK 2025-04-17T18:13:10.109986Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8215:6031], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:13:10.110034Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:13:10.110161Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:13:10.110235Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:8212:6028], StatRequests.size() = 1 2025-04-17T18:13:10.203487Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjgxMGJmYjQtOGVmMWEzNzUtZmY2NWZmYmEtZjgwMjQ1YjE=, TxId: 2025-04-17T18:13:10.203540Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjgxMGJmYjQtOGVmMWEzNzUtZmY2NWZmYmEtZjgwMjQ1YjE=, TxId: 2025-04-17T18:13:10.203990Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:10.217051Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:13:10.217104Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:13:10.260298Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:13:10.260363Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:13:10.345321Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8214:6030], schemeshard count = 1 2025-04-17T18:13:10.639722Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-17T18:13:10.639784Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 180.000000s, at schemeshard: 72075186224037899 2025-04-17T18:13:10.640074Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-17T18:13:10.653455Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:13:11.577351Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:13:11.577441Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-17T18:13:11.580751Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:13:11.596459Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:13:11.596883Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:11.596959Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037899, LocalPathId: 2], AnalyzedShards 1 2025-04-17T18:13:11.610004Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:11.620860Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-17T18:13:11.621653Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-17T18:13:11.621731Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-17T18:13:11.634895Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:13:13.010916Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:13:13.011039Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-17T18:13:13.011081Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:13:13.011668Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:13:13.025252Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:13:13.025610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:13:13.025686Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:13:13.026621Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:13:13.040137Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:13:13.040369Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:13:13.040941Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8351:6120], server id = [2:8352:6121], tablet id = 72075186224037905, status = OK 2025-04-17T18:13:13.041018Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8351:6120], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:13:13.043788Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:13:13.043863Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:13:13.044029Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:13:13.044158Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:13:13.044366Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:13:13.045895Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8351:6120], server id = [2:8352:6121], tablet id = 72075186224037905 2025-04-17T18:13:13.045929Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:13:13.046360Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:13.073627Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8372:6140]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:13:13.073894Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:13:13.073936Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8372:6140], StatRequests.size() = 1 2025-04-17T18:13:13.164499Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDY3ZDg3NWMtN2ZmYzIwYjItZTI3NmE0YjUtNWYwNjg2NDU=, TxId: 2025-04-17T18:13:13.164546Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDY3ZDg3NWMtN2ZmYzIwYjItZTI3NmE0YjUtNWYwNjg2NDU=, TxId: 2025-04-17T18:13:13.164955Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:13.179098Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:13:13.179143Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3324:3402] |90.7%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> BSCRestartPDisk::RestartNotAllowed [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> KqpPg::TypeCoercionInsert-useSink >> KqpPg::NoTableQuery+useSink >> KqpPg::CreateTableSerialColumns+useSink >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> KqpPg::TypeCoercionBulkUpsert >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 18070385748290786950 >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest::LdapServerIsUnavailable >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> KqpPg::CreateTableSerialColumns+useSink [GOOD] >> KqpPg::CreateTableSerialColumns-useSink >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::CancelBuild >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpPg::CreateTableSerialColumns-useSink [GOOD] >> KqpPg::DropIndex >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> TTabletPipeTest::TestPipeConnectToHint >> TTabletPipeTest::TestConnectReject >> TTabletPipeTest::TestSendAfterReboot >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> BootstrapperTest::LoneBootstrapper >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> TFlatMetrics::MaximumValue3 [GOOD] >> TTabletPipeTest::TestOpen >> TFlatMetrics::MaximumValue4 [GOOD] >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect >> TTabletLabeledCountersAggregator::SimpleAggregation >> TTabletPipeTest::TestSendWithoutWaitOpen >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation >> TTabletPipeTest::TestConnectReject [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] >> TTabletPipeTest::TestOpen [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> YdbTableSplit::SplitByLoadWithReads >> YdbTableSplit::RenameTablesAndSplit >> IndexBuildTest::CancelBuild [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestOpen [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:117:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [1:119:2147] sender: [1:121:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:158:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:160:2057] recipient: [1:101:2136] Leader for TabletID 9437185 is [1:119:2147] sender: [1:163:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:165:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:167:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:195:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:198:2057] recipient: [1:100:2135] Leader for TabletID 9437184 is [1:116:2145] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:202:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:204:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:232:2057] recipient: [1:14:2061] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] Test command err: { LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:117:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [1:119:2147] sender: [1:121:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:158:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:161:2057] recipient: [1:101:2136] Leader for TabletID 9437185 is [1:119:2147] sender: [1:163:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:165:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:167:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:195:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:198:2057] recipient: [1:100:2135] Leader for TabletID 9437184 is [1:116:2145] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:202:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:204:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:232:2057] recipient: [1:14:2061] >> VectorIndexBuildTest::BaseCase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: 2025-04-17T18:13:24.689769Z node 3 :PIPE_SERVER ERROR: [9437185] NodeDisconnected NodeId# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.307833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.307871Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.319190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.319460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.319595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.327815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.328030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.330987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.332106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.337415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.344009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.344058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.344218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.350318Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.482260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.482491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.482721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.482975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.483043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.485128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.485277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.485439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.485490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.485525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.485572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.487023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.487075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.487105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.488448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.488486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.488526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.488580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.492027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.493552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.493724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.494645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.494771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.494850Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.495170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.495226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.495407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.495503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.497319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.497364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.497514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.497551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.497923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.497968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.498071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.498121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.498163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.498210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.498247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.498299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.498354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.498388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.498464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.498500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.498532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.500275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.500377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.500410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... on: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-17T18:13:24.640875Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:24.640974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:24.641023Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-17T18:13:24.641069Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-17T18:13:24.642878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-17T18:13:24.642920Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-17T18:13:24.642996Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T18:13:24.643024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:13:24.643054Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T18:13:24.643086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:13:24.643142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-17T18:13:24.643192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:125:2151] message: TxId: 281474976710760 2025-04-17T18:13:24.643226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:13:24.643254Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-17T18:13:24.643293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-17T18:13:24.643359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-17T18:13:24.644996Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-17T18:13:24.645047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-17T18:13:24.645097Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-17T18:13:24.645162Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1166:3019], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:13:24.646738Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:13:24.646806Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1166:3019], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:13:24.646847Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-04-17T18:13:24.648320Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:13:24.648398Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1166:3019], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:13:24.648430Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-17T18:13:24.648553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:13:24.648596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1262:3104] TestWaitNotification: OK eventTxId 102 2025-04-17T18:13:24.651400Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-17T18:13:24.651637Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 } 2025-04-17T18:13:24.654665Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:13:24.654872Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 229us result status StatusSuccess 2025-04-17T18:13:24.655261Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:24.657804Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:13:24.657996Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 205us result status StatusPathDoesNotExist 2025-04-17T18:13:24.658163Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000005, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 3 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> TPipeCacheTest::TestAutoConnect [GOOD] >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerInstant::Test >> TResourceBroker::TestCounters >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] >> TResourceBrokerInstant::Test [GOOD] >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> TResourceBroker::TestAutoTaskId [GOOD] >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.307772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.307814Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.319190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.319465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.319595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.326943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.327860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.330981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.332075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.336633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.342937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.342988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.343933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.343989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.344031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.344132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.348726Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.436202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.436653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.436897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.437093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.437149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.439271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.439400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.439553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.439601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.439628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.439656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.441100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.441140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.441168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.442695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.442732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.442767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.442804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.446491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.447922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.448077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.448881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.448994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.449040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.449452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.449505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.449694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.449772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.451390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.451566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.451895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.451933Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.452006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.452034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.452085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.452113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.452141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.452216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.452249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.452274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.452330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.452367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.452395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.453898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.453986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.454017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rId: [1:4770:6422] } 2025-04-17T18:13:24.905824Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:13:24.908612Z node 1 :TX_DATASHARD INFO: 72075186233409568 Reporting state Offline to schemeshard 72075186233409561 2025-04-17T18:13:24.908986Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [1:4549:6214], Recipient [1:4557:6220]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:13:24.909436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409561, message: Source { RawX1: 4557 RawX2: 4294973516 } TabletId: 72075186233409568 State: 4 2025-04-17T18:13:24.909509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409568, state: Offline, at schemeshard: 72075186233409561 2025-04-17T18:13:24.909964Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [1:4872:6524], Recipient [1:4557:6220]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72075186233409561 Status: OK ServerId: [1:4873:6525] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:13:24.910010Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:13:24.912518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409561:8 hive 72057594037968897 at ss 72075186233409561 2025-04-17T18:13:24.912820Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269552133, Sender [1:3302:5047], Recipient [1:4557:6220]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72075186233409561 State: 4 2025-04-17T18:13:24.912861Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChangedResult 2025-04-17T18:13:24.912893Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186233409568 state Offline 2025-04-17T18:13:24.913260Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:4872:6524], Recipient [1:4557:6220]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409561 ClientId: [1:4872:6524] ServerId: [1:4873:6525] } 2025-04-17T18:13:24.913299Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:13:24.913613Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409561 ShardLocalIdx: 8 TxId_Deprecated: 8 TabletID: 72075186233409568 2025-04-17T18:13:24.913896Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:4549:6214], Recipient [1:4557:6220]: NKikimr::TEvTablet::TEvTabletDead 2025-04-17T18:13:24.914151Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409568 2025-04-17T18:13:24.914274Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409568 Forgetting tablet 72075186233409568 2025-04-17T18:13:24.915845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72075186233409561 ShardLocalIdx: 8, at schemeshard: 72075186233409561 2025-04-17T18:13:24.916135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409561, LocalPathId: 7] was 1 2025-04-17T18:13:24.916939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409561 2025-04-17T18:13:24.916984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409561, LocalPathId: 7], at schemeshard: 72075186233409561 2025-04-17T18:13:24.917045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409561, LocalPathId: 3] was 4 2025-04-17T18:13:24.919805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409561:8 2025-04-17T18:13:24.919847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409561:8 tabletId 72075186233409568 2025-04-17T18:13:24.920704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409561 2025-04-17T18:13:24.978876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1900, transactions count in step: 1, at schemeshard: 72075186233409561 2025-04-17T18:13:24.979002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735762 AckTo { RawX1: 0 RawX2: 0 } } Step: 1900 MediatorID: 72075186233409563 TabletID: 72075186233409561, at schemeshard: 72075186233409561 2025-04-17T18:13:24.979048Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDropLock TPropose opId# 281474976735762:0 HandleReply TEvOperationPlan: step# 1900 2025-04-17T18:13:24.979100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976735762:0 128 -> 240 2025-04-17T18:13:24.981400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976735762:0, at schemeshard: 72075186233409561 2025-04-17T18:13:24.981441Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDone opId# 281474976735762:0 ProgressState 2025-04-17T18:13:24.981515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-04-17T18:13:24.981577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-04-17T18:13:24.981621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-04-17T18:13:24.981647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-04-17T18:13:24.981671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976735762, ready parts: 1/1, is published: true 2025-04-17T18:13:24.981727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3302:5047] message: TxId: 281474976735762 2025-04-17T18:13:24.981764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-04-17T18:13:24.981791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976735762:0 2025-04-17T18:13:24.981811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976735762:0 2025-04-17T18:13:24.981872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409561, LocalPathId: 2] was 4 2025-04-17T18:13:24.984289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976735762 2025-04-17T18:13:24.984336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976735762 2025-04-17T18:13:24.984381Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfoId: 115 2025-04-17T18:13:24.984445Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfo: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4176:5876], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:13:24.986601Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-04-17T18:13:24.986689Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4176:5876], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:13:24.986734Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:13:24.988716Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-04-17T18:13:24.988790Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4176:5876], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:13:24.988818Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 115, subscribers count# 1 2025-04-17T18:13:24.988953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-04-17T18:13:24.988992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:4308:5986] TestWaitNotification: OK eventTxId 115 2025-04-17T18:13:24.995650Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 115 2025-04-17T18:13:24.995855Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 } >> BootstrapperTest::KeepExistingTablet >> TTabletPipeTest::TestConsumerSidePipeReset >> TResourceBroker::TestOverusage >> TTabletPipeTest::TestPipeWithVersionInfo >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::Test [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] >> TResourceBroker::TestChangeTaskType [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:79:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:78:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:78:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:79:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:78:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:78:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:82:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:83:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:83:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] 2025-04-17T18:13:26.086530Z node 17 :KEYVALUE ERROR: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2025-04-17T18:13:26.095337Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2025-04-17T18:13:26.095426Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] >> KqpPg::DropIndex [GOOD] >> KqpPg::CreateUniqPgColumn+useSink >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck >> TResourceBroker::TestNotifyActorDied [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TTabletPipeTest::TestInterconnectSession [GOOD] >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet >> TTabletPipeTest::TestKillClientBeforServerIdKnown |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TTabletResolver::NodeProblem >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TPipeCacheTest::TestIdleRefresh >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> KqpScripting::StreamOperationTimeout [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TResourceBroker::TestErrors >> TResourceBrokerInstant::TestErrors >> BootstrapperTest::MultipleBootstrappers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] Test command err: 2025-04-17T18:13:28.067103Z node 1 :PIPE_SERVER DEBUG: [9437185] Detach 2025-04-17T18:13:28.080173Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-04-17T18:13:28.087250Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-04-17T18:13:28.090115Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:128:2154] 2025-04-17T18:13:28.090163Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:128:2154] 2025-04-17T18:13:28.090490Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:128:2154] 2025-04-17T18:13:28.090561Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:128:2154] 2025-04-17T18:13:28.090649Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:128:2154] 2025-04-17T18:13:28.090683Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:128:2154] 2025-04-17T18:13:28.090758Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:128:2154] 2025-04-17T18:13:28.090929Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:128:2154] Type# 269877249 Reason# ActorUnknown 2025-04-17T18:13:28.091051Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:131:2156] 2025-04-17T18:13:28.091089Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:131:2156] 2025-04-17T18:13:28.091208Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:131:2156] 2025-04-17T18:13:28.091243Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:131:2156] 2025-04-17T18:13:28.091297Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:131:2156] 2025-04-17T18:13:28.091320Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:131:2156] 2025-04-17T18:13:28.091369Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:131:2156] 2025-04-17T18:13:28.091487Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:131:2156] Type# 269877249 Reason# ActorUnknown 2025-04-17T18:13:28.091624Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:133:2158] 2025-04-17T18:13:28.091651Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:133:2158] 2025-04-17T18:13:28.091697Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:133:2158] 2025-04-17T18:13:28.091724Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:133:2158] 2025-04-17T18:13:28.091764Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:133:2158] 2025-04-17T18:13:28.091790Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:133:2158] 2025-04-17T18:13:28.091845Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:133:2158] 2025-04-17T18:13:28.091939Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:133:2158] Type# 269877249 Reason# ActorUnknown >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TTabletResolver::NodeProblem [GOOD] >> BootstrapperTest::DuplicateNodes [GOOD] >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> TPipeCacheTest::TestTabletNode [GOOD] >> TResourceBrokerInstant::TestErrors [GOOD] >> TResourceBrokerInstant::TestMerge >> TTabletPipeTest::TestTwoNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-04-17T18:13:24.749247Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:24.749310Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:24.749346Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:24.750249Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-17T18:13:24.750289Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-04-17T18:13:24.750391Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-17T18:13:24.750412Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2025-04-17T18:13:24.750702Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-17T18:13:24.750726Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2025-04-17T18:13:24.751501Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: UNKNOWN 2025-04-17T18:13:24.751640Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-17T18:13:24.751734Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-17T18:13:24.751773Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.149198s 2025-04-17T18:13:24.751863Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-17T18:13:24.751945Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-17T18:13:24.751973Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-17T18:13:24.751993Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-17T18:13:24.752181Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-17T18:13:24.752208Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2025-04-17T18:13:24.941716Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:24.942299Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-04-17T18:13:24.942732Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-17T18:13:24.942773Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-17T18:13:24.995603Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:24.996123Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-04-17T18:13:24.996557Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-17T18:13:24.996589Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 5 (idx 3) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-04-17T18:13:25.754954Z node 5 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2025-04-17T18:13:25.755022Z node 5 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-04-17T18:13:25.755347Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-17T18:13:25.755385Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:25.755470Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-17T18:13:25.755486Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:25.756780Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-04-17T18:13:25.756931Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-04-17T18:13:25.757735Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-17T18:13:25.757779Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-17T18:13:25.757833Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-17T18:13:25.757853Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-04-17T18:13:26.456620Z node 5 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2025-04-17T18:13:26.456680Z node 5 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-04-17T18:13:26.456828Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-17T18:13:26.456867Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:26.457027Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-17T18:13:26.457046Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:26.458304Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-04-17T18:13:26.458640Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] ... disconnecting nodes 3 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 3 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-17T18:13:26.459158Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-17T18:13:26.459188Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 6528562917658346564 2025-04-17T18:13:26.459751Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-17T18:13:26.459777Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16349739802483488852 2025-04-17T18:13:26.460045Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: UNKNOWN 2025-04-17T18:13:26.460218Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-17T18:13:26.460410Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: OWNER 2025-04-17T18:13:26.460430Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 5 (owner) 2025-04-17T18:13:26.460566Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-17T18:13:26.460696Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: WAITFOR 2025-04-17T18:13:26.460719Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-04-17T18:13:27.158285Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335029 2025-04-17T18:13:27.158379Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:27.158547Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335029 2025-04-17T18:13:27.159021Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:27.160277Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-04-17T18:13:27.160738Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] ... disconnecting nodes 3 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-17T18:13:27.161223Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-17T18:13:27.161265Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 13164802727073798053 ... disconnecting nodes 3 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 3 <-> 2 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-04-17T18:13:27.161808Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-17T18:13:27.161836Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 10171326560769670008 2025-04-17T18:13:27.161992Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: UNKNOWN 2025-04-17T18:13:27.162080Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335031 2025-04-17T18:13:27.162119Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: DISCONNECTED 2025-04-17T18:13:27.162151Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot ... disconnecting nodes 3 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-04-17T18:13:27.162696Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335031 2025-04-17T18:13:27.162728Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: DISCONNECTED 2025-04-17T18:13:27.162763Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-04-17T18:13:27.162794Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2025-04-17T18:13:27.165385Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, tablet dead 2025-04-17T18:13:27.165451Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:27.168913Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:393:2096] 2025-04-17T18:13:27.185879Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-17T18:13:27.185933Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 3 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2025-04-17T18:13:28.341861Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StInit ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.342125Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [1:207:2136] CurrentLeaderTablet: [1:208:2137] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:13:28.342168Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-17T18:13:28.342241Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2136] 2025-04-17T18:13:28.342377Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StInit ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.342527Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [1:213:2140] CurrentLeaderTablet: [1:214:2141] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:13:28.342560Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-17T18:13:28.342594Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:213:2140] 2025-04-17T18:13:28.343362Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.343405Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2136] 2025-04-17T18:13:28.343568Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.343630Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:213:2140] 2025-04-17T18:13:28.343759Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 2 2025-04-17T18:13:28.343792Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [1:207:2136] by NodeId 2025-04-17T18:13:28.343847Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.343992Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [2:223:2094] CurrentLeaderTablet: [2:224:2095] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:13:28.344030Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-17T18:13:28.344059Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-04-17T18:13:28.344275Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [1:213:2140] by NodeId 2025-04-17T18:13:28.344315Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.344445Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [2:229:2096] CurrentLeaderTablet: [2:230:2097] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:13:28.344479Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-17T18:13:28.344513Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-04-17T18:13:28.345820Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 2 2025-04-17T18:13:28.345865Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.345894Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-04-17T18:13:28.346039Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.346067Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-04-17T18:13:28.346219Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2025-04-17T18:13:28.346268Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [2:223:2094] by NodeId 2025-04-17T18:13:28.346303Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.346477Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [3:241:2094] CurrentLeaderTablet: [3:242:2095] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:13:28.346507Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-17T18:13:28.346544Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] 2025-04-17T18:13:28.346701Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.346731Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-04-17T18:13:28.346851Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2025-04-17T18:13:28.346882Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.346911Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] 2025-04-17T18:13:28.347059Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [2:229:2096] by NodeId 2025-04-17T18:13:28.347103Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:13:28.347232Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [3:247:2096] CurrentLeaderTablet: [3:248:2097] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-17T18:13:28.347260Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-17T18:13:28.347293Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:247:2096] >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TResourceBroker::TestExecutionStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2025-04-17T18:13:27.819981Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:27.820062Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-17T18:13:27.820637Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-17T18:13:27.820679Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 12552810490399048506 2025-04-17T18:13:27.820828Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-17T18:13:27.820851Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-04-17T18:13:27.821509Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: UNKNOWN 2025-04-17T18:13:27.821578Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-17T18:13:27.821968Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-04-17T18:13:27.822000Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) >> TResourceBrokerInstant::TestMerge [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TTabletPipeTest::TestTwoNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 18624, MsgBus: 62963 2025-04-17T18:12:29.924191Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346629843059189:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:29.924258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001402/r3tmp/tmpSWe5cH/pdisk_1.dat 2025-04-17T18:12:30.181259Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18624, node 1 2025-04-17T18:12:30.246940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:30.246975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:30.246984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:30.247227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:30.247598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:30.247694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:30.249248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62963 TClient is connected to server localhost:62963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:30.663048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.683568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.785817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.918921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:30.969292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:32.159127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346642727962855:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.159228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.405404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.428884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.449393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.472594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.496921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.524741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.545297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:32.615745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346642727963516:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.615793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.615884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346642727963521:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:32.618530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:32.625122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346642727963523:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:32.690691Z node 1 :TX_PROXY ERROR: Actor# [1:7494346642727963578:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:33.633276Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913553670, txId: 281474976710673] shutting down 2025-04-17T18:12:33.866199Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913553901, txId: 281474976710676] shutting down 2025-04-17T18:12:34.040559Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554076, txId: 281474976710679] shutting down 2025-04-17T18:12:34.234375Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554272, txId: 281474976710682] shutting down 2025-04-17T18:12:34.406874Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554447, txId: 281474976710685] shutting down 2025-04-17T18:12:34.634548Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554671, txId: 281474976710688] shutting down 2025-04-17T18:12:34.865502Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913554867, txId: 281474976710691] shutting down 2025-04-17T18:12:34.924185Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346629843059189:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:34.924249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:12:35.046282Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555084, txId: 281474976710694] shutting down 2025-04-17T18:12:35.232051Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555266, txId: 281474976710697] shutting down 2025-04-17T18:12:35.416307Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555455, txId: 281474976710700] shutting down 2025-04-17T18:12:35.602265Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555637, txId: 281474976710703] shutting down 2025-04-17T18:12:35.814906Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913555854, txId: 281474976710706] shutting down 2025-04-17T18:12:36.031062Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556071, txId: 281474976710709] shutting down 2025-04-17T18:12:36.210391Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556246, txId: 281474976710712] shutting down 2025-04-17T18:12:36.407008Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556442, txId: 281474976710715] shutting down 2025-04-17T18:12:36.567800Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556603, txId: 281474976710718] shutting down 2025-04-17T18:12:36.766499Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913556806, txId: 281474976710721] shutting down 2025-04-17T18:12:36.967207Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557002, txId: 281474976710724] shutting down 2025-04-17T18:12:37.150134Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557191, txId: 281474976710727] shutting down 2025-04-17T18:12:37.340793Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557380, txId: 281474976710730] shutting down 2025-04-17T18:12:37.506607Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913557548, txId: 281474976710733] shutting down 2025-04-17T18:12:37.687746Z node 1 :KQP_RESOURCE_MANAGER WARN: ... napshotManager: discarding snapshot; our snapshot: [step: 1744913595194, txId: 281474976711201] shutting down 2025-04-17T18:13:15.379105Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913595411, txId: 281474976711204] shutting down 2025-04-17T18:13:15.603653Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913595635, txId: 281474976711207] shutting down 2025-04-17T18:13:15.857116Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913595880, txId: 281474976711210] shutting down 2025-04-17T18:13:16.152495Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913596160, txId: 281474976711213] shutting down 2025-04-17T18:13:16.396235Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913596433, txId: 281474976711216] shutting down 2025-04-17T18:13:16.652999Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913596685, txId: 281474976711219] shutting down 2025-04-17T18:13:16.942652Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913596972, txId: 281474976711222] shutting down 2025-04-17T18:13:17.195388Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913597224, txId: 281474976711225] shutting down 2025-04-17T18:13:17.446003Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913597476, txId: 281474976711228] shutting down 2025-04-17T18:13:17.691177Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913597721, txId: 281474976711231] shutting down 2025-04-17T18:13:18.072048Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913598099, txId: 281474976711234] shutting down 2025-04-17T18:13:18.421679Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913598456, txId: 281474976711237] shutting down 2025-04-17T18:13:18.798036Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913598827, txId: 281474976711240] shutting down 2025-04-17T18:13:19.088360Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913599114, txId: 281474976711243] shutting down 2025-04-17T18:13:19.393372Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913599415, txId: 281474976711246] shutting down 2025-04-17T18:13:19.685998Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913599716, txId: 281474976711249] shutting down 2025-04-17T18:13:20.019368Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913600052, txId: 281474976711252] shutting down 2025-04-17T18:13:20.362123Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913600395, txId: 281474976711255] shutting down 2025-04-17T18:13:20.679987Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913600710, txId: 281474976711258] shutting down 2025-04-17T18:13:21.022895Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913601053, txId: 281474976711261] shutting down 2025-04-17T18:13:21.357692Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913601389, txId: 281474976711264] shutting down 2025-04-17T18:13:21.581475Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913601613, txId: 281474976711267] shutting down 2025-04-17T18:13:21.808232Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913601837, txId: 281474976711270] shutting down Trying to start YDB, gRPC: 17501, MsgBus: 1638 2025-04-17T18:13:22.718513Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346857484432447:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:22.718591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001402/r3tmp/tmpapIz6q/pdisk_1.dat 2025-04-17T18:13:22.809878Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17501, node 2 2025-04-17T18:13:22.847841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:22.847953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:22.849593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:22.876449Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:22.876477Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:22.876485Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:22.876629Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1638 TClient is connected to server localhost:1638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:23.332783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:23.348503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:23.399602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:23.537541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:23.665597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:26.011220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346874664303417:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:26.011320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:26.061934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:26.090850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:26.119862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:26.150706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:26.180953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:26.218515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:26.252276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:26.309158Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346874664304076:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:26.309242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:26.309250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346874664304081:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:26.312602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:26.321841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346874664304083:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:26.388891Z node 2 :TX_PROXY ERROR: Actor# [2:7494346874664304138:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestMerge [GOOD] Test command err: 2025-04-17T18:13:28.865531Z node 1 :RESOURCE_BROKER ERROR: FinishTaskInstant failed for task 2: cannot finish unknown task |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> TResourceBroker::TestResubmitTask >> KqpJoinOrder::TestJoinHint2-ColumnStore [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> YdbYqlClient::SimpleColumnFamilies [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition >> TTabletPipeTest::TestShutdown >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:101:2135] ... blocking block result NO_GROUP for [1:102:2135] ... blocking block result NO_GROUP for [1:103:2135] ... blocking block result NO_GROUP for [1:104:2135] >> BootstrapperTest::RestartUnavailableTablet >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletPipeTest::TestShutdown [GOOD] >> TResourceBroker::TestUpdateCookie [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:79:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:78:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:78:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:52:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:79:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:78:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:78:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:86:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:87:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:87:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:52:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:88:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:91:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:92:2057] recipient: [19:90:2119] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:94:2057] recipient: [19:90:2119] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:93:2120] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:147:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:88:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:91:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:92:2057] recipient: [20:90:2119] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:94:2057] recipient: [20:90:2119] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:93:2120] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:147:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:89:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:92:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:93:2057] recipient: [21:91:2119] Leader for TabletID 72057594037927937 is [21:94:2120] sender: [21:95:2057] recipient: [21:91:2119] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::SingleBucket [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestShutdown [GOOD] >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] |91.2%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad >> TTabletPipeTest::TestSendAfterOpen ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7633, MsgBus: 10742 2025-04-17T18:13:00.149707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346763157906874:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:00.149885Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e56/r3tmp/tmpczrHH3/pdisk_1.dat 2025-04-17T18:13:00.423111Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7633, node 1 2025-04-17T18:13:00.495469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:00.495488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:00.495493Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:00.495600Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:00.510635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:00.510741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:00.512586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10742 TClient is connected to server localhost:10742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:00.925728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:02.489885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346771747842139:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:02.489905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346771747842131:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:02.490002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:02.492484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:13:02.502113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346771747842145:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:13:02.558696Z node 1 :TX_PROXY ERROR: Actor# [1:7494346771747842196:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:02.795264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.892289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.918010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.943346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:02.968435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.086941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.111199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.135692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.159976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.183922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.221832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.244084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.268079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.748838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:13:03.774701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.799015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.820077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.843105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.864419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.883907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.908232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.932471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.958811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:13:03.982493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.007828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.035064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.061592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.084723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.109727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.134837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.185076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.209209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-17T18:13:04.231442Z node 1 :FLAT_TX_ ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.460593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.465763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.465764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.471291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.471308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.476708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.476708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.482163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.482163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.487844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.487852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.493404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.493423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.499032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.499111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.504809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.504808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.510418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.510524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.516168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.516262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.521934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.521991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.527574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.527713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.533602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.533604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.539050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.539422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.544717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.544982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.550270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.550374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.555856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.555873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.561723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.561772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.567419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.568621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.572789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.574732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.578208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.580706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.584159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.592347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:13:25.648814Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2f8msc9bd5ce737vb1jsa5", SessionId: ydb://session/3?node_id=1&id=ZmE4MWU4YjItM2Y3NTYxYzctN2Q3NTYyN2YtOTk4MTQ5MWQ=, Slow query, duration: 20.707778s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:13:25.853016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7494346788927717220:2737];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-17T18:13:25.853110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:13:25.853488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:13:25.853597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> BootstrapperTest::UnavailableStateStorage [GOOD] >> TTabletPipeTest::TestSendAfterOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2025-04-17T18:13:31.620360Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA 2025-04-17T18:13:31.621031Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2025-04-17T18:13:31.621081Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.148014s 2025-04-17T18:13:31.737438Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... waiting for multiple state storage lookup attempts (done) |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood |91.2%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> AsyncIndexChangeCollector::DeleteNothing >> AsyncIndexChangeCollector::InsertSingleRow >> CdcStreamChangeCollector::UpsertToSameKey >> CdcStreamChangeCollector::InsertSingleRow >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> AsyncIndexChangeCollector::UpsertToSameKey >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpPg::CreateUniqPgColumn+useSink [GOOD] >> KqpPg::CreateUniqPgColumn-useSink |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] Test command err: 2025-04-17T18:13:16.542448Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346830160461715:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.542572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014c6/r3tmp/tmpM5qGxo/pdisk_1.dat 2025-04-17T18:13:16.848373Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23127, node 1 2025-04-17T18:13:16.946409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.946505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.948149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.985614Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.985641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.985647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.985762Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:17.243982Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:17.246577Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.246612Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.247620Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2289, port: 2289 2025-04-17T18:13:17.247707Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:17.258449Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:17.302712Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:17.350480Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:17.350970Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:17.351048Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:17.394510Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:17.442494Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:17.444135Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****xpVg (259BA375) () has now valid token of ldapuser@ldap 2025-04-17T18:13:19.141292Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346843289537458:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:19.141373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014c6/r3tmp/tmpOq9bel/pdisk_1.dat 2025-04-17T18:13:19.275469Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:19.301126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:19.301216Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:19.302649Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6777, node 2 2025-04-17T18:13:19.343046Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:19.343066Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:19.343072Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:19.343163Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:19.401819Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:19.405427Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:19.405459Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:19.406322Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23175, port: 23175 2025-04-17T18:13:19.406398Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:19.416606Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:19.458658Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:19.506856Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****f11Q (45B04221) () has now valid token of ldapuser@ldap 2025-04-17T18:13:22.127489Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346859126349395:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:22.127557Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014c6/r3tmp/tmpZYaOib/pdisk_1.dat 2025-04-17T18:13:22.226160Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4321, node 3 2025-04-17T18:13:22.266995Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:22.267080Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:22.268576Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:22.270322Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:22.270342Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:22.270348Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:22.270479Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:22.402677Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:22.404965Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:22.404987Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:22.405486Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:64817 ldap://localhost:64817 ldap://localhost:11111, port: 64817 2025-04-17T18:13:22.405555Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:22.416633Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:22.458683Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:22.506505Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:22.507065Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:22.507109Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:22.554452Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:22.598457Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:22.599380Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****fdag (875F4915) () has now valid token of ldapuser@ldap 2025-04-17T18:13:24.632942Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346863782157391:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:24.633016Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014c6/r3tmp/tmpEfLNm5/pdisk_1.dat 2025-04-17T18:13:24.729356Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15625, node 4 2025-04-17T18:13:24.760083Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:24.760172Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:24.761597Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:24.766217Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:24.766236Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:24.766243Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:24.766366Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:24.830550Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:24.832994Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:24.833032Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:24.833789Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1746, port: 1746 2025-04-17T18:13:24.833862Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:24.843097Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:24.886790Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-17T18:13:24.934806Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****aj2A (1EA2C3BE) () has now valid token of ldapuser@ldap 2025-04-17T18:13:27.417524Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494346878406582731:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:27.417613Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014c6/r3tmp/tmpHflorb/pdisk_1.dat 2025-04-17T18:13:27.506713Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7917, node 5 2025-04-17T18:13:27.546801Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:27.546884Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:27.548608Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:27.573422Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:27.573443Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:27.573449Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:27.573563Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:27.693292Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:27.696499Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:27.696525Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:27.697099Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1468, port: 1468 2025-04-17T18:13:27.697181Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:27.705694Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:27.750686Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:1468. Invalid credentials 2025-04-17T18:13:27.751138Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****KKFA (1B3CCA93) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:1468. Invalid credentials)' 2025-04-17T18:13:30.586323Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494346893426341118:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:30.586369Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014c6/r3tmp/tmp55ZgKm/pdisk_1.dat 2025-04-17T18:13:30.706374Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3415, node 6 2025-04-17T18:13:30.727488Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:30.727669Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:30.736312Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:30.761500Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:30.761524Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:30.761531Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:30.761646Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:30.899158Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:30.902059Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:30.902084Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:30.902704Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:25651, port: 25651 2025-04-17T18:13:30.902791Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:30.911696Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:30.958654Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:25651. Invalid credentials 2025-04-17T18:13:30.959005Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****U-fQ (E98428FB) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:25651. Invalid credentials)' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] Test command err: 2025-04-17T18:13:16.547802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346831864690344:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.548150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001499/r3tmp/tmpaRg6mJ/pdisk_1.dat 2025-04-17T18:13:16.892423Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10191, node 1 2025-04-17T18:13:16.950296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.950421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.951844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.990771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.990800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.990804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.990928Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:17.313395Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:17.316561Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.316626Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.317954Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:61469, port: 61469 2025-04-17T18:13:17.318016Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:17.321877Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-04-17T18:13:17.322332Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****zS1g (33C72AAE) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-04-17T18:13:17.322504Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.322522Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.323158Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:61469, port: 61469 2025-04-17T18:13:17.323227Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:17.326044Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-04-17T18:13:17.326199Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****zS1g (33C72AAE) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-04-17T18:13:19.194391Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346843014939840:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:19.194452Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001499/r3tmp/tmprQwoL5/pdisk_1.dat 2025-04-17T18:13:19.338300Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:19.354998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:19.355063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:19.356344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11901, node 2 2025-04-17T18:13:19.400386Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:19.400404Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:19.400409Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:19.400497Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:19.469567Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:19.473663Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:19.473691Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:19.474562Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****LDpw (C15B731F) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2025-04-17T18:13:21.955252Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346850717812316:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:21.955311Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001499/r3tmp/tmpDSdwv5/pdisk_1.dat 2025-04-17T18:13:22.061820Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16050, node 3 2025-04-17T18:13:22.093538Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:22.093630Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:22.094747Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:22.111712Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:22.111744Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:22.111754Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:22.111880Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:22.194849Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:22.197232Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:22.197261Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:22.198110Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****5f1g (8D9FFEBD) () has now permanent error message 'Could not login via LDAP (Parameter BaseDn is empty)' 2025-04-17T18:13:24.344078Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346866720798582:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:24.344180Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001499/r3tmp/tmpf9khf3/pdisk_1.dat 2025-04-17T18:13:24.408763Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14274, node 4 2025-04-17T18:13:24.455800Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:24.455820Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:24.455826Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:24.455946Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:24.469803Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:24.469912Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:24.471575Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:24.551778Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:24.555214Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:24.555242Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:24.555970Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****IRBg (1CDAED10) () has now permanent error message 'Could not login via LDAP (Parameter BindDn is empty)' 2025-04-17T18:13:27.186064Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494346878959895660:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:27.186122Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001499/r3tmp/tmpy2yUR2/pdisk_1.dat 2025-04-17T18:13:27.281632Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12360, node 5 2025-04-17T18:13:27.323808Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:27.323902Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:27.325461Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:27.335326Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:27.335347Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:27.335358Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:27.335475Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:27.495699Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:27.498808Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:27.498830Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:27.499596Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****7k-w (0FF29101) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2025-04-17T18:13:30.342597Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494346889540841794:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:30.342686Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001499/r3tmp/tmpZuPKex/pdisk_1.dat 2025-04-17T18:13:30.429488Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11254, node 6 2025-04-17T18:13:30.472125Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:30.472316Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:30.473645Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:30.497883Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:30.497905Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:30.497912Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:30.498050Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:30.601318Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:30.603969Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:30.603992Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:30.604579Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:27366, port: 27366 2025-04-17T18:13:30.604688Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:30.665367Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:30.711167Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Dnlg (B2398457) () has now valid token of ldapuser@ldap >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-17T18:13:16.544020Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346833028527285:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.544077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014fb/r3tmp/tmpKhyscy/pdisk_1.dat 2025-04-17T18:13:16.873056Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12619, node 1 2025-04-17T18:13:16.922340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.922413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.926468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.986531Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.986557Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.986570Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.988481Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:17.239325Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:17.243267Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.243309Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.244104Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18850, port: 18850 2025-04-17T18:13:17.244812Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:17.249425Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:17.290435Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:17.339075Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****8crA (4C6A6E97) () has now valid token of ldapuser@ldap 2025-04-17T18:13:19.155027Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346844640942010:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:19.155158Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014fb/r3tmp/tmpwbZbyE/pdisk_1.dat 2025-04-17T18:13:19.281170Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:19.305655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:19.305762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:19.307440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24725, node 2 2025-04-17T18:13:19.344308Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:19.344329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:19.344335Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:19.344425Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:19.434841Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:19.438417Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:19.438451Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:19.438994Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:26634, port: 26634 2025-04-17T18:13:19.439091Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:19.442329Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:19.490422Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:19.490932Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:19.491017Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:19.534531Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:19.578433Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:19.579246Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Jt8g (5C3E5D53) () has now valid token of ldapuser@ldap 2025-04-17T18:13:21.875234Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346854326073178:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:21.875379Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014fb/r3tmp/tmpNpoK94/pdisk_1.dat 2025-04-17T18:13:21.977257Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4144, node 3 2025-04-17T18:13:22.011515Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:22.011598Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:22.013547Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:22.030940Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:22.030962Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:22.030968Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:22.031092Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:22.139879Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:22.143432Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:22.143458Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:22.144020Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15765, port: 15765 2025-04-17T18:13:22.144093Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:22.148376Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:22.190746Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****-low (D1FF9F0D) () has now valid token of ldapuser@ldap 2025-04-17T18:13:24.397822Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346867429046853:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:24.397897Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014fb/r3tmp/tmpKqRFir/pdisk_1.dat 2025-04-17T18:13:24.477682Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1829, node 4 2025-04-17T18:13:24.522440Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:24.522528Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:24.524141Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:24.531248Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:24.531273Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:24.531283Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:24.531411Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:24.660726Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:24.664350Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:24.664382Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:24.665087Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:8017 ldap://localhost:8017 ldap://localhost:11111, port: 8017 2025-04-17T18:13:24.665176Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:24.671765Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:24.714413Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:24.714853Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:24.714886Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:24.762510Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:24.810460Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:24.811173Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****0Tlg (E75356F7) () has now valid token of ldapuser@ldap 2025-04-17T18:13:27.111740Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494346879048427255:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:27.111792Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014fb/r3tmp/tmpadl2My/pdisk_1.dat 2025-04-17T18:13:27.214584Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13810, node 5 2025-04-17T18:13:27.252955Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:27.253043Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:27.255129Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:27.276771Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:27.276794Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:27.276802Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:27.276949Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:27.430614Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:27.433758Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:27.433779Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:27.434364Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1906, port: 1906 2025-04-17T18:13:27.434443Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:27.437382Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-17T18:13:27.482412Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:27.482835Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:27.482878Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-17T18:13:27.526457Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-17T18:13:27.574441Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-17T18:13:27.575206Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****2A-Q (9E9B644D) () has now valid token of ldapuser@ldap 2025-04-17T18:13:30.471949Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494346892840141942:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:30.473156Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014fb/r3tmp/tmpQ2g0bH/pdisk_1.dat 2025-04-17T18:13:30.596277Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:30.620540Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:30.620635Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:30.622102Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28028, node 6 2025-04-17T18:13:30.676162Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:30.676185Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:30.676193Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:30.676322Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:30.740663Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:30.743954Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:30.743982Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:30.744750Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21769, port: 21769 2025-04-17T18:13:30.744830Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:30.747884Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-17T18:13:30.747962Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:21769. Bad search filter 2025-04-17T18:13:30.748191Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****uelg (4FA44311) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:21769. Bad search filter)' >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 8967, MsgBus: 3012 2025-04-17T18:12:34.658271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346649912866600:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:34.659360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013d0/r3tmp/tmpCX6pYv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8967, node 1 2025-04-17T18:12:34.951046Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:12:34.951448Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:12:34.970468Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:12:34.979988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:34.980018Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:34.980029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:34.980183Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:35.040161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:35.040276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:35.041946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3012 TClient is connected to server localhost:3012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:35.378246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.404583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.515139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.618941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:35.675778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:36.859891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346658502802960:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:36.859995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.081024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.104702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.129911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.193058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.215220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.255218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.275468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:37.347124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346662797770918:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.347182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.347274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346662797770923:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:37.350078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:37.357214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346662797770925:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:37.448642Z node 1 :TX_PROXY ERROR: Actor# [1:7494346662797770983:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:38.377161Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913558409, txId: 281474976710673] shutting down 2025-04-17T18:12:38.610424Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913558647, txId: 281474976710676] shutting down 2025-04-17T18:12:38.823157Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913558857, txId: 281474976710679] shutting down 2025-04-17T18:12:39.052573Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913559088, txId: 281474976710682] shutting down 2025-04-17T18:12:39.272142Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913559305, txId: 281474976710685] shutting down 2025-04-17T18:12:39.465371Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913559501, txId: 281474976710688] shutting down 2025-04-17T18:12:39.657849Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346649912866600:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:39.657917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:12:39.689230Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913559725, txId: 281474976710691] shutting down 2025-04-17T18:12:39.893915Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913559914, txId: 281474976710694] shutting down 2025-04-17T18:12:40.108416Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913560124, txId: 281474976710697] shutting down 2025-04-17T18:12:40.308000Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913560348, txId: 281474976710700] shutting down 2025-04-17T18:12:40.496227Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913560537, txId: 281474976710703] shutting down 2025-04-17T18:12:40.660390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913560698, txId: 281474976710706] shutting down 2025-04-17T18:12:40.870370Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913560908, txId: 281474976710709] shutting down 2025-04-17T18:12:41.065222Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913561104, txId: 281474976710712] shutting down 2025-04-17T18:12:41.293711Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913561328, txId: 281474976710715] shutting down 2025-04-17T18:12:41.485421Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913561517, txId: 281474976710718] shutting down 2025-04-17T18:12:41.692729Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913561734, txId: 281474976710721] shutting down 2025-04-17T18:12:41.878281Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913561916, txId: 281474976710724] shutting down 2025-04-17T18:12:42.043841Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913562084, txId: 281474976710727] shutting down 2025-04-17T18:12:42.250368Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913562287, txId: 281474976710730] shutting down 2025-04-17T18:12:42.418420Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnap ... apshot: [step: 1744913601683, txId: 281474976711210] shutting down 2025-04-17T18:13:21.866894Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913601900, txId: 281474976711213] shutting down 2025-04-17T18:13:22.147295Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913602180, txId: 281474976711216] shutting down 2025-04-17T18:13:22.376815Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913602411, txId: 281474976711219] shutting down 2025-04-17T18:13:22.594306Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913602628, txId: 281474976711222] shutting down 2025-04-17T18:13:22.811815Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913602845, txId: 281474976711225] shutting down 2025-04-17T18:13:23.037332Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913603069, txId: 281474976711228] shutting down 2025-04-17T18:13:23.318002Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913603349, txId: 281474976711231] shutting down 2025-04-17T18:13:23.599377Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913603594, txId: 281474976711234] shutting down 2025-04-17T18:13:23.845368Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913603874, txId: 281474976711237] shutting down 2025-04-17T18:13:24.170718Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913604203, txId: 281474976711240] shutting down 2025-04-17T18:13:24.525734Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913604560, txId: 281474976711243] shutting down 2025-04-17T18:13:24.842579Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913604875, txId: 281474976711246] shutting down 2025-04-17T18:13:25.158219Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913605141, txId: 281474976711249] shutting down 2025-04-17T18:13:25.427876Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913605456, txId: 281474976711252] shutting down 2025-04-17T18:13:25.755128Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913605785, txId: 281474976711255] shutting down 2025-04-17T18:13:26.108836Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913606079, txId: 281474976711258] shutting down 2025-04-17T18:13:26.409468Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913606443, txId: 281474976711261] shutting down 2025-04-17T18:13:26.704929Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913606737, txId: 281474976711264] shutting down 2025-04-17T18:13:26.967360Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913607003, txId: 281474976711267] shutting down 2025-04-17T18:13:27.311063Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913607297, txId: 281474976711270] shutting down Trying to start YDB, gRPC: 7678, MsgBus: 1915 2025-04-17T18:13:28.091923Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346884020244266:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:28.092004Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013d0/r3tmp/tmpkYC70g/pdisk_1.dat 2025-04-17T18:13:28.224109Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:28.259622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:28.259732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:28.261095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7678, node 2 2025-04-17T18:13:28.313416Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:28.313436Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:28.313441Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:28.313562Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1915 TClient is connected to server localhost:1915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:28.790557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:28.809306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:28.917980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:29.106340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:13:29.214871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:13:31.819673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346896905147944:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:31.819772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:31.875126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:13:31.904881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:13:31.934975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:31.966027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:13:31.993550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:13:32.062612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:13:32.095591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:13:32.175744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346901200115910:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:32.175822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:32.175923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346901200115915:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:32.179217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:13:32.188095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346901200115917:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:13:32.275518Z node 2 :TX_PROXY ERROR: Actor# [2:7494346901200115974:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:33.092174Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346884020244266:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:33.092284Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:34.242168Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913614269, txId: 281474976715672] shutting down |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-17T18:13:16.542354Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346832332961557:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.542495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ef/r3tmp/tmpL4PhTu/pdisk_1.dat 2025-04-17T18:13:16.868167Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17249, node 1 2025-04-17T18:13:16.921322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.921465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.923269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.986148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.986170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.986195Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.986299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:17.191803Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:17.194086Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.194113Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.198896Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:26262, port: 26262 2025-04-17T18:13:17.198993Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:17.258630Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:17.302540Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:17.351422Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****BogQ (3B9FD190) () has now valid token of ldapuser@ldap 2025-04-17T18:13:19.605574Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346845667243312:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:19.605706Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ef/r3tmp/tmppQCTh4/pdisk_1.dat 2025-04-17T18:13:19.735414Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:19.756974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:19.757058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:19.758987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8018, node 2 2025-04-17T18:13:19.836551Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:19.836575Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:19.836584Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:19.836695Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:19.946308Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:19.948630Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:19.948657Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:19.949359Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:15854, port: 15854 2025-04-17T18:13:19.949429Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:20.002649Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:20.046511Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:20.046944Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:20.046990Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:20.090591Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:20.138417Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:20.139312Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****hawg (646217B1) () has now valid token of ldapuser@ldap 2025-04-17T18:13:22.649849Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346856548067799:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:22.649948Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ef/r3tmp/tmpwbMAI9/pdisk_1.dat 2025-04-17T18:13:22.767882Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:22.793134Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:22.793208Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:22.794511Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29141, node 3 2025-04-17T18:13:22.824120Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:22.824140Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:22.824145Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:22.824238Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:23.011827Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:23.015387Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:23.015422Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:23.016137Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:21731, port: 21731 2025-04-17T18:13:23.016242Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:23.066626Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:23.110984Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****_tTA (F1DE28A3) () has now valid token of ldapuser@ldap 2025-04-17T18:13:25.113640Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346869442289297:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:25.113686Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ef/r3tmp/tmp0nup7R/pdisk_1.dat 2025-04-17T18:13:25.208594Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20007, node 4 2025-04-17T18:13:25.241307Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:25.241398Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:25.243027Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:25.251913Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:25.251935Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:25.251942Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:25.252076Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:25.356603Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:25.360090Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:25.360124Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:25.360867Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://qqq:5569 ldaps://localhost:5569 ldaps://localhost:11111, port: 5569 2025-04-17T18:13:25.360960Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:25.414801Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:25.466409Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:25.467011Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:25.467085Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:25.510555Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:25.561097Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:25.562052Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****aGsQ (1951FB67) () has now valid token of ldapuser@ldap 2025-04-17T18:13:28.443640Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494346881518532386:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:28.443788Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ef/r3tmp/tmp78TgkB/pdisk_1.dat 2025-04-17T18:13:28.525192Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7884, node 5 2025-04-17T18:13:28.575569Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:28.575674Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:28.577232Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:28.578160Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:28.578188Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:28.578195Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:28.578322Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:28.662792Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:28.665808Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:28.665842Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:28.666586Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:19507, port: 19507 2025-04-17T18:13:28.666658Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:28.718986Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-17T18:13:28.762441Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:28.762956Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:28.763009Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-17T18:13:28.806521Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-17T18:13:28.850531Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-17T18:13:28.851616Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****hSeQ (97AB8990) () has now valid token of ldapuser@ldap 2025-04-17T18:13:31.908661Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494346894650143441:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:31.908740Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ef/r3tmp/tmpSoaq4D/pdisk_1.dat 2025-04-17T18:13:31.989055Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12984, node 6 2025-04-17T18:13:32.039181Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:32.039270Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:32.040895Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:32.045545Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:32.045572Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:32.045580Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:32.045714Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:32.197249Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:32.200083Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:32.200132Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:32.200817Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:12747, port: 12747 2025-04-17T18:13:32.200884Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:32.250739Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-17T18:13:32.250809Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:12747. Bad search filter 2025-04-17T18:13:32.251325Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****K26w (230DC1D0) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:12747. Bad search filter)' |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] Test command err: 2025-04-17T18:08:18.057892Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494345552490431400:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:18.059053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a29/r3tmp/tmpX9J5ua/pdisk_1.dat 2025-04-17T18:08:18.564761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:18.564850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:18.613935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:08:18.635370Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2001, node 1 2025-04-17T18:08:18.925299Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:18.925320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:18.925326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:18.925443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:19.253318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:08:21.501723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345565375334329:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.501867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.770344Z node 1 :TX_PROXY ERROR: Actor# [1:7494345565375334350:2626] txid# 281474976710658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-04-17T18:08:21.770481Z node 1 :TX_PROXY ERROR: Actor# [1:7494345565375334350:2626] txid# 281474976710658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-04-17T18:08:21.889327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494345565375334362:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.889420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:21.906364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:23.600541Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494345573677145486:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:23.600593Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a29/r3tmp/tmpkWCuVv/pdisk_1.dat 2025-04-17T18:08:23.952738Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:23.983843Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:23.983914Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:23.987562Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2818, node 4 2025-04-17T18:08:24.115171Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:24.115197Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:24.115205Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:24.115334Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:08:24.288773Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:08:26.438309Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345586562048399:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.438414Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.460747Z node 4 :TX_PROXY ERROR: Actor# [4:7494345586562048420:2624] txid# 281474976710658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-04-17T18:08:26.460864Z node 4 :TX_PROXY ERROR: Actor# [4:7494345586562048420:2624] txid# 281474976710658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-04-17T18:08:26.570069Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494345586562048432:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.570151Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:08:26.586764Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:08:28.260802Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494345592436650647:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:08:28.260924Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a29/r3tmp/tmpMKnol4/pdisk_1.dat 2025-04-17T18:08:28.604525Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:08:28.649688Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:08:28.649786Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:08:28.655292Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16154, node 7 2025-04-17T18:08:28.863861Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:08:28.863890Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:08:28.863899Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:08:28.864053Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:08:29.295395Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... 76719643. Ctx: { TraceId: 01js2f9a2f41h5v53ztgp37jan, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:26.859274Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719644. Ctx: { TraceId: 01js2f9a4s88kn8qky4465xbzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:26.921488Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719645. Ctx: { TraceId: 01js2f9a6pf74fy04eb8ja156t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:26.980512Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719646. Ctx: { TraceId: 01js2f9a8m71ewxm8sy22kt1g1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.061647Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719647. Ctx: { TraceId: 01js2f9aaf72x602qfa2q13wt2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.132767Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719648. Ctx: { TraceId: 01js2f9ad12yy06sjjf6qvjj1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.204414Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719649. Ctx: { TraceId: 01js2f9afc33ne79vp3v0df26q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.279455Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719650. Ctx: { TraceId: 01js2f9ahg1gwma216md4hpfas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.357783Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719651. Ctx: { TraceId: 01js2f9akx0rv6wpf8ch7m0qzv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.432613Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719652. Ctx: { TraceId: 01js2f9ap8frftaxny3vcwhz09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.499090Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719653. Ctx: { TraceId: 01js2f9arm4kd0mbv8af78z5q7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.569675Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719654. Ctx: { TraceId: 01js2f9atp56kfvxad24vs8242, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.641333Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719655. Ctx: { TraceId: 01js2f9ax001529bjjz758xn79, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.714890Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719656. Ctx: { TraceId: 01js2f9az464jvkrfqhe6zzbmj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.783324Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719657. Ctx: { TraceId: 01js2f9b1d0meq9mdn43wpbk9x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.859808Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719658. Ctx: { TraceId: 01js2f9b3j5w6tywtekaze3dyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:27.937139Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719659. Ctx: { TraceId: 01js2f9b5z0sr5zrbda5rn03yx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.012355Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719660. Ctx: { TraceId: 01js2f9b8cdqrb09ne2061sjwn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.085999Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719661. Ctx: { TraceId: 01js2f9bar8vxjwj4v9jqhbxbb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.157968Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719662. Ctx: { TraceId: 01js2f9bd24k99gjpxdctnnvx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.233110Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719663. Ctx: { TraceId: 01js2f9bf924w7cp36tytpt6v9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.302305Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719664. Ctx: { TraceId: 01js2f9bhs7nkk9fdry19crv6g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.376479Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719665. Ctx: { TraceId: 01js2f9bks4rnnxdann63fw48r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.449795Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719666. Ctx: { TraceId: 01js2f9bp357fd3yc22antw08z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.513286Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719667. Ctx: { TraceId: 01js2f9breda76ctjx8efpf8a4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.578633Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719668. Ctx: { TraceId: 01js2f9btd35g9y0pshea6pxr9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMzYTU0YzktM2QyNDRjNTktZTRmZmEwZjYtMmRkZTQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.590242Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-17T18:13:28.591086Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:13:30.597369Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494346889766117408:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:30.597419Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a29/r3tmp/tmpunnmAT/pdisk_1.dat 2025-04-17T18:13:30.770508Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:30.817266Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:30.817386Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:30.820834Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27140, node 10 2025-04-17T18:13:30.878340Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:30.878364Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:30.878380Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:30.878563Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:31.208626Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:34.285769Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService >> Secret::DeactivatedQueryService >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] Test command err: 2025-04-17T18:13:16.542635Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346832426733842:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.542703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014f6/r3tmp/tmpJvg1oa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17547, node 1 2025-04-17T18:13:16.884941Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:13:16.884971Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:13:16.885557Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:16.922731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.922800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.925179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.986707Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.986725Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.986734Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.986840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:17.266058Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:17.268718Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.268755Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.269416Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1590, port: 1590 2025-04-17T18:13:17.269980Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:17.275072Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:17.318428Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:17.318737Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:17.318774Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:17.366384Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:17.410409Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:17.411788Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****VK8Q (A00039D0) () has now valid token of ldapuser@ldap 2025-04-17T18:13:21.543019Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346832426733842:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:21.543134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:22.554709Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****VK8Q (A00039D0) 2025-04-17T18:13:22.554783Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1590, port: 1590 2025-04-17T18:13:22.554843Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:22.557033Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:22.557271Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:1590 return no entries 2025-04-17T18:13:22.557445Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****VK8Q (A00039D0) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:1590 return no entries)' 2025-04-17T18:13:26.556482Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****VK8Q (A00039D0) 2025-04-17T18:13:27.985064Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346879370194266:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:27.985219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014f6/r3tmp/tmpBJXS53/pdisk_1.dat 2025-04-17T18:13:28.080336Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28069, node 2 2025-04-17T18:13:28.124829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:28.124908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:28.126434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:28.138794Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:28.138821Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:28.138827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:28.138934Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:28.258300Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:28.261330Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:28.261362Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:28.262043Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21093, port: 21093 2025-04-17T18:13:28.262131Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:28.266938Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:28.267324Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:21093. Server is busy 2025-04-17T18:13:28.267588Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****s1Fw (7397DD46) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:21093. Server is busy)' 2025-04-17T18:13:28.267859Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:28.267874Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:28.268692Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21093, port: 21093 2025-04-17T18:13:28.268771Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:28.272534Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:28.272835Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:21093. Server is busy 2025-04-17T18:13:28.273004Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****s1Fw (7397DD46) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:21093. Server is busy)' 2025-04-17T18:13:30.991499Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****s1Fw (7397DD46) 2025-04-17T18:13:30.991843Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:30.991874Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:30.992839Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21093, port: 21093 2025-04-17T18:13:30.992984Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:30.996709Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:30.997194Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:21093. Server is busy 2025-04-17T18:13:30.997460Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****s1Fw (7397DD46) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:21093. Server is busy)' 2025-04-17T18:13:32.985206Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346879370194266:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:32.985309Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:35.000737Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****s1Fw (7397DD46) 2025-04-17T18:13:35.000957Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:35.000980Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:35.001617Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21093, port: 21093 2025-04-17T18:13:35.001702Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:35.006920Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:35.054419Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:35.058389Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:35.058454Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:35.106761Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:35.154621Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:35.155385Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****s1Fw (7397DD46) () has now valid token of ldapuser@ldap 2025-04-17T18:13:38.003428Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****s1Fw (7397DD46) 2025-04-17T18:13:38.003514Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21093, port: 21093 2025-04-17T18:13:38.003922Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:38.024864Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:38.066401Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:38.066823Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:38.066856Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:38.111763Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:38.154462Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:38.155251Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****s1Fw (7397DD46) () has now valid token of ldapuser@ldap >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::RejectsCancel |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk >> TGroupMapperTest::NonUniformCluster2 >> TGroupMapperTest::SanitizeGroupTest3dc >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> TGroupMapperTest::MonteCarlo |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> TGroupMapperTest::NonUniformCluster |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> KqpPg::TableSelect+useSink >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD |91.5%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::CreateUniqPgColumn-useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink >> TGroupMapperTest::NonUniformClusterMirror3dc >> TGroupMapperTest::MakeDisksNonoperational >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> KqpWorkload::STOCK [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> CdcStreamChangeCollector::IndexAndStreamUpsert >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] >> TGroupMapperTest::ReassignGroupTest3dc >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> BSCRestartPDisk::RestartOneByOne [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> CdcStreamChangeCollector::UpsertModifyDelete >> AsyncIndexChangeCollector::AllColumnsInPk >> KqpPg::V1CreateTable >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> DataShardTxOrder::ZigZag_oo |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 2118119465272202832 >> IndexBuildTest::RejectsCancel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 17826657446018596740 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 26226, MsgBus: 24219 2025-04-17T18:12:53.108640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346733290433667:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:53.108761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00162f/r3tmp/tmpdhBfQd/pdisk_1.dat 2025-04-17T18:12:53.381712Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26226, node 1 2025-04-17T18:12:53.463631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:53.463745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:53.465584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:12:53.466057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:53.466077Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:53.466085Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:53.466224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24219 TClient is connected to server localhost:24219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:53.915515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:55.556798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346741880368921:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.556909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.780864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.887171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.385913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:12:56.761098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346746175340367:2632], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.761188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.761279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346746175340372:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:56.764907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:12:56.774638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346746175340374:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:12:56.845880Z node 1 :TX_PROXY ERROR: Actor# [1:7494346746175340491:5086] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:58.108484Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346733290433667:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:58.108539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:08.381350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:13:08.381396Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.480852s took: 0.499964s took: 0.508717s took: 0.510420s took: 0.511309s took: 0.515267s took: 0.516328s took: 0.517875s took: 0.523638s took: 0.525242s took: 5.834704s took: 5.842073s took: 5.844462s took: 5.849713s 2025-04-17T18:13:34.445878Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711082; 2025-04-17T18:13:34.447244Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346909384111307:4978], Table: `/Root/stock` ([72057594046644480:2:1]), SessionActorId: [1:7494346883614305557:4978]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7494346909384111307:4978].{
: Error: Operation is aborting because locks are not valid, code: 2001 } took: 5.858014s 2025-04-17T18:13:34.447918Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346909384110316:4978], SessionActorId: [1:7494346883614305557:4978], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494346883614305557:4978]. isRollback=0 2025-04-17T18:13:34.448269Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWViOWVjOGItMmI4YjQ1My00YjA3NjJlMC02YTNlMDdkNA==, ActorId: [1:7494346883614305557:4978], ActorState: ExecuteState, TraceId: 01js2f9cbtf2t2v8m8nmgeg0kg, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494346909384111170:4978] from: [1:7494346909384110316:4978] 2025-04-17T18:13:34.448372Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346909384111170:4978] TxId: 281474976711082. Ctx: { TraceId: 01js2f9cbtf2t2v8m8nmgeg0kg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWViOWVjOGItMmI4YjQ1My00YjA3NjJlMC02YTNlMDdkNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:13:34.448709Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWViOWVjOGItMmI4YjQ1My00YjA3NjJlMC02YTNlMDdkNA==, ActorId: [1:7494346883614305557:4978], ActorState: ExecuteState, TraceId: 01js2f9cbtf2t2v8m8nmgeg0kg, Create QueryResponse for error on request, msg: took: 5.863244s took: 5.864413s 2025-04-17T18:13:34.454566Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976711082; 2025-04-17T18:13:34.454742Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744913614494 : 281474976711082] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } took: 5.866572s 2025-04-17T18:13:34.456433Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976711082; 2025-04-17T18:13:34.456531Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744913614494 : 281474976711082] from 72075186224037914 at tablet 72075186224037914, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } took: 5.869110s took: 5.870727s 2025-04-17T18:13:40.158745Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711164; 2025-04-17T18:13:40.162028Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711165; 2025-04-17T18:13:40.163065Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711166; 2025-04-17T18:13:40.163914Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711168; 2025-04-17T18:13:40.165608Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346935153917057:5456], Table: `/Root/stock` ([72057594046644480:2:1]), SessionActorId: [1:7494346909384111401:5456]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7494346935153917057:5456].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:13:40.165705Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346930858948822:5456], SessionActorId: [1:7494346909384111401:5456], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7494346909384111401:5456]. isRollback=0 2025-04-17T18:13:40.165991Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzNhNWIyNWMtZmMzMDJlNzgtZjhlMTEwYjctN2E3N2RlNTY=, ActorId: [1:7494346909384111401:5456], ActorState: ExecuteState, TraceId: 01js2f9j997q2w5p2hb7ejg5mt, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7494346930858949523:5456] from: [1:7494346930858948822:5456] 2025-04-17T18:13:40.166076Z node 1 :KQP_EXE ... 8:13:41.188267Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-04-17T18:13:41.188275Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-04-17T18:13:41.188282Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T18:13:41.188289Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-04-17T18:13:41.188314Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-04-17T18:13:41.188331Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-17T18:13:41.188341Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-17T18:13:41.188349Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-17T18:13:41.188357Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-17T18:13:41.188365Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-17T18:13:41.188373Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-17T18:13:41.188380Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-04-17T18:13:41.188387Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-17T18:13:41.200072Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-17T18:13:41.200110Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-17T18:13:41.200125Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-17T18:13:41.200140Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-17T18:13:41.200154Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-17T18:13:41.200169Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-17T18:13:41.200183Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-17T18:13:41.200198Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-04-17T18:13:41.200212Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-17T18:13:41.200227Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-04-17T18:13:41.205742Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-04-17T18:13:41.205797Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-17T18:13:41.205815Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-17T18:13:41.205831Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-17T18:13:41.205845Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-17T18:13:41.205859Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-04-17T18:13:41.205874Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-17T18:13:41.205889Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-17T18:13:41.206728Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-04-17T18:13:41.206929Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-17T18:13:41.207345Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-17T18:13:41.207537Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-17T18:13:41.207549Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-17T18:13:41.211385Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-04-17T18:13:41.217839Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-17T18:13:41.318939Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2025-04-17T18:13:41.318982Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2025-04-17T18:13:41.318996Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-04-17T18:13:41.319040Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2025-04-17T18:13:41.319060Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-04-17T18:13:41.319072Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2025-04-17T18:13:41.319087Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2025-04-17T18:13:41.319101Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2025-04-17T18:13:41.319116Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2025-04-17T18:13:41.319133Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2025-04-17T18:13:41.319147Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2025-04-17T18:13:41.319159Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2025-04-17T18:13:41.319184Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2025-04-17T18:13:41.319198Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2025-04-17T18:13:41.319210Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-04-17T18:13:41.319230Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2025-04-17T18:13:41.319247Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-04-17T18:13:41.319264Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2025-04-17T18:13:41.319277Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2025-04-17T18:13:41.333412Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-04-17T18:13:41.333460Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2025-04-17T18:13:41.333475Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2025-04-17T18:13:41.333489Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2025-04-17T18:13:41.333502Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2025-04-17T18:13:41.333517Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2025-04-17T18:13:41.333532Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2025-04-17T18:13:41.333546Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2025-04-17T18:13:41.335897Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2025-04-17T18:13:41.335930Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2025-04-17T18:13:41.335943Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2025-04-17T18:13:41.335958Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-04-17T18:13:41.335975Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2025-04-17T18:13:41.335990Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2025-04-17T18:13:41.336008Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2025-04-17T18:13:41.341232Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2025-04-17T18:13:41.342095Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2025-04-17T18:13:41.342123Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2025-04-17T18:13:41.354236Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-04-17T18:13:41.357445Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2025-04-17T18:13:41.357486Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found |91.6%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMultiversionObjectMap::MonteCarlo |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] |91.6%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsCancel [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.325368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.325419Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.338262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.338499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.338642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.343929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.344074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.344546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.344687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.346770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.347711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.347767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.347878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.347912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.347954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.348043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.352803Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.449759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.449960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.450135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.450328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.450374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.452305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.452412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.452549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.452591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.452616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.452646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.453921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.453956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.453984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.455341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.455389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.455428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.455514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.464356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.466406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.466573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.467491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.467607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.467664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.467986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.468047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.468206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.468304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.470315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.470365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.470515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.470557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.470911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.470959Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.471062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.471122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.471185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.471248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.471319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.471371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.471406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.471435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.471497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.471533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.471562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.473253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.473366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.473421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... TxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1166:3019], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:13:44.832390Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:13:44.834053Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:13:44.834123Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1166:3019], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:13:44.834170Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-17T18:13:44.834342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:13:44.834381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1260:3102] TestWaitNotification: OK eventTxId 102 2025-04-17T18:13:44.836728Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 105 DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-17T18:13:44.836864Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } 2025-04-17T18:13:44.839113Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-17T18:13:44.839329Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 } 2025-04-17T18:13:44.841457Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:13:44.841674Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 246us result status StatusSuccess 2025-04-17T18:13:44.842067Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:44.844166Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:13:44.844416Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 264us result status StatusSuccess 2025-04-17T18:13:44.845172Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index1" PathDescription { Self { Name: "index1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> TCertificateCheckerTest::CheckSubjectDns [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: 2025-04-17T18:13:36.874803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:36.875694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:36.875763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019bc/r3tmp/tmpKQka9w/pdisk_1.dat 2025-04-17T18:13:37.276112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.328465Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:37.337163Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-17T18:13:37.372245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:37.373003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:37.385532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:37.474857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.526961Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:13:37.527234Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.575927Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.576031Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.577601Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:13:37.577694Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:13:37.577755Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:13:37.578704Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.578843Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.578924Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:13:37.589661Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.613175Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:13:37.614590Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.614766Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:13:37.614809Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.614850Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:13:37.614885Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.616107Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:13:37.616241Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:13:37.616372Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.616430Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.616545Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:13:37.616600Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.616717Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:13:37.617877Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.618261Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:13:37.618361Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:13:37.620185Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.630869Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:37.630967Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.780113Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:13:37.784020Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:37.784096Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.784831Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.784884Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.784937Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:37.785134Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:37.785241Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:37.785939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.786009Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:37.788873Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:37.790497Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.791924Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:37.791958Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.792345Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:37.792408Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.793515Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.793562Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.793616Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:37.793688Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:37.793749Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:37.793854Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.797148Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.799306Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:37.799918Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:37.799998Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:37.815965Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.816100Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:37.816159Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-17T18:13:37.816216Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-17T18:13:37.817656Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.842287Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:38.110669Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:38.110751Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:38.111053Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:38.111108Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:38.111154Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:38.111358Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-17T18:13:38.111501Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:38.111794Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:38.112896Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:38.181193Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T18:13:38.181290Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:38.181332Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:38.181391Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... rTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:47.126292Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:47.126774Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:47.126842Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:47.127516Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:47.127550Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:47.127589Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:47.127635Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:47.127675Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:47.127753Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:47.129011Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:47.131252Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:47.131326Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:47.131979Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:47.140386Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:47.140502Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:47.140541Z node 3 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-17T18:13:47.140567Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-17T18:13:47.141659Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:47.165995Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:47.360855Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:47.360926Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:47.361217Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:47.361269Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:47.361316Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:47.361515Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-17T18:13:47.361643Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:47.361916Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:47.362669Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:47.400634Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T18:13:47.400728Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:47.400755Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:47.400788Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:47.400839Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:47.400885Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-17T18:13:47.400946Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:47.403229Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-17T18:13:47.403307Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:47.409397Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:47.409463Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:896:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:47.409545Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:47.414078Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:13:47.418785Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:47.573276Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:47.576642Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:13:47.604842Z node 3 :TX_PROXY ERROR: Actor# [3:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:47.654622Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2f9y8f9psq0zercembhegk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTY4Zjg3YjgtM2I0NGQzLTVjY2EzOWJlLWRiYWEyMWVh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:47.657057Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:987:2786], serverId# [3:988:2787], sessionId# [0:0:0] 2025-04-17T18:13:47.657368Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-04-17T18:13:47.657609Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744913627657517 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T18:13:47.657825Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-04-17T18:13:47.668816Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-17T18:13:47.668894Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:47.714962Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2f9ygr32vq1tp9gkdg1q64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzYyMzg5ZDUtZjUyYmNkYTAtY2IzODBmNGQtYzc4YmU1ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:47.716851Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-04-17T18:13:47.717083Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744913627717015 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T18:13:47.717187Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-04-17T18:13:47.727883Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-17T18:13:47.727943Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:47.793156Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2f9yjj3vvavjjyknfevrwa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTg1YTYwODgtYmNhZjJiMmMtZTVmMmI4YWQtNDFjYWIzOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:47.795091Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:5] at 72075186224037888 2025-04-17T18:13:47.795357Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1744913627795256 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T18:13:47.795483Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:5] at 72075186224037888, row count=1 2025-04-17T18:13:47.806334Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-17T18:13:47.806390Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:47.807874Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1034:2816], serverId# [3:1035:2817], sessionId# [0:0:0] 2025-04-17T18:13:47.812952Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1036:2818], serverId# [3:1037:2819], sessionId# [0:0:0] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] |91.7%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-04-17T18:13:16.542388Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346830211548271:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.542454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d8/r3tmp/tmpQep2bS/pdisk_1.dat 2025-04-17T18:13:16.855739Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11732, node 1 2025-04-17T18:13:16.923513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.923825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.929786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.988207Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.988231Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.988236Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.988338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:17.210350Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:17.211084Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.211115Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.212386Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:2213, port: 2213 2025-04-17T18:13:17.212478Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:17.266577Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-17T18:13:17.312000Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****Fbtg (8AB2B6DC) () has now valid token of ldapuser@ldap 2025-04-17T18:13:19.191867Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346846320155879:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:19.192012Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d8/r3tmp/tmpV2eTN1/pdisk_1.dat 2025-04-17T18:13:19.354501Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:19.357744Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:19.357832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:19.359651Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6033, node 2 2025-04-17T18:13:19.401375Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:19.401396Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:19.401401Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:19.401500Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:19.513167Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:19.515471Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:19.515503Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:19.516127Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:5711, port: 5711 2025-04-17T18:13:19.516190Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:19.570645Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:5711. Invalid credentials 2025-04-17T18:13:19.571188Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****hUxQ (EF6969AE) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:5711. Invalid credentials)' 2025-04-17T18:13:21.817142Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346853203624838:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:21.817225Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d8/r3tmp/tmpnjSIhd/pdisk_1.dat 2025-04-17T18:13:21.913243Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18499, node 3 2025-04-17T18:13:21.946549Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:21.946708Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:21.948225Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:21.961022Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:21.961050Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:21.961058Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:21.961173Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:22.026493Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:22.029664Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:22.029698Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:22.030276Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:20733, port: 20733 2025-04-17T18:13:22.030346Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:22.078685Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:20733. Invalid credentials 2025-04-17T18:13:22.079053Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****YYAA (362D28FB) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:20733. Invalid credentials)' 2025-04-17T18:13:24.180197Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346867566927216:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:24.180278Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d8/r3tmp/tmpnz5Haa/pdisk_1.dat 2025-04-17T18:13:24.263548Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4842, node 4 2025-04-17T18:13:24.308121Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:24.308177Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:24.309300Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:24.318078Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:24.318100Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:24.318105Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:24.318236Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:24.473296Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:24.475675Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:24.475691Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:24.476151Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:17305, port: 17305 2025-04-17T18:13:24.476215Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:24.530647Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:24.531003Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:17305 return no entries 2025-04-17T18:13:24.531450Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****YSFw (A1F84086) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:17305 return no entries)' 2025-04-17T18:13:27.051827Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494346878251111725:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:27.051918Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d8/r3tmp/tmpgjnLhz/pdisk_1.dat 2025-04-17T18:13:27.133498Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14229, node 5 2025-04-17T18:13:27.181138Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:27.181219Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:27.182891Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:27.184787Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:27.184805Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:27.184813Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:27.184928Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:27.277030Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:27.280583Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:27.280609Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:27.281317Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:17585, port: 17585 2025-04-17T18:13:27.281408Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:27.334669Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:27.378467Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:27.378919Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:27.378971Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:27.426516Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:27.470502Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:27.471472Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****yCYw (CB3907D2) () has now valid token of ldapuser@ldap 2025-04-17T18:13:31.059419Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****yCYw (CB3907D2) 2025-04-17T18:13:31.059516Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:17585, port: 17585 2025-04-17T18:13:31.059586Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:31.110683Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:31.154616Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:31.155179Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:31.155225Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:31.198510Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:31.247491Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:31.248561Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****yCYw (CB3907D2) () has now valid token of ldapuser@ldap 2025-04-17T18:13:32.052000Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494346878251111725:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:32.052116Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:34.060492Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****yCYw (CB3907D2) 2025-04-17T18:13:34.060802Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:17585, port: 17585 2025-04-17T18:13:34.060870Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:34.114723Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:34.162496Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:34.163022Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:34.163063Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:34.206434Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:34.250521Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:34.255292Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****yCYw (CB3907D2) () has now valid token of ldapuser@ldap 2025-04-17T18:13:38.182426Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494346924732874916:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:38.182643Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d8/r3tmp/tmpXC4CBg/pdisk_1.dat 2025-04-17T18:13:38.307369Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:38.336931Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:38.337017Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:38.338923Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17678, node 6 2025-04-17T18:13:38.387119Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:38.387145Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:38.387152Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:38.387277Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:38.490309Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:38.493059Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:38.493089Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:38.493810Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:22347, port: 22347 2025-04-17T18:13:38.493881Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:38.550745Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:38.595159Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****y0SA (848ED5BE) () has now valid token of ldapuser@ldap 2025-04-17T18:13:43.182860Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494346924732874916:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:43.182982Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:44.188950Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****y0SA (848ED5BE) 2025-04-17T18:13:44.191443Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:22347, port: 22347 2025-04-17T18:13:44.191541Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:44.246660Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:44.291033Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****y0SA (848ED5BE) () has now valid token of ldapuser@ldap >> KqpPg::CreateUniqComplexPgColumn+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-04-17T18:13:16.543157Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346831571877835:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.543250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014da/r3tmp/tmp7NrfFW/pdisk_1.dat 2025-04-17T18:13:16.854807Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30716, node 1 2025-04-17T18:13:16.903766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.904446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.907034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.986806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.986830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.986836Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.986940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:17.245259Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:17.248932Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.248958Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.249634Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:22659, port: 22659 2025-04-17T18:13:17.250173Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:17.254468Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-17T18:13:17.302873Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****3qcA (8ECF65C0) () has now valid token of ldapuser@ldap 2025-04-17T18:13:19.086034Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346844226026447:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:19.086128Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014da/r3tmp/tmpL7YBe8/pdisk_1.dat 2025-04-17T18:13:19.165283Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13778, node 2 2025-04-17T18:13:19.216378Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:19.216460Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:19.217338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:19.234131Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:19.234150Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:19.234157Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:19.234268Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:19.364928Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:19.366129Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:19.366154Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:19.366777Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10925, port: 10925 2025-04-17T18:13:19.366869Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:19.373330Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:10925. Invalid credentials 2025-04-17T18:13:19.373566Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****EwTQ (059E2A1C) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:10925. Invalid credentials)' 2025-04-17T18:13:21.973111Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346853382899310:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:21.973200Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014da/r3tmp/tmpeixMzg/pdisk_1.dat 2025-04-17T18:13:22.061099Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29482, node 3 2025-04-17T18:13:22.098725Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:22.098743Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:22.098748Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:22.098833Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:22.100002Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:22.100063Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:22.101488Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:22.246504Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:22.249238Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:22.249267Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:22.249817Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15723, port: 15723 2025-04-17T18:13:22.249910Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:22.252800Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15723. Invalid credentials 2025-04-17T18:13:22.253029Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****3nbg (0E580174) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15723. Invalid credentials)' 2025-04-17T18:13:24.520059Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346865435540727:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:24.520126Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014da/r3tmp/tmpMZA91b/pdisk_1.dat 2025-04-17T18:13:24.601363Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4110, node 4 2025-04-17T18:13:24.645010Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:24.645105Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:24.646779Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:24.651119Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:24.651141Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:24.651145Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:24.651233Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:24.747074Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:24.750370Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:24.750402Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:24.751088Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:22437, port: 22437 2025-04-17T18:13:24.751187Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:24.753686Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:24.753934Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:22437 return no entries 2025-04-17T18:13:24.754088Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****J7QA (89357683) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:22437 return no entries)' 2025-04-17T18:13:27.544705Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494346876771866902:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:27.544782Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014da/r3tmp/tmpORt2Qt/pdisk_1.dat 2025-04-17T18:13:27.626542Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31283, node 5 2025-04-17T18:13:27.670999Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:27.671121Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:27.672677Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:27.686857Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:27.686885Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:27.686892Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:27.687025Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:27.765483Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:27.769270Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:27.769301Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:27.769960Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29650, port: 29650 2025-04-17T18:13:27.770032Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:27.773178Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:27.814449Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:27.814867Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:27.814920Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:27.858492Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:27.906434Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:27.907093Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Bc8A (FB231E3F) () has now valid token of ldapuser@ldap 2025-04-17T18:13:31.552635Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Bc8A (FB231E3F) 2025-04-17T18:13:31.552731Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29650, port: 29650 2025-04-17T18:13:31.552832Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:31.556728Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:31.598477Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:31.598932Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:31.598981Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:31.646456Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:31.690454Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:31.691231Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Bc8A (FB231E3F) () has now valid token of ldapuser@ldap 2025-04-17T18:13:32.544928Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494346876771866902:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:32.545034Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:35.558836Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Bc8A (FB231E3F) 2025-04-17T18:13:35.559041Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29650, port: 29650 2025-04-17T18:13:35.559112Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:35.562162Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:35.610477Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:35.610961Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:35.610996Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:35.658474Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:35.706440Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:35.707167Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Bc8A (FB231E3F) () has now valid token of ldapuser@ldap 2025-04-17T18:13:38.486141Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494346926071676861:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:38.486273Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014da/r3tmp/tmpWLHHE2/pdisk_1.dat 2025-04-17T18:13:38.660274Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:38.662688Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:38.662751Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:38.664841Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3684, node 6 2025-04-17T18:13:38.734762Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:38.734782Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:38.734790Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:38.734902Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:38.834403Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:38.838451Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:38.838487Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:38.839301Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10861, port: 10861 2025-04-17T18:13:38.839404Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:38.843514Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:38.886727Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****nsFw (26BAD237) () has now valid token of ldapuser@ldap 2025-04-17T18:13:41.500226Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****nsFw (26BAD237) 2025-04-17T18:13:41.500284Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10861, port: 10861 2025-04-17T18:13:41.500381Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:41.510777Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:41.555971Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****nsFw (26BAD237) () has now valid token of ldapuser@ldap 2025-04-17T18:13:43.490305Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494346926071676861:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:43.490408Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:45.504640Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****nsFw (26BAD237) 2025-04-17T18:13:45.504715Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10861, port: 10861 2025-04-17T18:13:45.504802Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:45.507912Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:45.550749Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****nsFw (26BAD237) () has now valid token of ldapuser@ldap 2025-04-17T18:13:48.505972Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****nsFw (26BAD237) 2025-04-17T18:13:48.506084Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10861, port: 10861 2025-04-17T18:13:48.506204Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:48.518504Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:48.566664Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****nsFw (26BAD237) () has now valid token of ldapuser@ldap >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> TTransferTests::Create_Disabled >> TTransferTests::Create |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2025-04-17T18:13:36.786336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:36.786882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:36.786933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019a8/r3tmp/tmpssAp48/pdisk_1.dat 2025-04-17T18:13:37.279503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.328409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:37.372269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:37.373030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:37.385584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:37.474838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.527595Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-17T18:13:37.527831Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.575297Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.575419Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.577173Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:13:37.577251Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:13:37.577317Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:13:37.578705Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.578965Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.579034Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2578] in generation 1 2025-04-17T18:13:37.580485Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-04-17T18:13:37.580682Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.589885Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.589996Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.591278Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:13:37.591344Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:13:37.591405Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:13:37.591681Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.591769Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.591840Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-17T18:13:37.602609Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.627013Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:13:37.627242Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.627363Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-17T18:13:37.627406Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.627439Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:13:37.627479Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.627751Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.627783Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:13:37.627831Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.627913Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-17T18:13:37.627949Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:13:37.627971Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:13:37.628003Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:37.628365Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:13:37.628469Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:13:37.628618Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.628672Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.628721Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:13:37.628761Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.628821Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:13:37.628897Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:13:37.629311Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:690:2585], sessionId# [0:0:0] 2025-04-17T18:13:37.629365Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.629390Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.629412Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:13:37.629460Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:37.629586Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.629797Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:13:37.629901Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:13:37.631594Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.642942Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:37.643049Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.689149Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-17T18:13:37.689313Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:13:37.689571Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:37.689647Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T18:13:37.690117Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:37.700791Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:13:37.700874Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.819689Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:742:2620], sessionId# [0:0:0] 2025-04-17T18:13:37.819973Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-17T18:13:37.825009Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T18:13:37.825098Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:37.825541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.825594Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.825650Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-17T18:13:37.825932Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:37.826094Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:37.826599Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:37.826637Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.826729Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.826792Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:37.828467Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:37.828782Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.829894Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.829933Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.829962Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:37.830133Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T ... Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:50.534553Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:50.534583Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:50.534736Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:50.534835Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:50.534943Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:50.535010Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:50.535558Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:50.536006Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:50.539269Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:50.539342Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-17T18:13:50.539700Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:50.540026Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:50.541120Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-04-17T18:13:50.541204Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:50.542470Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-17T18:13:50.542590Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:50.543232Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:50.543306Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:50.544386Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:50.544427Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:50.544473Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:50.544526Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:13:50.544575Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-17T18:13:50.544651Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:50.544711Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:50.544834Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:50.546545Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:50.546615Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:50.546786Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:50.546818Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:50.546855Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:50.546901Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:50.546949Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:50.547056Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:50.550774Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:50.551092Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:50.551537Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-17T18:13:50.551605Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T18:13:50.552566Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:50.552634Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:50.562243Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:50.562364Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2663], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:50.562452Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:50.567947Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:13:50.574004Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:50.574137Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:50.726285Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:50.726413Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:50.729515Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:13:50.764304Z node 4 :TX_PROXY ERROR: Actor# [4:880:2713] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:50.843421Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2fa1b0aqk6yqhg9c34cgq0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDQ2ODhkODYtNjQ3Yzk1MTMtZGFiNDkzZDItYWM0YWEyMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:50.845443Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:949:2744], serverId# [4:950:2745], sessionId# [0:0:0] 2025-04-17T18:13:50.845787Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-04-17T18:13:50.846026Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744913630845938 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T18:13:50.846169Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-04-17T18:13:50.857225Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-17T18:13:50.857326Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:50.900391Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2fa1mcftjx5vr6a109gwj7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YzkxMzdkZTYtNjIyOWY2ODgtMjNlYmFkYmItODY4MmI4YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:50.902318Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037889 2025-04-17T18:13:50.902569Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744913630902484 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T18:13:50.902701Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1744913630902484 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T18:13:50.902764Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-04-17T18:13:50.913567Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-17T18:13:50.913625Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:50.917218Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:990:2776], serverId# [4:991:2777], sessionId# [0:0:0] 2025-04-17T18:13:50.922893Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:992:2778], serverId# [4:993:2779], sessionId# [0:0:0] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:11:06.887514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:11:06.887595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:06.887637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:11:06.887674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:11:06.888644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:11:06.888693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:11:06.888766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:11:06.888850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:11:06.889800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:11:06.949166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:11:06.949199Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:11:06.962279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:11:06.962611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:11:06.962749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:11:06.970060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:11:06.970263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:11:06.970829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:06.971789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:11:06.977235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:06.985047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:06.985119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:06.985935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:11:06.985992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:06.986036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:11:06.986170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:11:06.991486Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:11:07.079883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:11:07.080795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.081636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:11:07.082697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:11:07.082753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.085405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:07.085512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:11:07.085713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.085762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:11:07.085804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:11:07.085871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:11:07.087609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.087659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:11:07.087688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:11:07.089246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.089284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.089334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:07.089393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.093744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:11:07.095674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:11:07.095855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:11:07.097807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:11:07.097935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:11:07.097999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:07.099220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:11:07.099281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:11:07.100403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:11:07.100502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:11:07.102804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:11:07.102848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:11:07.103013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:11:07.103052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:11:07.103398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:11:07.103500Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:11:07.103622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:07.103655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.103692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:11:07.103720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.103751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:11:07.103804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:11:07.103846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:11:07.103884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:11:07.103933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:11:07.103967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:11:07.103995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:11:07.105641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:07.105730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:11:07.105764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:13:50.696689Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:13:50.696729Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:13:51.084821Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:312:2299]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:13:51.085059Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-17T18:13:51.085440Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:312:2299], Recipient [3:129:2154]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 279 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-17T18:13:51.085481Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T18:13:51.085525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0279 2025-04-17T18:13:51.085656Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:13:51.085698Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T18:13:51.096258Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:13:51.096346Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:13:51.096451Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:13:51.096487Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:13:51.139095Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:13:51.139183Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:13:51.139230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-17T18:13:51.139347Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T18:13:51.139390Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T18:13:51.139525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-17T18:13:51.139611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-04-17T18:13:51.139712Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-04-17T18:13:51.139798Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:13:51.150375Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:13:51.150454Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:13:51.150481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T18:13:51.181842Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:712:2679]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:13:51.182144Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 3 2025-04-17T18:13:51.182557Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:712:2679], Recipient [3:129:2154]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 3 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 59 Memory: 124232 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 212 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-17T18:13:51.182598Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T18:13:51.182655Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0059 2025-04-17T18:13:51.182792Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:13:51.182850Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T18:13:51.224579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-04-17T18:13:51.224672Z node 3 :FLAT_TX_SCHEMESHARD INFO: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-17T18:13:51.224716Z node 3 :FLAT_TX_SCHEMESHARD INFO: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-17T18:13:51.224797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 3 seconds 2025-04-17T18:13:51.224827Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-04-17T18:13:51.224977Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:13:51.225035Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:13:51.225075Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-17T18:13:51.225164Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T18:13:51.225213Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T18:13:51.225323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 13940 row count 100 2025-04-17T18:13:51.225394Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=CopyTable, is column=0, is olap=0, RowCount 100, DataSize 13940, with borrowed parts 2025-04-17T18:13:51.225473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Want to split tablet 72075186233409547 by size split by size (shardCount: 1, maxShardCount: 2, shardSize: 13940, maxShardSize: 1) 2025-04-17T18:13:51.225521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Postpone split tablet 72075186233409547 because it has borrow parts, enqueue compact them first 2025-04-17T18:13:51.225548Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-04-17T18:13:51.225613Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:13:51.236113Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:13:51.236199Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:13:51.236236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T18:13:51.497210Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:13:51.497295Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:13:51.497412Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:129:2154], Recipient [3:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:13:51.497442Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2025-04-17T18:13:36.785728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:36.786210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:36.786253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00199f/r3tmp/tmpwa10EB/pdisk_1.dat 2025-04-17T18:13:37.275796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.329739Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:37.372286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:37.373024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:37.385538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:37.474861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.526967Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-17T18:13:37.527235Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.574954Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.575035Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.577211Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:13:37.577284Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:13:37.577360Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:13:37.578705Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.578984Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.579068Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2578] in generation 1 2025-04-17T18:13:37.580485Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-04-17T18:13:37.580726Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.589880Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.589996Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.591291Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:13:37.591356Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:13:37.591421Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:13:37.591681Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.591773Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.591845Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-17T18:13:37.602606Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.630784Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:13:37.630972Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.631071Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-17T18:13:37.631109Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.631158Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:13:37.631204Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.631467Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.631500Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:13:37.631554Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.631650Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-17T18:13:37.631688Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:13:37.631710Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:13:37.631744Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:37.632099Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:13:37.632209Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:13:37.632343Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.632377Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.632420Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:13:37.632465Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.632524Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:13:37.632594Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:13:37.632958Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:690:2585], sessionId# [0:0:0] 2025-04-17T18:13:37.633010Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.633068Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.633100Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:13:37.633151Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:37.633289Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.633489Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:13:37.633569Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:13:37.635398Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.646056Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:37.646172Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.689149Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-17T18:13:37.689312Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:13:37.689560Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:37.689645Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T18:13:37.690110Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:37.700764Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:13:37.700849Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.825495Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:742:2620], sessionId# [0:0:0] 2025-04-17T18:13:37.825748Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-17T18:13:37.833997Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T18:13:37.834069Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:37.834541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.834595Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.834651Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-17T18:13:37.834890Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:37.835043Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:37.835370Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:37.835402Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.835496Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.835556Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:37.837441Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:37.837950Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.839353Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.839393Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.839427Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:37.839635Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T ... WaitScheme 2025-04-17T18:13:51.452747Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:740:2618], serverId# [4:743:2621], sessionId# [0:0:0] 2025-04-17T18:13:51.453235Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:741:2619], serverId# [4:744:2622], sessionId# [0:0:0] 2025-04-17T18:13:51.453573Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T18:13:51.453628Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:51.453914Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:51.453943Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:51.454044Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:51.454091Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:51.454161Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-17T18:13:51.454482Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:51.454635Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:51.454805Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:51.454837Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:51.454871Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:51.455006Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:51.455115Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:51.455207Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:51.455280Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:51.455783Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:51.456204Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:51.459131Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:51.459196Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-17T18:13:51.459506Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:51.459809Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:51.460871Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-04-17T18:13:51.460925Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:51.462064Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-17T18:13:51.462147Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:51.462802Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:51.462876Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:51.463878Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:51.463918Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:51.463967Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:51.464015Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:13:51.464063Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-17T18:13:51.464152Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:51.464216Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:51.464314Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:51.465888Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:51.465954Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:51.466113Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:51.466160Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:51.466233Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:51.466292Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:51.466339Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:51.466423Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:51.469948Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:51.470233Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:51.470665Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-17T18:13:51.470732Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T18:13:51.471686Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:51.471730Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:51.480902Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:51.481022Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2663], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:51.481111Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:51.486680Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:13:51.492724Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:51.492864Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:51.642041Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:51.642122Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:51.644209Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:13:51.678872Z node 4 :TX_PROXY ERROR: Actor# [4:880:2713] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:51.747384Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2fa27qc1ht6ja0at3st18r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTQxM2EyMmEtYjEyNWU2MmQtNTQyYmQ5M2MtNTA3YWNlZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:51.749875Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:949:2744], serverId# [4:950:2745], sessionId# [0:0:0] 2025-04-17T18:13:51.750285Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-04-17T18:13:51.750555Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744913631750463 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T18:13:51.750738Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-04-17T18:13:51.761764Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-17T18:13:51.761855Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:51.766284Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:956:2750], serverId# [4:957:2751], sessionId# [0:0:0] 2025-04-17T18:13:51.772263Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:958:2752], serverId# [4:959:2753], sessionId# [0:0:0] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> CdcStreamChangeCollector::NewImage [GOOD] >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2025-04-17T18:13:36.971639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:36.972120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:36.972178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019ad/r3tmp/tmpMfRCnr/pdisk_1.dat 2025-04-17T18:13:37.343619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.380819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:37.420389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:37.420523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:37.432538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:37.514736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.566348Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-17T18:13:37.566550Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.606702Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.606826Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.608164Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:13:37.608242Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:13:37.608296Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:13:37.608596Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.608792Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.608865Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2578] in generation 1 2025-04-17T18:13:37.610038Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-04-17T18:13:37.610231Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.617774Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.617871Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.619101Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:13:37.619179Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:13:37.619249Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:13:37.619461Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.619549Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.619599Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-17T18:13:37.630440Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.663339Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:13:37.663566Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.663686Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-17T18:13:37.663722Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.663764Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:13:37.663802Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.664126Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.664178Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:13:37.664243Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.664298Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-17T18:13:37.664343Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:13:37.664369Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:13:37.664390Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:37.664784Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:13:37.664900Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:13:37.665054Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.665094Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.665145Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:13:37.665201Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.665244Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:13:37.665296Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:13:37.665692Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:690:2585], sessionId# [0:0:0] 2025-04-17T18:13:37.665744Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.665768Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.665793Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:13:37.665821Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:37.665969Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.666232Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:13:37.666332Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:13:37.668222Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.678985Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:37.679130Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.722619Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-17T18:13:37.722818Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:13:37.723027Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:37.723112Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T18:13:37.723623Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:37.734377Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:13:37.734483Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.853837Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:742:2620], sessionId# [0:0:0] 2025-04-17T18:13:37.854051Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-17T18:13:37.857504Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T18:13:37.857561Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:37.857956Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.858003Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.858083Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-17T18:13:37.858379Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:37.858519Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:37.858836Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:37.858863Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.858928Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.858981Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:37.860880Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:37.861383Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.862760Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.862803Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.862840Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:37.863055Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T ... 3:51.559881Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:51.559903Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037890 2025-04-17T18:13:51.560038Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:51.560133Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:51.560383Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-17T18:13:51.560435Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:51.560473Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:13:51.560503Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037890 tableId# [OwnerId: 72057594046644480, LocalPathId: 6] schema version# 1 2025-04-17T18:13:51.560681Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:51.560863Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:51.563087Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:51.563130Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:13:51.563165Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-17T18:13:51.563215Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:51.563252Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:51.563340Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:51.563790Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:51.563821Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:51.564366Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:51.564392Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:51.565166Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:51.565205Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:51.565246Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:51.565287Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:51.565324Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:51.565386Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:51.565442Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-04-17T18:13:51.565460Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:13:51.565870Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-04-17T18:13:51.565918Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:13:51.568026Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:51.568123Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:13:51.568421Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:51.568508Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:13:51.568539Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T18:13:51.568572Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-17T18:13:51.568612Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:51.568648Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:51.568706Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:13:51.572840Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:51.573096Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-17T18:13:51.573168Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T18:13:51.573862Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:51.574033Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:51.574263Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:51.574305Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:51.575416Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-04-17T18:13:51.575464Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-17T18:13:51.583897Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:837:2697], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:51.583989Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:847:2702], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:51.584058Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:51.588511Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:13:51.592863Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:51.592953Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:51.593005Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:13:51.739324Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:51.739408Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:51.739445Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-17T18:13:51.741413Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:851:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:13:51.775842Z node 4 :TX_PROXY ERROR: Actor# [4:933:2756] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:51.833843Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2fa2ay61whctjbggnra4wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YzAwN2U4NTEtY2VmZWJmMGYtZGY5YmNjOTQtMjljOTQyNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:51.835866Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1038:2799], serverId# [4:1039:2800], sessionId# [0:0:0] 2025-04-17T18:13:51.836258Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-04-17T18:13:51.836477Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744913631836403 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T18:13:51.836603Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744913631836403 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-17T18:13:51.836680Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-04-17T18:13:51.847546Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-17T18:13:51.847627Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:51.851457Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1045:2805], serverId# [4:1046:2806], sessionId# [0:0:0] 2025-04-17T18:13:51.856118Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1047:2807], serverId# [4:1048:2808], sessionId# [0:0:0] |91.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> Secret::Deactivated [GOOD] >> TargetDiscoverer::SystemObjects >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential |91.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> TargetDiscoverer::InvalidCredentials >> TargetDiscoverer::IndexedTable >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-17T18:13:16.565854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346829635152362:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.565922Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d6/r3tmp/tmp5QuBhP/pdisk_1.dat 2025-04-17T18:13:16.918566Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28653, node 1 2025-04-17T18:13:16.974849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.974971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.976643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.987232Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.987249Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.987255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.987355Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:17.404751Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:17.408364Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.408407Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.409740Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:19890, port: 19890 2025-04-17T18:13:17.409837Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:17.466724Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:17.510513Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:17.511057Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:17.511133Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:17.554553Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:17.598481Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:17.600075Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****0OWg (8272CB97) () has now valid token of ldapuser@ldap 2025-04-17T18:13:21.565951Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346829635152362:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:21.566028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:22.601652Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****0OWg (8272CB97) 2025-04-17T18:13:22.601720Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:19890, port: 19890 2025-04-17T18:13:22.601757Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:22.654581Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:22.654917Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:19890 return no entries 2025-04-17T18:13:22.655232Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****0OWg (8272CB97) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:19890 return no entries)' 2025-04-17T18:13:26.603467Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****0OWg (8272CB97) 2025-04-17T18:13:28.184908Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346883899883181:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:28.185000Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d6/r3tmp/tmpW3CkCf/pdisk_1.dat 2025-04-17T18:13:28.319511Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:28.346819Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:28.346918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:28.348608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30602, node 2 2025-04-17T18:13:28.383578Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:28.383607Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:28.383613Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:28.383743Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:28.644496Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:28.647604Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:28.647629Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:28.648319Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10671, port: 10671 2025-04-17T18:13:28.648409Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:28.702685Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:28.704182Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:10671. Server is busy 2025-04-17T18:13:28.704600Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****l3Zg (F14CBFCA) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:10671. Server is busy)' 2025-04-17T18:13:28.704866Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:28.704889Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:28.705695Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10671, port: 10671 2025-04-17T18:13:28.705771Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:28.758738Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:28.759243Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:10671. Server is busy 2025-04-17T18:13:28.759735Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****l3Zg (F14CBFCA) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:10671. Server is busy)' 2025-04-17T18:13:31.190373Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****l3Zg (F14CBFCA) 2025-04-17T18:13:31.190746Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:31.190783Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:31.191793Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10671, port: 10671 2025-04-17T18:13:31.191883Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:31.246719Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:31.247228Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:10671. Server is busy 2025-04-17T18:13:31.247680Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****l3Zg (F14CBFCA) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:10671. Server is busy)' 2025-04-17T18:13:33.185317Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494346883899883181:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:33.185409Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:34.192829Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****l3Zg (F14CBFCA) 2025-04-17T18:13:34.193125Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:34.193151Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:34.193852Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10671, port: 10671 2025-04-17T18:13:34.194020Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:34.250606Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:34.298501Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:34.299037Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:34.299082Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:34.342501Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:34.390555Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:34.391600Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****l3Zg (F14CBFCA) () has now valid token of ldapuser@ldap 2025-04-17T18:13:39.352550Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346928109332291:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:39.352606Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d6/r3tmp/tmpXtOkFX/pdisk_1.dat 2025-04-17T18:13:39.451946Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7090, node 3 2025-04-17T18:13:39.500546Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:39.500626Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:39.503251Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:39.576844Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:39.576874Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:39.576883Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:39.577006Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:39.726323Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:39.729189Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:39.729208Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:39.729942Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:27141, port: 27141 2025-04-17T18:13:39.730017Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:39.744766Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:39.786726Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:39.830542Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:39.879063Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****_-iw (C74814EF) () has now valid token of ldapuser@ldap 2025-04-17T18:13:42.737093Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346944629176167:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:42.737176Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d6/r3tmp/tmpDyDLLJ/pdisk_1.dat 2025-04-17T18:13:42.846005Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:42.873284Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:42.873363Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:42.889002Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13689, node 4 2025-04-17T18:13:42.936048Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:42.936071Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:42.936076Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:42.936192Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:43.076354Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:43.078958Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:43.078987Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:43.079684Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3664, port: 3664 2025-04-17T18:13:43.079988Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:43.090549Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:43.134609Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:43.178925Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****Qvpw (0D5FBDB2) () has now valid token of ldapuser@ldap 2025-04-17T18:13:46.028222Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494346960635832372:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:46.028286Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d6/r3tmp/tmpTWFGKr/pdisk_1.dat 2025-04-17T18:13:46.151858Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:46.178821Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:46.178890Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:46.180579Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17908, node 5 2025-04-17T18:13:46.232373Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:46.232399Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:46.232406Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:46.232581Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:46.354305Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:46.356641Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:46.356660Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:46.357219Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:11346, port: 11346 2025-04-17T18:13:46.357268Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:46.366957Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:46.413356Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-17T18:13:46.454598Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:46.455195Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:46.455247Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-17T18:13:46.502587Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-17T18:13:46.546521Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-17T18:13:46.547624Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Gh2Q (2C52138F) () has now valid token of ldapuser@ldap 2025-04-17T18:13:49.463945Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494346971364211820:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:49.464069Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014d6/r3tmp/tmporynEK/pdisk_1.dat 2025-04-17T18:13:49.559520Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1902, node 6 2025-04-17T18:13:49.599934Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:49.600043Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:49.601865Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:49.623159Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:49.623189Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:49.623196Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:49.623322Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:49.701140Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:49.704885Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:49.704916Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:49.705621Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:25355, port: 25355 2025-04-17T18:13:49.705706Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:49.713616Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:49.758746Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-17T18:13:49.758818Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:25355. Bad search filter 2025-04-17T18:13:49.759298Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****DvBA (BA90CF13) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:25355. Bad search filter)' >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig >> TargetDiscoverer::Basic >> Secret::DeactivatedQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2025-04-17T18:13:36.884764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:36.885278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:36.885329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00198c/r3tmp/tmpkd6Mdh/pdisk_1.dat 2025-04-17T18:13:37.276992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.328229Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:37.372330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:37.373047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:37.385581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:37.474953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.528896Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-17T18:13:37.529161Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.576996Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.577132Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.578849Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:13:37.578929Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:13:37.578999Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:13:37.579352Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.579586Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.579674Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2578] in generation 1 2025-04-17T18:13:37.581052Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-04-17T18:13:37.581262Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.590453Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.590558Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.591887Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:13:37.591950Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:13:37.592013Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:13:37.592287Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.592375Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.592446Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-17T18:13:37.606836Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.645952Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:13:37.646222Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.646369Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-17T18:13:37.646418Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.646451Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:13:37.646492Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.646779Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.646822Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:13:37.646883Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.646988Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-17T18:13:37.647027Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:13:37.647052Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:13:37.647089Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:37.647508Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:13:37.647617Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:13:37.647809Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.647867Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.647924Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:13:37.647969Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.648021Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:13:37.648102Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:13:37.648472Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:690:2585], sessionId# [0:0:0] 2025-04-17T18:13:37.648542Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.648569Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.648593Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:13:37.648642Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:13:37.648770Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.649012Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:13:37.649138Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:13:37.651029Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.661740Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:37.661882Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.705337Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-17T18:13:37.705507Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:13:37.705762Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:37.705845Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-17T18:13:37.706342Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-17T18:13:37.717124Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:13:37.717230Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.835905Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:742:2620], sessionId# [0:0:0] 2025-04-17T18:13:37.836170Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-17T18:13:37.845799Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-17T18:13:37.845883Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:13:37.846328Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.846371Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.846423Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-17T18:13:37.846643Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:37.846771Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:37.847100Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:37.847132Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.847207Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:13:37.847262Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:37.848945Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:37.849432Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.850923Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.850976Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.851014Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:37.851238Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T ... DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:51.905266Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:51.905342Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:51.905579Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:51.905699Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:51.905844Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:51.905887Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:51.906357Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:51.906682Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:51.908820Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:51.908874Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:51.909180Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:51.909272Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:51.910089Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:51.910133Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:51.910209Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:51.910288Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:51.910331Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:51.910402Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:51.911511Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:51.912407Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:51.912455Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:51.913066Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:51.921767Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:51.921895Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:51.921933Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-17T18:13:51.921968Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-17T18:13:51.922545Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:51.945844Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:52.122662Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:52.122786Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.123030Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:52.123074Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:52.123114Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:52.123256Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-17T18:13:52.123354Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:52.123559Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:52.124122Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:52.157745Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T18:13:52.157855Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:52.157891Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:52.157928Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.158004Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:52.158058Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-17T18:13:52.158133Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.160379Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-17T18:13:52.160468Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:52.165953Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.166025Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.166125Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.170703Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:13:52.175133Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:52.329680Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:52.331811Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:13:52.357459Z node 4 :TX_PROXY ERROR: Actor# [4:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:52.414637Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2fa2x4565cdvp7p4jetz8e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MTFiNjlmYWItMjJlZWU4YmMtMjNhYzMzZTMtYzcwN2U0MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:52.417141Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:987:2786], serverId# [4:988:2787], sessionId# [0:0:0] 2025-04-17T18:13:52.417535Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-04-17T18:13:52.417819Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744913632417720 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T18:13:52.418025Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-04-17T18:13:52.429045Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-17T18:13:52.429128Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.474223Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2fa35ga0g3spqdxdrs316k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDBiZDQ2OTAtN2YwZmIwYjAtYjIwMTgyZGYtMWQzMTZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:52.475662Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-04-17T18:13:52.475869Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744913632475794 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T18:13:52.475982Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-04-17T18:13:52.486711Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-17T18:13:52.486769Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.488164Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1015:2805], serverId# [4:1016:2806], sessionId# [0:0:0] 2025-04-17T18:13:52.493321Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1017:2807], serverId# [4:1018:2808], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: 2025-04-17T18:13:39.454649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001aae/r3tmp/tmp4ltWfR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20441, node 1 TClient is connected to server localhost:27949 2025-04-17T18:13:40.539907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:40.594134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:40.601129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:40.601196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:40.601229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:40.601516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:40.640664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:40.641425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:40.654643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-17T18:13:52.715532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:686:2576], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.715665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> YdbTableSplit::SplitByLoadWithReads [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2025-04-17T18:13:36.952820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:36.953276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:36.953314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0019be/r3tmp/tmpUGYXCp/pdisk_1.dat 2025-04-17T18:13:37.308565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.349448Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:37.352591Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-17T18:13:37.388817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:37.388953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:37.400109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:37.482557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.527619Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:13:37.527852Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.574801Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.574929Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.577331Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:13:37.577428Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:13:37.577481Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:13:37.578716Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.578825Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.578898Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:13:37.589639Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.617811Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:13:37.618009Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.618128Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:13:37.618165Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.618220Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:13:37.618257Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.618703Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:13:37.618821Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:13:37.618927Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.618980Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.619033Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:13:37.619073Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.619163Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:13:37.619565Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.619784Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:13:37.619866Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:13:37.621541Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.632258Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:37.632367Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.781661Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:13:37.791040Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:37.791125Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.791835Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.791888Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.791962Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:37.792228Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:37.792382Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:37.793168Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.793266Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:37.795274Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:37.795675Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.797152Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:37.797205Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.797745Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:37.797808Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.798998Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.799043Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.799094Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:37.799147Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:37.799209Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:37.799309Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.802679Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.804614Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:37.805211Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:37.805287Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:37.815639Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.815767Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:37.815842Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-17T18:13:37.815882Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-17T18:13:37.817321Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.841377Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:38.109857Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:38.109937Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:38.110254Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:38.110306Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:38.110356Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:38.110553Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-17T18:13:38.110686Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:38.110958Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:38.112883Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:38.174124Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T18:13:38.174274Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:38.174330Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:38.174385Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:52.058760Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:52.058809Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:52.059049Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:52.059153Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:52.059327Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:52.059380Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:52.059825Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:52.060217Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:52.062347Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:52.062407Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.062728Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:52.062791Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:52.063622Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:52.063655Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:52.063726Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:52.063805Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:52.063862Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:52.063935Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.065013Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:52.066040Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:52.066095Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:52.066763Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:52.074350Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:52.074477Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:52.074521Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-17T18:13:52.074555Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-17T18:13:52.075338Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:52.098751Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:52.309563Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:52.309642Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.309891Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:52.309945Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:52.309997Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:52.310249Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-17T18:13:52.310397Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:52.310661Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:52.311386Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:52.347204Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T18:13:52.347308Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:52.347355Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:52.347402Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.347482Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:52.347543Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-17T18:13:52.347649Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.349836Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-17T18:13:52.349898Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:52.356109Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.356206Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.356292Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.361171Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:13:52.366584Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:52.521714Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:52.524409Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:13:52.549523Z node 4 :TX_PROXY ERROR: Actor# [4:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:52.598965Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2fa332a1md1by765bjrq9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YTkzYjJhZjUtZTM2ODdmYjctYmIxNDIzYWItZDVmODhlOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:52.600958Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:987:2786], serverId# [4:988:2787], sessionId# [0:0:0] 2025-04-17T18:13:52.601308Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-04-17T18:13:52.601589Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744913632601490 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T18:13:52.601756Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-04-17T18:13:52.612698Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-17T18:13:52.612765Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.673850Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2fa3b78v1ptdzem7sbyzaa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZGYzMTRiMGEtNWFiZTc2OGItYzIyODliOC01YjZkMTc0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:52.675413Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-04-17T18:13:52.675637Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744913632675544 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T18:13:52.675755Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-04-17T18:13:52.686701Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-17T18:13:52.686768Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:52.688537Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1015:2805], serverId# [4:1016:2806], sessionId# [0:0:0] 2025-04-17T18:13:52.694588Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1017:2807], serverId# [4:1018:2808], sessionId# [0:0:0] >> TargetDiscoverer::Transfer >> KqpWorkload::KV [GOOD] >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize >> TargetDiscoverer::Dirs ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: 2025-04-17T18:13:40.694090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a73/r3tmp/tmplTsjl7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17663, node 1 TClient is connected to server localhost:22324 2025-04-17T18:13:41.211493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:41.248778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:41.252778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:41.252843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:41.252873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:41.253191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:41.292222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:41.292388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:41.307474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-17T18:13:52.983348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:683:2575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.983455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:693:2580], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.983821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.988347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T18:13:53.005263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:697:2583], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T18:13:53.051296Z node 1 :TX_PROXY ERROR: Actor# [1:748:2615] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:53.345647Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:758:2624], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2025-04-17T18:13:53.347696Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmExM2ViNjgtOGQ3YTk5YmUtZmUwODMxN2MtMjkxMTFiZA==, ActorId: [1:681:2573], ActorState: ExecuteState, TraceId: 01js2fa3pg84djtxjpmt3ebd98, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2025-04-17T18:13:25.251276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346870664408993:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:25.253760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001439/r3tmp/tmppOrBOn/pdisk_1.dat 2025-04-17T18:13:25.594984Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:25.634796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:25.635302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:25.639389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6164, node 1 2025-04-17T18:13:25.766229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:25.766255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:25.766264Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:25.766370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:26.135244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:25847 2025-04-17T18:13:27.898273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346879254344634:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:27.898414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.133827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:13:28.274037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312109:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.274105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.291334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608256 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608256 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-17T18:13:28.359206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312211:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.359329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312235:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.359378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.359387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312238:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.359544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312240:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.359554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312244:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.359604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312239:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.359885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312241:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.359946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312242:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.361209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312271:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.361253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312278:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.361299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.361781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312296:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.363260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312318:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.363305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.363312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883549312321:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.363653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:28.363882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710664:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:13:28.363920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710664:1, at schemeshard: 72057594046644480 20 ... 807716:2362]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.384402Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730513, task: 1, CA Id [1:7494346969448807808:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.463221Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730509, task: 1, CA Id [1:7494346969448807720:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.463222Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730514, task: 1, CA Id [1:7494346969448807806:2383]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.463265Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730511, task: 1, CA Id [1:7494346969448807752:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.463277Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730513, task: 1, CA Id [1:7494346969448807808:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.463288Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730517, task: 1, CA Id [1:7494346969448807821:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.463300Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730515, task: 1, CA Id [1:7494346969448807814:2385]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.463307Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730512, task: 1, CA Id [1:7494346969448807753:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.463324Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730508, task: 1, CA Id [1:7494346969448807716:2362]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.576333Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730515, task: 1, CA Id [1:7494346969448807814:2385]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.576375Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730514, task: 1, CA Id [1:7494346969448807806:2383]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.576382Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730508, task: 1, CA Id [1:7494346969448807716:2362]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.612688Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730517, task: 1, CA Id [1:7494346969448807821:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.612696Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730513, task: 1, CA Id [1:7494346969448807808:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.612729Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730512, task: 1, CA Id [1:7494346969448807753:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.612742Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730511, task: 1, CA Id [1:7494346969448807752:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.612745Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730509, task: 1, CA Id [1:7494346969448807720:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.801619Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730512, task: 1, CA Id [1:7494346969448807753:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.801631Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730514, task: 1, CA Id [1:7494346969448807806:2383]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.845644Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730517, task: 1, CA Id [1:7494346969448807821:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.845674Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730508, task: 1, CA Id [1:7494346969448807716:2362]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.845701Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730513, task: 1, CA Id [1:7494346969448807808:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.884561Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730511, task: 1, CA Id [1:7494346969448807752:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.884622Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730515, task: 1, CA Id [1:7494346969448807814:2385]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:48.959332Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730509, task: 1, CA Id [1:7494346969448807720:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.156449Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730512, task: 1, CA Id [1:7494346969448807753:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.302926Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730517, task: 1, CA Id [1:7494346969448807821:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.302951Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730511, task: 1, CA Id [1:7494346969448807752:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.302977Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730514, task: 1, CA Id [1:7494346969448807806:2383]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.302988Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730508, task: 1, CA Id [1:7494346969448807716:2362]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.380251Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730513, task: 1, CA Id [1:7494346969448807808:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.380251Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730515, task: 1, CA Id [1:7494346969448807814:2385]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.431943Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730509, task: 1, CA Id [1:7494346969448807720:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.845145Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730512, task: 1, CA Id [1:7494346969448807753:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.945964Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730508, task: 1, CA Id [1:7494346969448807716:2362]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:49.980327Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730511, task: 1, CA Id [1:7494346969448807752:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.020818Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730509, task: 1, CA Id [1:7494346969448807720:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.068826Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730513, task: 1, CA Id [1:7494346969448807808:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.132826Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730514, task: 1, CA Id [1:7494346969448807806:2383]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.211390Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730517, task: 1, CA Id [1:7494346969448807821:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.211424Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730515, task: 1, CA Id [1:7494346969448807814:2385]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.519394Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730512, task: 1, CA Id [1:7494346969448807753:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.736978Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730513, task: 1, CA Id [1:7494346969448807808:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.737001Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730511, task: 1, CA Id [1:7494346969448807752:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.737031Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730514, task: 1, CA Id [1:7494346969448807806:2383]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.856885Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730517, task: 1, CA Id [1:7494346969448807821:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.856912Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730515, task: 1, CA Id [1:7494346969448807814:2385]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.934144Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730509, task: 1, CA Id [1:7494346969448807720:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:50.934158Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730508, task: 1, CA Id [1:7494346969448807716:2362]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:51.382613Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730512, task: 1, CA Id [1:7494346969448807753:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:51.500902Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730514, task: 1, CA Id [1:7494346969448807806:2383]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:51.534674Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730511, task: 1, CA Id [1:7494346969448807752:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:51.565962Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730515, task: 1, CA Id [1:7494346969448807814:2385]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:51.599735Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730508, task: 1, CA Id [1:7494346969448807716:2362]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:51.748210Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730513, task: 1, CA Id [1:7494346969448807808:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:51.881892Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730517, task: 1, CA Id [1:7494346969448807821:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-17T18:13:51.913126Z node 1 :KQP_COMPUTE WARN: TxId: 281474976730509, task: 1, CA Id [1:7494346969448807720:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608256 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> TargetDiscoverer::Negative >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 23088, MsgBus: 16268 2025-04-17T18:12:52.105222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346727022041693:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:52.105508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001645/r3tmp/tmpeah4U9/pdisk_1.dat 2025-04-17T18:12:52.379085Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23088, node 1 2025-04-17T18:12:52.459365Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:52.459387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:52.459398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:52.459511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:52.466309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:52.466399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:52.468096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16268 TClient is connected to server localhost:16268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:52.885453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:54.735921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346735611976951:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.736034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:54.943764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:12:55.395488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346739906945881:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.395555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.395710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346739906945886:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:55.398947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:12:55.406614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346739906945888:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:12:55.484102Z node 1 :TX_PROXY ERROR: Actor# [1:7494346739906945975:3407] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:57.106142Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346727022041693:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:57.106229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:07.378661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:13:07.378690Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.105969s took: 0.106202s took: 0.107096s took: 0.107262s took: 0.107253s took: 0.107698s took: 0.108448s took: 0.109116s took: 0.110317s took: 0.114626s took: 0.100307s took: 0.100860s took: 0.102355s took: 0.106021s took: 0.106452s took: 0.104869s took: 0.104700s took: 0.106829s took: 0.108731s took: 0.106879s took: 0.198995s took: 0.199612s took: 0.203225s took: 0.200949s took: 0.203643s took: 0.204874s took: 0.205555s took: 0.206352s took: 0.206980s took: 0.208928s took: 0.025009s took: 0.028085s took: 0.037824s took: 0.042541s took: 0.040473s took: 0.043013s took: 0.040511s took: 0.040622s took: 0.041616s took: 0.121238s took: 0.101688s took: 0.105257s took: 0.105305s took: 0.105508s took: 0.105655s took: 0.107662s took: 0.107862s took: 0.108053s took: 0.133310s took: 0.134980s 2025-04-17T18:13:53.695848Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-17T18:13:53.696001Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-17T18:13:53.696028Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-17T18:13:53.696046Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-17T18:13:53.696064Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-17T18:13:53.696082Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-17T18:13:53.696101Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-04-17T18:13:53.696117Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-17T18:13:53.696136Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-17T18:13:53.696152Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-17T18:13:53.696183Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-17T18:13:53.696208Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-17T18:13:53.696234Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-04-17T18:13:53.696255Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-04-17T18:13:53.696272Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-17T18:13:53.696290Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-17T18:13:53.696305Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-17T18:13:53.696325Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-17T18:13:53.696347Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-04-17T18:13:53.696367Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-17T18:13:53.696385Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-04-17T18:13:53.696403Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-04-17T18:13:53.696421Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-04-17T18:13:53.696438Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-17T18:13:53.696460Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T18:13:53.711964Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-17T18:13:53.712006Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-17T18:13:53.712024Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-17T18:13:53.712039Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-17T18:13:53.712053Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-17T18:13:53.712070Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-04-17T18:13:53.712086Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-17T18:13:53.712106Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-17T18:13:53.712123Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-04-17T18:13:53.712139Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-17T18:13:53.712155Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T18:13:53.712169Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-04-17T18:13:53.712183Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-17T18:13:53.712197Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-17T18:13:53.712215Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig >> DataCleanup::CleanupDataNoTables [GOOD] >> DataCleanup::CleanupDataNoTablesWithRestart >> TChargeBTreeIndex::NoNodes_Groups >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> Bloom::Conf [GOOD] >> Bloom::Hashes >> DataCleanup::CleanupDataNoTablesWithRestart [GOOD] >> DataCleanup::CleanupDataLog [GOOD] >> DataCleanup::CleanupData [GOOD] >> DataCleanup::CleanupDataMultipleFamilies >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets >> Bloom::Hashes [GOOD] >> Bloom::Rater >> DataCleanup::CleanupDataMultipleFamilies [GOOD] >> DataCleanup::CleanupDataMultipleTables >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> DataCleanup::CleanupDataMultipleTables [GOOD] >> DataCleanup::CleanupDataWithFollowers [GOOD] >> DataCleanup::CleanupDataMultipleTimes >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> DataCleanup::CleanupDataMultipleTimes [GOOD] >> DataCleanup::CleanupDataEmptyTable [GOOD] >> DataCleanup::CleanupDataWithRestarts >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets >> Bloom::Rater [GOOD] >> Bloom::Dipping >> DataCleanup::CleanupDataWithRestarts [GOOD] >> DataCleanup::CleanupDataRetryWithNotGreaterGenerations [GOOD] >> DBase::Select [GOOD] >> DBase::Subsets [GOOD] >> DBase::WideKey >> TScreen::Sequential [GOOD] >> TScreen::Random >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> TargetDiscoverer::InvalidCredentials [GOOD] >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> TargetDiscoverer::SystemObjects [GOOD] >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> TargetDiscoverer::IndexedTable [GOOD] >> DBase::WideKey [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> TargetDiscoverer::Basic [GOOD] >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNoTable >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:52.545162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:52.545264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:52.545298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:52.545342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:52.546211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:52.546242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:52.546310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:52.546379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:52.547672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:52.635582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:52.635633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:52.651953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:52.652407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:52.652607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:52.663048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:52.663242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:52.667017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:52.668185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:52.673919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:52.680162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:52.680247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:52.680929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:52.680968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:52.681017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:52.681134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.686884Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:52.811421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:52.812990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.813986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:52.815208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:52.815278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.817855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:52.817955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:52.818091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.818209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:52.818243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:52.818289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:52.819790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.819836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:52.819869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:52.821219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.821256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.821290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:52.821350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:52.825152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:52.826800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:52.827747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:52.828613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:52.828723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:52.828767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:52.829974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:52.830047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:52.830225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:52.830305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:52.832837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:52.832884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:52.833051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:52.833088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:52.833437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.833489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:52.833577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:52.833628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:52.833662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:52.833703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:52.833734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:52.833766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:52.833797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:52.833825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:52.833882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:52.833916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:52.833944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:52.835646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:52.835754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:52.835795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:56.404245Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:56.404436Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:56.405342Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:56.405469Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 25769805931 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:56.405525Z node 6 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:56.405796Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:56.405857Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:56.406052Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:56.406137Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:56.407971Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:56.408031Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:56.408251Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:56.408307Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [6:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:56.408645Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:56.408702Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:56.408830Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:56.408876Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:56.408925Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:56.408968Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:56.409011Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:56.409060Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:56.409109Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:56.409145Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:56.409234Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:56.409287Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:56.409329Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:56.409885Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:56.410014Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:56.410058Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:13:56.410103Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:13:56.410152Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:56.410279Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:13:56.413052Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:13:56.413588Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:13:56.414517Z node 6 :TX_PROXY DEBUG: actor# [6:271:2262] Bootstrap 2025-04-17T18:13:56.436538Z node 6 :TX_PROXY DEBUG: actor# [6:271:2262] Become StateWork (SchemeCache [6:276:2267]) 2025-04-17T18:13:56.436976Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:275:2266], Recipient [6:125:2151]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-04-17T18:13:56.437031Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:13:56.439768Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:56.440017Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateReplication Propose: opId# 101:0, path# /MyRoot/Transfer 2025-04-17T18:13:56.440115Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2025-04-17T18:13:56.440340Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:13:56.441072Z node 6 :TX_PROXY DEBUG: actor# [6:271:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:13:56.448915Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:56.449136Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-04-17T18:13:56.449226Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:13:56.449903Z node 6 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:13:56.450165Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:13:56.450243Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:13:56.450637Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:288:2279], Recipient [6:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:13:56.450701Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:13:56.450746Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T18:13:56.450913Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:285:2276], Recipient [6:125:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 101 2025-04-17T18:13:56.450951Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T18:13:56.451031Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:13:56.451142Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:13:56.451187Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:286:2277] 2025-04-17T18:13:56.451389Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:288:2279], Recipient [6:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:13:56.451428Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:13:56.451466Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-17T18:13:56.451820Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:289:2280], Recipient [6:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T18:13:56.451879Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T18:13:56.451985Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:13:56.452188Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 208us result status StatusPathDoesNotExist 2025-04-17T18:13:56.452358Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> TargetDiscoverer::Transfer [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2025-04-17T18:13:53.697941Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346990481366922:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:53.698217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001489/r3tmp/tmpfgcpD4/pdisk_1.dat 2025-04-17T18:13:54.053084Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:54.126517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:54.126674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:54.127958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28024 TServer::EnableGrpc on GrpcPort 23770, node 1 2025-04-17T18:13:54.403936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:54.403965Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:54.403985Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:54.404139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:54.909476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:54.932243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:55.230470Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } } } 2025-04-17T18:13:55.230521Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> KqpPg::CreateUniqComplexPgColumn-useSink [GOOD] >> KqpPg::CreateTempTable >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> KqpExtractPredicateLookup::SimpleRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2025-04-17T18:13:53.677283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346992349165339:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:53.677324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001495/r3tmp/tmpgVNrJ6/pdisk_1.dat 2025-04-17T18:13:54.047177Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:54.074171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:54.074300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:54.079100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31207 TServer::EnableGrpc on GrpcPort 27751, node 1 2025-04-17T18:13:54.403926Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:54.403955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:54.403965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:54.404123Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:54.907793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:54.931720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:55.285544Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744913634968, tx_id: 1 } } } 2025-04-17T18:13:55.285584Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-17T18:13:55.293279Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913635241, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-17T18:13:55.293329Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-17T18:13:56.420672Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913635241, tx_id: 281474976710658 } } } 2025-04-17T18:13:56.420711Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-17T18:13:56.420728Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2025-04-17T18:13:56.420815Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2025-04-17T18:13:53.677315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346991648697987:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:53.677352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00148f/r3tmp/tmpiTkx6d/pdisk_1.dat 2025-04-17T18:13:54.055814Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:54.089545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:54.089648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:54.091702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32551 TServer::EnableGrpc on GrpcPort 2051, node 1 2025-04-17T18:13:54.403954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:54.403978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:54.403990Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:54.404133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:54.918464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:54.942577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:13:55.070989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:13:55.178871Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744913634982, tx_id: 1 } } } 2025-04-17T18:13:55.178946Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-17T18:13:55.195336Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913635059, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744913635108, tx_id: 281474976710659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-17T18:13:55.195379Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-17T18:13:56.506141Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913635059, tx_id: 281474976710658 } } } 2025-04-17T18:13:56.506196Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-17T18:13:56.506241Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> KqpNewEngine::JoinIdxLookup >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> KqpNotNullColumns::ReplaceNotNull >> KqpAgg::AggWithLookup >> KqpNewEngine::SimpleUpsertSelect >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> KqpNewEngine::PureExpr >> KqpReturning::ReturningWorksIndexedUpsert+QueryService >> IndexBuildTest::CancellationNoTable [GOOD] >> KqpNewEngine::MultiSelect >> KqpNewEngine::PkSelect1 >> KqpNewEngine::PrunePartitionsByLiteral >> TargetDiscoverer::Dirs [GOOD] >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2025-04-17T18:13:54.112849Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346995596620806:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:54.112963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001483/r3tmp/tmpM2wDYH/pdisk_1.dat 2025-04-17T18:13:54.410071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:54.490249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:54.490373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:54.492391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19504 TServer::EnableGrpc on GrpcPort 9328, node 1 2025-04-17T18:13:54.661551Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:54.661579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:54.661589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:54.661786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:55.014479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:55.032185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:55.178863Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744913635066, tx_id: 1 } } } 2025-04-17T18:13:55.178898Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-17T18:13:55.188851Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913635136, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-17T18:13:55.188979Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-17T18:13:56.833630Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913635136, tx_id: 281474976710658 } } } 2025-04-17T18:13:56.833663Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-17T18:13:56.833717Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> TTransferTests::Alter [GOOD] >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2025-04-17T18:13:54.641895Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346995363417388:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:54.642233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00147e/r3tmp/tmpujXHrt/pdisk_1.dat 2025-04-17T18:13:54.961715Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:55.015908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:55.016133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:55.017760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1989 TServer::EnableGrpc on GrpcPort 29827, node 1 2025-04-17T18:13:55.156872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:55.156889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:55.156894Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:55.157018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:55.457046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:13:55.616103Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1744913635591, tx_id: 281474976710658 } } } 2025-04-17T18:13:55.616129Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2025-04-17T18:13:55.651061Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-17T18:13:55.651134Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2025-04-17T18:13:55.651175Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> TargetDiscoverer::Negative [GOOD] >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> CdcStreamChangeCollector::OldImage [GOOD] >> KqpSort::TopSortTableExprOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2025-04-17T18:13:54.887749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346994288686825:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:54.887949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001476/r3tmp/tmpEz2CKu/pdisk_1.dat 2025-04-17T18:13:55.235081Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:55.274734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:55.274897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:55.276786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4456 TServer::EnableGrpc on GrpcPort 26949, node 1 2025-04-17T18:13:55.471617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:55.471633Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:55.471639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:55.471726Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:55.814216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:13:55.842041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:55.964021Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744913635871, tx_id: 1 } } } 2025-04-17T18:13:55.964072Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-17T18:13:55.970669Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744913635885, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-17T18:13:55.970724Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-17T18:13:55.977056Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913635948, tx_id: 281474976710659 } }] } } 2025-04-17T18:13:55.977083Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2025-04-17T18:13:57.485506Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744913635948, tx_id: 281474976710659 } } } 2025-04-17T18:13:57.485555Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2025-04-17T18:13:57.485575Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:52.545133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:52.545297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:52.545333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:52.545382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:52.546215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:52.546265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:52.546352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:52.546416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:52.547712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:52.635591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:52.635642Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:52.651952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:52.652420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:52.652599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:52.663059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:52.663343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:52.667017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:52.668225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:52.674443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:52.680192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:52.680276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:52.680951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:52.681004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:52.681059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:52.681225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.687746Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:52.812286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:52.813478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.814008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:52.815215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:52.815284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.817871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:52.817974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:52.818094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.818133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:52.818162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:52.818226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:52.819662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.819698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:52.819723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:52.820946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.820975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.821001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:52.821055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:52.825008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:52.826921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:52.827748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:52.828487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:52.828603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:52.828640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:52.829910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:52.829963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:52.830117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:52.830216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:52.832843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:52.832901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:52.833058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:52.833090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:52.833458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:52.833499Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:52.833584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:52.833632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:52.833685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:52.833713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:52.833747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:52.833799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:52.833828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:52.833852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:52.833906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:52.833937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:52.833965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:52.847142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:52.847278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:52.847319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id, TxId: 103, partId: 0, tablet: 72075186233409546 2025-04-17T18:13:58.324055Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186233409546] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046678944 LocalId: 2 } OperationId { TxId: 103 PartId: 0 } SwitchState { StandBy { } } Config { SrcConnectionParams { StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } 2025-04-17T18:13:58.324376Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186233409546][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046678944 LocalId: 2 } OperationId { TxId: 103 PartId: 0 } SwitchState { StandBy { } } Config { SrcConnectionParams { StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } 2025-04-17T18:13:58.324513Z node 6 :REPLICATION_CONTROLLER NOTICE: [controller 72075186233409546][TxAlterReplication] Alter replication: rid# 1, pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:13:58.325885Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186233409546][TxAlterReplication] Complete 2025-04-17T18:13:58.326064Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 276758531, Sender [6:309:2296], Recipient [6:125:2151]: NKikimrReplication.TEvAlterReplicationResult OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-17T18:13:58.326103Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NReplication::TEvController::TEvAlterReplicationResult 2025-04-17T18:13:58.326219Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvAlterReplicationResult, at schemeshard: 72057594046678944, message: OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-17T18:13:58.326358Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-17T18:13:58.326447Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TConfigureParts opId# 103:0 HandleReply NKikimrReplication.TEvAlterReplicationResult OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-17T18:13:58.326493Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-04-17T18:13:58.326601Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:13:58.326653Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:2 2025-04-17T18:13:58.328273Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:13:58.328322Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:13:58.328360Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 103:0 2025-04-17T18:13:58.328479Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:125:2151], Recipient [6:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T18:13:58.328513Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T18:13:58.328559Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:13:58.328601Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 103:0 ProgressState 2025-04-17T18:13:58.328649Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:13:58.328696Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-17T18:13:58.328835Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:58.330605Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:13:58.330654Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-17T18:13:58.330748Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-17T18:13:58.330998Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [6:132:2155], Recipient [6:259:2250] 2025-04-17T18:13:58.331038Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:13:58.331109Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:58.331231Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 25769805931 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:58.331282Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-04-17T18:13:58.331410Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-17T18:13:58.331607Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:13:58.331672Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:13:58.331738Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 2025-04-17T18:13:58.333141Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:13:58.333190Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000004 first txId#103 countTxs#1 2025-04-17T18:13:58.333255Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000004 2025-04-17T18:13:58.333293Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 103:0 2025-04-17T18:13:58.333464Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:125:2151], Recipient [6:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T18:13:58.333498Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 103 2025-04-17T18:13:58.333585Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:58.333627Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:13:58.333871Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:58.333921Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [6:208:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-17T18:13:58.334383Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:13:58.334438Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T18:13:58.334544Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:13:58.334586Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:13:58.334621Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:13:58.334660Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:13:58.334699Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:13:58.334740Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-17T18:13:58.334785Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:13:58.334828Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T18:13:58.334862Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T18:13:58.334978Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:13:58.335023Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-17T18:13:58.335057Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-04-17T18:13:58.335667Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:208:2210], Recipient [6:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 4 } 2025-04-17T18:13:58.335746Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T18:13:58.335823Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:13:58.335910Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-04-17T18:13:58.335952Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-17T18:13:58.335994Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T18:13:58.336032Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:13:58.336113Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-17T18:13:58.336151Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:13:58.348176Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:13:58.348448Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-17T18:13:58.348492Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancellationNoTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:13:11.246537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:13:11.246627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.246669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:13:11.246724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:13:11.247684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:13:11.247715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:13:11.247778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:13:11.247857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:13:11.248868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:11.330326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:11.330371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:11.343730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:11.344033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:13:11.344192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:13:11.350699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:13:11.350877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:13:11.351461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.351657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:13:11.354021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.355186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.355256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.355412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:13:11.355456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.355511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:11.355641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.361330Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:13:11.467247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:11.467411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.467588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:11.467751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:11.467783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.469318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.469423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:11.469522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.469571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:11.469617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:11.469647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:11.471056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.471092Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:11.471115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:11.472371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.472402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.472428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.472479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.474837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:11.475962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:11.476085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:11.476693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:11.476783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:11.476817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.477052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:11.477086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:11.477202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:11.477254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:11.478523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:11.478558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:11.478665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:11.478688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:11.478927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:11.478966Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:11.479034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.479065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.479096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:11.479114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.479146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:11.479175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:11.479212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:11.479233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:11.479270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:11.479292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:11.479310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:11.480345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.480408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:11.480432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... shardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:57.725251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:13:57.725374Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:13:57.736162Z node 2 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [2:125:2151] sender: [2:239:2058] recipient: [2:15:2062] 2025-04-17T18:13:57.748371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:13:57.748557Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:57.748719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:13:57.748885Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:13:57.748928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:57.750889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:57.750990Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:13:57.751174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:57.751230Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:13:57.751275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:13:57.751311Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:13:57.753395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:57.753450Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:13:57.753489Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:13:57.755796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:57.755832Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:57.755871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:57.755919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:13:57.756027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:13:57.757338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:13:57.757531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:13:57.758445Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:57.758538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:13:57.758573Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:57.758772Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:13:57.758810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:13:57.758941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:57.759007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:13:57.760454Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:13:57.760491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:13:57.760619Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:13:57.760646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:13:57.760998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:13:57.761048Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:13:57.761155Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:57.761191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:57.761246Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:13:57.761282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:57.761317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:13:57.761357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:13:57.761393Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:13:57.761426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:13:57.761491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:13:57.761530Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:13:57.761568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:13:57.761917Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:57.762006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:13:57.762046Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:13:57.762085Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:13:57.762127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:13:57.762240Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:13:57.764779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:13:57.765420Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-17T18:13:57.766042Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 101 DatabaseName: "/MyRoot" Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { settings { } } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } 2025-04-17T18:13:57.766237Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: Reply TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 2025-04-17T18:13:57.766354Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Bootstrap 2025-04-17T18:13:57.784719Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Become StateWork (SchemeCache [2:274:2265]) 2025-04-17T18:13:57.785336Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:13:57.787110Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 TestWaitNotification wait txId: 101 2025-04-17T18:13:57.787546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:13:57.787585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:13:57.787897Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:13:57.788000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:13:57.788025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:281:2272] TestWaitNotification: OK eventTxId 101 2025-04-17T18:13:57.788335Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" PageSize: 100 PageToken: "" 2025-04-17T18:13:57.788405Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::ThreeLeveledLRU >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> KqpNewEngine::InShardsWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2025-04-17T18:13:55.883296Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346999519771239:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:55.883392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001474/r3tmp/tmpqV5KAX/pdisk_1.dat 2025-04-17T18:13:56.166516Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:56.236613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:56.236724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:56.238462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13093 TServer::EnableGrpc on GrpcPort 7007, node 1 2025-04-17T18:13:56.370577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:56.370621Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:56.370636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:56.370749Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:56.608337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:56.637142Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-04-17T18:13:56.637173Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found } |91.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSort::ReverseOptimized >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History |91.8%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> KqpSqlIn::SimpleKey >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] Test command err: 2025-04-17T18:13:36.868448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:36.869006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:36.869079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001995/r3tmp/tmpe2yIkU/pdisk_1.dat 2025-04-17T18:13:37.276018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.327963Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:37.337473Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-17T18:13:37.372716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:37.373037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:37.385453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:37.474875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:13:37.527614Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:13:37.527846Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:37.574536Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:37.574652Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:37.577179Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:13:37.577265Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:13:37.577326Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:13:37.578711Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:37.578851Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:37.578931Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:13:37.589641Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:37.633594Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:13:37.633772Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:37.633905Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:13:37.633948Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.633984Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:13:37.634020Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.634509Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:13:37.634629Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:13:37.634761Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.634803Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.634847Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:13:37.634892Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.634982Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:13:37.635361Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.635594Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:13:37.635688Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:13:37.637414Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.648037Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:37.648138Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:37.796826Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:13:37.801429Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:37.801494Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.802239Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.802290Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:37.802343Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:37.802573Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:37.802711Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:37.803554Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:37.803632Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:37.805772Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:37.806424Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:37.807994Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:37.808041Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.808580Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:37.808670Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.809880Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:37.809932Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:37.809992Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:37.810053Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:37.810104Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:37.810442Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:37.814061Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.816117Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:37.816679Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:37.816759Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:37.826825Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:37.826977Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:37.827037Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-17T18:13:37.827071Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-17T18:13:37.828532Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:37.852707Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:38.112294Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:38.112365Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:38.112702Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:38.112756Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:38.112803Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:38.112971Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-17T18:13:38.113120Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:38.113387Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:38.114063Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:38.184814Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T18:13:38.184932Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:38.184973Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:38.185040Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:57.748627Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:57.748681Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:57.748993Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:13:57.749161Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:57.749410Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:57.749481Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:13:57.749986Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:13:57.750604Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:57.753555Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:13:57.753617Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:57.754137Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:13:57.754256Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:57.755708Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:57.755756Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:13:57.755819Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:13:57.755909Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:57.755978Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:13:57.756076Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:57.758009Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:57.759789Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:13:57.759870Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:57.760960Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:13:57.774690Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:13:57.774862Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-17T18:13:57.774922Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-17T18:13:57.774962Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-17T18:13:57.775804Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:57.801103Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:13:58.051251Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:13:58.051320Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:58.051573Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:58.051627Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:58.051672Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-17T18:13:58.051852Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-17T18:13:58.051966Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:13:58.052189Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:13:58.052916Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:58.090698Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T18:13:58.090823Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:58.090868Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:13:58.090970Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:58.091089Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:13:58.091168Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-17T18:13:58.091282Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:58.094670Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-17T18:13:58.094767Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:13:58.103633Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:58.103756Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:58.103854Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:58.110466Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:13:58.117567Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:58.292582Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:13:58.296314Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:13:58.327369Z node 4 :TX_PROXY ERROR: Actor# [4:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:58.399242Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2fa8pn9yhbbfv9fynscrny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=N2YyYWUwOS1mNTdhYzVlMi05NjFlNjQzYi0yZDE0ZTk1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:58.402125Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:987:2786], serverId# [4:988:2787], sessionId# [0:0:0] 2025-04-17T18:13:58.402544Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-04-17T18:13:58.402823Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744913638402718 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T18:13:58.403041Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-04-17T18:13:58.414302Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-17T18:13:58.414439Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:58.499879Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2fa90k16web6hv8bh8vnvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2VmZDI5ODUtY2JhMThiMTMtOGZmOTc4MjMtYmViY2JlNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:58.502151Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-04-17T18:13:58.502477Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744913638502355 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-17T18:13:58.502646Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-04-17T18:13:58.514298Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-17T18:13:58.514389Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:13:58.516568Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1015:2805], serverId# [4:1016:2806], sessionId# [0:0:0] 2025-04-17T18:13:58.523212Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1017:2807], serverId# [4:1018:2808], sessionId# [0:0:0] >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> KqpNotNullColumns::UpsertNotNullPk >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> DBase::VersionCompactedParts [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck |91.9%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TSharedPageCache::ThreeLeveledLRU [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> TSharedPageCache::S3FIFO >> BuildStatsFlatIndex::Single_Slices [GOOD] >> Memtable::Wreck [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> BuildStatsFlatIndex::Single_History >> Memtable::Erased >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits >> KqpRanges::NullInKey >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Memtable::Erased [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics >> BuildStatsFlatIndex::Single_History [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> KqpNewEngine::OnlineRO_Consistent >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky >> Charge::ByKeysBasics [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> KqpNotNullColumns::UpdateNotNullPk >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> Charge::ByRowsLimits [GOOD] >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> Charge::ByRowsLimitsReverse >> Charge::ByRowsLimitsReverse [GOOD] >> BuildStatsFlatIndex::Single_Groups >> DBase::Basics [GOOD] >> DBase::Defaults [GOOD] >> DBase::Garbage [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::Outer [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNotNullColumns::InsertNotNullPk >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::ClockPro >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> KqpNewEngine::StreamLookupWithView >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> TGroupMapperTest::NonUniformCluster [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:55.980105Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.017 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.019 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.019 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.019 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {146b, 4} 00000.019 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.019 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.019 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.019 II| FAKE_ENV: All BS storage groups are stopped 00000.019 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.019 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.004522Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.016 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.017 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.017 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.017 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {292b, 8} 00000.017 II| FAKE_ENV: DS.1 gone, left {210b, 6}, put {210b, 6} 00000.017 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.017 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.017 II| FAKE_ENV: All BS storage groups are stopped 00000.017 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.017 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.026467Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.042 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.043 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 76b} miss {0 0b} 00000.043 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.043 II| FAKE_ENV: DS.1 gone, left {909b, 3}, put {1913b, 12} 00000.043 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {132b, 2} 00000.043 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.043 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1181b, 13} 00000.043 II| FAKE_ENV: All BS storage groups are stopped 00000.043 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.043 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.074977Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.033 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.034 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.034 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.034 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.034 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.034 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.034 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.034 II| FAKE_ENV: All BS storage groups are stopped 00000.034 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.034 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.114410Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.078 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.080 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.081 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} 00000.081 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.081 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2095b, 23} 00000.081 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.081 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.081 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.081 II| FAKE_ENV: All BS storage groups are stopped 00000.081 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.081 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.201205Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.049 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.050 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} 00000.050 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.050 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1828b, 23} 00000.050 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.050 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.050 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.050 II| FAKE_ENV: All BS storage groups are stopped 00000.050 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.050 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.256705Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.034 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.036 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} 00000.036 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.036 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.036 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.036 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.036 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.036 II| FAKE_ENV: All BS storage groups are stopped 00000.036 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.036 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.298475Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.045 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.046 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} 00000.046 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.046 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1567b, 23} 00000.046 II| FAKE_ENV: DS.1 gone, left {529b, 3}, put {197610b, 21} 00000.046 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.046 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.046 II| FAKE_ENV: All BS storage groups are stopped 00000.046 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.046 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.350532Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.011 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.011 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.011 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.011 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.011 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.011 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.011 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.011 II| FAKE_ENV: All BS storage groups are stopped 00000.011 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.011 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.366428Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.057 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.057 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.058 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} 00000.058 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.058 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.058 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.058 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1494b, 23} 00000.058 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.058 II| FAKE_ENV: All BS storage groups are stopped 00000.058 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.058 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:56.429925Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.018 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.019 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.019 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.019 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {753b, 11} 00000.019 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 0 ... t32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] >> KqpNotNullColumns::ReplaceNotNull [GOOD] >> KqpNotNullColumns::ReplaceNotNullPg >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> KqpNewEngine::SimpleUpsertSelect [GOOD] >> KqpNewEngine::ShuffleWrite >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> TSharedPageCache::ClockPro [GOOD] >> TSharedPageCache::ReplacementPolicySwitch >> TSharedPageCache::ReplacementPolicySwitch [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single >> KqpNewEngine::PureExpr [GOOD] >> KqpNewEngine::PureTxMixedWithDeferred >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial >> DataShardTxOrder::ZigZag_oo [GOOD] >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> KqpNewEngine::MultiSelect [GOOD] >> KqpNewEngine::MultiOutput >> KqpNewEngine::PrunePartitionsByLiteral [GOOD] >> KqpNewEngine::PrunePartitionsByExpr >> KqpSqlIn::SecondaryIndex_PgKey >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex >> KqpNewEngine::PkSelect1 [GOOD] >> KqpNewEngine::PkSelect2 >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink >> KqpNewEngine::JoinIdxLookup [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate >> KqpNotNullColumns::UpsertNotNullPk [GOOD] >> KqpNotNullColumns::UpsertNotNullPkPg >> KqpRanges::UpdateMulti >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex >> KqpPg::TableSelect+useSink [GOOD] >> KqpPg::TableSelect-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2025-04-17T18:13:44.705319Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:13:44.719010Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:13:44.719457Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:13:44.719682Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:13:44.766027Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:13:44.771593Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:13:44.850974Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:13:44.852668Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:13:44.852754Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:13:44.852884Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:13:44.853259Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:13:44.853752Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:13:44.853832Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:13:44.854161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:44.854262Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:44.930516Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:13:44.960000Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:13:44.960247Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:13:44.960378Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:13:44.960416Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:13:44.960449Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:13:44.960484Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:13:44.960643Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:13:44.960690Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:13:44.960927Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:13:44.961010Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:13:44.961109Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:13:44.961137Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:13:44.961175Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:13:44.961223Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:13:44.961267Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:13:44.961288Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:13:44.961317Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:13:44.961388Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:13:44.961418Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:13:44.961464Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:13:44.967165Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:13:44.967248Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:13:44.967341Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:13:44.967468Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:13:44.967516Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:13:44.967599Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:13:44.967650Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:13:44.967706Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:13:44.967748Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:13:44.967774Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:13:44.968043Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:13:44.968079Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:13:44.968127Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:13:44.968164Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:13:44.968217Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:13:44.968254Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:13:44.968291Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:13:44.968320Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:13:44.968342Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:13:44.981603Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:13:44.981694Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:13:44.981736Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:13:44.981773Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:13:44.981828Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:13:44.982402Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:13:44.982462Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:13:44.982504Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:13:44.982572Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:13:44.982597Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:13:44.982741Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:13:44.982779Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:13:44.982828Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:13:44.982869Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:13:44.986582Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:13:44.986646Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:13:44.986891Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:13:44.986928Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:13:44.986990Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:13:44.987032Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:13:44.987062Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:13:44.987103Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:13:44.987141Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:13:44.987188Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:13:44.987224Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:13:44.987282Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:13:44.987318Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:13:44.987492Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:13:44.987523Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:13:44.987546Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:13:44.987567Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:13:44.987588Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:13:44.987710Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:13:44.987734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:13:44.987772Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:13:44.987807Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:13:44.987853Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:13:44.987916Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:13:44.987950Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:13:44.987988Z node 1 :TX_DATA ... aitInRS 2025-04-17T18:14:03.607029Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:14:03.607053Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-17T18:14:03.607074Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:14:03.607096Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-17T18:14:03.607580Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-17T18:14:03.607659Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:14:03.607715Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:14:03.607742Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:14:03.607767Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-17T18:14:03.607795Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-17T18:14:03.608017Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-17T18:14:03.608056Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-17T18:14:03.608103Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-17T18:14:03.608146Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-17T18:14:03.608187Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-17T18:14:03.608211Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-17T18:14:03.608244Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-17T18:14:03.608290Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:14:03.608330Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:14:03.608369Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:14:03.608409Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:14:03.608655Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [6:344:2311], Recipient [6:344:2311]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:14:03.608695Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:14:03.608747Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-17T18:14:03.608780Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:14:03.608807Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T18:14:03.608851Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-04-17T18:14:03.608880Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-04-17T18:14:03.608911Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.608936Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-04-17T18:14:03.608961Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-04-17T18:14:03.609009Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-04-17T18:14:03.609728Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-04-17T18:14:03.609781Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.609805Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-04-17T18:14:03.609831Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-17T18:14:03.609858Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-04-17T18:14:03.609892Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.609913Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-17T18:14:03.609935Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-17T18:14:03.609959Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-04-17T18:14:03.610006Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2025-04-17T18:14:03.610038Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-04-17T18:14:03.610067Z node 6 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2025-04-17T18:14:03.610106Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.610128Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-17T18:14:03.610151Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-04-17T18:14:03.610263Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-04-17T18:14:03.610318Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.610341Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-04-17T18:14:03.610385Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-04-17T18:14:03.610412Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-04-17T18:14:03.610439Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.610460Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-04-17T18:14:03.610482Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-04-17T18:14:03.610503Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-04-17T18:14:03.610529Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.610549Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-04-17T18:14:03.610570Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-04-17T18:14:03.610591Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-04-17T18:14:03.610616Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.610636Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-17T18:14:03.610657Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-17T18:14:03.610683Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-17T18:14:03.610999Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-17T18:14:03.611034Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:14:03.611068Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.611084Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-17T18:14:03.611099Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-17T18:14:03.611117Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-17T18:14:03.611316Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-17T18:14:03.611348Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-17T18:14:03.611378Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-17T18:14:03.611407Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-17T18:14:03.611442Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-17T18:14:03.611466Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-17T18:14:03.611492Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-17T18:14:03.611522Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:14:03.611548Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T18:14:03.611575Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T18:14:03.611604Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T18:14:03.626398Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-17T18:14:03.626493Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-17T18:14:03.626578Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T18:14:03.626636Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-17T18:14:03.626718Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:14:03.626777Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T18:14:03.627235Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-17T18:14:03.627273Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-17T18:14:03.627310Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:14:03.627335Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-17T18:14:03.627378Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:14:03.627410Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpAgg::AggWithLookup [GOOD] >> KqpAgg::AggWithSelfLookup >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex >> KqpNewEngine::InShardsWrite [GOOD] >> KqpNewEngine::Join >> KqpSort::ReverseOptimized [GOOD] >> KqpSort::ReverseOptimizedWithPredicate >> TMultiversionObjectMap::MonteCarlo [GOOD] >> KqpNotNullColumns::InsertNotNullPk [GOOD] >> KqpNotNullColumns::InsertNotNull >> KqpNotNullColumns::UpdateNotNullPk [GOOD] >> KqpNotNullColumns::UpdateNotNullPkPg >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert+QueryService >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Int32_Limit3 >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSwitchableCache::Touch >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] >> TSwitchableCache::Touch [GOOD] >> TSwitchableCache::Erase [GOOD] >> TSwitchableCache::EvictNext [GOOD] >> TSwitchableCache::UpdateLimit [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_All [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_Parts [GOOD] >> TSwitchableCache::Switch_RotatePages_Force [GOOD] >> TSwitchableCache::Switch_RotatePages_Evicts [GOOD] >> TSwitchableCache::Switch_Touch [GOOD] >> TSwitchableCache::Switch_Erase [GOOD] >> TSwitchableCache::Switch_EvictNext [GOOD] >> TSwitchableCache::Switch_UpdateLimit [GOOD] >> TVersions::WreckHead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBase::KIKIMR_15598_Many_MemTables [GOOD] Test command err: 3 parts: [0:0:1:0:0:0:0] 150 rows, 7 pages, 1 levels: (286, 103) (607, 210) (811, 278) (1315, 446) (1540, 521) [0:0:2:0:0:0:0] 197 rows, 9 pages, 2 levels: (253, 92) (577, 200) (742, 255) (1156, 393) (1594, 539) [0:0:3:0:0:0:0] 153 rows, 7 pages, 1 levels: (199, 74) (514, 179) (769, 264) (1291, 438) (1555, 526) Checking BTree: Touched 100% bytes, 5 pages RowCountHistogram: 2% (actual 0%) key = (10, 11) value = 12 (actual 1 - 2% error) 2% (actual 0%) key = (16, 13) value = 24 (actual 2 - 4% error) 5% (actual 11%) key = (199, 74) value = 49 (actual 61 - -2% error) 4% (actual 2%) key = (253, 92) value = 73 (actual 74 - 0% error) 4% (actual 2%) key = (286, 103) value = 97 (actual 84 - 2% error) 4% (actual 8%) key = (418, 147) value = 120 (actual 125 - -1% error) 4% (actual 5%) key = (514, 179) value = 144 (actual 154 - -2% error) 5% (actual 4%) key = (577, 200) value = 169 (actual 174 - -1% error) 4% (actual 1%) key = (607, 210) value = 192 (actual 183 - 1% error) 4% (actual 8%) key = (742, 255) value = 214 (actual 226 - -2% error) 5% (actual 1%) key = (769, 264) value = 239 (actual 235 - 0% error) 4% (actual 2%) key = (811, 278) value = 262 (actual 248 - 2% error) 4% (actual 9%) key = (958, 327) value = 286 (actual 293 - -1% error) 5% (actual 5%) key = (1054, 359) value = 311 (actual 322 - -2% error) 4% (actual 2%) key = (1087, 370) value = 334 (actual 332 - 0% error) 4% (actual 4%) key = (1156, 393) value = 358 (actual 354 - 0% error) 4% (actual 8%) key = (1291, 438) value = 381 (actual 394 - -2% error) 4% (actual 1%) key = (1315, 446) value = 404 (actual 401 - 0% error) 4% (actual 3%) key = (1375, 466) value = 426 (actual 419 - 1% error) 4% (actual 10%) key = (1540, 521) value = 449 (actual 469 - -4% error) 3% (actual 1%) key = (1555, 526) value = 465 (actual 474 - -1% error) 3% (actual 2%) key = (1594, 539) value = 482 (actual 484 - 0% error) 1% (actual 2%) key = (1636, 553) value = 491 (actual 497 - -1% error) 1% (actual 0%) key = (1639, 554) value = 496 (actual 498 - 0% error) 0% (actual 0%) DataSizeHistogram: 2% (actual 13%) key = (10, 11) value = 950 (actual 5800 - -11% error) 2% (actual 0%) key = (16, 13) value = 1933 (actual 5800 - -9% error) 4% (actual 0%) key = (199, 74) value = 3866 (actual 5800 - -4% error) 4% (actual 9%) key = (253, 92) value = 5849 (actual 9821 - -9% error) 4% (actual 4%) key = (286, 103) value = 7810 (actual 11827 - -9% error) 4% (actual 4%) key = (418, 147) value = 9825 (actual 13848 - -9% error) 4% (actual 4%) key = (514, 179) value = 11834 (actual 15888 - -9% error) 4% (actual 4%) key = (577, 200) value = 13865 (actual 17883 - -9% error) 4% (actual 4%) key = (607, 210) value = 15865 (actual 19876 - -9% error) 4% (actual 4%) key = (742, 255) value = 17859 (actual 21881 - -9% error) 4% (actual 4%) key = (769, 264) value = 19882 (actual 23918 - -9% error) 4% (actual 0%) key = (811, 278) value = 21897 (actual 23918 - -4% error) 4% (actual 9%) key = (958, 327) value = 23894 (actual 27913 - -9% error) 4% (actual 4%) key = (1054, 359) value = 25915 (actual 29895 - -9% error) 4% (actual 0%) key = (1087, 370) value = 27901 (actual 29895 - -4% error) 4% (actual 4%) key = (1156, 393) value = 29881 (actual 31850 - -4% error) 4% (actual 4%) key = (1291, 438) value = 31821 (actual 33747 - -4% error) 4% (actual 4%) key = (1315, 446) value = 33794 (actual 35739 - -4% error) 4% (actual 9%) key = (1375, 466) value = 35737 (actual 39763 - -9% error) 4% (actual 4%) key = (1540, 521) value = 37749 (actual 41447 - -8% error) 3% (actual 0%) key = (1555, 526) value = 39198 (actual 41447 - -5% error) 3% (actual 1%) key = (1594, 539) value = 40605 (actual 42020 - -3% error) 1% (actual 0%) key = (1636, 553) value = 41344 (actual 42020 - -1% error) 0% (actual 0%) key = (1639, 554) value = 41733 (actual 42020 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 4% (actual 0%) key = (10, 11) value = 24 (actual 1 - 4% error) 5% (actual 0%) key = (16, 13) value = 49 (actual 2 - 9% error) 5% (actual 11%) key = (199, 74) value = 74 (actual 61 - 2% error) 4% (actual 2%) key = (253, 92) value = 97 (actual 74 - 4% error) 4% (actual 2%) key = (286, 103) value = 120 (actual 84 - 7% error) 4% (actual 8%) key = (418, 147) value = 144 (actual 125 - 3% error) 5% (actual 5%) key = (514, 179) value = 169 (actual 154 - 3% error) 5% (actual 4%) key = (577, 200) value = 194 (actual 174 - 4% error) 4% (actual 1%) key = (607, 210) value = 216 (actual 183 - 6% error) 4% (actual 8%) key = (742, 255) value = 237 (actual 226 - 2% error) 5% (actual 1%) key = (769, 264) value = 262 (actual 235 - 5% error) 5% (actual 2%) key = (811, 278) value = 287 (actual 248 - 7% error) 4% (actual 9%) key = (958, 327) value = 311 (actual 293 - 3% error) 4% (actual 5%) key = (1054, 359) value = 335 (actual 322 - 2% error) 4% (actual 2%) key = (1087, 370) value = 358 (actual 332 - 5% error) 4% (actual 4%) key = (1156, 393) value = 382 (actual 354 - 5% error) 4% (actual 8%) key = (1291, 438) value = 404 (actual 394 - 2% error) 4% (actual 1%) key = (1315, 446) value = 426 (actual 401 - 5% error) 4% (actual 3%) key = (1375, 466) value = 448 (actual 419 - 5% error) 4% (actual 10%) key = (1540, 521) value = 472 (actual 469 - 0% error) 2% (actual 1%) key = (1555, 526) value = 483 (actual 474 - 1% error) 2% (actual 2%) key = (1594, 539) value = 493 (actual 484 - 1% error) 1% (actual 3%) DataSizeHistogram: 4% (actual 13%) key = (10, 11) value = 1900 (actual 5800 - -9% error) 4% (actual 0%) key = (16, 13) value = 3867 (actual 5800 - -4% error) 4% (actual 0%) key = (199, 74) value = 5800 (actual 5800 - 0% error) 4% (actual 9%) key = (253, 92) value = 7798 (actual 9821 - -4% error) 4% (actual 4%) key = (286, 103) value = 9821 (actual 11827 - -4% error) 4% (actual 4%) key = (418, 147) value = 11827 (actual 13848 - -4% error) 4% (actual 4%) key = (514, 179) value = 13848 (actual 15888 - -4% error) 4% (actual 4%) key = (577, 200) value = 15888 (actual 17883 - -4% error) 4% (actual 4%) key = (607, 210) value = 17883 (actual 19876 - -4% error) 4% (actual 4%) key = (742, 255) value = 19876 (actual 21881 - -4% error) 4% (actual 4%) key = (769, 264) value = 21881 (actual 23918 - -4% error) 4% (actual 0%) key = (811, 278) value = 23918 (actual 23918 - 0% error) 4% (actual 9%) key = (958, 327) value = 25907 (actual 27913 - -4% error) 4% (actual 4%) key = (1054, 359) value = 27913 (actual 29895 - -4% error) 4% (actual 0%) key = (1087, 370) value = 29895 (actual 29895 - 0% error) 4% (actual 4%) key = (1156, 393) value = 31850 (actual 31850 - 0% error) 4% (actual 4%) key = (1291, 438) value = 33747 (actual 33747 - 0% error) 4% (actual 4%) key = (1315, 446) value = 35739 (actual 35739 - 0% error) 4% (actual 9%) key = (1375, 466) value = 37727 (actual 39763 - -4% error) 4% (actual 4%) key = (1540, 521) value = 39763 (actual 41447 - -4% error) 2% (actual 0%) key = (1555, 526) value = 40669 (actual 41447 - -1% error) 1% (actual 1%) key = (1594, 539) value = 41447 (actual 42020 - -1% error) 1% (actual 0%) Checking Mixed: Touched 100% bytes, 5 pages RowCountHistogram: 14% (actual 12%) key = (199, 74) value = 74 (actual 61 - 2% error) 4% (actual 2%) key = (253, 92) value = 97 (actual 74 - 4% error) 4% (actual 2%) key = (286, 103) value = 120 (actual 84 - 7% error) 4% (actual 8%) key = (418, 147) value = 144 (actual 125 - 3% error) 5% (actual 5%) key = (514, 179) value = 169 (actual 154 - 3% error) 5% (actual 4%) key = (577, 200) value = 194 (actual 174 - 4% error) 4% (actual 1%) key = (607, 210) value = 216 (actual 183 - 6% error) 4% (actual 8%) key = (742, 255) value = 237 (actual 226 - 2% error) 5% (actual 1%) key = (769, 264) value = 262 (actual 235 - 5% error) 5% (actual 2%) key = (811, 278) value = 287 (actual 248 - 7% error) 4% (actual 9%) key = (958, 327) value = 311 (actual 293 - 3% error) 4% (actual 5%) key = (1054, 359) value = 335 (actual 322 - 2% error) 4% (actual 2%) key = (1087, 370) value = 358 (actual 332 - 5% error) 4% (actual 4%) key = (1156, 393) value = 382 (actual 354 - 5% error) 4% (actual 8%) key = (1291, 438) value = 404 (actual 394 - 2% error) 4% (actual 1%) key = (1315, 446) value = 426 (actual 401 - 5% error) 4% (actual 3%) key = (1375, 466) value = 448 (actual 419 - 5% error) 4% (actual 10%) key = (1540, 521) value = 472 (actual 469 - 0% error) 2% (actual 1%) key = (1555, 526) value = 483 (actual 474 - 1% error) 2% (actual 2%) key = (1594, 539) value = 493 (actual 484 - 1% error) 1% (actual 3%) DataSizeHistogram: 13% (actual 13%) key = (199, 74) value = 5800 (actual 5800 - 0% error) 4% (actual 9%) key = (253, 92) value = 7798 (actual 9821 - -4% error) 4% (actual 4%) key = (286, 103) value = 9821 (actual 11827 - -4% error) 4% (actual 4%) key = (418, 147) value = 11827 (actual 13848 - -4% error) 4% (actual 4%) key = (514, 179) value = 13848 (actual 15888 - -4% error) 4% (actual 4%) key = (577, 200) value = 15888 (actual 17883 - -4% error) 4% (actual 4%) key = (607, 210) value = 17883 (actual 19876 - -4% error) 4% (actual 4%) key = (742, 255) value = 19876 (actual 21881 - -4% error) 4% (actual 4%) key = (769, 264) value = 21881 (actual 23918 - -4% error) 4% (actual 0%) key = (811, 278) value = 23918 (actual 23918 - 0% error) 4% (actual 9%) key = (958, 327) value = 25907 (actual 27913 - -4% error) 4% (actual 4%) key = (1054, 359) value = 27913 (actual 29895 - -4% error) 4% (actual 0%) key = (1087, 370) value = 29895 (actual 29895 - 0% error) 4% (actual 4%) key = (1156, 393) value = 31850 (actual 31850 - 0% error) 4% (actual 4%) key = (1291, 438) value = 33747 (actual 33747 - 0% error) 4% (actual 4%) key = (1315, 446) value = 35739 (actual 35739 - 0% error) 4% (actual 9%) key = (1375, 466) value = 37727 (actual 39763 - -4% error) 4% (actual 4%) key = (1540, 521) value = 39763 (actual 41447 - -4% error) 2% (actual 0%) key = (1555, 526) value = 40669 (actual 41447 - -1% error) 1% (actual 1%) key = (1594, 539) value = 41447 (actual 42020 - -1% error) 1% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 150 rows, 7 pages, 1 levels: (286, 103) (607, 210) (811, 278) (1315, 446) (1540, 521) [0:0:2:0:0:0:0] 197 rows, 9 pages, 2 levels: (253, 92) (577, 200) (742, 255) (1156, 393) (1594, 539) [0:0:3:0:0:0:0] 153 rows, 7 pages, 1 levels: (199, 74) (514, 179) (769, 264) (1291, 438) (1555, 526) Checking BTree: Touched 100% bytes, 5 pages RowCountHistogram: 19% (actual 16%) key = (286, 103) value = 97 (actual 84 - 2% error) 19% (actual 19%) key = (607, 210) value = 192 (actual 183 - 1% error) 18% (actual 22%) key = (958, 327) value = 286 (actual 293 - -1% error) 19% (actual 20%) key = (1291, 438) value = 381 (actual 394 - -2% error) 23% (actual 21%) DataSizeHistogram: 18% (actual 28%) key = (286, 103) value = 7810 (actual 11827 - -9% error) 19% (actual 19%) key = (607, 210) value = 15865 (actual 19876 - -9% error) 19% (actual 19%) key = (958, 327) value ... 85 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () [0:0:2:0:0:0:0] 166 rows, 1 pages, 0 levels: () () () () () [0:0:3:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 >> KqpNotNullColumns::ReplaceNotNullPg [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup >> KqpNewEngine::OnlineRO_Consistent [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent >> KqpReturning::ReturningWorksIndexedUpsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedUpsert-QueryService |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TClockProCache::Touch [GOOD] >> TClockProCache::Lifecycle [GOOD] >> TClockProCache::EvictNext [GOOD] >> TClockProCache::UpdateLimit [GOOD] >> TClockProCache::Erase [GOOD] >> TClockProCache::Random >> TClockProCache::Random [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] Test command err: 2025-04-17T18:13:16.545640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346831649303731:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.546006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014aa/r3tmp/tmpMdB2kW/pdisk_1.dat 2025-04-17T18:13:16.845423Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18097, node 1 2025-04-17T18:13:16.915164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.915817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.918401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.985615Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.985639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.985654Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.985768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:17.192337Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:17.195761Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:17.195819Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:17.196457Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1929, port: 1929 2025-04-17T18:13:17.198943Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:17.205314Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:17.251806Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****5GqQ (D49EEFA9) () has now valid token of ldapuser@ldap 2025-04-17T18:13:19.188772Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346844953557198:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:19.188856Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014aa/r3tmp/tmppc4WQU/pdisk_1.dat 2025-04-17T18:13:19.306288Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3339, node 2 2025-04-17T18:13:19.339012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:19.339107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:19.342988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:19.375061Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:19.375081Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:19.375087Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:19.375188Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:19.670113Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:19.672948Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:19.672967Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:19.673586Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:7461, port: 7461 2025-04-17T18:13:19.673661Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:19.684011Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:19.726685Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:19.727130Z node 2 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:7461 return no entries 2025-04-17T18:13:19.727618Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****KfKw (2E26BB9C) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:7461 return no entries)' 2025-04-17T18:13:22.256446Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346858420859702:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:22.256509Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014aa/r3tmp/tmp7MqkdT/pdisk_1.dat 2025-04-17T18:13:22.336410Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15700, node 3 2025-04-17T18:13:22.380018Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:22.380113Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:22.381705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:22.387463Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:22.387482Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:22.387493Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:22.387609Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:22.482226Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:22.485164Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:22.485196Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:22.485695Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:6435, port: 6435 2025-04-17T18:13:22.485785Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:22.493882Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:22.538620Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:22.586427Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:22.586859Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:22.586904Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:22.630551Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:22.678433Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:22.679439Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****PLNw (3DC03427) () has now valid token of ldapuser@ldap 2025-04-17T18:13:27.256843Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494346858420859702:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:27.256932Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:28.265747Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****PLNw (3DC03427) 2025-04-17T18:13:28.265821Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:6435, port: 6435 2025-04-17T18:13:28.265881Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:28.283027Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:28.326636Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:28.370464Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:28.370970Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:28.371008Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:28.418542Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:28.466496Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:28.467537Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****PLNw (3DC03427) () has now valid token of ldapuser@ldap 2025-04-17T18:13:33.339553Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494346906375601323:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:33.339618Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014aa/r3tmp/tmpVfXSTp/pdisk_1.dat 2025-04-17T18:13:33.426123Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31095, node 4 2025-04-17T18:13:33.474796Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:33.474890Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:33.476256Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:33.486847Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:33.486872Z node 4 :NET ... 594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:44.469867Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:44.474806Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:44.502871Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:44.502899Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:44.502906Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:44.503039Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:44.624965Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:44.628063Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:44.628104Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:44.628930Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:64665, port: 64665 2025-04-17T18:13:44.629008Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:44.639755Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:44.686704Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:44.734501Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:13:44.735076Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:13:44.735136Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:44.778585Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:44.822552Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:13:44.823671Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****y3Vg (E3818AE5) () has now valid token of ldapuser@ldap 2025-04-17T18:13:49.291777Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494346949989590349:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:49.291934Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:13:50.305622Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****y3Vg (E3818AE5) 2025-04-17T18:13:50.305793Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:64665, port: 64665 2025-04-17T18:13:50.305871Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:50.317739Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:50.362724Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:50.363229Z node 5 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:64665 return no entries 2025-04-17T18:13:50.363740Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****y3Vg (E3818AE5) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:64665 return no entries)' 2025-04-17T18:13:54.307874Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****y3Vg (E3818AE5) 2025-04-17T18:13:55.429232Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494346997761432405:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:55.429370Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014aa/r3tmp/tmpaz6to0/pdisk_1.dat 2025-04-17T18:13:55.536646Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62389, node 6 2025-04-17T18:13:55.562614Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:55.562714Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:55.567803Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:55.614868Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:55.614890Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:55.614898Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:55.615042Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:55.713444Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-17T18:13:55.716696Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:55.716735Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:55.717534Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24707, port: 24707 2025-04-17T18:13:55.717610Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:55.727856Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:55.770674Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:55.771136Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:24707. Server is busy 2025-04-17T18:13:55.771638Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****wS9w (52774FAB) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:24707. Server is busy)' 2025-04-17T18:13:55.772014Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:55.772047Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:55.773442Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24707, port: 24707 2025-04-17T18:13:55.773563Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:55.783538Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:55.826679Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:55.827127Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:24707. Server is busy 2025-04-17T18:13:55.827541Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****wS9w (52774FAB) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:24707. Server is busy)' 2025-04-17T18:13:58.439660Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****wS9w (52774FAB) 2025-04-17T18:13:58.439989Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:13:58.440020Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:13:58.441332Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24707, port: 24707 2025-04-17T18:13:58.441448Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:13:58.454818Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:13:58.505567Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:13:58.506047Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:24707. Server is busy 2025-04-17T18:13:58.506486Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****wS9w (52774FAB) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:24707. Server is busy)' 2025-04-17T18:14:00.429815Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494346997761432405:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:00.429887Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:02.443800Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****wS9w (52774FAB) 2025-04-17T18:14:02.444042Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-17T18:14:02.444058Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-17T18:14:02.444696Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24707, port: 24707 2025-04-17T18:14:02.444765Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-17T18:14:02.459000Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-17T18:14:02.502875Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-17T18:14:02.546810Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-17T18:14:02.549908Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-17T18:14:02.549982Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:14:02.594763Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:14:02.638592Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-17T18:14:02.639914Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****wS9w (52774FAB) () has now valid token of ldapuser@ldap >> KqpRanges::NullInKey [GOOD] >> KqpRanges::NullInKeySuffix >> KqpSort::TopSortTableExprOffset [GOOD] >> KqpSort::UnionAllSortLimit >> KqpSysColV1::InnerJoinSelectAsterisk >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort |91.9%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNotNullColumns::InsertNotNullPkPg+useSink >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> KqpNotNullColumns::UpsertNotNullPkPg [GOOD] >> KqpNotNullColumns::UpsertNotNullPg >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns >> KqpSqlIn::SimpleKey [GOOD] >> KqpSqlIn::SelectNotAllElements >> KqpNewEngine::ShuffleWrite [GOOD] >> KqpNewEngine::StaleRO >> KqpNewEngine::PureTxMixedWithDeferred [GOOD] >> KqpNewEngine::ReadAfterWrite >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> KqpKv::ReadRows_UnknownTable >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] >> KqpNewEngine::MultiOutput [GOOD] >> KqpNewEngine::MultiStatement >> KqpNewEngine::PrunePartitionsByExpr [GOOD] >> KqpNewEngine::PruneWritePartitions+UseSink >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DropSequence >> KqpNewEngine::StreamLookupWithView [GOOD] >> KqpNewEngine::Truncated >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> KqpNotNullColumns::InsertNotNull [GOOD] >> KqpNotNullColumns::InsertFromSelect >> KqpNotNullColumns::UpdateNotNullPkPg [GOOD] >> KqpNotNullColumns::UpdateNotNull >> KqpNewEngine::PkSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect1 >> KqpRanges::UpdateMulti [GOOD] >> KqpRanges::UpdateWhereInBigLiteralList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] Test command err: 2025-04-17T18:10:39.040076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:39.040213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:39.040353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b25/r3tmp/tmpRGyyNJ/pdisk_1.dat 2025-04-17T18:10:39.399293Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26224, node 1 2025-04-17T18:10:39.610788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:39.610853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:39.610883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:39.611528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:39.613188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:39.696074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:39.696192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:39.709967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61874 2025-04-17T18:10:40.224762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:43.096211Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:43.131601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:43.131690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:43.168934Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:43.171145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:43.385963Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.386481Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.387113Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.387468Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.387565Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.387653Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.387728Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.387799Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.387890Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:43.550642Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:43.550725Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:43.563785Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:43.683887Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:43.729945Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:43.730061Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:43.759481Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:43.761055Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:43.761238Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:43.761305Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:43.761358Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:43.761402Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:43.761455Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:43.761498Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:43.762631Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:43.797372Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:43.797450Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:43.804146Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:43.807696Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:43.807815Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:43.817269Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:43.833801Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:43.833854Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:43.833950Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:43.842824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:43.854085Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:43.854235Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:44.007323Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:44.180149Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:44.237635Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:45.006105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:45.006268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:45.021070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:45.111738Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:45.111988Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:45.112377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:45.112568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:45.112789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:45.112935Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:45.113053Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:45.113232Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:45.113392Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:45.113540Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:45.113687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:45.113835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:45.137727Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:45.137827Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... , $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:13:57.710459Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGUyYWFlYWQtY2RlYTYxNjItNTY4NDQ2OGItYjg5ZmIwOTM=, TxId: 2025-04-17T18:13:57.710528Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGUyYWFlYWQtY2RlYTYxNjItNTY4NDQ2OGItYjg5ZmIwOTM=, TxId: 2025-04-17T18:13:57.711004Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:13:57.736179Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:13:57.736241Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId7, ActorId=[1:2807:3225] 2025-04-17T18:13:58.350852Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-04-17T18:13:58.350920Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:13:59.064057Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:13:59.064256Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:13:59.090993Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:13:59.091060Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-04-17T18:13:59.091088Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-17T18:13:59.091108Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:13:59.091289Z node 2 :STATISTICS DEBUG: Event round 9 is different from the current 0 2025-04-17T18:13:59.091325Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-17T18:14:00.545202Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:14:01.889273Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:14:01.889343Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-04-17T18:14:01.889373Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-17T18:14:01.889393Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:14:03.190992Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:14:03.203061Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:14:03.203202Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:14:03.203240Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:14:03.203837Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:14:03.230847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:14:03.231238Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:14:03.231307Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:14:03.231680Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:14:03.245758Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:14:03.245952Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 11, current Round: 0 2025-04-17T18:14:03.246507Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9364:6416], server id = [2:9365:6417], tablet id = 72075186224037899, status = OK 2025-04-17T18:14:03.246606Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9364:6416], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:14:03.247887Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:14:03.247966Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:14:03.248125Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:14:03.248324Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:14:03.248663Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:14:03.251343Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9364:6416], server id = [2:9365:6417], tablet id = 72075186224037899 2025-04-17T18:14:03.251380Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:14:03.252172Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:14:03.275400Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTA5YjRlNGUtYjdiMDIwOC0xYTllYTdjOS1jYzY3YzQ0Mg==, TxId: 2025-04-17T18:14:03.275452Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTA5YjRlNGUtYjdiMDIwOC0xYTllYTdjOS1jYzY3YzQ0Mg==, TxId: 2025-04-17T18:14:03.275847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:14:03.303536Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:14:03.303610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId8, ActorId=[1:2807:3225] 2025-04-17T18:14:03.891685Z node 2 :STATISTICS DEBUG: Event round 11 is different from the current 0 2025-04-17T18:14:03.891760Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:14:04.577150Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-04-17T18:14:04.577208Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-17T18:14:04.577294Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:14:04.577320Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-17T18:14:04.577345Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:14:05.912037Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:14:05.912274Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:14:05.923376Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:14:07.254235Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:14:07.254314Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-17T18:14:07.254347Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:14:08.600692Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:14:08.600820Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:14:08.600854Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:14:08.601360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:14:08.616294Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:14:08.616687Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:14:08.616750Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:14:08.617064Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:14:08.630865Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:14:08.631035Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 12, current Round: 0 2025-04-17T18:14:08.631540Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9533:6505], server id = [2:9534:6506], tablet id = 72075186224037899, status = OK 2025-04-17T18:14:08.631637Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9533:6505], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:14:08.632707Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:14:08.632769Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:14:08.632970Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9533:6505], server id = [2:9534:6506], tablet id = 72075186224037899 2025-04-17T18:14:08.633001Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:14:08.633076Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:14:08.633234Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:14:08.633589Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:14:08.636060Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:14:08.674159Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGI5OGQ3YjctYjZiNDkzMWQtMzZmMDZkNzUtNDFmMzk4ZjM=, TxId: 2025-04-17T18:14:08.674249Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGI5OGQ3YjctYjZiNDkzMWQtMzZmMDZkNzUtNDFmMzk4ZjM=, TxId: 2025-04-17T18:14:08.674503Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:58.000000Z, event interval end# 2025-04-17T18:14:06.000000Z 2025-04-17T18:14:08.675114Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:14:08.716893Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:14:08.716959Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId9, ActorId=[1:2807:3225] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools >> KqpNewEngine::JoinIdxLookupWithPredicate [GOOD] >> KqpNewEngine::JoinPure |91.9%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2025-04-17T18:13:25.261940Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346870405161288:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:25.262006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001406/r3tmp/tmpj1aWDS/pdisk_1.dat 2025-04-17T18:13:25.621319Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16567, node 1 2025-04-17T18:13:25.650166Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:13:25.650208Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:13:25.685757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:25.685847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:25.696230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:25.766270Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:25.766297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:25.766311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:25.766425Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:26.136373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24068 2025-04-17T18:13:27.602564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346878995096907:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:27.602679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.000443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:13:28.167868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883290064392:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.168006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.168082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883290064397:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.173641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:13:28.193032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346883290064399:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:13:28.263379Z node 1 :TX_PROXY ERROR: Actor# [1:7494346883290064478:2813] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:28.533949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2f9bf6endzhm9venjyz4te, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.598318Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2f9bwm9ytcgnefggmmd9t6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.609739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2f9bx07fj1h43mdc3gy8s1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.620117Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2f9bxacbzec7jzq6k4enhc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.630126Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2f9bxmdb051199kr6x3w7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.642458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2f9by16gzb97mfv0dkj78g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.652665Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2f9byb0ctcgqfbb1a0pxfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.663483Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01js2f9byp4gbx84ew6nmzj0k1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.674706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01js2f9bz1cz4vb6bt1h3zc6k2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.684808Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01js2f9bzb99fz7vfgxgf50wj5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.697112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2f9bzq3nb112bevme1a2ns, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.708737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01js2f9c03asmvn5c9k922fnkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.724350Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2f9c0jdsfjvphvptbve3h2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.741779Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2f9c1469v78dmd0kx3fag1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.757024Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2f9c1k12h94x1wzmc0kngr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.769457Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01js2f9c206apxnt7wyyaw94hb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.779590Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01js2f9c2a05egc5hhzf9enzhz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.792176Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01js2f9c2pd22389q8yjmqv2wn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:13:28.804440Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01js2f9c33fwbayvssdm3vbnxg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIyNTkxOWItNDVmODg4ZDQtMmQ0OTU1Y2EtN2QyMjY5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T ... n/3?node_id=1&id=ZTBlMjEyMjMtOGI3MWJmYzgtNjI1NmE5MWEtZjk3M2FmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.226800Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723695. Ctx: { TraceId: 01js2fahkj8q7eabn9473f6mfg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGEyZWRkNjktZTIxNDQ4NTEtNzMwOWRiZjItOGJiNDE1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.228864Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723696. Ctx: { TraceId: 01js2fahkjazb5031cwtanwk4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjE0YWIxMzUtYjRmYTU1ZjMtOGQ3YmUwY2YtZmZjNGQ0ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.232375Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723697. Ctx: { TraceId: 01js2fahkm21j6qex7apc0awg0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZkOTRhMTAtMWRkZGZjZGMtMmM2MzI2NTAtYmIwNDA4MTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.232770Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723698. Ctx: { TraceId: 01js2fahkjbbrmzf6ne8qex8v9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjUyNDdmOWUtZjU0MzBjZDktNWM3Yjk2MmMtYzVlYzQ5MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.238377Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723699. Ctx: { TraceId: 01js2fahkt1aysqaq9q5sm5crt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg3NDBlNTctYjNmM2RmZWMtNWI5NzBiYy1kZGVhZGFjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.238570Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723700. Ctx: { TraceId: 01js2fahkt1cz8hdnrvn51sv3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzMwNGQzMjQtZjVlNjllMzYtMmYyYzI3MTAtNjhkZTQzN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.244810Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723701. Ctx: { TraceId: 01js2fahky72xe7djmstqtc4ph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhZGEzM2ItNmFmZWY0MWYtYzI3M2VjYzktODYxZjI1MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.245552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723702. Ctx: { TraceId: 01js2fahm2czstefna0b3mq0gz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY5OWVhYjgtOTdjMDJlZTctNjUwN2M5MTAtM2U5NjY2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.251378Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723703. Ctx: { TraceId: 01js2fahmd0tpdryy8em8vvz7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBlMjEyMjMtOGI3MWJmYzgtNjI1NmE5MWEtZjk3M2FmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.251841Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723704. Ctx: { TraceId: 01js2fahme7hs6w83aspd88vwn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE5Y2U3MGQtYmZhMjg5ZjAtY2ZkMjQyNi01MDUxMzBhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.263608Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723705. Ctx: { TraceId: 01js2fahmpb1j7486tcmrwsnh2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjUyNDdmOWUtZjU0MzBjZDktNWM3Yjk2MmMtYzVlYzQ5MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.264957Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723706. Ctx: { TraceId: 01js2fahmrcgqg227egsjsmmm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjE0YWIxMzUtYjRmYTU1ZjMtOGQ3YmUwY2YtZmZjNGQ0ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.267844Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723707. Ctx: { TraceId: 01js2fahmtbh0vmq4xjwhyhka4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZkOTRhMTAtMWRkZGZjZGMtMmM2MzI2NTAtYmIwNDA4MTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.268329Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723708. Ctx: { TraceId: 01js2fahmvdqmns7n3444d1b7w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGEyZWRkNjktZTIxNDQ4NTEtNzMwOWRiZjItOGJiNDE1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.298608Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723709. Ctx: { TraceId: 01js2fahn27zk5mv2v6x77x6wj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzMwNGQzMjQtZjVlNjllMzYtMmYyYzI3MTAtNjhkZTQzN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.300220Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723710. Ctx: { TraceId: 01js2fahn2fwhbsb73nqkekqj2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhZGEzM2ItNmFmZWY0MWYtYzI3M2VjYzktODYxZjI1MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.303170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723712. Ctx: { TraceId: 01js2fahnj2rpfjy2z4k4rtw8w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY5OWVhYjgtOTdjMDJlZTctNjUwN2M5MTAtM2U5NjY2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.306437Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723713. Ctx: { TraceId: 01js2fahn23t93zkpzfjay57dd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg3NDBlNTctYjNmM2RmZWMtNWI5NzBiYy1kZGVhZGFjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.306856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723711. Ctx: { TraceId: 01js2fahnjcx6vkhc6mgrjrqwy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBlMjEyMjMtOGI3MWJmYzgtNjI1NmE5MWEtZjk3M2FmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-17T18:14:07.310830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723714. Ctx: { TraceId: 01js2fahntef28ry6zmam7abar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE5Y2U3MGQtYmZhMjg5ZjAtY2ZkMjQyNi01MDUxMzBhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.314142Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723715. Ctx: { TraceId: 01js2fahpa54tvxn6mckyz51cw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjUyNDdmOWUtZjU0MzBjZDktNWM3Yjk2MmMtYzVlYzQ5MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.318019Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723716. Ctx: { TraceId: 01js2fahpef4xbgy42zc0r1h4d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZkOTRhMTAtMWRkZGZjZGMtMmM2MzI2NTAtYmIwNDA4MTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2025-04-17T18:14:07.321906Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723717. Ctx: { TraceId: 01js2fahppdacx0e88stm6kdxc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzMwNGQzMjQtZjVlNjllMzYtMmYyYzI3MTAtNjhkZTQzN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.321930Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723718. Ctx: { TraceId: 01js2fahpp9p8w7c1japgdha46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGEyZWRkNjktZTIxNDQ4NTEtNzMwOWRiZjItOGJiNDE1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.323312Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723719. Ctx: { TraceId: 01js2fahpp2bj7n0rw38ygxvhp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhZGEzM2ItNmFmZWY0MWYtYzI3M2VjYzktODYxZjI1MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-17T18:14:07.329324Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723720. Ctx: { TraceId: 01js2fahpt7wxsctqj341p7rm4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjE0YWIxMzUtYjRmYTU1ZjMtOGQ3YmUwY2YtZmZjNGQ0ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.330347Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723721. Ctx: { TraceId: 01js2fahpt0gr211jy83qnxwnr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg3NDBlNTctYjNmM2RmZWMtNWI5NzBiYy1kZGVhZGFjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.333698Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723722. Ctx: { TraceId: 01js2fahq29e8mkkcdcvkcyyxz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY5OWVhYjgtOTdjMDJlZTctNjUwN2M5MTAtM2U5NjY2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:07.343842Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723723. Ctx: { TraceId: 01js2fahqea3t4kytygah0f0zx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE5Y2U3MGQtYmZhMjg5ZjAtY2ZkMjQyNi01MDUxMzBhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> KqpAgg::AggWithSelfLookup [GOOD] >> KqpAgg::AggWithSelfLookup2 >> KqpNewEngine::Join [GOOD] >> KqpSort::ReverseOptimizedWithPredicate [GOOD] >> KqpSort::ReverseMixedOrderNotOptimized >> KqpNewEngine::ItemsLimit >> KqpMergeCn::TopSortBy_Int32_Limit3 [GOOD] >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> KqpMergeCn::TopSortBy_Float_Limit4 >> BuildStatsHistogram::Single_Slices [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> KqpSort::ReverseFirstKeyOptimized >> KqpNotNullColumns::UpsertNotNullPg [GOOD] >> KqpNewEngine::Select1 >> KqpNotNullColumns::InsertNotNullPkPg+useSink [GOOD] >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup >> BuildStatsHistogram::Single_History >> KqpRanges::DateKeyPredicate >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> KqpNotNullColumns::InsertNotNullPkPg-useSink >> KqpNewEngine::Nondeterministic >> KqpNotNullColumns::AlterAddNotNullColumnPg >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> KqpSqlIn::SecondaryIndex_PgKey [GOOD] >> KqpKv::ReadRows_UnknownTable [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] >> KqpMergeCn::TopSortByDesc_Double_Limit3 >> TExecutorDb::RandomOps >> KqpSqlIn::SecondaryIndex_SimpleKey >> KqpSort::UnionAllSortLimit [GOOD] >> KqpSqlIn::CantRewrite >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [FAIL] >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> KqpReturning::ReturningWorksIndexedInsert+QueryService [GOOD] >> KqpRanges::NullInKeySuffix [GOOD] >> KqpReturning::ReturningWorksIndexedInsert-QueryService >> THiveTest::TestCreateAndReassignTabletWhileStarting >> KqpRanges::NullInPredicate >> KqpNotNullColumns::UpdateNotNull [GOOD] >> KqpNotNullColumns::UpdateNotNullPg >> KqpNotNullColumns::InsertFromSelect [GOOD] >> KqpNotNullColumns::InsertNotNullPg+useSink >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 5153, MsgBus: 6366 2025-04-17T18:14:07.939876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347051698952860:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:07.941585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c12/r3tmp/tmpgZ9VVw/pdisk_1.dat 2025-04-17T18:14:08.356237Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5153, node 1 2025-04-17T18:14:08.413196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:08.413324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:08.414958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:08.429197Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:08.429219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:08.429227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:08.429372Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6366 TClient is connected to server localhost:6366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:08.938556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:08.975508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:09.122711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:09.284612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:09.355839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:10.855855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347064583856531:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:10.855949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:11.184555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:11.224015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:11.263974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:11.303452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:11.344500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:11.386901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:11.460257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:11.545228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347068878824490:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:11.545334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:11.545582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347068878824496:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:11.549018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:11.560633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347068878824498:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:11.644749Z node 1 :TX_PROXY ERROR: Actor# [1:7494347068878824555:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:12.940554Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347051698952860:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:12.940626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2025-04-17T18:13:25.271739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346869391662967:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:25.272992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001448/r3tmp/tmp8Waik7/pdisk_1.dat 2025-04-17T18:13:25.613011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:25.656602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:25.656707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14434, node 1 2025-04-17T18:13:25.697493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:25.766201Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:25.766227Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:25.766241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:25.766369Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:26.135329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8537 2025-04-17T18:13:27.756488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346877981598470:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:27.756650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.000414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:13:28.171909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276565956:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.172077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.172387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276565961:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.177243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:13:28.177941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276565997:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.177980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566007:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.178065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566009:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.178114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566011:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.178232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.178278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566010:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.178443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566008:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.180114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566038:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.180136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566040:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.180175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.180744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566062:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.180775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566059:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.180817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.181537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566072:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.181581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.181582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566074:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.184634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566092:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.184692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.184728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882276566094:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.190411Z node 1 :TX_PROXY ERROR: Actor# [1:7494346882276566026:2775] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:13:28.191937Z node 1 :TX_PROXY ERROR: Actor# [1:7494346882276566025:2774] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:13:28.193173Z node 1 :TX_PROXY ERROR: Actor# [1:7494346882276566028:2777] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:13:28.193624Z node 1 :TX_PROXY ERROR: Actor# [1:7494346882276566029:2778] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-17T18:13:28.193846Z node 1 :TX_PROXY ERROR: Actor# [1:7494346882276566027:2776] txid# 2814 ... n/3?node_id=1&id=ZjJjOGNmNjEtNmNlMjFjZjktM2VlNDcyYmEtYTgwYWJhM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.740629Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743927. Ctx: { TraceId: 01js2fan1eb3zbafn3ed13dnkk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZlNDRiN2MtYzcxZmJiZGEtZWIyMzQyMDQtN2Y2ZDA4NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.744545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743928. Ctx: { TraceId: 01js2fan1p3rs0qejd6fv8hcw4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYzOTMyNTUtODk0ODdmOTctNmY3NDViZjktMWUxMTQyOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.745521Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743929. Ctx: { TraceId: 01js2fan1q0bhp9pzc0wq4b8ts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZkNmFiYjQtODBiMDE0ZGEtZDdhNDg4ZmEtN2VjZDMwYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.746007Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743930. Ctx: { TraceId: 01js2fan1qf3hek208x619c75k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDAxZTBlOTktYWUzNGE4ZjAtYTE0NjllMDEtYTA3NDQ2OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.751749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743931. Ctx: { TraceId: 01js2fan1ya3qjt1y7pv2tazgg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzJmN2RiZjUtZjU3NTRlM2ItMWZhOTNmZTAtODQ5ZjBhMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.751989Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743932. Ctx: { TraceId: 01js2fan1zcjyp70scbe6kv8sz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQwOTQyNTctNmFmYjJlMTgtYWVjNWEwZjQtYmE4MDg0OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.752365Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743933. Ctx: { TraceId: 01js2fan1z90hp36mt9thpwad4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2RlY2U2NjAtYWYyYjQ0YmQtMmI3Mzk1YWYtZTA5YzExNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.762328Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743934. Ctx: { TraceId: 01js2fan27fvktpw91mwvhpm1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NhNzgzMi1mZGRkOTJmYS1iYjJhZDgyOS02MjA1MjkzNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.762382Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743935. Ctx: { TraceId: 01js2fan274c4b1gb4vzrbdk7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJjOTUzZWItYWQxMmY0YTYtYzQ3ODk1NGMtODkzNWM0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.764449Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743937. Ctx: { TraceId: 01js2fan295h710p4akswr69m8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYzOTMyNTUtODk0ODdmOTctNmY3NDViZjktMWUxMTQyOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.765902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743936. Ctx: { TraceId: 01js2fan29c1z3447hfr22tk9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZlNDRiN2MtYzcxZmJiZGEtZWIyMzQyMDQtN2Y2ZDA4NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.766421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743938. Ctx: { TraceId: 01js2fan292e1kvfepdb96txmx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDAxZTBlOTktYWUzNGE4ZjAtYTE0NjllMDEtYTA3NDQ2OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.769270Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743939. Ctx: { TraceId: 01js2fan2952zqcf6xcdcfndqr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJjOGNmNjEtNmNlMjFjZjktM2VlNDcyYmEtYTgwYWJhM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.769897Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743940. Ctx: { TraceId: 01js2fan2acck7nq2cvrapk5ej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzJmN2RiZjUtZjU3NTRlM2ItMWZhOTNmZTAtODQ5ZjBhMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.770407Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743941. Ctx: { TraceId: 01js2fan2e5etjxvqssbdx0pp9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQwOTQyNTctNmFmYjJlMTgtYWVjNWEwZjQtYmE4MDg0OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.771464Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743942. Ctx: { TraceId: 01js2fan2e7n1d276q47vcsts5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZkNmFiYjQtODBiMDE0ZGEtZDdhNDg4ZmEtN2VjZDMwYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.771968Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743943. Ctx: { TraceId: 01js2fan2e9kpaz2wwkck28914, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2RlY2U2NjAtYWYyYjQ0YmQtMmI3Mzk1YWYtZTA5YzExNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.776412Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743944. Ctx: { TraceId: 01js2fan2k0rfy4fde1hxvgz43, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NhNzgzMi1mZGRkOTJmYS1iYjJhZDgyOS02MjA1MjkzNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.777801Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743945. Ctx: { TraceId: 01js2fan2q8rbfwre8vztac9s2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJjOTUzZWItYWQxMmY0YTYtYzQ3ODk1NGMtODkzNWM0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-17T18:14:10.783725Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743947. Ctx: { TraceId: 01js2fan2wawjr4sx6vj6ppq2m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYzOTMyNTUtODk0ODdmOTctNmY3NDViZjktMWUxMTQyOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.784574Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743946. Ctx: { TraceId: 01js2fan2w85rbvmanqts1mz1y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDAxZTBlOTktYWUzNGE4ZjAtYTE0NjllMDEtYTA3NDQ2OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.785041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743948. Ctx: { TraceId: 01js2fan2wbe3e8s1qmbyzs486, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZlNDRiN2MtYzcxZmJiZGEtZWIyMzQyMDQtN2Y2ZDA4NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.787477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743949. Ctx: { TraceId: 01js2fan2y07ersxq0af9cnxn7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJjOGNmNjEtNmNlMjFjZjktM2VlNDcyYmEtYTgwYWJhM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.791194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743950. Ctx: { TraceId: 01js2fan32bfehw3vq0t0rfbvs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZkNmFiYjQtODBiMDE0ZGEtZDdhNDg4ZmEtN2VjZDMwYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.791249Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743953. Ctx: { TraceId: 01js2fan338pyff2t8dy5qh9ya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzJmN2RiZjUtZjU3NTRlM2ItMWZhOTNmZTAtODQ5ZjBhMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.791800Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743951. Ctx: { TraceId: 01js2fan33863a06q0y8g5d93e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NhNzgzMi1mZGRkOTJmYS1iYjJhZDgyOS02MjA1MjkzNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.791936Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743954. Ctx: { TraceId: 01js2fan32c93rwkmbv9tvt1d1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2RlY2U2NjAtYWYyYjQ0YmQtMmI3Mzk1YWYtZTA5YzExNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.792624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743955. Ctx: { TraceId: 01js2fan3388yfjvdptrfkt0q0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJjOTUzZWItYWQxMmY0YTYtYzQ3ODk1NGMtODkzNWM0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:14:10.792660Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976743952. Ctx: { TraceId: 01js2fan33fh99xkfd29cr3efp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQwOTQyNTctNmFmYjJlMTgtYWVjNWEwZjQtYmE4MDg0OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 4 shards >> KqpNewEngine::ReadAfterWrite [GOOD] >> KqpNewEngine::ReadDifferentColumns >> KqpReturning::ReturningWorksIndexedUpsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace-QueryService >> KqpNewEngine::StaleRO [GOOD] >> KqpNewEngine::SqlInFromCompact >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> KqpNewEngine::MultiStatement [GOOD] >> KqpNewEngine::MultiStatementMixPure |91.9%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::Truncated [GOOD] >> KqpNewEngine::StaleRO_Immediate >> KqpNewEngine::PruneWritePartitions+UseSink [GOOD] >> KqpNewEngine::PruneWritePartitions-UseSink |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> KqpNewEngine::PkRangeSelect1 [GOOD] >> KqpNewEngine::PkRangeSelect2 >> KqpPg::DropSequence [GOOD] >> KqpPg::DeleteWithQueryService+useSink >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> KqpNewEngine::ContainerRegistryCombiner >> KqpSqlIn::SelectNotAllElements [GOOD] >> KqpSqlIn::SimpleKey_In_And_In >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 >> KqpMergeCn::TopSortBy_Utf8_Limit2 >> KqpNewEngine::Select1 [GOOD] >> KqpNewEngine::Replace >> KqpNewEngine::JoinPure [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys >> KqpSort::TopSortParameter >> KqpSort::ReverseMixedOrderNotOptimized [GOOD] >> KqpSort::ReverseRangeOptimized ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [FAIL] Test command err: Trying to start YDB, gRPC: 15581, MsgBus: 61931 2025-04-17T18:12:40.577527Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346675328317253:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:12:40.577617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001399/r3tmp/tmp5ZVgry/pdisk_1.dat 2025-04-17T18:12:40.855175Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15581, node 1 2025-04-17T18:12:40.916184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:12:40.916217Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:12:40.916226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:12:40.916358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:12:40.963966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:12:40.964069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:12:40.965730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61931 TClient is connected to server localhost:61931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:12:41.320232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.342224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.463132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.594635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:41.649745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:12:42.728818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346683918253636:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:42.728900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.018063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.039597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.061766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.087781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.113535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.154794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.176237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:12:43.251183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346688213221595:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.251250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.251313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346688213221600:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:12:43.254106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:12:43.261225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346688213221602:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:12:43.342078Z node 1 :TX_PROXY ERROR: Actor# [1:7494346688213221658:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:12:44.041686Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189277:2510] 2025-04-17T18:12:44.115535Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189284:2513] 2025-04-17T18:12:44.116237Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189291:2517] 2025-04-17T18:12:44.116586Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189292:2518] 2025-04-17T18:12:44.116941Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189293:2519] 2025-04-17T18:12:44.130255Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189294:2520] 2025-04-17T18:12:44.145153Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189333:2530] 2025-04-17T18:12:44.172462Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189355:2537] 2025-04-17T18:12:44.203415Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189379:2543] 2025-04-17T18:12:44.238290Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189400:2549] 2025-04-17T18:12:44.274080Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189415:2555] 2025-04-17T18:12:44.315082Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189430:2561] 2025-04-17T18:12:44.357661Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189453:2567] 2025-04-17T18:12:44.403780Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189482:2574] 2025-04-17T18:12:44.451313Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189500:2581] 2025-04-17T18:12:44.504911Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189526:2589] 2025-04-17T18:12:44.557636Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189550:2596] 2025-04-17T18:12:44.614986Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189576:2602] 2025-04-17T18:12:44.676150Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189600:2612] 2025-04-17T18:12:44.738125Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189630:2618] 2025-04-17T18:12:44.806452Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189661:2628] 2025-04-17T18:12:44.874316Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189701:2634] 2025-04-17T18:12:44.945525Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189757:2643] 2025-04-17T18:12:45.022635Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346692508189770:2647] 2025-04-17T18:12:45.099458Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7494346696803157113:2658] 2025-04-17T18:12:45.100195Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346696803157152:2663] TxId: 281474976710674. Ctx: { TraceId: 01js2f81b2arskm7zaejp5xpw2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQxYjQ2MzItN2RjMzIwMDktNjhlMjA5MmItMWIyYmU5YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:12:45.100503Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDQxYjQ2MzItN2RjMzIwMDktNjhlMjA5MmItMWIyYmU5YWI=, ActorId: [1:7494346696803157124:2663], ActorState: ExecuteState, TraceId: 01js2f81b2arskm7zaejp5xpw2, Create QueryResponse for error on request, msg: 2025-04-17T18:12:45.107316Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913565136, txId: 281474976710673] shutting down 2025-04-17T18:12:45.107367Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346696803157160:2666], TxId: 281474976710674, task: 3. Ctx: { TraceId : 01js2f81b2arskm7zaejp5xpw2. SessionId : ydb://session/3?node_id=1&id=NDQxYjQ2MzItN2RjMzIwMDktNjhlMjA5MmItMWIyYmU5YWI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494346696803157152:2663], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:45.108358Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494346696803157157:2664], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01js2f81b2arskm7zaejp5xpw2. SessionId : ydb://se ... 5-04-17T18:12:53.510709Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:12:53.783929Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzJiNmE1ZGUtNTM1YzNiNTMtM2I2NjBjNzUtN2RhMDE4OTc=, ActorId: [2:7494346731170067313:2687], ActorState: ExecuteState, TraceId: 01js2f89s195s3er0mdaczhck6, Create QueryResponse for error on request, msg: 2025-04-17T18:12:54.209966Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWFiMTEyMTgtMmFiNDUxMzMtZTJhNTkyZjEtNWQ0OWI1NWU=, ActorId: [2:7494346735465034696:2723], ActorState: ExecuteState, TraceId: 01js2f8a6wcv59xsgm97g44r7p, Create QueryResponse for error on request, msg: 2025-04-17T18:12:55.242051Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346739760002252:2812] TxId: 0. Ctx: { TraceId: 01js2f8b6fftxf7ene04xv64rs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTNiZWY4NzAtNGVhYzkyNDUtN2I0MmVlMzUtZjJkYjJlZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:12:56.516709Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGQ2ODkyYzEtODZkYzEzM2QtNDlkMjE5MmQtY2Q2OTQ3NjU=, ActorId: [2:7494346744054969735:2891], ActorState: ExecuteState, TraceId: 01js2f8cde44ked48eyw2c5r58, Create QueryResponse for error on request, msg: 2025-04-17T18:12:57.018932Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346748349937216:2923] TxId: 281474976715678. Ctx: { TraceId: 01js2f8cwt6p6zb131j1fc7dcc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWUwMDJkZGYtMjVmZTA5ZDgtNDIxNGMwOGEtYmFhZTJhNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:12:57.019152Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWUwMDJkZGYtMjVmZTA5ZDgtNDIxNGMwOGEtYmFhZTJhNTk=, ActorId: [2:7494346744054969887:2923], ActorState: ExecuteState, TraceId: 01js2f8cwt6p6zb131j1fc7dcc, Create QueryResponse for error on request, msg: 2025-04-17T18:12:57.019622Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913577057, txId: 281474976715677] shutting down 2025-04-17T18:12:57.020043Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937225:2930], TxId: 281474976715678, task: 4. Ctx: { TraceId : 01js2f8cwt6p6zb131j1fc7dcc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MWUwMDJkZGYtMjVmZTA5ZDgtNDIxNGMwOGEtYmFhZTJhNTk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346748349937216:2923], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.020550Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937224:2929], TxId: 281474976715678, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MWUwMDJkZGYtMjVmZTA5ZDgtNDIxNGMwOGEtYmFhZTJhNTk=. TraceId : 01js2f8cwt6p6zb131j1fc7dcc. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346748349937216:2923], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.021684Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7494346748349937257:2087], tablet: [2:7494346713990195526:2323], scanId: 32, table: /Root/EightShard 2025-04-17T18:12:57.021707Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937221:2927], TxId: 281474976715678, task: 1. Ctx: { TraceId : 01js2f8cwt6p6zb131j1fc7dcc. SessionId : ydb://session/3?node_id=2&id=MWUwMDJkZGYtMjVmZTA5ZDgtNDIxNGMwOGEtYmFhZTJhNTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494346748349937216:2923], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.021990Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937222:2928], TxId: 281474976715678, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWUwMDJkZGYtMjVmZTA5ZDgtNDIxNGMwOGEtYmFhZTJhNTk=. TraceId : 01js2f8cwt6p6zb131j1fc7dcc. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494346748349937216:2923], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.022361Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937226:2931], TxId: 281474976715678, task: 5. Ctx: { TraceId : 01js2f8cwt6p6zb131j1fc7dcc. SessionId : ydb://session/3?node_id=2&id=MWUwMDJkZGYtMjVmZTA5ZDgtNDIxNGMwOGEtYmFhZTJhNTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494346748349937216:2923], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.029512Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715678. Snapshot is not valid, tabletId: 72075186224037895, step: 1744913577057 2025-04-17T18:12:57.030489Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715678. Snapshot is not valid, tabletId: 72075186224037894, step: 1744913577057 2025-04-17T18:12:57.177141Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913577211, txId: 281474976715680] shutting down 2025-04-17T18:12:57.352943Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7494346748349937424:2960] TxId: 281474976715683. Ctx: { TraceId: 01js2f8d73e15vxb1asxrzjz4v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q0MjgzNDctZDBkMGRmZmYtMTg0NTQyMTYtYTZhZDFiZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T18:12:57.353152Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2Q0MjgzNDctZDBkMGRmZmYtMTg0NTQyMTYtYTZhZDFiZmE=, ActorId: [2:7494346748349937390:2960], ActorState: ExecuteState, TraceId: 01js2f8d73e15vxb1asxrzjz4v, Create QueryResponse for error on request, msg: 2025-04-17T18:12:57.353550Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913577386, txId: 281474976715682] shutting down 2025-04-17T18:12:57.354775Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937429:2965], TxId: 281474976715683, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=N2Q0MjgzNDctZDBkMGRmZmYtMTg0NTQyMTYtYTZhZDFiZmE=. TraceId : 01js2f8d73e15vxb1asxrzjz4v. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494346748349937424:2960], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.358688Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937430:2966], TxId: 281474976715683, task: 3. Ctx: { TraceId : 01js2f8d73e15vxb1asxrzjz4v. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=N2Q0MjgzNDctZDBkMGRmZmYtMTg0NTQyMTYtYTZhZDFiZmE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346748349937424:2960], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.359461Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937431:2967], TxId: 281474976715683, task: 4. Ctx: { TraceId : 01js2f8d73e15vxb1asxrzjz4v. SessionId : ydb://session/3?node_id=2&id=N2Q0MjgzNDctZDBkMGRmZmYtMTg0NTQyMTYtYTZhZDFiZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346748349937424:2960], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.359675Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937428:2964], TxId: 281474976715683, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=N2Q0MjgzNDctZDBkMGRmZmYtMTg0NTQyMTYtYTZhZDFiZmE=. CustomerSuppliedId : . TraceId : 01js2f8d73e15vxb1asxrzjz4v. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7494346748349937424:2960], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.359876Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7494346748349937432:2968], TxId: 281474976715683, task: 5. Ctx: { SessionId : ydb://session/3?node_id=2&id=N2Q0MjgzNDctZDBkMGRmZmYtMTg0NTQyMTYtYTZhZDFiZmE=. TraceId : 01js2f8d73e15vxb1asxrzjz4v. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7494346748349937424:2960], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T18:12:57.360885Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7494346748349937455:2097], tablet: [2:7494346713990195525:2322], scanId: 38, table: /Root/EightShard 2025-04-17T18:12:57.361609Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7494346748349937452:2096], tablet: [2:7494346713990195538:2328], scanId: 37, table: /Root/EightShard 2025-04-17T18:12:57.361892Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7494346748349937458:2098], tablet: [2:7494346713990195536:2326], scanId: 39, table: /Root/EightShard 2025-04-17T18:12:57.362055Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7494346748349937463:2099], tablet: [2:7494346713990195526:2323], scanId: 40, table: /Root/EightShard 2025-04-17T18:12:57.520376Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913577547, txId: 281474976715685] shutting down 2025-04-17T18:12:57.860306Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913577897, txId: 281474976715687] shutting down 2025-04-17T18:13:03.585209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:13:03.585250Z node 2 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:1474, void NKikimr::NKqp::WaitForZeroSessions(const NKqp::TKqpCounters &): (count) Unable to wait for proper active session count, it looks like cancelation doesn`t work 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1B8BA93B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1BD82A4F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:1474: WaitForZeroSessions @ 0x4B4C2153 3. /tmp//-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:734: DoStreamExecuteYqlScriptScanTimeoutBruteForce @ 0x1B43A206 4. /tmp//-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:18: operator() @ 0x1B475C27 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:18:1) &> @ 0x1B475C27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:18:1) &> @ 0x1B475C27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1B475C27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1B475C27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x1BDB9C45 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x1BDB9C45 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x1BDB9C45 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1BD895D8 13. /tmp//-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:18: Execute @ 0x1B474DF3 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1BD8AEA5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1BDB41BC 16. ??:0: ?? @ 0x7F3B196FDD8F 17. ??:0: ?? @ 0x7F3B196FDE3F 18. ??:0: ?? @ 0x18B3F028 >> KqpNotNullColumns::InsertNotNullPkPg-useSink [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup >> KqpNewEngine::Nondeterministic [GOOD] >> KqpNewEngine::MultiUsagePrecompute >> KqpNotNullColumns::AlterAddNotNullColumnPg [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn >> KqpRanges::UpdateWhereInBigLiteralList [GOOD] >> KqpRanges::UpdateWhereInBigLiteralListPrefix >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet >> KqpAgg::AggWithSelfLookup2 [GOOD] >> KqpAgg::AggWithHop >> KqpMergeCn::TopSortBy_Float_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_String_Limit3 >> KqpSort::ReverseFirstKeyOptimized [GOOD] >> KqpSort::ReverseLimitOptimized >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices >> KqpNotNullColumns::InsertNotNullPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPg-useSink >> THiveTest::TestCreateExternalTablet [GOOD] >> KqpMergeCn::TopSortByDesc_Double_Limit3 [GOOD] >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 >> KqpNotNullColumns::ReplaceNotNullPk >> KqpNotNullColumns::UpdateNotNullPg [GOOD] >> KqpNotNullColumns::UpdateOnNotNull >> KqpRanges::DateKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan+UseSink >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] >> KqpNotNullColumns::OptionalParametersDataQuery >> KqpRanges::NullInPredicate [GOOD] >> KqpRanges::NullInPredicateRow >> KqpNewEngine::ReadDifferentColumns [GOOD] >> KqpNewEngine::ReadDifferentColumnsPk >> KqpNewEngine::StaleRO_Immediate [GOOD] >> KqpNewEngine::UnionAllPure >> KqpSqlIn::SecondaryIndex_SimpleKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And >> KqpNewEngine::MultiStatementMixPure [GOOD] >> KqpNewEngine::MultiEffects >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> KqpSqlIn::CantRewrite [GOOD] >> KqpSqlIn::ComplexKey >> KqpNewEngine::PkRangeSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect3 >> KqpNewEngine::ItemsLimit [GOOD] >> KqpNewEngine::JoinDictWithPure >> KqpNotNullColumns::AlterDropNotNullColumn [GOOD] >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns >> KqpNewEngine::Replace [GOOD] >> KqpNewEngine::SelfJoin >> KqpNewEngine::PruneWritePartitions-UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions+UseSink >> TGroupMapperTest::Block42_1disk [GOOD] >> KqpNewEngine::ContainerRegistryCombiner [GOOD] >> KqpNewEngine::BrokenLocksOnUpdate >> KqpNewEngine::SqlInFromCompact [GOOD] >> KqpNewEngine::SqlInAsScalar >> KqpPg::DeleteWithQueryService+useSink [GOOD] >> KqpPg::DeleteWithQueryService-useSink >> KqpMergeCn::TopSortBy_Utf8_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 >> KqpReturning::ReturningWorksIndexedInsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace+QueryService |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink >> KqpSort::TopSortParameter [GOOD] >> KqpSort::TopSortExpr >> KqpNewEngine::JoinPureUncomparableKeys [GOOD] >> KqpNewEngine::JoinProjectMulti >> KqpNotNullColumns::ReplaceNotNullPk [GOOD] >> KqpNotNullColumns::ReplaceNotNullPkPg >> KqpNotNullColumns::UpdateOnNotNull [GOOD] >> KqpNotNullColumns::UpdateOnNotNullPg >> KqpSort::ReverseRangeOptimized [GOOD] >> KqpSort::ReverseRangeLimitOptimized >> KqpSort::ReverseLimitOptimized [GOOD] >> KqpSort::ReverseEightShardOptimized >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup >> KqpNotNullColumns::InsertNotNullPg-useSink [GOOD] >> KqpNotNullColumns::FailedMultiEffects >> KqpReturning::ReturningWorksIndexedReplace-QueryService [GOOD] >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate >> KqpNewEngine::LocksSingleShard >> KqpNewEngine::MultiUsagePrecompute [GOOD] >> KqpNewEngine::OrderedScalarContext >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Ten_Mixed >> KqpSqlIn::SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_TupleParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2025-04-17T18:09:22.165110Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:09:22.168842Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.169419Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-17T18:09:22.171839Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:09:22.174458Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-17T18:09:22.174513Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:09:22.175873Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-04-17T18:09:22.175915Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:09:22.176950Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:09:22.177281Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:09:22.189584Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-17T18:09:22.189627Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-17T18:09:22.191499Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.191654Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.191791Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.191940Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.192079Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.192197Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.192318Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-17T18:09:22.192340Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-17T18:09:22.192401Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-04-17T18:09:22.192431Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-04-17T18:09:22.192472Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-17T18:09:22.192505Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:09:22.193707Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:09:22.194605Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.224511Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:22.224583Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.224613Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.233744Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-04-17T18:09:22.233790Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-04-17T18:09:22.233848Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.233908Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-17T18:09:22.240981Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-17T18:09:22.241217Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.242030Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.242350Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-17T18:09:22.242443Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-17T18:09:22.242541Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:22.243903Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-17T18:09:22.244169Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:09:22.244207Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-17T18:09:22.244268Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-17T18:09:22.244291Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-17T18:09:22.244345Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2025-04-17T18:09:22.244369Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:52:2092] 2025-04-17T18:09:22.246440Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-17T18:09:22.246497Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.246646Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:09:22.246700Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.246770Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:52:2092] 2025-04-17T18:09:22.262573Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-17T18:09:22.262718Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-17T18:09:22.262761Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-17T18:09:22.264586Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.264685Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-17T18:09:22.267326Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-17T18:09:22.267384Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-17T18:09:22.267409Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-17T18:09:22.269493Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-04-17T18:09:22.269539Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-04-17T18:09:22.269822Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-17T18:09:22.270741Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:22.271782Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:22.273730Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-17T18:09:22.273817Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-17T18:09:22.273848Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-17T18:09:22.273968Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-04-17T18:09:22.273998Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-04-17T18:09:22.274073Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:09:22.274242Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-17T18:09:22.274275Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:09:22.275103Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-17T18:09:22.275957Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-17T18:09:22.276008Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-04-17T18:09:22.276030Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-04-17T18:09:22.276071Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-17T18:09:22.276186Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-17T18:09:22.276223Z node 1 :BS_NODE ... 72057594037927937 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594037927937 Cookie: 0 CurrentLeader: [148:270:2261] CurrentLeaderTablet: [148:277:2265] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-04-17T18:14:19.079769Z node 148 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594037927937 followers: 0 2025-04-17T18:14:19.079863Z node 148 :TABLET_RESOLVER DEBUG: SelectForward node 148 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [148:270:2261] 2025-04-17T18:14:19.079997Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [148:267:2260] 2025-04-17T18:14:19.080085Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [148:267:2260] 2025-04-17T18:14:19.080237Z node 148 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [148:267:2260] 2025-04-17T18:14:19.080426Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [148:267:2260] 2025-04-17T18:14:19.080512Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [148:267:2260] 2025-04-17T18:14:19.080570Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [148:267:2260] 2025-04-17T18:14:19.080656Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [148:267:2260] 2025-04-17T18:14:19.080710Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [148:267:2260] 2025-04-17T18:14:19.080844Z node 148 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [148:266:2259] EventType# 268697601 2025-04-17T18:14:19.081145Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2025-04-17T18:14:19.081256Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:14:19.082126Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1157b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2025-04-17T18:14:19.086333Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:14:19.086549Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [148:311:2288] 2025-04-17T18:14:19.086590Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [148:311:2288] 2025-04-17T18:14:19.086660Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [148:311:2288] 2025-04-17T18:14:19.086729Z node 148 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:14:19.086788Z node 148 :TABLET_RESOLVER DEBUG: SelectForward node 148 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [148:94:2122] 2025-04-17T18:14:19.086878Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [148:311:2288] 2025-04-17T18:14:19.086924Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [148:311:2288] 2025-04-17T18:14:19.087014Z node 148 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [148:311:2288] 2025-04-17T18:14:19.087158Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [148:311:2288] 2025-04-17T18:14:19.087197Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [148:311:2288] 2025-04-17T18:14:19.087229Z node 148 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [148:311:2288] 2025-04-17T18:14:19.087283Z node 148 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [148:277:2265] EventType# 268637702 2025-04-17T18:14:19.087470Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-04-17T18:14:19.087576Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:14:19.087803Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:14:19.087904Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:14:19.088229Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-17T18:14:19.088321Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:14:19.088738Z node 148 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005342304}(72075186224037888)::Execute - TryToBoot was not successfull 2025-04-17T18:14:19.088869Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-04-17T18:14:19.088962Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:14:19.100685Z node 148 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] bootstrap ActorId# [148:314:2291] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-17T18:14:19.100826Z node 148 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-17T18:14:19.100874Z node 148 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-17T18:14:19.100932Z node 148 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2025-04-17T18:14:19.100975Z node 148 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2025-04-17T18:14:19.101107Z node 148 :BS_PROXY DEBUG: Send to queueActorId# [148:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-17T18:14:19.102765Z node 148 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-17T18:14:19.102858Z node 148 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-17T18:14:19.102898Z node 148 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-17T18:14:19.102987Z node 148 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.617 sample PartId# [72057594037927937:2:4:0:0:698:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 148 } TEvVPutResult{ TimestampMs# 2.294 VDiskId# [0:1:0:0:0] NodeId# 148 Status# OK } ] } 2025-04-17T18:14:19.103080Z node 148 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-17T18:14:19.103183Z node 148 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-04-17T18:14:19.103431Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:14:19.103507Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-17T18:14:19.103549Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-17T18:14:19.103575Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-17T18:14:19.103610Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-17T18:14:19.103657Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-17T18:14:19.103690Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-17T18:14:19.103947Z node 148 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [148:318:2294] 2025-04-17T18:14:19.103994Z node 148 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [148:318:2294] 2025-04-17T18:14:19.104092Z node 148 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-17T18:14:19.104234Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-17T18:14:19.104368Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-17T18:14:19.104434Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-17T18:14:19.104462Z node 148 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-17T18:14:19.104506Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-17T18:14:19.104555Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-17T18:14:19.104581Z node 148 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-17T18:14:19.104676Z node 148 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-17T18:14:19.104729Z node 148 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-17T18:14:19.104833Z node 148 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [148:318:2294] 2025-04-17T18:14:19.104879Z node 148 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [148:318:2294] >> KqpAgg::AggWithHop [GOOD] >> KqpAgg::GroupByLimit >> KqpMergeCn::TopSortBy_String_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Date_Limit4 >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 >> KqpRanges::UpdateWhereInBigLiteralListPrefix [GOOD] >> KqpRanges::UpdateWhereInFullScan+UseSink >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> KqpRanges::DeleteNotFullScan+UseSink [GOOD] >> KqpRanges::CastKeyBounds >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 >> KqpNotNullColumns::OptionalParametersDataQuery [GOOD] >> KqpNotNullColumns::OptionalParametersScanQuery >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::Describe >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink >> KqpNotNullColumns::ReplaceNotNullPkPg [GOOD] >> KqpNotNullColumns::SelectNotNullColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2025-04-17T18:13:25.253447Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346870177377873:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:25.257537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001451/r3tmp/tmpCDFfkt/pdisk_1.dat 2025-04-17T18:13:25.613604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:25.635118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:25.635299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 63758, node 1 2025-04-17T18:13:25.652392Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:13:25.652418Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:13:25.666638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:25.766198Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:25.766220Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:25.766227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:25.766358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:26.186253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Triggering split by load TClient is connected to server localhost:22135 2025-04-17T18:13:27.708961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346878767313442:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:27.709067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.000445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:13:28.183542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062280910:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.183639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.200200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-17T18:13:28.303515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281014:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.303617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.303893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281023:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.303955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281022:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.305188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281036:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.305293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.305745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281050:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.305824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281061:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.305887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281068:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.305905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281063:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.305992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.306306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281074:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.306752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281080:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.306758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281087:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.307829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281103:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.307841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346883062281101:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.307877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.308764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, pat ... MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient is connected to server localhost:22135 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards Fast forward > 10h to trigger the merge TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-17T18:14:23.311092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Propose merge request : Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976715658 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037890 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2025-04-17T18:14:23.311282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2025-04-17T18:14:23.311931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\364c\327l\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\364c\327l\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037890 ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 4 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2025-04-17T18:14:23.311981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:14:23.315809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-17T18:14:23.320301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-17T18:14:23.320377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-17T18:14:23.322129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:23.331364Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7494347119285652113:5279] 2025-04-17T18:14:23.358513Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-17T18:14:23.358626Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-17T18:14:23.358845Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-17T18:14:23.381162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715658 TabletId: 72075186224037891 2025-04-17T18:14:23.381205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 131 2025-04-17T18:14:23.383217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:14:23.415368Z node 1 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-04-17T18:14:23.415490Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:14:23.415540Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-17T18:14:23.415569Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-04-17T18:14:23.415793Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-17T18:14:23.423587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037889 2025-04-17T18:14:23.423883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037890 2025-04-17T18:14:23.424156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 131 -> 132 2025-04-17T18:14:23.427109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:14:23.427383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:14:23.427432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:14:23.429199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-17T18:14:23.429251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-17T18:14:23.429269Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-17T18:14:23.443271Z node 1 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-17T18:14:23.443306Z node 1 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-17T18:14:23.444494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-04-17T18:14:23.444732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-17T18:14:23.444785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-17T18:14:23.444805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-17T18:14:23.444838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-17T18:14:23.447314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715658:0 2025-04-17T18:14:23.447570Z node 1 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:14:23.447597Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:14:23.448080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:14:23.448233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:14:23.452961Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-17T18:14:23.453005Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-17T18:14:23.453469Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-17T18:14:23.453528Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-17T18:14:23.454338Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:14:23.454425Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-17T18:14:23.454439Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T18:14:23.454465Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913608123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics [GOOD] >> TIterator::External >> TIterator::External [GOOD] >> KqpNewEngine::ReadDifferentColumnsPk [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput >> KqpNewEngine::UnionAllPure [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin >> KqpNewEngine::PkRangeSelect3 [GOOD] >> KqpNewEngine::PkRangeSelect4 >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] >> KqpNewEngine::DeferredEffects >> KqpRanges::NullInPredicateRow [GOOD] >> KqpRanges::NoFullScanAtScanQuery >> KqpNewEngine::SelfJoin [GOOD] >> KqpNewEngine::ReadRangeWithParams >> KqpNewEngine::MultiEffects [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TIterator::External [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:55.913068Z 00000.020 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.031 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.036 II| FAKE_ENV: Starting storage for BS group 0 00000.037 II| FAKE_ENV: Starting storage for BS group 1 00000.037 II| FAKE_ENV: Starting storage for BS group 2 00000.037 II| FAKE_ENV: Starting storage for BS group 3 00000.051 II| TABLET_FLATBOOT: Leader{1:2:-} booting Deps{0:0 entries 0} {nil} 00000.052 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.052 II| TABLET_FLATBOOT: Leader{1:2:-} loading { Alter 0, Turns 0, Loans 0, GCExt 0 } 00000.052 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 2, has 4 jobs, Boot{ 5 que, 2 refs } 00000.052 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 3, has 1 jobs, Boot{ 2 que, 2 refs } 00000.052 II| TABLET_FLATBOOT: Leader{1:2:-} redo log has 0 records, last before 0:0 00000.052 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 4, has 1 jobs, Boot{ 2 que, 2 refs } 00000.053 II| TABLET_FLATBOOT: Leader{1:2:-} result: db change {1 -> 1} snap on 0 00000.054 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 5, has 0 jobs, Boot{ 1 que, 2 refs } 00000.054 II| TABLET_FLATBOOT: Leader{1:2:-} booting completed, took 0.000s 00000.122 II| TABLET_FLATBOOT: Leader{1:3:-} booting Deps{2:1 entries 252} {nil} 00000.122 DD| TABLET_FLATBOOT: Leader{1:3:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.122 DD| TABLET_FLATBOOT: Leader{1:3:-} snap in deps on 2:1, TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.122 DD| TABLET_FLATBOOT: Leader{1:3:-} process snap gc entry, + [ ], - [ ] 00000.122 DD| TABLET_FLATBOOT: Leader{1:3:-} Loading TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.122 II| TABLET_FLATBOOT: Leader{1:3:-} snap on 2:1 change 1, 25b, ABI 28 of [1, 28], GC{ +0 -0 } 00000.122 DD| TABLET_FLATBOOT: Leader{1:3:-} process gc snapshot, + [ ], - [ ] 00000.122 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:2:1:8192:209:0] ], - [ ] 00000.122 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:3:1:24576:74:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:4:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:5:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:6:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:7:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:8:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:9:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:10:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:11:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:12:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:13:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:14:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:15:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:16:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:17:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:18:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:19:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:20:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:21:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:22:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:23:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:24:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:25:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:26:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:27:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:28:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:29:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:30:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:31:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:32:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:33:1:24576:83:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:34:1:24576:82:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:35:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:36:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:37:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:38:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:39:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:40:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:41:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:42:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:43:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:44:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:45:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:46:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:47:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:48:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:49:1:24576:79:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:50:1:24576:82:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:51:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:52:1:24576:81:0] ], - [ ] 00000.123 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:53:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:54:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:55:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:56:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:57:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:58:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:59:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:60:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:61:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:62:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:63:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:64:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:65:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:66:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:67:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:68:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:69:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:70:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:71:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:72:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:73:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:74:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:75:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:76:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:77:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:78:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:79:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:80:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:81:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:82:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:83:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:84:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:85:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:86:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:87:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:88:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:89:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:90:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:91:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:92:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:93:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:94:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:95:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:96:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:97:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:98:1:24576:81:0] ], - [ ] 00000.124 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:99:1:24576:81:0] ], - ... 24576:100:0], [1:2:19:1:24576:97:0], [1:2:20:1:24576:96:0], [1:2:21:1:24576:97:0], [1:2:22:1:24576:97:0], [1:2:23:1:24576:97:0], [1:2:24:1:24576:97:0], [1:2:25:1:24576:97:0], [1:2:26:1:24576:97:0], [1:2:27:1:24576:97:0], [1:2:28:1:24576:96:0], [1:2:29:1:24576:100:0], [1:2:30:1:24576:97:0], [1:2:31:1:24576:96:0], [1:2:32:1:24576:96:0], [1:2:33:1:24576:104:0], [1:2:34:1:24576:97:0], [1:2:35:1:24576:99:0], [1:2:36:1:24576:97:0], [1:2:37:1:24576:97:0], [1:2:38:1:24576:97:0], [1:2:39:1:24576:97:0], [1:2:40:1:24576:97:0], [1:2:41:1:24576:97:0], [1:2:42:1:24576:97:0], [1:2:43:1:24576:97:0], [1:2:44:1:24576:97:0], [1:2:45:1:24576:97:0], [1:2:46:1:24576:97:0], [1:2:47:1:24576:97:0], [1:2:48:1:24576:97:0], [1:2:49:1:24576:97:0], [1:2:50:1:24576:97:0], [1:2:51:1:24576:97:0], [1:2:52:1:24576:97:0], [1:2:53:1:24576:97:0], [1:2:54:1:24576:97:0], [1:2:55:1:24576:97:0], [1:2:56:1:24576:97:0], [1:2:57:1:24576:97:0], [1:2:58:1:24576:97:0], [1:2:59:1:24576:97:0], [1:2:60:1:24576:97:0], [1:2:61:1:24576:97:0], [1:2:62:1:24576:97:0], [1:2:63:1:24576:97:0], [1:2:64:1:24576:97:0], [1:2:65:1:24576:97:0], [1:2:66:1:24576:97:0], [1:2:67:1:24576:97:0], [1:2:68:1:24576:97:0], [1:2:69:1:24576:97:0], [1:2:70:1:24576:97:0], [1:2:71:1:24576:97:0], [1:2:72:1:24576:97:0], [1:2:73:1:24576:101:0], [1:2:74:1:24576:102:0], [1:2:75:1:24576:101:0], [1:2:76:1:24576:102:0], [1:2:77:1:24576:104:0], [1:2:78:1:24576:104:0], [1:2:79:1:24576:104:0], [1:2:80:1:24576:104:0], [1:2:81:1:24576:103:0], [1:2:82:1:24576:101:0], [1:2:83:1:24576:104:0], [1:2:84:1:24576:104:0], [1:2:85:1:24576:104:0], [1:2:86:1:24576:104:0], [1:2:87:1:24576:104:0], [1:2:88:1:24576:104:0], [1:2:89:1:24576:104:0], [1:2:90:1:24576:101:0], [1:2:91:1:24576:104:0], [1:2:92:1:24576:104:0], [1:2:93:1:24576:98:0], [1:2:94:1:24576:104:0], [1:2:95:1:24576:104:0], [1:2:96:1:24576:104:0], [1:2:97:1:24576:104:0], [1:2:98:1:24576:104:0], [1:2:99:1:24576:104:0], [1:2:100:1:24576:104:0], [1:2:101:1:24576:97:0], [1:2:102:1:24576:100:0], [1:2:103:1:24576:104:0], [1:2:104:1:24576:104:0], [1:2:105:1:24576:104:0], [1:2:106:1:24576:104:0], [1:2:107:1:24576:104:0], [1:2:108:1:24576:104:0], [1:2:109:1:24576:104:0], [1:2:110:1:24576:104:0], [1:2:111:1:24576:104:0], [1:2:112:1:24576:104:0], [1:2:113:1:24576:104:0], [1:2:114:1:24576:104:0], [1:2:115:1:24576:104:0], [1:2:116:1:24576:104:0], [1:2:117:1:24576:104:0], [1:2:118:1:24576:104:0], [1:2:119:1:24576:104:0], [1:2:120:1:24576:104:0], [1:2:121:1:24576:104:0], [1:2:122:1:24576:104:0], [1:2:123:1:24576:104:0], [1:2:124:1:24576:104:0], [1:2:125:1:24576:104:0], [1:2:126:1:24576:104:0], [1:2:127:1:24576:104:0], [1:2:128:1:24576:104:0], [1:2:129:1:24576:104:0], [1:2:130:1:24576:104:0], [1:2:131:1:24576:104:0], [1:2:132:1:24576:104:0], [1:2:133:1:24576:104:0], [1:2:134:1:24576:104:0], [1:2:135:1:24576:104:0], [1:2:136:1:24576:104:0], [1:2:137:1:24576:104:0], [1:2:138:1:24576:104:0], [1:2:139:1:24576:104:0], [1:2:140:1:24576:104:0], [1:2:141:1:24576:104:0], [1:2:142:1:24576:104:0], [1:2:145:1:24576:60:0], [1:2:146:1:24576:60:0] } 00000.117 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [35:212:2237] class Online from cache [ ] already requested [ ] to request [ 22 23 24 25 ] 00000.117 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:143:1:12288:758:0] status OK pages [ 22 23 24 25 ] 00000.118 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.119 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 1880b, wait} done, Waste{2:0, 141856b +(140, 14018b), 146 trc} 00000.120 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:143:1:12288:758:0] owner [35:212:2237] 00000.120 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [35:212:2237] class AsyncLoad from cache [ ] already requested [ 22 23 24 25 ] to request [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.120 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] async queue pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.120 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:143:1:12288:758:0] status OK pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.121 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [35:212:2237] pages [ 22 23 24 25 ] 00000.121 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{26 pages [1:2:143:1:12288:758:0] ok OK}, category 2 00000.121 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [35:212:2237] pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 ] 00000.122 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 00000.123 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan 00000.123 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.124 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} hope 1 -> done Change{145, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.124 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} release 4194304b of static, Memory{0 dyn 0} 00000.124 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.124 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 141856b +(0, 0b), 1 trc, -14018b acc} 00000.125 DD| TABLET_SAUSAGECACHE: Unregister owner [35:212:2237] 00000.125 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {6 1077b} miss {50 281387b} 00000.125 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.125 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14357b, 149} 00000.125 II| FAKE_ENV: DS.1 gone, left {143736b, 8}, put {157893b, 150} 00000.125 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.125 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.125 II| FAKE_ENV: All BS storage groups are stopped 00000.126 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.126 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 782}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:14:02.202498Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.044 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.045 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 512b} miss {0 0b} 00000.045 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.046 II| FAKE_ENV: DS.0 gone, left {1356b, 12}, put {1376b, 13} 00000.046 II| FAKE_ENV: DS.1 gone, left {6814b, 23}, put {6814b, 23} 00000.046 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.046 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.046 II| FAKE_ENV: All BS storage groups are stopped 00000.046 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.046 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:14:02.254162Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.210 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.210 NN| TABLET_SAUSAGECACHE: Poison cache serviced 10 reqs hit {860 5551893b} miss {0 0b} 00000.210 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.210 II| FAKE_ENV: DS.0 gone, left {1201b, 13}, put {1221b, 14} 00000.211 II| FAKE_ENV: DS.1 gone, left {6751256b, 17}, put {6751256b, 17} 00000.213 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.213 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.213 II| FAKE_ENV: All BS storage groups are stopped 00000.213 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.213 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:14:02.484476Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00013.055 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00013.055 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4109 reqs hit {2091 2366986b} miss {6144 6340608b} 00013.056 II| FAKE_ENV: Shut order, stopping 4 BS groups 00013.056 II| FAKE_ENV: DS.0 gone, left {1761b, 14}, put {1781b, 15} 00013.056 II| FAKE_ENV: DS.1 gone, left {6927727b, 27}, put {6927727b, 27} 00013.058 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00013.058 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00013.058 II| FAKE_ENV: All BS storage groups are stopped 00013.058 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00013.058 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:14:15.575992Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00010.851 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00010.851 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4106 reqs hit {43 253450b} miss {4096 4227072b} 00010.852 II| FAKE_ENV: Shut order, stopping 4 BS groups 00010.852 II| FAKE_ENV: DS.0 gone, left {44744b, 2}, put {164747b, 16} 00010.852 II| FAKE_ENV: DS.1 gone, left {2764621b, 2068}, put {2764621b, 2068} 00010.858 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00010.858 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00010.858 II| FAKE_ENV: All BS storage groups are stopped 00010.858 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00010.858 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:14:26.442444Z 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:14:26.473853Z 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:14:26.517374Z 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:14:26.560409Z 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 >> KqpSort::TopSortExpr [GOOD] >> KqpSort::TopSortExprPk >> KqpNewEngine::JoinDictWithPure [GOOD] >> KqpNewEngine::IdxLookupExtractMembers >> KqpMergeCn::TopSortBy_Timestamp_Limit2 [GOOD] >> KqpNewEngine::Aggregate >> KqpNewEngine::PruneEffectPartitions+UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions-UseSink >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] >> KqpNewEngine::Delete-UseSink >> KqpNotNullColumns::FailedMultiEffects [GOOD] >> KqpPg::DeleteWithQueryService-useSink [GOOD] >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial >> KqpPg::V1CreateTable [GOOD] >> KqpPg::ValuesInsert+useSink >> KqpSort::ReverseEightShardOptimized [GOOD] >> KqpSort::PassLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] Test command err: Trying to start YDB, gRPC: 3096, MsgBus: 16548 2025-04-17T18:14:01.532435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347026165205610:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:01.533266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001595/r3tmp/tmptsQblC/pdisk_1.dat 2025-04-17T18:14:01.923579Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3096, node 1 2025-04-17T18:14:01.966518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:01.966614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:01.979445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:02.030708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:02.030729Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:02.030736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:02.030851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16548 TClient is connected to server localhost:16548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:02.509168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.527376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:04.470050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347039050108028:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.470160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.696970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.846259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347039050108132:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.846339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.846566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347039050108137:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.850894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:04.861566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347039050108139:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:14:04.939342Z node 1 :TX_PROXY ERROR: Actor# [1:7494347039050108190:2394] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:05.218563Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347043345075552:2361], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:61: Error: At function: KiUpdateTable!
:1:61: Error: Cannot update primary key column: Key 2025-04-17T18:14:05.219868Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2NiMzE3Mi01YmE3ZmIzMC1lZWU5NmRiYS1hYjE5MjFmNw==, ActorId: [1:7494347039050108000:2326], ActorState: ExecuteState, TraceId: 01js2fafm0b7bf7cyq7my618bj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:14:05.243800Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347043345075561:2365], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Cannot update primary key column: Key 2025-04-17T18:14:05.246433Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2NiMzE3Mi01YmE3ZmIzMC1lZWU5NmRiYS1hYjE5MjFmNw==, ActorId: [1:7494347039050108000:2326], ActorState: ExecuteState, TraceId: 01js2fafnbfp10p6f5y16yq5hy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 13200, MsgBus: 14749 2025-04-17T18:14:06.115910Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347046758535921:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:06.116008Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001595/r3tmp/tmpHJIcGk/pdisk_1.dat 2025-04-17T18:14:06.338304Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:06.351943Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:06.352015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:06.353386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13200, node 2 2025-04-17T18:14:06.511158Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:06.511180Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:06.511193Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:06.511334Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14749 TClient is connected to server localhost:14749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:06.979278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:09.322190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347059643438461:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:09.322270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:09.338659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:09.382458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347059643438561:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:09.382551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:09.382770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347059643438566:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:09.386365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:09.399138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494347059643438568:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: ... 68897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12036, node 5 2025-04-17T18:14:20.017381Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:20.094714Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:20.094739Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:20.094749Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:20.094857Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21471 TClient is connected to server localhost:21471 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:14:20.524339Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:23.043859Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347117279038692:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.043951Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.059591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:23.119824Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347117279038792:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.119921Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.120178Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347117279038797:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.124457Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:23.138217Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347117279038799:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:14:23.217523Z node 5 :TX_PROXY ERROR: Actor# [5:7494347117279038850:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:23.451022Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7494347117279038912:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:14:23.451199Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YTJjMzNiYWUtMmJkYTQyZC04NDQ5YTkyMy0xYmI1N2I4ZA==, ActorId: [5:7494347117279038669:2327], ActorState: ExecuteState, TraceId: 01js2fb1eeaqxd8zr3k8qcbqsm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 19867, MsgBus: 12017 2025-04-17T18:14:24.256513Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347125016928803:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:24.256584Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001595/r3tmp/tmp9HOax0/pdisk_1.dat 2025-04-17T18:14:24.400857Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:24.421497Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:24.421965Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:24.424384Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19867, node 6 2025-04-17T18:14:24.465490Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:24.465516Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:24.465526Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:24.465650Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12017 TClient is connected to server localhost:12017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:24.922244Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:24.927921Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:27.930914Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347137901831352:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:27.931005Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:27.947703Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.029252Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347142196798749:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.029359Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.029702Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347142196798754:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.034807Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:28.045296Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347142196798756:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:14:28.100987Z node 6 :TX_PROXY ERROR: Actor# [6:7494347142196798807:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:28.375842Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7494347142196798882:2329], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01js2fb669bhz0w200xazxwjy4. SessionId : ydb://session/3?node_id=6&id=YjU3NDA5YzQtOTE1OGU5NGEtNTdmYWViZWQtNjEyZDU2M2M=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 }. 2025-04-17T18:14:28.376324Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YjU3NDA5YzQtOTE1OGU5NGEtNTdmYWViZWQtNjEyZDU2M2M=, ActorId: [6:7494347137901831334:2329], ActorState: ExecuteState, TraceId: 01js2fb669bhz0w200xazxwjy4, Create QueryResponse for error on request, msg: >> KqpKv::ReadRows_SpecificKey >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In >> KqpSqlIn::ComplexKey [GOOD] >> KqpSqlIn::Dict >> KqpNewEngine::JoinProjectMulti [GOOD] >> KqpNewEngine::JoinMultiConsumer >> KqpNewEngine::LocksSingleShard [GOOD] >> KqpNewEngine::LocksMultiShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::FailedMultiEffects [GOOD] Test command err: Trying to start YDB, gRPC: 9671, MsgBus: 12956 2025-04-17T18:14:01.696297Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347023838577340:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:01.696355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001593/r3tmp/tmpAC0gWL/pdisk_1.dat 2025-04-17T18:14:02.060599Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9671, node 1 2025-04-17T18:14:02.102407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:02.102519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:02.104174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:02.127570Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:02.127593Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:02.127599Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:02.127705Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12956 TClient is connected to server localhost:12956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:02.643150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.656974Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:04.530798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347036723479879:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.531257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.821784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.954487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347036723479984:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.954579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.954871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347036723479989:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.958219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:04.971157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347036723479991:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:14:05.059617Z node 1 :TX_PROXY ERROR: Actor# [1:7494347041018447338:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:05.216832Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347041018447381:2355], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestInsertNotNullPk, code: 2029 2025-04-17T18:14:05.220243Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjJlNmNhNzktNzY3YmM0MzgtZGVmMGMxZDEtNGQzMDc2ZGE=, ActorId: [1:7494347036723479861:2328], ActorState: ExecuteState, TraceId: 01js2fafmbbrjmp3j10xwbs8qk, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-17T18:14:05.244487Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347041018447391:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:14:05.245806Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjJlNmNhNzktNzY3YmM0MzgtZGVmMGMxZDEtNGQzMDc2ZGE=, ActorId: [1:7494347036723479861:2328], ActorState: ExecuteState, TraceId: 01js2fafnbb23w7ah0d771g60q, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 25060, MsgBus: 25040 2025-04-17T18:14:06.123078Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347046390468281:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:06.124240Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001593/r3tmp/tmpei9fzE/pdisk_1.dat 2025-04-17T18:14:06.438750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:06.439031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:06.440256Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25060, node 2 2025-04-17T18:14:06.449005Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:06.530038Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:06.530058Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:06.530065Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:06.530186Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25040 TClient is connected to server localhost:25040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:07.037512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:07.060214Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:09.408225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347059275370764:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:09.408299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:09.425138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:09.471291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347059275370864:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:09.471375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:09.471728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347059275370869:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissio ... 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:20.175585Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:20.181679Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:22.925309Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347113980647988:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:22.925379Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:22.940945Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:23.020863Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347118275615388:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.020975Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.021568Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347118275615393:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.026491Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:23.041446Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-17T18:14:23.042431Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347118275615395:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:14:23.125085Z node 5 :TX_PROXY ERROR: Actor# [5:7494347118275615446:2395] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:23.463329Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7494347118275615512:2362], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-17T18:14:23.463923Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MzNjZTAwOTItZjRlYjBlYzItMzE1N2JiZmItZGVmZjFh, ActorId: [5:7494347113980647960:2327], ActorState: ExecuteState, TraceId: 01js2fb1es7fddqmzms16c90t5, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:14:23.683419Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:14:23.708851Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7494347118275615521:2366], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2025-04-17T18:14:23.709880Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MzNjZTAwOTItZjRlYjBlYzItMzE1N2JiZmItZGVmZjFh, ActorId: [5:7494347113980647960:2327], ActorState: ExecuteState, TraceId: 01js2fb1fjd9t36zy783gd7efk, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 7255, MsgBus: 4553 2025-04-17T18:14:24.592880Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347123741903836:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:24.592932Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001593/r3tmp/tmp9vd8rI/pdisk_1.dat 2025-04-17T18:14:24.713499Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7255, node 6 2025-04-17T18:14:24.747885Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:24.747957Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:24.750345Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:24.786292Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:24.786317Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:24.786326Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:24.786438Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4553 TClient is connected to server localhost:4553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:25.314964Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:25.324052Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:28.286388Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347140921773681:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.286483Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.307257Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.409334Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347140921773784:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.409431Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.409701Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347140921773789:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.413975Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:28.426971Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347140921773791:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:14:28.504851Z node 6 :TX_PROXY ERROR: Actor# [6:7494347140921773842:2395] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:28.645979Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7494347140921773887:2356], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:55: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64,'Value':String>
:3:55: Error: Failed to convert 'Value': Null to String
:3:55: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:14:28.646370Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZWJlMDRmZDgtYjNkZjY4MjUtM2JiNGMyNmEtNWI2ZDljMzc=, ActorId: [6:7494347140921773671:2329], ActorState: ExecuteState, TraceId: 01js2fb6g2egxcdqcazb34mepb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:14:28.682101Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpSort::ReverseRangeLimitOptimized [GOOD] >> KqpSort::TopParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 29076, MsgBus: 21333 2025-04-17T18:13:16.465473Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346830088371901:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.465688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a8/r3tmp/tmpOK1qgQ/pdisk_1.dat 2025-04-17T18:13:16.786246Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29076, node 1 2025-04-17T18:13:16.825683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.825805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.827320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.937580Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.937614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.937630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.937748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21333 TClient is connected to server localhost:21333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:17.450439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:18.908915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346838678307153:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:18.909034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.098753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:13:19.225267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346842973274585:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.225408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.225535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346842973274590:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.229164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:13:19.237994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346842973274592:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:13:19.290060Z node 1 :TX_PROXY ERROR: Actor# [1:7494346842973274643:2416] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 3325, MsgBus: 30060 2025-04-17T18:13:20.474070Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346850296217723:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:20.474200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a8/r3tmp/tmpTWL8Wi/pdisk_1.dat 2025-04-17T18:13:20.564561Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3325, node 2 2025-04-17T18:13:20.606589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:20.606642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:20.608153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:20.622144Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:20.622191Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:20.622200Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:20.622308Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30060 TClient is connected to server localhost:30060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:21.055348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:22.639236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346858886152960:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:22.639310Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:22.647942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:13:22.718759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346858886153094:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:22.718811Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:22.718815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346858886153099:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:22.721271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:13:22.727060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346858886153101:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:13:22.816543Z node 2 :TX_PROXY ERROR: Actor# [2:7494346858886153152:2413] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 14854, MsgBus: 27261 2025-04-17T18:13:23.883480Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346861373439521:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:23.883540Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a8/r3tmp/tmpCyYf37/pdisk_1.dat 2025-04-17T18:13:23.982074Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14854, node 3 2025-04-17T18:13:24.025707Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:24.025793Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connec ... T18:14:16.670365Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7494347090255865009:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:16.670465Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a8/r3tmp/tmphOgTFW/pdisk_1.dat 2025-04-17T18:14:16.834410Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:16.902717Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:16.902840Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:16.904584Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8095, node 11 2025-04-17T18:14:16.959887Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:16.959918Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:16.959930Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:16.960096Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10248 TClient is connected to server localhost:10248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:17.744275Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:17.752079Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:21.362214Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494347111730702155:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:21.362335Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:21.383997Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:21.444256Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494347111730702258:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:21.444381Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:21.444594Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494347111730702263:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:21.449221Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:21.460120Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494347111730702265:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:14:21.537358Z node 11 :TX_PROXY ERROR: Actor# [11:7494347111730702316:2397] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:21.670565Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494347090255865009:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:21.670666Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28982, MsgBus: 62749 2025-04-17T18:14:22.991457Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7494347113679473155:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:22.991541Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a8/r3tmp/tmplWEBTP/pdisk_1.dat 2025-04-17T18:14:23.160598Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:23.174274Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:23.174398Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:23.176938Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28982, node 12 2025-04-17T18:14:23.250903Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:23.250934Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:23.250949Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:23.251148Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62749 TClient is connected to server localhost:62749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:24.158766Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.939145Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494347135154310289:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:27.939281Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:27.963645Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:27.991602Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7494347113679473155:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:27.991704Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:28.041181Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494347139449277691:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.041299Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.041636Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494347139449277696:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.049074Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:28.062532Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494347139449277698:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:14:28.162498Z node 12 :TX_PROXY ERROR: Actor# [12:7494347139449277750:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup >> KqpNotNullColumns::Describe [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns >> KqpNewEngine::OrderedScalarContext [GOOD] >> KqpNewEngine::PagingNoPredicateExtract >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate [GOOD] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate >> KqpNotNullColumns::SelectNotNullColumns [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn >> KqpRanges::IsNullPartial >> KqpNewEngine::Update+UseSink >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 >> KqpNotNullColumns::UpsertNotNull >> KqpReturning::ReturningWorksIndexedReplace+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService >> KqpRanges::CastKeyBounds [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink >> KqpMergeCn::TopSortBy_Date_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 >> KqpRanges::UpdateWhereInFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInFullScan-UseSink >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::CastKeyBounds [GOOD] Test command err: Trying to start YDB, gRPC: 7127, MsgBus: 63023 2025-04-17T18:14:00.705210Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347020946184065:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:00.705307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a5/r3tmp/tmpW1ujbv/pdisk_1.dat 2025-04-17T18:14:01.089545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:01.090920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:01.091014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:01.092672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7127, node 1 2025-04-17T18:14:01.178757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:01.178787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:01.178799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:01.178910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63023 TClient is connected to server localhost:63023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:01.741836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:03.508079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347033831086485:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.508171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.769079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.899321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347033831086590:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.899388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.899554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347033831086595:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.903476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:03.912269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347033831086597:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:14:04.011475Z node 1 :TX_PROXY ERROR: Actor# [1:7494347038126053944:2397] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:04.165442Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347038126053987:2355], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2025-04-17T18:14:04.166708Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmQwYjU3NjctZDZmMWZkNTMtYWM4MjBiOTItODg2NmQ3NTY=, ActorId: [1:7494347033831086467:2328], ActorState: ExecuteState, TraceId: 01js2faekd6h4k5dyn67sp6d58, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-17T18:14:04.193954Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347038126053997:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:14:04.195135Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmQwYjU3NjctZDZmMWZkNTMtYWM4MjBiOTItODg2NmQ3NTY=, ActorId: [1:7494347033831086467:2328], ActorState: ExecuteState, TraceId: 01js2faemf800rfch6kzjbzg4r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 62299, MsgBus: 7957 2025-04-17T18:14:04.879427Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347037913464494:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:04.879477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a5/r3tmp/tmpKDYzc7/pdisk_1.dat 2025-04-17T18:14:04.986979Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62299, node 2 2025-04-17T18:14:05.023384Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:05.023893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:05.025359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:05.043926Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:05.043945Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:05.043951Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:05.044055Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7957 TClient is connected to server localhost:7957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:05.560132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.567011Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:07.893272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347050798367036:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:07.893422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:07.912120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:07.953126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347050798367137:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:07.953199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:07.953542Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347050798367142:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:07.957305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreat ... _SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.758350Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347120029484377:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.764899Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:23.777315Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347120029484379:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:23.849062Z node 5 :TX_PROXY ERROR: Actor# [5:7494347120029484438:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:24.730034Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347102849612764:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:24.730089Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink","Stats":{"ComputeNodes":[{"Tasks":[{"EgressRows":3,"NodeId":5,"FinishTimeMs":1744913665235,"EgressBytes":39,"TaskId":1,"Host":"ghrun-gkdwnlcfnq","ComputeTimeUs":388}],"PeakMemoryUsageBytes":196608,"CpuTimeUs":2744}],"UseLlvm":"undefined","MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"Tasks":1,"FinishedTasks":1,"Egress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39}},"Name":"KqpTableSink","Egress":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Splits":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39}},"Push":{"Chunks":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39}}}],"PhysicalStageId":0,"StageDurationUs":0,"EgressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"BaseTimeMs":1744913665235,"EgressBytes":{"Count":1,"Sum":39,"Max":39,"Min":39},"CpuTimeUs":{"Count":1,"Sum":2275,"Max":2275,"Min":2275}}}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":271423,"CpuTimeUs":268652},"ProcessCpuTimeUs":388,"TotalDurationUs":286938,"ResourcePoolId":"default","QueuedTimeUs":650},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":2.275,"A-Cpu":2.275,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Delete"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 1484, MsgBus: 63619 2025-04-17T18:14:26.211556Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347131826272671:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:26.211606Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a5/r3tmp/tmpuXB324/pdisk_1.dat 2025-04-17T18:14:26.344080Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:26.376874Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:26.376965Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:26.380050Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1484, node 6 2025-04-17T18:14:26.434753Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:26.434782Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:26.434790Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:26.434917Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63619 TClient is connected to server localhost:63619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:27.064352Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.078746Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:27.095445Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.176231Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.386106Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.463528Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:30.065840Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347149006143615:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.065956Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.129142Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.167285Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.207453Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.244268Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.283687Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.340293Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.422862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.528585Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347149006144283:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.528661Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.530419Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347149006144288:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.534624Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:30.548376Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347149006144290:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:30.646611Z node 6 :TX_PROXY ERROR: Actor# [6:7494347149006144346:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:31.216446Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347131826272671:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:31.216578Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::DeferredEffects [GOOD] >> KqpNewEngine::Delete+UseSink >> KqpNewEngine::ReadRangeWithParams [GOOD] >> KqpNewEngine::ScalarFunctions >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink >> KqpKv::ReadRows_SpecificKey [GOOD] >> KqpKv::ReadRows_NonExistentKeys >> KqpSqlIn::SecondaryIndex_TupleParameter [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral >> KqpNewEngine::PkRangeSelect4 [GOOD] >> KqpNewEngine::PrecomputeKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 16588, MsgBus: 21686 2025-04-17T18:13:58.171271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347012522646414:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.171364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015bc/r3tmp/tmp9w7m1A/pdisk_1.dat 2025-04-17T18:13:58.715006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.719275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.719363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.724274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16588, node 1 2025-04-17T18:13:58.846832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.846853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.846873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.846960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21686 TClient is connected to server localhost:21686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.556144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.572986Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:01.298008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347025407548818:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.298122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.784601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.926468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347025407548921:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.926532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.926574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347025407548926:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.936867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:01.948765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347025407548928:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:14:02.033225Z node 1 :TX_PROXY ERROR: Actor# [1:7494347029702516275:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:02.429486Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347029702516324:2357], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-17T18:14:02.429902Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjhmZDcwMmMtMTM1MmZmNDctMjAyZDZhOGItM2M1NGUxYzA=, ActorId: [1:7494347025407548800:2329], ActorState: ExecuteState, TraceId: 01js2facxde73rhasn61bs1b3w, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:14:02.454417Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347029702516333:2361], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:47: Error: Failed to convert 'Value': Null to String
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:14:02.455677Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjhmZDcwMmMtMTM1MmZmNDctMjAyZDZhOGItM2M1NGUxYzA=, ActorId: [1:7494347025407548800:2329], ActorState: ExecuteState, TraceId: 01js2facy82854s5x6kqes6hxq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 5874, MsgBus: 2536 2025-04-17T18:14:03.119598Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347033911636064:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.119653Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015bc/r3tmp/tmp7Fqh34/pdisk_1.dat 2025-04-17T18:14:03.211401Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5874, node 2 2025-04-17T18:14:03.259123Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:03.259200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:03.262172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:03.270649Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:03.270670Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:03.270676Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:03.270817Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2536 TClient is connected to server localhost:2536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:03.702673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:03.707884Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:05.833662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347042501571305:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:05.833790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:05.844679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.897601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347042501571405:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:05.897681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:05.897966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347042501571410:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... de 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:23.577235Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:23.647555Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:23.719167Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347117382135250:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.719259Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.719494Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347117382135255:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:23.723484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:23.739083Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347117382135257:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:23.805141Z node 5 :TX_PROXY ERROR: Actor# [5:7494347117382135314:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:24.752983Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:24.778424Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347100202263647:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:24.778827Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24643, MsgBus: 23800 2025-04-17T18:14:26.614569Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347131843334890:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:26.614645Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015bc/r3tmp/tmpbHWWp9/pdisk_1.dat 2025-04-17T18:14:26.756993Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:26.761170Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:26.761273Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:26.762954Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24643, node 6 2025-04-17T18:14:26.814738Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:26.814773Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:26.814781Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:26.814915Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23800 TClient is connected to server localhost:23800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:27.361566Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.376232Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.459221Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.630713Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.706427Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:30.426328Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347149023205854:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.426449Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.494803Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.540943Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.580871Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.616542Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.664209Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.706104Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.750860Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.838543Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347149023206515:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.838645Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.838879Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347149023206520:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.843842Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:30.860543Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347149023206522:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:30.961041Z node 6 :TX_PROXY ERROR: Actor# [6:7494347149023206580:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:31.614957Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347131843334890:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:31.615032Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:32.022060Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.450054Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913672474, txId: 281474976715674] shutting down 2025-04-17T18:14:32.649169Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913672684, txId: 281474976715676] shutting down 2025-04-17T18:14:32.864584Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913672901, txId: 281474976715678] shutting down >> KqpNewEngine::Delete-UseSink [GOOD] >> KqpNewEngine::DeleteOn+UseSink >> KqpSort::TopSortExprPk [GOOD] >> KqpSort::TopSortTableExpr >> KqpNewEngine::MultiEffectsOnSameTable [GOOD] >> KqpNewEngine::MultiUsageInnerConnection |92.0%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::Aggregate [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput [GOOD] >> KqpNewEngine::AggregateTuple >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin [GOOD] >> KqpNewEngine::SqlInAsScalar [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin >> KqpNewEngine::SequentialReadsPragma-Enabled >> KqpNewEngine::BlindWrite >> KqpNewEngine::IdxLookupExtractMembers [GOOD] >> KqpNewEngine::FlatmapLambdaMutiusedConnections >> KqpNewEngine::PruneEffectPartitions-UseSink [GOOD] >> KqpPg::PgAggregate-useSink [GOOD] >> KqpNewEngine::LocksMultiShard [GOOD] >> KqpReturning::ReturningWorks+QueryService >> KqpSort::PassLimit [GOOD] >> KqpNewEngine::PrimaryView >> KqpSort::OffsetPk >> KqpPg::MkqlTerminate >> KqpNotNullColumns::UpsertNotNull [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn [GOOD] >> KqpSort::TopParameter [GOOD] >> KqpNewEngine::LocksMultiShardOk >> KqpNewEngine::JoinMultiConsumer [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNull >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn >> KqpNewEngine::JoinSameKey >> KqpNewEngine::Update+UseSink [GOOD] >> KqpNewEngine::Update-UseSink >> KqpKv::ReadRows_NonExistentKeys [GOOD] >> KqpKv::ReadRows_NotFullPK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] Test command err: Trying to start YDB, gRPC: 20132, MsgBus: 22732 2025-04-17T18:14:09.082804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347060206886056:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:09.082896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001589/r3tmp/tmpboMkIW/pdisk_1.dat 2025-04-17T18:14:09.467931Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:09.489938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:09.490030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20132, node 1 2025-04-17T18:14:09.522544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:09.557736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:09.557757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:09.557773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:09.557894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22732 TClient is connected to server localhost:22732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:10.113096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:10.135274Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:12.107240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347073091788594:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:12.107348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:12.389653Z node 1 :TX_PROXY ERROR: Actor# [1:7494347073091788615:2307] txid# 281474976710658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-04-17T18:14:12.409767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347073091788623:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:12.409870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:12.424238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12557, MsgBus: 6647 2025-04-17T18:14:13.198538Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347077031591269:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:13.198795Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001589/r3tmp/tmpIrgQyA/pdisk_1.dat 2025-04-17T18:14:13.342362Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:13.354944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:13.355023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:13.360429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12557, node 2 2025-04-17T18:14:13.428593Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:13.428622Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:13.428630Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:13.428760Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6647 TClient is connected to server localhost:6647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:13.886891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:13.892611Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:13.901720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:13.965791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.154679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.232652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:16.231352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347089916494928:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.231435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.311430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.345709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.374799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.399952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.424649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.458430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.532696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.604984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347089916495589:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.605067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.605423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347089916495594:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.609673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, a ... EMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2185, MsgBus: 12399 2025-04-17T18:14:22.436287Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347115544372361:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:22.437079Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001589/r3tmp/tmpyjMknc/pdisk_1.dat 2025-04-17T18:14:22.574387Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:22.584826Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:22.584912Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:22.586830Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2185, node 4 2025-04-17T18:14:22.684439Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:22.684461Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:22.684470Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:22.684604Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12399 TClient is connected to server localhost:12399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:23.152407Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:23.158152Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:25.785970Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494347128429274892:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:25.786072Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:25.812449Z node 4 :TX_PROXY ERROR: Actor# [4:7494347128429274913:2305] txid# 281474976715658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-04-17T18:14:25.866856Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494347128429274921:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:25.866961Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:25.886349Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9530, MsgBus: 1952 2025-04-17T18:14:26.879217Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494347130373262856:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:26.881640Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001589/r3tmp/tmpzFQMM1/pdisk_1.dat 2025-04-17T18:14:27.010106Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:27.012961Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:27.013042Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:27.015071Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9530, node 5 2025-04-17T18:14:27.068672Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:27.068692Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:27.068698Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:27.068804Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1952 TClient is connected to server localhost:1952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:27.562084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:30.358471Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347147553132598:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.358659Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.368607Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20935, MsgBus: 11503 2025-04-17T18:14:31.337195Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347153534394680:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:31.337269Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001589/r3tmp/tmpNlPjcd/pdisk_1.dat 2025-04-17T18:14:31.506936Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:31.520896Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:31.520994Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:31.523274Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20935, node 6 2025-04-17T18:14:31.581877Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:31.581905Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:31.581913Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:31.582040Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11503 TClient is connected to server localhost:11503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:32.123348Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:32.131045Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:35.025061Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup [GOOD] >> TExecutorDb::FullScan [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup >> TExecutorDb::CoordinatorSimulation >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> KqpRanges::IsNullPartial [GOOD] >> KqpRanges::LiteralOr >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameter [GOOD] >> KqpNewEngine::PagingNoPredicateExtract [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink [GOOD] >> KqpSort::Offset Test command err: Trying to start YDB, gRPC: 26171, MsgBus: 20020 2025-04-17T18:13:59.947908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347015165624667:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:59.948440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ad/r3tmp/tmpZLbp6Q/pdisk_1.dat 2025-04-17T18:14:00.458663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:00.463959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:00.464060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:00.467003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26171, node 1 2025-04-17T18:14:00.577043Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:00.577070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:00.577077Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:00.577200Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20020 TClient is connected to server localhost:20020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:01.125055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.145600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.164666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.296796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.469783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.564042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:03.112122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347032345495484:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.112258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.406293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.434406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.468008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.496133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.534956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.609779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.646580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.717499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347032345496148:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.717581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.717635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347032345496153:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.721231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:03.730236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347032345496155:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:03.805211Z node 1 :TX_PROXY ERROR: Actor# [1:7494347032345496210:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:04.923570Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347015165624667:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:04.923618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9376, MsgBus: 24348 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ad/r3tmp/tmpKbKFRh/pdisk_1.dat 2025-04-17T18:14:06.206374Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:14:06.217877Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:06.232896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:06.232971Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:06.236387Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9376, node 2 2025-04-17T18:14:06.311044Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:06.311066Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:06.311072Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:06.311169Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24348 TClient is connected to server localhost:24348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:06.743235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.751003Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:06.772275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.856158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:07.029272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057 ... not found or you don't have access permissions } 2025-04-17T18:14:27.963136Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:27.992900Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.026052Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.055943Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.082758Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.153371Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.197900Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.296375Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347140592750463:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.296473Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.296692Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347140592750468:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.300768Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:28.311717Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347140592750470:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:28.416997Z node 5 :TX_PROXY ERROR: Actor# [5:7494347140592750529:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:29.275346Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347123412878827:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:29.275408Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11300, MsgBus: 29366 2025-04-17T18:14:30.898928Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347148083469669:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:30.898981Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ad/r3tmp/tmpXlwl4z/pdisk_1.dat 2025-04-17T18:14:31.017962Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11300, node 6 2025-04-17T18:14:31.050944Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:31.051017Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:31.052441Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:31.130719Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:31.130745Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:31.130753Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:31.130876Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29366 TClient is connected to server localhost:29366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:31.674138Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:31.691901Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:31.783162Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:31.973080Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:32.050812Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.685585Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347165263340630:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:34.685692Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:34.737528Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.781625Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.871745Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.919541Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.954786Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.034567Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.076247Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.167132Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347169558308598:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.167262Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.167662Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347169558308603:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.173330Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:35.185524Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347169558308605:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:35.257722Z node 6 :TX_PROXY ERROR: Actor# [6:7494347169558308661:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:35.899739Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347148083469669:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:35.899821Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::MultipleBroadcastJoin >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink >> KqpSqlIn::KeySuffix >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In [GOOD] >> KqpNewEngine::DeleteOn+UseSink [GOOD] >> KqpSqlIn::Dict [GOOD] >> KqpNewEngine::Delete+UseSink [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Decimal_Limit5 >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc >> KqpNewEngine::DuplicatedResults >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In >> KqpSqlIn::Delete >> KqpNewEngine::DeleteOn-UseSink >> KqpNewEngine::DecimalColumn >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService |92.0%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::AggregateTuple [GOOD] >> KqpNewEngine::AsyncIndexUpdate >> KqpNotNullColumns::UpdateTable_DontChangeNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex >> KqpNewEngine::BlindWrite [GOOD] >> KqpNewEngine::BlindWriteParameters >> KqpNewEngine::LocksMultiShardOk [GOOD] >> KqpNewEngine::LocksNoMutations >> KqpKv::ReadRows_NotFullPK [GOOD] >> KqpKv::ReadRows_SpecificReturnValue >> KqpSort::TopSortTableExpr [GOOD] >> KqpSort::TopSortResults >> KqpRanges::UpdateWhereInFullScan-UseSink [GOOD] >> KqpRanges::ScanKeyPrefix >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] >> KqpNewEngine::PrecomputeKey [GOOD] >> KqpAgg::GroupByLimit [GOOD] >> KqpExtractPredicateLookup::OverflowLookup >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] >> KqpNewEngine::Update-UseSink [GOOD] >> KqpNewEngine::UpdateFromParams >> KqpNewEngine::FlatmapLambdaMutiusedConnections [GOOD] >> KqpNewEngine::FullScanCount >> KqpSort::OffsetPk [GOOD] >> KqpSort::OffsetTopSort >> KqpNewEngine::MultiUsageInnerConnection [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] Test command err: Trying to start YDB, gRPC: 29477, MsgBus: 29607 2025-04-17T18:13:58.197444Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347010239307768:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.198062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d0/r3tmp/tmpUsiPaY/pdisk_1.dat 2025-04-17T18:13:58.639695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.639784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.644349Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.646018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29477, node 1 2025-04-17T18:13:58.846336Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.846359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.846368Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.846447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29607 TClient is connected to server localhost:29607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.582489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.593529Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:13:59.599042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.739337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.878532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.965608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.313804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347023124211267:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.314276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.780970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.817614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.849607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.882506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.911668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.945199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.016162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.079669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347027419179230:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.079759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.079877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347027419179235:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.084244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:02.098413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347027419179237:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:02.195055Z node 1 :TX_PROXY ERROR: Actor# [1:7494347027419179294:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:03.190391Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347010239307768:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.190652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17053, MsgBus: 3430 2025-04-17T18:14:03.974969Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347031512176815:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.975069Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d0/r3tmp/tmpIXzxew/pdisk_1.dat 2025-04-17T18:14:04.078465Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17053, node 2 2025-04-17T18:14:04.103705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:04.103774Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:04.104900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:04.153814Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:04.153837Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:04.153844Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:04.153948Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3430 TClient is connected to server localhost:3430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:04.597825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:04.604928Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:04.614750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:04.696618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17 ... ervice] [TPoolFetcherActor] ActorId: [6:7494347154407694773:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:31.966336Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.021445Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.063219Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.134486Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.175307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.216988Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.261405Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.334477Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.399846Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347158702662743:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.399954Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.400197Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347158702662748:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.403902Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:32.415729Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347158702662750:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:32.492150Z node 6 :TX_PROXY ERROR: Actor# [6:7494347158702662804:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:33.173865Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347141522791119:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:33.173940Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10481, MsgBus: 17549 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d0/r3tmp/tmp8xKv3N/pdisk_1.dat 2025-04-17T18:14:35.659620Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:14:35.714221Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:35.745182Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:35.745280Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:35.746927Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10481, node 7 2025-04-17T18:14:35.840424Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:35.840446Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:35.840454Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:35.840604Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17549 TClient is connected to server localhost:17549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:36.519359Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.535067Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:36.560608Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.662791Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:36.898596Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.998871Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.432080Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347186155028700:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.432212Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.515799Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.587328Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.629828Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.708270Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.788487Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.824864Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.896038Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.966279Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347186155029371:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.966397Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.966495Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347186155029376:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.969710Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:39.983644Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347186155029378:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:40.082562Z node 7 :TX_PROXY ERROR: Actor# [7:7494347190449996730:3557] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrecomputeKey [GOOD] Test command err: Trying to start YDB, gRPC: 20704, MsgBus: 1804 2025-04-17T18:13:58.174982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347012556695343:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.175028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ea/r3tmp/tmplnN9Hw/pdisk_1.dat 2025-04-17T18:13:58.665066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.669620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.669732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.671859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20704, node 1 2025-04-17T18:13:58.848371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.848403Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.848412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.848514Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1804 TClient is connected to server localhost:1804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.584628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.605113Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:13:59.612155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.763657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:13:59.932683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:14:00.021612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.531582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347025441598980:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.531696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.874258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.948551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.016237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.080402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.143759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.190645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.222565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.320381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347029736566953:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.320481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.320744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347029736566958:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.324800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:02.335076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347029736566960:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:02.408642Z node 1 :TX_PROXY ERROR: Actor# [1:7494347029736567016:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:03.175404Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347012556695343:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.186058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62509, MsgBus: 16502 2025-04-17T18:14:04.570349Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347037177496206:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:04.570719Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ea/r3tmp/tmpz6CglO/pdisk_1.dat 2025-04-17T18:14:04.728359Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:04.751765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:04.751851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:04.753865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62509, node 2 2025-04-17T18:14:04.878706Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:04.878729Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:04.878736Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:04.878856Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16502 TClient is connected to server localhost:16502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:05.373420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.379279Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:05.395376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.466395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17 ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.229726Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.270228Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.307465Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.384044Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.431946Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.502984Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.561766Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347157157997655:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.561883Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.562191Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347157157997660:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.566500Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:32.582539Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347157157997662:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:32.641751Z node 6 :TX_PROXY ERROR: Actor# [6:7494347157157997717:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:33.233500Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347139978126054:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:33.233559Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13019, MsgBus: 12086 2025-04-17T18:14:35.198420Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347170515620268:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:35.199919Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015ea/r3tmp/tmp5VGGT2/pdisk_1.dat 2025-04-17T18:14:35.360461Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13019, node 7 2025-04-17T18:14:35.464890Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:35.465008Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:35.470613Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:35.481434Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:35.481460Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:35.481470Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:35.481612Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12086 TClient is connected to server localhost:12086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:36.171081Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.177463Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:36.188326Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.264026Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.466696Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.565759Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.606817Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347187695491197:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.606963Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.672579Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.715133Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.752363Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.825316Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.867928Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.908644Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.980512Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.057631Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347191990459161:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.057761Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.058104Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347191990459166:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.062095Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:40.073138Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347191990459168:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:40.143042Z node 7 :TX_PROXY ERROR: Actor# [7:7494347191990459224:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:40.198660Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347170515620268:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.198766Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 9618, MsgBus: 30188 2025-04-17T18:13:58.171098Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347011538865246:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.173225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c3/r3tmp/tmpBh3Jrf/pdisk_1.dat 2025-04-17T18:13:58.621815Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.639332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.639421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.643347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9618, node 1 2025-04-17T18:13:58.842755Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.842792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.842804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.842903Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30188 TClient is connected to server localhost:30188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.601567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.615580Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:01.199758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347024423767581:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.199856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.781529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.925397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347024423767683:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.925470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.925533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347024423767688:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.936892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:01.952877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347024423767690:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:14:02.053866Z node 1 :TX_PROXY ERROR: Actor# [1:7494347028718735038:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12477, MsgBus: 12588 2025-04-17T18:14:03.474734Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347032122856002:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.481727Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c3/r3tmp/tmpUlaabR/pdisk_1.dat 2025-04-17T18:14:03.585159Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12477, node 2 2025-04-17T18:14:03.603567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:03.603672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:03.610728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:03.670635Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:03.670663Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:03.670669Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:03.670806Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12588 TClient is connected to server localhost:12588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:04.051492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:04.057413Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:04.065841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:04.129953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:04.280808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:04.358065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.709073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347045007759628:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:06.709147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:06.770519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.801208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.832183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.864184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.917713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.953444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.995774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:07.058412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetch ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:26.696314Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:26.730505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:26.763838Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:26.832547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:26.914303Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:26.965399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:27.055053Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347134710402634:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:27.055146Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:27.055574Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347134710402639:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:27.059902Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:27.072648Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347134710402641:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:27.130816Z node 5 :TX_PROXY ERROR: Actor# [5:7494347134710402698:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:27.951753Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347113235563768:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:27.951817Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65522, MsgBus: 12313 2025-04-17T18:14:35.988262Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347172574994565:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:35.994588Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c3/r3tmp/tmpgNoVs0/pdisk_1.dat 2025-04-17T18:14:36.157689Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:36.172715Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:36.172814Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:36.175423Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65522, node 6 2025-04-17T18:14:36.271788Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:36.271818Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:36.271828Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:36.271970Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12313 TClient is connected to server localhost:12313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:36.879128Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.887527Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:36.897820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.995489Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:37.187624Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.285027Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.882113Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347189754865361:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.882266Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.950910Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.002905Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.075772Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.118398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.154309Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.193334Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.266609Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.339509Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347194049833327:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.339689Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.340201Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347194049833332:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.344797Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:40.356960Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347194049833335:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:40.451936Z node 6 :TX_PROXY ERROR: Actor# [6:7494347194049833391:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:40.988306Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347172574994565:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.988386Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] >> KqpSqlIn::SecondaryIndex_TupleSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 7500, MsgBus: 25896 2025-04-17T18:14:02.267148Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347028333393652:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:02.267232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001592/r3tmp/tmpZ5x9T5/pdisk_1.dat 2025-04-17T18:14:02.634346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:02.662067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 7500, node 1 2025-04-17T18:14:02.662294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:02.663869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:02.722871Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:02.722901Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:02.722909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:02.723036Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25896 TClient is connected to server localhost:25896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:03.278555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.040866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347041218296194:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:05.041208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:05.366674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.507014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T18:14:05.564615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.597219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2025-04-17T18:14:05.631749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.654957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.738305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715766:2, at schemeshard: 72057594046644480 2025-04-17T18:14:05.802997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.835033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715770:2, at schemeshard: 72057594046644480 2025-04-17T18:14:05.883731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715771:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.911889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715774:2, at schemeshard: 72057594046644480 2025-04-17T18:14:05.950267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715775:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.978684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.056123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715778:2, at schemeshard: 72057594046644480 2025-04-17T18:14:06.104071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.137115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715782:2, at schemeshard: 72057594046644480 2025-04-17T18:14:06.181313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715783:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.235072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347045513264840:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:06.235195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:06.546552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347045513265151:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:06.546639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:06.546909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347045513265156:2451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:06.551016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:06.567732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347045513265158:2452], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:06.636764Z node 1 :TX_PROXY ERROR: Actor# [1:7494347045513265212:3526] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 23], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:07.267250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347028333393652:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:07.267330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] Trying to start YDB, gRPC: 25643, MsgBus: 29370 2025-04-17T18:14:10.320537Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347063760885661:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:10.321146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001592/r3tmp/tmpDHGk9v/pdisk_1.dat 2025-04-17T18:14:10.448355Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:10.471051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:10.471145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:10.472854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25643, node 2 2025-04-17T18:14:10.530396Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:10.530423Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:10.530432Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:10.530547Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29370 TClient is connected to server localhost:29370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClien ... t not found or you don't have access permissions } 2025-04-17T18:14:32.019204Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.055859Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.127957Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.163923Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.202484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.241791Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.320067Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.410505Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347159606593543:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.410580Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.410737Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347159606593548:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.416536Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:32.428401Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347159606593550:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:32.510545Z node 5 :TX_PROXY ERROR: Actor# [5:7494347159606593605:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:33.143001Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347142426721927:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:33.143072Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8943, MsgBus: 11033 2025-04-17T18:14:35.900655Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347169551686007:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:35.900756Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001592/r3tmp/tmphnN7ID/pdisk_1.dat 2025-04-17T18:14:36.081044Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:36.098576Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:36.098709Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:36.101059Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8943, node 6 2025-04-17T18:14:36.174724Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:36.174751Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:36.174760Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:36.174896Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11033 TClient is connected to server localhost:11033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:36.771136Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.792423Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.880500Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:37.081242Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:37.169781Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.798247Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347186731556965:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.798326Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.869051Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.905472Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.945886Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.019800Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.067339Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.108149Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.180856Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.275192Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347191026524933:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.275345Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.275462Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347191026524938:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.279412Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:40.290713Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347191026524940:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:40.345154Z node 6 :TX_PROXY ERROR: Actor# [6:7494347191026524993:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:40.901070Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347169551686007:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.901139Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpReturning::ReturningWorks+QueryService [GOOD] >> KqpReturning::ReturningWorks-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultiUsageInnerConnection [GOOD] Test command err: Trying to start YDB, gRPC: 5006, MsgBus: 26986 2025-04-17T18:13:58.257904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347010112990461:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.261505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c2/r3tmp/tmp7lxh4J/pdisk_1.dat 2025-04-17T18:13:58.654069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.694627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.694744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.697255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5006, node 1 2025-04-17T18:13:58.844485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.844507Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.844513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.844634Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26986 TClient is connected to server localhost:26986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.625123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.649075Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:13:59.661104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.825970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:00.014036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:00.110085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.408362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347022997894013:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.412390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.780805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.816179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.851791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.915154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.944388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.978976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.014840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.079974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347027292861969:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.080024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.080143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347027292861974:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.083876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:02.093615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347027292861976:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:02.164130Z node 1 :TX_PROXY ERROR: Actor# [1:7494347027292862031:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:03.257026Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347010112990461:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.257130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32637, MsgBus: 10120 2025-04-17T18:14:04.337140Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347035676375400:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:04.337178Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c2/r3tmp/tmp4oZ9HO/pdisk_1.dat 2025-04-17T18:14:04.477871Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:04.487594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:04.487680Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:04.489853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32637, node 2 2025-04-17T18:14:04.546735Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:04.546761Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:04.546767Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:04.546885Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10120 TClient is connected to server localhost:10120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:05.001877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.024729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.085175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.237378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... not found or you don't have access permissions } 2025-04-17T18:14:32.413384Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.488363Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.523810Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.557055Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.591377Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.626132Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.697957Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.774998Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347159465425488:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.775094Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.775273Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347159465425493:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.779858Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:32.796542Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347159465425495:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:32.877993Z node 6 :TX_PROXY ERROR: Actor# [6:7494347159465425552:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:33.505480Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347142285553855:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:33.505573Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24537, MsgBus: 62918 2025-04-17T18:14:35.472593Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347172723371459:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:35.476241Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c2/r3tmp/tmpg2OLDq/pdisk_1.dat 2025-04-17T18:14:35.672640Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:35.701597Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:35.701708Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:35.704385Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24537, node 7 2025-04-17T18:14:35.786513Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:35.786540Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:35.786550Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:35.786703Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62918 TClient is connected to server localhost:62918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:14:36.538339Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.556461Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.645342Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.860604Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.945154Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.574627Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347189903242419:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.574766Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.689186Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.733445Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.774127Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.821809Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.866080Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.939513Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.013853Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.125151Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347194198210387:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.125255Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.125406Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347194198210392:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.129473Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:40.140824Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347194198210394:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:40.198129Z node 7 :TX_PROXY ERROR: Actor# [7:7494347194198210448:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:40.472998Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347172723371459:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.473083Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpRanges::LiteralOr [GOOD] >> KqpRanges::LiteralOrCompisite >> KqpRanges::WhereInSubquery >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink >> KqpNewEngine::JoinSameKey [GOOD] >> KqpRanges::UpdateWhereInNoFullScan+UseSink >> KqpRanges::IsNotNullInJsonValue >> KqpNewEngine::DeleteOn-UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin+UseSink >> KqpKv::ReadRows_SpecificReturnValue [GOOD] >> KqpKv::ReadRows_PgValue >> KqpRanges::IsNull >> KqpNewEngine::PrimaryView [GOOD] >> KqpNewEngine::DuplicatedResults [GOOD] >> KqpNewEngine::EmptyMapWithBroadcast >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> KqpSqlIn::TableSource >> KqpNewEngine::DecimalColumn [GOOD] >> KqpNewEngine::DecimalColumn35 >> KqpExtractPredicateLookup::SimpleRange [GOOD] >> KqpExtractPredicateLookup::PointJoin >> KqpNewEngine::BlindWriteParameters [GOOD] >> KqpNewEngine::BlindWriteListParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinSameKey [GOOD] Test command err: Trying to start YDB, gRPC: 19429, MsgBus: 16230 2025-04-17T18:13:58.177813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347013181591920:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.177884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015bb/r3tmp/tmpTviGe5/pdisk_1.dat 2025-04-17T18:13:58.623673Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.628025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.628143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.639111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19429, node 1 2025-04-17T18:13:58.842906Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.842933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.842941Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.843072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16230 TClient is connected to server localhost:16230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.585904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.620843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.785427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.929131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:00.020443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.318940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347026066495564:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.319035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.783349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.820171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.852250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.879068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.913147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.950744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.990442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.077193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347030361463524:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.077298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.077346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347030361463530:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.081298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:02.094454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347030361463532:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:02.194866Z node 1 :TX_PROXY ERROR: Actor# [1:7494347030361463591:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:03.184310Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347013181591920:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.184379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17896, MsgBus: 3224 2025-04-17T18:14:04.825164Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347035597588930:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:04.825219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015bb/r3tmp/tmpEytcIM/pdisk_1.dat 2025-04-17T18:14:04.930200Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17896, node 2 2025-04-17T18:14:04.967133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:04.967192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:04.969879Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:04.997859Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:04.997879Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:04.997885Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:04.997984Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3224 TClient is connected to server localhost:3224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:05.487479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.495120Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:05.514111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:05.595601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.768519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.739998Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.796082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.841286Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.884317Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.933747Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:34.978493Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.088168Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347172359655488:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.088286Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.088652Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347172359655493:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.093023Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:35.108056Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347172359655495:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:35.191947Z node 6 :TX_PROXY ERROR: Actor# [6:7494347172359655552:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:35.751589Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347150884816568:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:35.751667Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32071, MsgBus: 15268 2025-04-17T18:14:38.230309Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347183677404627:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:38.230552Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015bb/r3tmp/tmphYudOd/pdisk_1.dat 2025-04-17T18:14:38.379117Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:38.395446Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:38.395543Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:38.397035Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32071, node 7 2025-04-17T18:14:38.478783Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:38.478808Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:38.478818Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:38.479208Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15268 TClient is connected to server localhost:15268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:39.056517Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.077989Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.166106Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:39.424710Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.515441Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.561741Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347200857275566:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:42.561835Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:42.631661Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.678028Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.719860Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.764632Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.809650Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.849115Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.886629Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.984309Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347200857276226:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:42.984404Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:42.984853Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347200857276231:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:42.989852Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:43.006619Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-04-17T18:14:43.007267Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347200857276233:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:43.096236Z node 7 :TX_PROXY ERROR: Actor# [7:7494347205152243587:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:43.229444Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347183677404627:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:43.229512Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSort::Offset [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc [GOOD] >> KqpMergeCn::SortBy_Int32 >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex >> YdbTableSplit::RenameTablesAndSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrimaryView [GOOD] Test command err: Trying to start YDB, gRPC: 29819, MsgBus: 7596 2025-04-17T18:13:58.190319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347010287015888:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.190819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015fd/r3tmp/tmpZiCVBZ/pdisk_1.dat 2025-04-17T18:13:58.612569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.625926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.626048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.640028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29819, node 1 2025-04-17T18:13:58.850756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.850829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.850842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.850945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7596 TClient is connected to server localhost:7596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.567020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.580863Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:13:59.593885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.768944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.941033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:00.018331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.420787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347023171919390:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.420894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.780951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.815660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.843718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.910314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.943940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.019179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.059779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.149498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347027466887355:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.149596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.149860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347027466887360:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.153376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:02.164681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347027466887362:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:02.247904Z node 1 :TX_PROXY ERROR: Actor# [1:7494347027466887419:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:03.188918Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347010287015888:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.189018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5238, MsgBus: 12806 2025-04-17T18:14:04.362725Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347035664090126:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:04.362765Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015fd/r3tmp/tmpJJipu7/pdisk_1.dat 2025-04-17T18:14:04.491682Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:04.507813Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:04.507896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:04.508758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5238, node 2 2025-04-17T18:14:04.558501Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:04.558530Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:04.558539Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:04.558673Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12806 TClient is connected to server localhost:12806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:04.997525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.003004Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:05.011135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.066932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T1 ... 4976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:33.569435Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:33.612394Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:33.687480Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:33.768167Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:33.839021Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347162071911444:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:33.839146Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:33.839591Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347162071911449:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:33.843962Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:33.859872Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347162071911451:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:33.953512Z node 6 :TX_PROXY ERROR: Actor# [6:7494347162071911509:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:34.302359Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347144892039807:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:34.302448Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14633, MsgBus: 19863 2025-04-17T18:14:37.097255Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347179646765517:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:37.097321Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015fd/r3tmp/tmpUUodO6/pdisk_1.dat 2025-04-17T18:14:37.260809Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:37.291510Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:37.291707Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:37.293292Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14633, node 7 2025-04-17T18:14:37.372798Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:37.372819Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:37.372830Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:37.372992Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19863 TClient is connected to server localhost:19863 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:38.016558Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:38.028398Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:38.182138Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:38.435808Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:38.540291Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.237232Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347196826636460:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.237326Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.293462Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.331687Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.372055Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.414977Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.452441Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.519403Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.601402Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.685937Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347196826637124:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.686050Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.686353Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347196826637129:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.690383Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:41.701948Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347196826637131:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:41.794013Z node 7 :TX_PROXY ERROR: Actor# [7:7494347196826637187:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:42.097521Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347179646765517:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:42.097623Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:42.977262Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.072561Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.183239Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 20261, MsgBus: 1703 2025-04-17T18:14:08.659051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347056345380956:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:08.659166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158c/r3tmp/tmpPmMKZU/pdisk_1.dat 2025-04-17T18:14:09.040385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:09.045830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:09.045927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:09.059067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20261, node 1 2025-04-17T18:14:09.178769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:09.178790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:09.178796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:09.178903Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1703 TClient is connected to server localhost:1703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:09.718849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:11.663127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347069230283292:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:11.663241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:11.761096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:11.915802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347069230283397:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:11.915924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:11.916255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347069230283402:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:11.920371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:11.933727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347069230283404:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:14:12.003607Z node 1 :TX_PROXY ERROR: Actor# [1:7494347069230283455:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:12.211988Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494347073525250806:2328], TxId: 281474976710662, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2fapd6d9mvn1hs4wa4gnms. SessionId : ydb://session/3?node_id=1&id=MjJlZTZkOTMtOThmYzg1ZDgtODk3ZTA0NDMtOTg1ODg3M2M=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-04-17T18:14:12.212679Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjJlZTZkOTMtOThmYzg1ZDgtODk3ZTA0NDMtOTg1ODg3M2M=, ActorId: [1:7494347069230283276:2328], ActorState: ExecuteState, TraceId: 01js2fapd6d9mvn1hs4wa4gnms, Create QueryResponse for error on request, msg: 2025-04-17T18:14:12.292006Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494347073525250822:2328], TxId: 281474976710663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjJlZTZkOTMtOThmYzg1ZDgtODk3ZTA0NDMtOTg1ODg3M2M=. TraceId : 01js2fapg01tg8er2dgvts88zd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-04-17T18:14:12.292431Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjJlZTZkOTMtOThmYzg1ZDgtODk3ZTA0NDMtOTg1ODg3M2M=, ActorId: [1:7494347069230283276:2328], ActorState: ExecuteState, TraceId: 01js2fapg01tg8er2dgvts88zd, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 1225, MsgBus: 19647 2025-04-17T18:14:13.164431Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347076582124137:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:13.164510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158c/r3tmp/tmpP1dnYu/pdisk_1.dat 2025-04-17T18:14:13.316686Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1225, node 2 2025-04-17T18:14:13.351834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:13.351908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:13.355589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:13.430702Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:13.430727Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:13.430733Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:13.430828Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19647 TClient is connected to server localhost:19647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:13.901171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:13.906478Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:16.237019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347089467026675:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.237115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.252170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.333456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347089467026778:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.333542Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.333735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347089467026783:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.337146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part p ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.276553Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.365008Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.456494Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347170160828965:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.456613Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.456619Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347170160828970:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.460245Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:35.477218Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347170160828972:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:35.570536Z node 5 :TX_PROXY ERROR: Actor# [5:7494347170160829027:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:36.178264Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347152980957360:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:36.178332Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:36.747421Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.899842Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9935, MsgBus: 22982 2025-04-17T18:14:38.802288Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347184537638426:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:38.802376Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158c/r3tmp/tmpe4uDbk/pdisk_1.dat 2025-04-17T18:14:38.954071Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9935, node 6 2025-04-17T18:14:38.976755Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:38.976852Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:38.978968Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:39.034049Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:39.034077Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:39.034089Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:39.034264Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22982 TClient is connected to server localhost:22982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:39.588632Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.596820Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:39.611026Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.689041Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.866296Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.975600Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.666935Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347201717509394:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:42.667097Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:42.733494Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.772293Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.811930Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.849927Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.893841Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.938555Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.979431Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.078058Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347206012477355:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.078167Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.078473Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347206012477360:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.082377Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:43.092846Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347206012477362:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:43.177987Z node 6 :TX_PROXY ERROR: Actor# [6:7494347206012477419:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:43.803910Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347184537638426:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:43.803988Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:44.358154Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.532156Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> KqpNewEngine::LocksNoMutations [GOOD] >> KqpNewEngine::LocksNoMutationsSharded >> KqpNewEngine::AsyncIndexUpdate [GOOD] >> KqpNewEngine::AutoChooseIndex >> KqpNewEngine::MultipleBroadcastJoin [GOOD] >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] >> KqpSqlIn::KeySuffix [GOOD] >> KqpSqlIn::KeySuffix_OnlyTail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::Offset [GOOD] Test command err: Trying to start YDB, gRPC: 16225, MsgBus: 17623 2025-04-17T18:13:58.186977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347009580872948:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.187040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c8/r3tmp/tmpY5syeL/pdisk_1.dat 2025-04-17T18:13:58.601374Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.624520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.624649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.640048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16225, node 1 2025-04-17T18:13:58.843671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.843695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.843702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.843806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17623 TClient is connected to server localhost:17623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.546334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.573564Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:13:59.588780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.737167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.897037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.984358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.262019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347022465776541:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.262134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.782781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.815412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.844240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.874385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.943537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.018221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.054559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.139317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347026760744508:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.139471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.139657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347026760744513:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.143180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:02.154518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347026760744515:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:02.245654Z node 1 :TX_PROXY ERROR: Actor# [1:7494347026760744577:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:03.187765Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347009580872948:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.187857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:03.302261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.335620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.373349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23478, MsgBus: 32752 2025-04-17T18:14:07.096208Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347049692023517:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:07.096257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c8/r3tmp/tmpE2En7S/pdisk_1.dat 2025-04-17T18:14:07.203203Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23478, node 2 2025-04-17T18:14:07.270825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:07.270919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:07.272724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:07.297766Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:07.297791Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:07.297800Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:07.297905Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32752 TClient is connected to server localhost:32752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:14:07.721790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0 ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.143959Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.197205Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.240776Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.288137Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.341916Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.390385Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.481384Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347174067317186:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.481509Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.481831Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347174067317192:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.485677Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:36.501705Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347174067317194:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:36.601936Z node 5 :TX_PROXY ERROR: Actor# [5:7494347174067317251:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:37.197223Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347156887445566:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:37.197306Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1213, MsgBus: 29790 2025-04-17T18:14:40.120163Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347192598110130:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.120211Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015c8/r3tmp/tmpgBPONi/pdisk_1.dat 2025-04-17T18:14:40.240188Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:40.269738Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:40.269837Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:40.271863Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1213, node 6 2025-04-17T18:14:40.336772Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:40.336800Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:40.336810Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:40.336958Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29790 TClient is connected to server localhost:29790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:40.904415Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:40.919225Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:40.929030Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.025765Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.274020Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.366286Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:44.323276Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347209777981099:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.323382Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.396118Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.437683Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.478002Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.516057Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.553545Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.644263Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.715201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.795119Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347209777981762:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.795221Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.795419Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347209777981767:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.799850Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:44.814555Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347209777981769:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:44.895405Z node 6 :TX_PROXY ERROR: Actor# [6:7494347209777981825:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:45.120923Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347192598110130:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:45.120988Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpRanges::ScanKeyPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2025-04-17T18:13:25.251271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346869894980962:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:25.255052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001417/r3tmp/tmpjcPFD9/pdisk_1.dat 2025-04-17T18:13:25.632075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:25.635329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:25.635455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13159, node 1 2025-04-17T18:13:25.654982Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:13:25.655049Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:13:25.667221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:25.768461Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:25.768481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:25.768486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:25.768588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:26.140996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:27.808884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346878484916603:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:27.809042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.057452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:13:28.058687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:13:28.058721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:13:28.060431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-17T18:13:28.128082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913608172, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:13:28.180580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-17T18:13:28.195623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346882779884131:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.195710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:28.214665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:13:28.215050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:13:28.215068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:13:28.219489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo 2025-04-17T18:13:28.232691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913608277, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:13:28.240264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m 2025-04-17T18:13:30.250555Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494346869894980962:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:30.250629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2025-04-17T18:13:38.208573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2025-04-17T18:13:38.211034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" TabletID: 72075186224037888 ShardIdx: 1 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2025-04-17T18:13:38.211081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:13:38.306463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-17T18:13:38.313863Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T18:13:38.315309Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found partitions 1 2025-04-17T18:13:40.336175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMoveTable Propose, from: /Root/Foo, to: /Root/Bar, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:13:40.336446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:13:40.339499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Foo, dst path: /Root/Bar 2025-04-17T18:13:40.359509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744914100398, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:13:40.366651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710660, done: 0, blocked: 1 2025-04-17T18:13:40.373818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-04-17T18:13:40.377789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:13:40.607218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:13:40.607237Z node 1 :IMPORT WARN: Table profiles were not loaded Fast forward 1m 2025-04-17T18:13:43.259661Z node 1 :TX_DATASHARD DEBUG: SendPeriodicTableStats register new pipe at datashard 72075186224037890 FollowerId 0, TableInfos size = 1 2025-04-17T18:13:43.260847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.7913 2025-04-17T18:13:43.361301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T18:13:43.361421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 0 row count 0 2025-04-17T18:13:43.361479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=Bar, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:13:43.361532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037890 2025-04-17T18:13:43.361780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 partitions 1 Fast forward 1m 2025-04-17T18:13:48.259890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 cpuUsage 0.1979 2025-04-17T18:13:48.359939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T18:13:48.360024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 6841088 row count 50000 2025-04-17T18:13:48.360065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 m ... 4-17T18:14:46.970517Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710664 datashard 72075186224037891 state PreOffline 2025-04-17T18:14:46.970542Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-04-17T18:14:46.970831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-17T18:14:46.970977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2025-04-17T18:14:46.970991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-04-17T18:14:46.971008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2025-04-17T18:14:46.971020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-04-17T18:14:46.971033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710664, ready parts: 1/1, is published: true 2025-04-17T18:14:46.971074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7494347217787337012:2741] message: TxId: 281474976710664 2025-04-17T18:14:46.971102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-04-17T18:14:46.971118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710664:0 2025-04-17T18:14:46.971134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710664:0 2025-04-17T18:14:46.971232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:14:46.971442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-04-17T18:14:46.971484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-04-17T18:14:46.972806Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:14:46.972827Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 parts [ [72075186224037890:1:120:1:12288:10944:0] ] return ack processed 2025-04-17T18:14:46.972870Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:14:46.972959Z node 1 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-04-17T18:14:46.973094Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7494347217787337067:2745], serverId# [1:7494347217787337070:4703], sessionId# [0:0:0] 2025-04-17T18:14:46.973223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494346925729557513 RawX2: 4503603922340186 } TabletId: 72075186224037890 State: 4 2025-04-17T18:14:46.973258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:14:46.975433Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:14:46.975498Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037890:1:120:1:12288:10944:0] ] return ack processed 2025-04-17T18:14:46.975523Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:14:46.975535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:14:46.975579Z node 1 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-17T18:14:46.976986Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:14:46.977548Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-17T18:14:46.977577Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7494347217787337079:2747], serverId# [1:7494347217787337087:4718], sessionId# [0:0:0] 2025-04-17T18:14:46.978493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:14:46.978681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:14:46.980386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:14:46.980409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:14:46.982087Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:14:46.982121Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:14:46.982254Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-17T18:14:46.982287Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7494346934319492475:3256], serverId# [1:7494346934319492476:3257], sessionId# [0:0:0] 2025-04-17T18:14:46.982507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347204902434699 RawX2: 4503603922340500 } TabletId: 72075186224037892 State: 4 2025-04-17T18:14:46.982573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:14:46.982936Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-17T18:14:46.983948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:14:46.984544Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:14:46.984632Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-17T18:14:46.985604Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-17T18:14:46.987197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347204902434700 RawX2: 4503603922340501 } TabletId: 72075186224037891 State: 4 2025-04-17T18:14:46.987242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:14:46.987413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347204902434700 RawX2: 4503603922340501 } TabletId: 72075186224037891 State: 4 2025-04-17T18:14:46.987434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:14:46.989518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:14:46.989607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:14:46.990008Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-17T18:14:46.990047Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-17T18:14:46.995362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-17T18:14:46.995587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:14:46.996325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:14:46.996475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:14:46.996606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:14:46.996752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:14:46.996780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:14:46.996826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-17T18:14:46.998407Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-17T18:14:46.998462Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-17T18:14:46.998501Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [1:7494347217787336989:4635], serverId# [1:7494347217787336990:4636], sessionId# [0:0:0] 2025-04-17T18:14:46.999013Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-17T18:14:46.999084Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-17T18:14:46.999336Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-17T18:14:46.999378Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-17T18:14:47.000707Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-17T18:14:47.000758Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-17T18:14:47.001445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-17T18:14:47.001464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-17T18:14:47.002760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:14:47.002801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:14:47.002836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:14:47.002861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> KqpNewEngine::UpdateFromParams [GOOD] >> KqpNewEngine::UpsertEmptyInput >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] |92.1%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSqlIn::Delete [GOOD] |92.1%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultipleBroadcastJoin [GOOD] Test command err: Trying to start YDB, gRPC: 19371, MsgBus: 15094 2025-04-17T18:14:01.358601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347022457344019:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:01.358664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00159b/r3tmp/tmp4aAxgM/pdisk_1.dat 2025-04-17T18:14:01.755223Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:01.775053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:01.775159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:01.777131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19371, node 1 2025-04-17T18:14:01.865047Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:01.865073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:01.865081Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:01.865210Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15094 TClient is connected to server localhost:15094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:02.370861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.389942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:02.398435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.584900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.738069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.804217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:04.285338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347035342247664:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.285486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.611788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.654577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.691413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.762435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.793514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.827799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.862975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.961160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347035342248329:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.961257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.961571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347035342248334:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.964495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:04.973930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347035342248336:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:05.035762Z node 1 :TX_PROXY ERROR: Actor# [1:7494347039637215689:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:06.280103Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7494347043932183354:2512] TxId: 281474976710672. Ctx: { TraceId: 01js2fagdr6h75dtebw3s40f1m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFhYWRjZGEtZDNhZDc1MGEtMmQ4YjYyYWItMjI5ZDQ3YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 3, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T18:14:06.283812Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01js2fagdr6h75dtebw3s40f1m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFhYWRjZGEtZDNhZDc1MGEtMmQ4YjYyYWItMjI5ZDQ3YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494347043932183362:2522] 2025-04-17T18:14:06.283922Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01js2fagdr6h75dtebw3s40f1m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFhYWRjZGEtZDNhZDc1MGEtMmQ4YjYyYWItMjI5ZDQ3YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494347043932183363:2523] 2025-04-17T18:14:06.284099Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01js2fagdr6h75dtebw3s40f1m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFhYWRjZGEtZDNhZDc1MGEtMmQ4YjYyYWItMjI5ZDQ3YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494347043932183364:2524] Trying to start YDB, gRPC: 8715, MsgBus: 2411 2025-04-17T18:14:07.005987Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347044160017536:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00159b/r3tmp/tmpPd2FI5/pdisk_1.dat 2025-04-17T18:14:07.072792Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:14:07.143112Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:07.166915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:07.166994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:07.168159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8715, node 2 2025-04-17T18:14:07.301027Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:07.301050Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:07.301057Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:07.301152Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2411 TClient is connected to server localhost:2411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72 ... do unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.966008Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.061255Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.115593Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.187935Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347175236052815:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.188026Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347175236052820:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.188032Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.192605Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:36.206357Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347175236052822:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:36.308909Z node 6 :TX_PROXY ERROR: Actor# [6:7494347175236052879:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:36.772582Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347153761213931:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:36.772653Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21730, MsgBus: 13983 2025-04-17T18:14:39.562334Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347189010372096:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:39.562410Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00159b/r3tmp/tmpMiDrCH/pdisk_1.dat 2025-04-17T18:14:39.737038Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:39.752904Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:39.753008Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:39.755424Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21730, node 7 2025-04-17T18:14:39.810764Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:39.810791Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:39.810801Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:39.810958Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13983 TClient is connected to server localhost:13983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:40.427573Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:40.436413Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:40.441012Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:40.528255Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:40.786766Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:40.886130Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:43.811891Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347206190243042:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.812011Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.883372Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.920478Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.959680Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.002773Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.043834Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.135206Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.174119Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.269061Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347210485211004:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.269178Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.269476Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347210485211009:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.274376Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:44.290059Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347210485211011:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:44.345808Z node 7 :TX_PROXY ERROR: Actor# [7:7494347210485211068:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:44.562698Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347189010372096:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:44.562779Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:45.567419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.611627Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.715857Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 [] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] Test command err: Trying to start YDB, gRPC: 61912, MsgBus: 21065 2025-04-17T18:14:00.015831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347014202091763:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:00.017483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b0/r3tmp/tmp46Dmbr/pdisk_1.dat 2025-04-17T18:14:00.425334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:00.429420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:00.429513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:00.434010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61912, node 1 2025-04-17T18:14:00.509539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:00.509591Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:00.509601Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:00.509724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21065 TClient is connected to server localhost:21065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:01.056567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.078863Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:01.100464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.245332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.414737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.482128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:03.183468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347031381962588:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.183585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.512234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.546781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.574705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.605067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.638385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.675743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.716322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.793074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347031381963249:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.793155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.793262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347031381963254:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.796464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:03.806880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347031381963256:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:03.895337Z node 1 :TX_PROXY ERROR: Actor# [1:7494347031381963313:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:04.810489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.000850Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347014202091763:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:05.000900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:05.631600Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913645650, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 29828, MsgBus: 28405 2025-04-17T18:14:06.455919Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347043991149678:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:06.455955Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b0/r3tmp/tmpzAJL9N/pdisk_1.dat 2025-04-17T18:14:06.575157Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:06.589309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:06.589386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:06.590969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29828, node 2 2025-04-17T18:14:06.630288Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:06.630317Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:06.630326Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:06.630457Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28405 TClient is connected to server localhost:28405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:07.098055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:07.107609Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:07.120902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part p ... T18:14:37.454948Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.506231Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.550043Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.661203Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347177222456820:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.661308Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.661605Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347177222456825:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.665601Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:37.682684Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347177222456827:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:37.758464Z node 6 :TX_PROXY ERROR: Actor# [6:7494347177222456884:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:38.286419Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347160042585206:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:38.286496Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:39.042389Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.015525Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913680048, txId: 281474976715674] shutting down Trying to start YDB, gRPC: 19151, MsgBus: 9288 2025-04-17T18:14:40.944651Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347191961087320:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.944740Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b0/r3tmp/tmpJl9DNN/pdisk_1.dat 2025-04-17T18:14:41.156215Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:41.194799Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:41.194910Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:41.196433Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19151, node 7 2025-04-17T18:14:41.269001Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:41.269024Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:41.269032Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:41.269156Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9288 TClient is connected to server localhost:9288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:41.971547Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.000154Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:42.014455Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.088352Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.256589Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.362823Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.199290Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347213435925581:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.199397Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.244163Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.318382Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.353811Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.388782Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.419615Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.490869Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.528417Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.603947Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347213435926246:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.604058Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.607438Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347213435926251:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.611592Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:45.623126Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347213435926253:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:45.713983Z node 7 :TX_PROXY ERROR: Actor# [7:7494347213435926310:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:45.945108Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347191961087320:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:45.945194Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:46.922161Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.023520Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913688056, txId: 281474976715674] shutting down >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ScanKeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 1541, MsgBus: 18751 2025-04-17T18:14:05.017722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347042930699284:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:05.017817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158e/r3tmp/tmpjmkwTG/pdisk_1.dat 2025-04-17T18:14:05.393173Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:05.429359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:05.429451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1541, node 1 2025-04-17T18:14:05.434738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:05.511510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:05.511532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:05.511537Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:05.511633Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18751 TClient is connected to server localhost:18751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:06.018818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:06.045219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.181082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.360959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.432073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:08.103061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347055815602939:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.103143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.395380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.424618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.452659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.486287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.516975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.549493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.622622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.712875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347055815603606:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.712956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.713344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347055815603611:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.716755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:08.725371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347055815603613:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:08.828482Z node 1 :TX_PROXY ERROR: Actor# [1:7494347055815603671:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:10.023180Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347042930699284:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:10.023239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1156, MsgBus: 26191 2025-04-17T18:14:11.047619Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347068952433527:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:11.047660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158e/r3tmp/tmpue6cFz/pdisk_1.dat 2025-04-17T18:14:11.132104Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1156, node 2 2025-04-17T18:14:11.167200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:11.167296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:11.169086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:11.196369Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:11.196388Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:11.196395Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:11.196487Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26191 TClient is connected to server localhost:26191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:11.566759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:11.585604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:11.636074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:11.796018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:11.868892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part propos ... MESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.953818Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347180739003758:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.953944Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.954012Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347180739003763:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.957817Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:37.967772Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347180739003765:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:38.054054Z node 5 :TX_PROXY ERROR: Actor# [5:7494347185033971118:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:38.560571Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347163559132154:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:38.560632Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:39.634558Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.929573Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.126334Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.367698Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.777319Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 21582, MsgBus: 22707 2025-04-17T18:14:42.601101Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347201642886959:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:42.601139Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158e/r3tmp/tmpg81mR5/pdisk_1.dat 2025-04-17T18:14:42.728469Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:42.747750Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:42.747843Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:42.749333Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21582, node 6 2025-04-17T18:14:42.832805Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:42.832830Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:42.832843Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:42.832982Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22707 TClient is connected to server localhost:22707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:43.370169Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:43.387470Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:43.474655Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:43.686135Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:43.763108Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:46.176693Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347218822757915:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.176832Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.245034Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.285002Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.324693Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.362331Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.404313Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.455947Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.507579Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.605958Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347218822758576:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.606088Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.606576Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347218822758581:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.613559Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:46.630991Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347218822758583:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:46.709523Z node 6 :TX_PROXY ERROR: Actor# [6:7494347218822758642:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:47.601244Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347201642886959:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:47.601302Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In [GOOD] >> KqpSqlIn::PhasesCount >> KqpNewEngine::ScalarFunctions [GOOD] >> KqpNewEngine::ScalarMultiUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16130, MsgBus: 4797 2025-04-17T18:13:58.185852Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347011012144164:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.185889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d2/r3tmp/tmpUzB8Hx/pdisk_1.dat 2025-04-17T18:13:58.610340Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.638802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.638938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.647588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16130, node 1 2025-04-17T18:13:58.842754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.842779Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.842789Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.842910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4797 TClient is connected to server localhost:4797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.547879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.586967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.744991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:13:59.883401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:13:59.967868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.538347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347023897047754:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.538456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.900025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.942437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.972800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.004705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.074202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.143103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.209816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.288500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347028192015726:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.288564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.288628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347028192015731:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.292537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:02.301513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347028192015733:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:02.371887Z node 1 :TX_PROXY ERROR: Actor# [1:7494347028192015788:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:03.186676Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347011012144164:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.186747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:03.290862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.402594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.435719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22871, MsgBus: 7350 2025-04-17T18:14:06.256884Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347044932224616:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:06.259256Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d2/r3tmp/tmpuzX1bI/pdisk_1.dat 2025-04-17T18:14:06.379894Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:06.390777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:06.390853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:06.392311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22871, node 2 2025-04-17T18:14:06.435298Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:06.435324Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:06.435330Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:06.435445Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7350 TClient is connected to server localhost:7350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:06.841579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.847301Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at ... 4976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.270448Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.345990Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.392587Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.451387Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.533104Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.625034Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347178243278666:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.625161Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.625679Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347178243278671:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.631233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:37.649154Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347178243278673:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:37.734378Z node 5 :TX_PROXY ERROR: Actor# [5:7494347178243278728:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:38.031407Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347161063407019:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:38.031476Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:38.891617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25496, MsgBus: 28085 2025-04-17T18:14:40.979915Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347193333479165:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.980969Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015d2/r3tmp/tmpsGOiOK/pdisk_1.dat 2025-04-17T18:14:41.117606Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:41.141297Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:41.141389Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:41.143431Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25496, node 6 2025-04-17T18:14:41.242669Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:41.242695Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:41.242705Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:41.242855Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28085 TClient is connected to server localhost:28085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:14:41.827743Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.844846Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.925170Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.131420Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.221895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.472810Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347214808317417:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.472941Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.554422Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.603002Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.638951Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.674552Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.748263Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.788180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.882800Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.946030Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347214808318079:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.946141Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.946486Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347214808318084:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.951357Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:45.965579Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347214808318086:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:45.980136Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347193333479165:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:45.980220Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:46.045384Z node 6 :TX_PROXY ERROR: Actor# [6:7494347219103285438:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:47.301989Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName >> KqpSort::OffsetTopSort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] Test command err: Trying to start YDB, gRPC: 30067, MsgBus: 21636 2025-04-17T18:14:19.717210Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347103193285853:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:19.717301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00157a/r3tmp/tmpZNguHP/pdisk_1.dat 2025-04-17T18:14:20.134566Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30067, node 1 2025-04-17T18:14:20.141352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:20.141473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:20.146157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:20.186701Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:20.186726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:20.186733Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:20.186841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21636 TClient is connected to server localhost:21636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:20.661116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:22.531857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347116078188391:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:22.531951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:22.790641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:22.914915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347116078188495:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:22.915016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:22.915316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347116078188500:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:22.919154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:22.927998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347116078188502:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:14:22.998717Z node 1 :TX_PROXY ERROR: Actor# [1:7494347116078188553:2394] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:23.179748Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347120373155892:2355], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing key column in input: Key for table: /Root/TestReplaceNotNullPk, code: 2029 2025-04-17T18:14:23.180995Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE0ODMxMzctZTYxZTZjMTItNzlmMTk0MWItZjFlOTYyZDA=, ActorId: [1:7494347116078188373:2328], ActorState: ExecuteState, TraceId: 01js2fb15p1rmd991pzxhcra9j, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-17T18:14:23.204751Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347120373155901:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:49: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:49: Error: Failed to convert 'Key': Null to Uint64
:1:49: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:14:23.205955Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE0ODMxMzctZTYxZTZjMTItNzlmMTk0MWItZjFlOTYyZDA=, ActorId: [1:7494347116078188373:2328], ActorState: ExecuteState, TraceId: 01js2fb16nedbpvebmrn74y8s3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 63439, MsgBus: 7226 2025-04-17T18:14:23.855907Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347116934553599:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:23.856384Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00157a/r3tmp/tmpoHo21p/pdisk_1.dat 2025-04-17T18:14:23.952543Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:23.959096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:23.959167Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:23.961313Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63439, node 2 2025-04-17T18:14:24.002766Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:24.002790Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:24.002798Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:24.002908Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7226 TClient is connected to server localhost:7226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:14:24.446114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:24.452112Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:26.586016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347129819456119:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:26.586099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:26.598483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:26.646518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347129819456221:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:26.646610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:26.647000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347129819456226:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:26.650719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCr ... e 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21578, node 6 2025-04-17T18:14:43.179138Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:43.244603Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:43.244632Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:43.244642Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:43.244793Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62572 TClient is connected to server localhost:62572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:43.784474Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:46.663286Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347217745115355:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.670039Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.673680Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.757694Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347217745115508:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.757786Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.758003Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347217745115513:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.761098Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:46.771702Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347217745115515:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:14:46.837803Z node 6 :TX_PROXY ERROR: Actor# [6:7494347217745115566:2429] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:47.868339Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7494347222040083023:2381], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=. TraceId : 01js2fbrxjfwg6g57knz64rtz5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 }. 2025-04-17T18:14:47.869073Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7494347222040083024:2382], TxId: 281474976715663, task: 2. Ctx: { TraceId : 01js2fbrxjfwg6g57knz64rtz5. SessionId : ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7494347222040083019:2328], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-17T18:14:47.869433Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7494347222040083025:2383], TxId: 281474976715663, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=. TraceId : 01js2fbrxjfwg6g57knz64rtz5. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7494347222040083019:2328], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-17T18:14:47.869560Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7494347222040083026:2384], TxId: 281474976715663, task: 4. Ctx: { SessionId : ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=. CustomerSuppliedId : . TraceId : 01js2fbrxjfwg6g57knz64rtz5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7494347222040083019:2328], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-17T18:14:47.869972Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=, ActorId: [6:7494347217745115328:2328], ActorState: ExecuteState, TraceId: 01js2fbrxjfwg6g57knz64rtz5, Create QueryResponse for error on request, msg: 2025-04-17T18:14:47.929818Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:14:47.968170Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7494347222040083051:2388], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-17T18:14:47.970005Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=, ActorId: [6:7494347217745115328:2328], ActorState: ExecuteState, TraceId: 01js2fbsc26n773ef2d0zeg75p, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:14:48.002720Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7494347222040083070:2396], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-17T18:14:48.004667Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=, ActorId: [6:7494347217745115328:2328], ActorState: ExecuteState, TraceId: 01js2fbsdketfxqz8tvx5w7xth, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:14:48.024610Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347204860212812:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:48.024673Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:48.030663Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7494347226335050386:2404], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-17T18:14:48.031207Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=, ActorId: [6:7494347217745115328:2328], ActorState: ExecuteState, TraceId: 01js2fbsef1tmzehsw6mht6c2w, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:14:48.398008Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:14:48.411679Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7494347226335050407:2413], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-17T18:14:48.414370Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=, ActorId: [6:7494347217745115328:2328], ActorState: ExecuteState, TraceId: 01js2fbsfm898cwyb48hjpa207, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:14:48.993699Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:14:48.999095Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7494347226335050437:2422], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-17T18:14:48.999350Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=, ActorId: [6:7494347217745115328:2328], ActorState: ExecuteState, TraceId: 01js2fbsve3s80x3vvx1dykfgq, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:14:49.641066Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:14:49.646409Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7494347230630017763:2432], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-17T18:14:49.650379Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Yjk0ZDFjOWEtYzU1N2YxNWQtMTRhMDQ3ZDctNDE4MDgyNGU=, ActorId: [6:7494347217745115328:2328], ActorState: ExecuteState, TraceId: 01js2fbtdmb4n95z8k9ga8w1vf, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::Delete [GOOD] Test command err: Trying to start YDB, gRPC: 22180, MsgBus: 27446 2025-04-17T18:13:59.256193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347017527684903:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:59.256239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b5/r3tmp/tmp1JLJzZ/pdisk_1.dat 2025-04-17T18:13:59.654523Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:59.659554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:59.659655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:59.664484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22180, node 1 2025-04-17T18:13:59.810716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:59.810742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:59.810748Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:59.810846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27446 TClient is connected to server localhost:27446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:00.460750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:00.492571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:00.626137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:00.815547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:14:00.885156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.497742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347030412588576:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.497875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.863771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.935274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.963780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.989940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.018917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.053145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.083157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.137502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347034707556531:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.137573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.137756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347034707556536:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.140834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:03.150354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347034707556538:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:03.220024Z node 1 :TX_PROXY ERROR: Actor# [1:7494347034707556593:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:04.256835Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347017527684903:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:04.256898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30472, MsgBus: 13330 2025-04-17T18:14:07.929996Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347049537347879:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:07.930063Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b5/r3tmp/tmp7t7bBj/pdisk_1.dat 2025-04-17T18:14:08.086163Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:08.100033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:08.100117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:08.106991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30472, node 2 2025-04-17T18:14:08.187894Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:08.187923Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:08.187930Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:08.188045Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13330 TClient is connected to server localhost:13330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:08.619048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:08.633338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:08.711437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:08.857974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:08.948156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... rmissions } 2025-04-17T18:14:35.017925Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:35.029556Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347170920446071:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:35.108548Z node 5 :TX_PROXY ERROR: Actor# [5:7494347170920446127:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:35.812061Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347149445607164:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:35.812437Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:36.407768Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.467632Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.510946Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 21718, MsgBus: 29430 2025-04-17T18:14:40.659486Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347194147004164:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.659542Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b5/r3tmp/tmp14Cfg1/pdisk_1.dat 2025-04-17T18:14:40.808652Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:40.840249Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:40.840351Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:40.842568Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21718, node 6 2025-04-17T18:14:40.921143Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:40.921179Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:40.921190Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:40.921334Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29430 TClient is connected to server localhost:29430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:41.514022Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.527461Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:41.541355Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.632625Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.814166Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.901208Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:44.735211Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347211326875055:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.735319Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.796568Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.839978Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.909308Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.952734Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.999494Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.052393Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.129085Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.259375Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347215621843022:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.259471Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.259527Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347215621843027:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.263644Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:45.280791Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347215621843029:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:45.378525Z node 6 :TX_PROXY ERROR: Actor# [6:7494347215621843089:3561] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:45.659589Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347194147004164:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:45.734476Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:46.746850Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.820456Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.941331Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> KqpSort::TopSortResults [GOOD] >> KqpSort::TopParameterFilter >> KqpRanges::WhereInSubquery [GOOD] >> KqpReturning::ReturningTwice |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple >> KqpReturning::ReturningWorks-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete+QueryService >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink [GOOD] >> KqpNewEngine::DependentSelect >> KqpNewEngine::DeleteWithBuiltin+UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin-UseSink >> StatisticsSaveLoad::Delete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::OffsetTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 14735, MsgBus: 20844 2025-04-17T18:14:13.361693Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347074511590915:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:13.361740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001583/r3tmp/tmpr3YK6G/pdisk_1.dat 2025-04-17T18:14:13.784669Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:13.803727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:13.803817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:13.806536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14735, node 1 2025-04-17T18:14:13.898253Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:13.898267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:13.898275Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:13.898380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20844 TClient is connected to server localhost:20844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:14.453363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.471370Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:14.484380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.618532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:14.769084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.833888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.224750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347087396494546:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.224874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.633223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.655931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.682131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.712036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.743039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.813188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.843693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.925385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347087396495215:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.925475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.925727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347087396495220:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.929113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:16.944100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347087396495222:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:17.028243Z node 1 :TX_PROXY ERROR: Actor# [1:7494347091691462575:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:18.361828Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347074511590915:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:18.361907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26735, MsgBus: 13340 2025-04-17T18:14:19.135427Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347100303070488:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:19.135515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001583/r3tmp/tmpZX6DEs/pdisk_1.dat 2025-04-17T18:14:19.244150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:19.244227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:19.247073Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:19.260853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26735, node 2 2025-04-17T18:14:19.300064Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:19.300085Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:19.300093Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:19.300174Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13340 TClient is connected to server localhost:13340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:19.744320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:19.750825Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:19.764914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:19.816574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04 ... lt not found or you don't have access permissions } 2025-04-17T18:14:40.847663Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.921373Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.972440Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.010333Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.046025Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.089399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.139242Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.248182Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347197031005583:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.248484Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.249068Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347197031005590:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.252238Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:41.262864Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347197031005593:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:41.363286Z node 5 :TX_PROXY ERROR: Actor# [5:7494347197031005650:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:42.198430Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347179851133947:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:42.198503Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64182, MsgBus: 7434 2025-04-17T18:14:44.059629Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347211004788277:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:44.059674Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001583/r3tmp/tmpFAT9Dp/pdisk_1.dat 2025-04-17T18:14:44.195380Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:44.223012Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:44.223133Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:44.224969Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64182, node 6 2025-04-17T18:14:44.281501Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:44.281529Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:44.281539Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:44.281714Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7434 TClient is connected to server localhost:7434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:44.878240Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:44.901501Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:44.983140Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.166250Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.254096Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.140924Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347228184659217:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.141031Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.202268Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.274710Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.311088Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.347697Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.387208Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.431358Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.507099Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.572888Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347228184659880:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.573013Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.573179Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347228184659885:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.577178Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:48.588384Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347228184659887:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:48.668688Z node 6 :TX_PROXY ERROR: Actor# [6:7494347228184659943:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:49.059717Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347211004788277:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:49.059787Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpRanges::IsNotNullInJsonValue [GOOD] >> KqpRanges::DuplicateKeyPredicateLiteral >> KqpNewEngine::EmptyMapWithBroadcast [GOOD] >> KqpNewEngine::FlatMapLambdaInnerPrecompute >> KqpRanges::LiteralOrCompisite [GOOD] >> KqpRanges::LiteralOrCompisiteCollision >> KqpRanges::IsNull [GOOD] >> KqpRanges::IsNotNullSecondComponent >> KqpNewEngine::FullScanCount [GOOD] >> KqpRanges::UpdateWhereInNoFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInNoFullScan-UseSink >> KqpNewEngine::BlindWriteListParameter [GOOD] >> KqpNewEngine::BatchUpload >> StatisticsSaveLoad::ForbidAccess >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::FullScanCount [GOOD] Test command err: Trying to start YDB, gRPC: 18654, MsgBus: 12891 2025-04-17T18:13:59.780982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347016811890782:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:59.781381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b3/r3tmp/tmpTvfjSy/pdisk_1.dat 2025-04-17T18:14:00.313166Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:00.360373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:00.360468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18654, node 1 2025-04-17T18:14:00.366937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:00.426670Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:00.426709Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:00.426733Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:00.426860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12891 TClient is connected to server localhost:12891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:00.936550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:00.959527Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:00.964332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.139628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.325393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:01.411078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.034930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347033991761605:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.035070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.367526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.397092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.464253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.540466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.574350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.643967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.677386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.771052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347033991762281:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.771132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.771364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347033991762286:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.774882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:03.784006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347033991762288:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:03.880221Z node 1 :TX_PROXY ERROR: Actor# [1:7494347033991762347:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:04.777610Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347016811890782:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:04.777683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24060, MsgBus: 9748 2025-04-17T18:14:05.980656Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347040070372653:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:05.980750Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b3/r3tmp/tmpFJDbPN/pdisk_1.dat 2025-04-17T18:14:06.090367Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24060, node 2 2025-04-17T18:14:06.128051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:06.128153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:06.129946Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:06.185025Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:06.185044Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:06.185052Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:06.185160Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9748 TClient is connected to server localhost:9748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:06.657062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.672775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.754173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.902442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... SchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.624160Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.666368Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.746270Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347193221486042:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.746344Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.746580Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347193221486047:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.751169Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:40.769195Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347193221486049:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:40.852091Z node 6 :TX_PROXY ERROR: Actor# [6:7494347193221486107:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:41.257435Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347176041614412:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:41.257518Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2543, MsgBus: 28306 2025-04-17T18:14:44.008285Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347211036115251:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:44.008356Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b3/r3tmp/tmpdhx8vd/pdisk_1.dat 2025-04-17T18:14:44.156621Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:44.194297Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:44.194406Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:44.195481Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2543, node 7 2025-04-17T18:14:44.266412Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:44.266443Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:44.266453Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:44.266622Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28306 TClient is connected to server localhost:28306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:44.908638Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:44.915575Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:44.928245Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.013713Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.203691Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.313558Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.324248Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347228215986231:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.324353Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.433632Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.508179Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.551915Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.592076Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.663601Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.748424Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.788949Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.900733Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347228215986905:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.900858Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.901113Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347228215986910:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.905824Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:48.917405Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347228215986912:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:49.004956Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347211036115251:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:49.005037Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:49.012669Z node 7 :TX_PROXY ERROR: Actor# [7:7494347232510954266:3556] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpNewEngine::DecimalColumn35 [GOOD] >> KqpNewEngine::ComplexLookupLimit >> KqpNewEngine::LocksNoMutationsSharded [GOOD] >> KqpNewEngine::LocksInRoTx >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] >> KqpKv::ReadRows_PgValue [GOOD] >> KqpKv::ReadRows_PgKey >> KqpSqlIn::TableSource [GOOD] >> KqpSqlIn::SimpleKey_Negated |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> KqpNewEngine::UpsertEmptyInput [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn |92.1%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpMergeCn::SortBy_Int32 [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndexPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] Test command err: Trying to start YDB, gRPC: 3992, MsgBus: 11410 2025-04-17T18:14:00.321718Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347019487249782:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:00.332647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a7/r3tmp/tmp9P0GIz/pdisk_1.dat 2025-04-17T18:14:00.716480Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:00.731085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:00.731206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:00.734727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3992, node 1 2025-04-17T18:14:00.833843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:00.833863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:00.833869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:00.833983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11410 TClient is connected to server localhost:11410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:01.396934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.412467Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:01.419912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.610195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.778986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.846269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:03.407931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347032372153412:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.408031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.754871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.786840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.819620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.846160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.874104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.944786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.978481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.057914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347036667121375:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.057980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.058038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347036667121380:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.061750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:04.072127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347036667121382:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:04.175131Z node 1 :TX_PROXY ERROR: Actor# [1:7494347036667121439:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:05.159359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.233462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.318934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.319584Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347019487249782:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:05.319658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 4132, MsgBus: 20996 2025-04-17T18:14:09.322463Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347057470052416:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:09.322624Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a7/r3tmp/tmps6uKNN/pdisk_1.dat 2025-04-17T18:14:09.429458Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4132, node 2 2025-04-17T18:14:09.483423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:09.483514Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:09.484731Z ... 6710669 completed, doublechecking } 2025-04-17T18:14:39.259550Z node 5 :TX_PROXY ERROR: Actor# [5:7494347186361761802:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:40.130332Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347169181890133:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.130408Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:40.318361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.382044Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.429355Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 11734, MsgBus: 7731 2025-04-17T18:14:44.807660Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347211378279343:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:44.807722Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a7/r3tmp/tmpw0RZ4u/pdisk_1.dat 2025-04-17T18:14:44.904783Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:44.940771Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 11734, node 6 2025-04-17T18:14:44.941025Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:44.942857Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:44.986156Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:44.986193Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:44.986201Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:44.986372Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7731 TClient is connected to server localhost:7731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:45.539928Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.553119Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.627436Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.831615Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:45.909524Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.555497Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347228558150293:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.555607Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.658373Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.702368Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.744903Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.788874Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.861364Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.904358Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.946848Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.067491Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347232853118252:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.067624Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.068024Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347232853118259:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.072886Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:49.089472Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347232853118261:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:49.143890Z node 6 :TX_PROXY ERROR: Actor# [6:7494347232853118316:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:49.808090Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347211378279343:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:49.808155Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:50.525490Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.572417Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.614423Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:56: Warning: At function: Filter, At function: Coalesce
:7:29: Warning: At function: SqlIn
:7:29: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpSqlIn::KeySuffix_OnlyTail [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile >> TNetClassifierTest::TestInitFromFile >> TNetClassifierTest::TestInitFromRemoteSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::SortBy_Int32 [GOOD] Test command err: Trying to start YDB, gRPC: 62987, MsgBus: 16902 2025-04-17T18:14:09.662619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347059146568368:2170];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:09.663431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001588/r3tmp/tmpbKjFRq/pdisk_1.dat 2025-04-17T18:14:10.025152Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62987, node 1 2025-04-17T18:14:10.064181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:10.064305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:10.067628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:10.113019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:10.113050Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:10.113056Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:10.113179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16902 TClient is connected to server localhost:16902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:10.764670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:12.591974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347072031470803:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:12.592124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:12.834172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:12.961072Z node 1 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Unknown table '/Root/WrongTable' Trying to start YDB, gRPC: 10049, MsgBus: 13301 2025-04-17T18:14:13.550338Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347077740709285:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:13.550419Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001588/r3tmp/tmpPkoSr6/pdisk_1.dat 2025-04-17T18:14:13.690436Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10049, node 2 2025-04-17T18:14:13.719704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:13.719788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:13.724502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:13.806604Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:13.806623Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:13.806629Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:13.806714Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13301 TClient is connected to server localhost:13301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:14.260851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.270951Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:14.285307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.341726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.496163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.596771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:16.543612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347090625612918:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.543702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.632847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.660257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.699064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.734968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.769914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.801814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.837040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:16.919634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347090625613584:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.919714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.919956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347090625613589:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.923465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:16.934020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494347090625613591:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:17.005067Z node 2 :TX_PROXY ERROR: Actor# [2:7494347094920580944:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:18.032448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboper ... oposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.566421Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.608527Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.652036Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.745505Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347211066805591:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.745635Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.745991Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347211066805596:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.751721Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:44.764066Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347211066805598:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:44.845128Z node 6 :TX_PROXY ERROR: Actor# [6:7494347211066805655:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:45.520439Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347193886933971:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:45.520514Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:45.922250Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.707417Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913686740, txId: 281474976715674] shutting down Trying to start YDB, gRPC: 32296, MsgBus: 29766 2025-04-17T18:14:47.862396Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347224071638616:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:47.862501Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001588/r3tmp/tmpRHd9zE/pdisk_1.dat 2025-04-17T18:14:48.048153Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:48.083698Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:48.083819Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:48.087442Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32296, node 7 2025-04-17T18:14:48.173217Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:48.173246Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:48.173257Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:48.173403Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29766 TClient is connected to server localhost:29766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:48.781733Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.801026Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:48.884058Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:49.181554Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:49.267018Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:14:52.115358Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347245546476864:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:52.115455Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:52.162433Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:52.197458Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:52.235963Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:52.267821Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:52.299734Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:52.332956Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:52.367902Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:52.451636Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347245546477526:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:52.451733Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:52.451814Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347245546477531:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:52.455901Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:52.467095Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347245546477533:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:52.522941Z node 7 :TX_PROXY ERROR: Actor# [7:7494347245546477587:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:52.862850Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347224071638616:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:52.862971Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:53.763524Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:54.570401Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913694608, txId: 281474976715674] shutting down |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> KqpRanges::DuplicateKeyPredicateLiteral [GOOD] >> KqpRanges::DuplicateKeyPredicateParam >> KqpPg::LongDomainName [GOOD] >> KqpReturning::ReturningTwice [GOOD] >> KqpReturning::ReplaceSerial >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial >> KqpRanges::NoFullScanAtScanQuery [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate >> KqpNewEngine::ScalarMultiUsage [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled >> KqpNewEngine::AutoChooseIndex [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLambda >> TNodeBrokerTest::TestRandomActions [GOOD] >> KqpRanges::IsNotNullSecondComponent [GOOD] >> KqpRanges::IsNullInValue >> KqpSqlIn::PhasesCount [GOOD] >> KqpSort::TopParameterFilter [GOOD] >> KqpNewEngine::DeleteWithBuiltin-UseSink [GOOD] >> KqpNewEngine::DeleteON >> KqpNewEngine::DependentSelect [GOOD] >> KqpNewEngine::DqSourceCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 22757, MsgBus: 22736 2025-04-17T18:13:16.465493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346831914753113:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.465570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a2/r3tmp/tmpkzUqIa/pdisk_1.dat 2025-04-17T18:13:16.768172Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22757, node 1 2025-04-17T18:13:16.842566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.844350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.848780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.937224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.937250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.937261Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.937412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22736 TClient is connected to server localhost:22736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:17.450386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:19.016373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346844799655668:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.016380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346844799655660:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.016517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.021797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:13:19.029813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346844799655674:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:13:19.115458Z node 1 :TX_PROXY ERROR: Actor# [1:7494346844799655725:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23613, MsgBus: 8352 2025-04-17T18:13:19.882576Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494346844956642912:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:19.882616Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a2/r3tmp/tmptG0cQi/pdisk_1.dat 2025-04-17T18:13:19.977743Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23613, node 2 2025-04-17T18:13:20.028098Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:20.028225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:20.032470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:20.053352Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:20.053378Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:20.053385Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:20.053508Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8352 TClient is connected to server localhost:8352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:20.358110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:22.416326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346857841545460:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:22.416400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494346857841545452:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:22.416639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:22.419214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:13:22.427182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494346857841545466:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:13:22.523589Z node 2 :TX_PROXY ERROR: Actor# [2:7494346857841545517:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22361, MsgBus: 16496 2025-04-17T18:13:23.027836Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494346859900281897:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:23.027922Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a2/r3tmp/tmprRlGtH/pdisk_1.dat 2025-04-17T18:13:23.092106Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22361, node 3 2025-04-17T18:13:23.141327Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:23.141347Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:23.141353Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:23.141426Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:23.149253Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:23.149332Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:23.150917Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16496 TClient is connected to server localhost:16496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } ... CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:14:44.092733Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.103177Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:48.008334Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494347203535102958:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:48.017348Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:48.167952Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494347225009939951:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.168070Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7494347225009939916:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.168584Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.173961Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:14:48.193867Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7494347225009939954:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:14:48.284653Z node 10 :TX_PROXY ERROR: Actor# [10:7494347225009940005:2343] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:48.323609Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 24287, MsgBus: 7571 2025-04-17T18:14:51.135083Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7494347237407267275:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:51.136020Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a2/r3tmp/tmpOx8MlJ/pdisk_1.dat 2025-04-17T18:14:51.295372Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:51.316199Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:51.316381Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:51.318381Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24287, node 11 2025-04-17T18:14:51.372119Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:51.372150Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:51.372160Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:51.372332Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7571 TClient is connected to server localhost:7571 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_D... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2025-04-17T18:14:52.064258Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:56.123145Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494347258882104406:2334], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:56.123255Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7494347258882104398:2331], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:56.123704Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:56.129721Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:14:56.136450Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494347237407267275:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:56.136812Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:56.149994Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7494347258882104412:2335], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:14:56.224415Z node 11 :TX_PROXY ERROR: Actor# [11:7494347258882104466:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:56.257706Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> KqpNewEngine::FlatMapLambdaInnerPrecompute [GOOD] >> KqpNewEngine::DqSourceLiteralRange >> KqpRanges::UpdateWhereInNoFullScan-UseSink [GOOD] >> KqpRanges::UpdateWhereInWithNull >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> KqpNewEngine::BatchUpload [GOOD] >> KqpNewEngine::BrokenLocksAtROTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2025-04-17T18:13:02.394945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:13:02.395003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:02.482311Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.199753Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.200042Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.224555Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.260588Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.261165Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.261371Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.910134Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.960838Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.961176Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.974552Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:13:03.974973Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.048622Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.075598Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.075867Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.088086Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.127362Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.127671Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.128098Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-04-17T18:13:04.153464Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.154460Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.868152Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.868501Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:04.974784Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:05.014169Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:05.065879Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-17T18:13:05.066399Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:05.081468Z node 1 :NODE_BROKER ERROR: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-04-17T18:13:05.082029Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-04-17T18:13:05.082549Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-17T18:13:05.082943Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:05.807890Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-17T18:13:05.820965Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:05.834680Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:06.688767Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:06.689298Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:06.689771Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.038545Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.039034Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.434467Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.779512Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.794306Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.824925Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.825409Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.825884Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.826319Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:07.854161Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:13:08.685183Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:13:08.699183Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:13:08.699504Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:13:08.699775Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:13:08.700065Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:13:08.700685Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:13:09.023121Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:09.037248Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:09.056483Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:09.069376Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:09.084660Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:09.504405Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:09.504758Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:09.850574Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:09.851336Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.158424Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.159111Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.159659Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.196302Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.196920Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.211405Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.212045Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.213124Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.213606Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.691147Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.691493Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.723222Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.723691Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.724056Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.763458Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.777352Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.816811Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.868951Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.869420Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:13:10.889260Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:13:11.572465Z node 1 :NODE_BROKER ERROR: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-04-17T18:13:11.573318Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:11.601141Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-17T18:13:11.601793Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:11.636344Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:11.650069Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:11.650573Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:13:12.098693Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:13:12.114331Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:13:12.115007Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:13:12.115541Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:13:12.143438Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-17T18:13:12.536733Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:12.956947Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:12.957392Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:13:13.283407Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:13.283893Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:13:13.298173Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04 ... ode 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:14:42.954391Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:14:43.004258Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:14:43.014552Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:14:43.019541Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:14:43.066303Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-04-17T18:14:43.754870Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-17T18:14:43.757790Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-17T18:14:43.785037Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-04-17T18:14:43.805524Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-04-17T18:14:44.260821Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:14:44.263060Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:14:44.267262Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:14:44.913461Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-17T18:14:44.935871Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-17T18:14:44.937924Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-17T18:14:45.005283Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-17T18:14:45.007595Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-17T18:14:45.053484Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:45.069848Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:45.073887Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:45.888958Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:14:45.893118Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:14:46.049953Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:14:46.599950Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:14:46.601688Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:14:46.618635Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-04-17T18:14:46.622439Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:14:46.623547Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-17T18:14:46.626076Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:14:47.072060Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:14:47.411174Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:14:47.452561Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:14:47.506508Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:14:47.508554Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:14:47.980784Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:14:48.082751Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-17T18:14:48.539065Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:14:48.898421Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:14:48.900434Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:14:48.941042Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:14:48.942926Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:14:48.994161Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-17T18:14:48.999125Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-17T18:14:49.025588Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-17T18:14:49.046013Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:49.163401Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:49.169238Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:49.266157Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:14:49.268309Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:14:49.270416Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:14:49.330313Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-17T18:14:49.350673Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-17T18:14:49.428145Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-17T18:14:49.432258Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-17T18:14:49.435469Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-17T18:14:49.437535Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-17T18:14:49.442221Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-17T18:14:49.458570Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-17T18:14:50.805545Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:14:50.870030Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:14:50.873159Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-17T18:14:50.899732Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-04-17T18:14:51.458749Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-17T18:14:51.478098Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:14:51.545599Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:14:51.547921Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:14:51.935608Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:14:51.943727Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-17T18:14:52.004412Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:14:52.531493Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:14:52.573790Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:14:53.176117Z node 1 :NODE_BROKER ERROR: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2025-04-17T18:14:53.178122Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-17T18:14:53.201230Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-17T18:14:53.203270Z node 1 :NODE_BROKER ERROR: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2025-04-17T18:14:53.204967Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-04-17T18:14:53.206990Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-17T18:14:53.212706Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-17T18:14:54.187164Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:14:54.191407Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:14:54.193614Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:14:54.683196Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:14:55.079285Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:14:55.441527Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:14:55.443383Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-17T18:14:55.483931Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-17T18:14:55.543257Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-17T18:14:56.110373Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-17T18:14:56.161357Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:56.163809Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:56.840698Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:56.843043Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-17T18:14:56.886896Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:14:56.889119Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-17T18:14:56.970584Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Node has expired 2025-04-17T18:14:56.972774Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Node has expired 2025-04-17T18:14:56.977886Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Node has expired 2025-04-17T18:14:57.005792Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-17T18:14:57.056486Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Node has expired 2025-04-17T18:14:57.058926Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs >> TNetClassifierTest::TestInitFromFile [GOOD] >> KqpKv::ReadRows_PgKey [GOOD] >> KqpKv::ReadRows_Nulls >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2025-04-17T18:14:56.656728Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347262729703199:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:56.656873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001446/r3tmp/tmpdVOUtD/pdisk_1.dat 2025-04-17T18:14:57.004109Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:57.067882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001446/r3tmp/yandex9IE5rN.tmp 2025-04-17T18:14:57.067907Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001446/r3tmp/yandex9IE5rN.tmp 2025-04-17T18:14:57.068850Z node 1 :NET_CLASSIFIER ERROR: invalid NetData format 2025-04-17T18:14:57.068918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: /home/runner/.ya/build/build_root/6873/001446/r3tmp/yandex9IE5rN.tmp 2025-04-17T18:14:57.069033Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:14:57.080191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:57.080372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:57.082416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameterFilter [GOOD] Test command err: Trying to start YDB, gRPC: 16748, MsgBus: 24990 2025-04-17T18:14:17.858780Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347093284812823:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:17.858833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00157c/r3tmp/tmpA1z3ll/pdisk_1.dat 2025-04-17T18:14:18.160427Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16748, node 1 2025-04-17T18:14:18.238827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:18.238926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:18.244096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:18.246709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:18.246738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:18.246744Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:18.246857Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24990 TClient is connected to server localhost:24990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:18.805544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:18.830111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:18.965925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:19.123343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:19.203061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.726219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347106169716469:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.726310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:21.044742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:21.071989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:21.098953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:21.131608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:21.161312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:21.191317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:21.223148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:21.286498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347110464684425:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:21.286592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:21.286964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347110464684430:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:21.290516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:21.298831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347110464684432:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:21.393428Z node 1 :TX_PROXY ERROR: Actor# [1:7494347110464684489:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:22.859209Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347093284812823:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:22.859323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23914, MsgBus: 30075 2025-04-17T18:14:23.543026Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347120902610598:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:23.543874Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00157c/r3tmp/tmplHCajh/pdisk_1.dat 2025-04-17T18:14:23.631073Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23914, node 2 2025-04-17T18:14:23.681795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:23.681957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:23.683472Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:23.689990Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:23.690011Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:23.690019Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:23.690151Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30075 TClient is connected to server localhost:30075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:24.139205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:24.143907Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:24.157511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:24.234900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:24.371759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... Id: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.256304Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.256346Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347218437833984:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:46.260245Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:46.271247Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347218437833986:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:46.364753Z node 5 :TX_PROXY ERROR: Actor# [5:7494347218437834043:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:47.388631Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347201257962368:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:47.388704Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:47.529550Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5746, MsgBus: 12875 2025-04-17T18:14:51.962028Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347239120306887:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:51.962850Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00157c/r3tmp/tmpKX3iHU/pdisk_1.dat 2025-04-17T18:14:52.110003Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:52.127889Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:52.127971Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:52.130020Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5746, node 6 2025-04-17T18:14:52.178868Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:52.178899Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:52.178910Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:52.179081Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12875 TClient is connected to server localhost:12875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:52.754014Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:52.776365Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:52.837856Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:53.014349Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:53.087885Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:55.730626Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347256300177851:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.730756Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.778215Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.817055Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.853854Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.885922Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.922472Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.965398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:56.036272Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:56.125348Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347260595145816:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:56.125449Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:56.125613Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347260595145822:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:56.131199Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:56.142867Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347260595145824:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:56.242512Z node 6 :TX_PROXY ERROR: Actor# [6:7494347260595145880:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:57.035456Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347239120306887:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:57.035835Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (declare $limit (DataType 'Uint64)) (declare $value (DataType 'Int32)) (let $1 (KqpTable '"/Root/TwoShard" '"72057594046644480:2" '"" '1)) (let $2 '('"Key" '"Value1" '"Value2")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DataType 'Int32)) (let $5 (Min (Uint64 '"1001") $limit)) (let $6 (StructType '('"Key" (OptionalType (DataType 'Uint32))) '('"Value1" (OptionalType (DataType 'String))) '('"Value2" (OptionalType $4)))) (let $7 '('('"_logical_id" '497) '('"_id" '"7b92edaf-12c212bf-e11bcba7-f3ef257e") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($12) (block '( (let $13 (lambda '($16) (block '( (let $17 (Member $16 '"Value2")) (return (Member $16 '"Key") (Member $16 '"Value1") $17 (Coalesce (!= $17 $value) (Bool 'false))) )))) (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda '($18 $19 $20 $21) $21) $5)) (let $15 (lambda '($22 $23 $24 $25) $22 $23 $24)) (return (FromFlow (WideMap $14 $15))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($26) (FromFlow (NarrowMap (Take (ToFlow $26) $5) (lambda '($27 $28 $29) (AsStruct '('"Key" $27) '('"Value1" $28) '('"Value2" $29)))))) '('('"_logical_id" '510) '('"_id" '"d4b54be2-877bce77-11b3796a-25ccf708")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '('('"$limit") '('"$value")) '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::PhasesCount [GOOD] Test command err: Trying to start YDB, gRPC: 21486, MsgBus: 4470 2025-04-17T18:14:04.621819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347037549426776:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:04.622777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158f/r3tmp/tmpYHsDlp/pdisk_1.dat 2025-04-17T18:14:05.040451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:05.040541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:05.042069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21486, node 1 2025-04-17T18:14:05.071739Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:14:05.071930Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:14:05.077040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:05.197381Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:05.197403Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:05.197413Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:05.197533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4470 TClient is connected to server localhost:4470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:05.722564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:05.736007Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:05.754251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:05.952873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.156882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.258759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:08.055008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347054729297732:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.055110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.375442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.442452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.465303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.492388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.526258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.596538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.669316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.735023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347054729298406:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.735114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.735555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347054729298411:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:08.739019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:08.751831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347054729298413:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:08.826132Z node 1 :TX_PROXY ERROR: Actor# [1:7494347054729298468:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:09.622446Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347037549426776:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:09.622512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:09.798977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:09.837631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:09.874347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:09.980305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 2506, MsgBus: 31987 2025-04-17T18:14:13.642651Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347075749575443:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:13.643093Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158f/r3tmp/tmpWgK5qK/pdisk_1.dat 2025-04-17T18:14:13.772968Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:13.779853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:13.779936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:13.781876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2506, node 2 2025-04-17T18:14:13.877369Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:13.877390Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:13.877398Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:13.877507Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost ... 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.687661Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.723152Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.799047Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.871631Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.014579Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347212386901281:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.014684Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.014885Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347212386901286:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:45.018545Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:45.032174Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347212386901288:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:45.131247Z node 5 :TX_PROXY ERROR: Actor# [5:7494347212386901346:3558] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:45.533661Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347190912062327:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:45.533752Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:46.325194Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.440265Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.484931Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1388, MsgBus: 6202 2025-04-17T18:14:50.997291Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347233732058569:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:50.997402Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158f/r3tmp/tmpMuP3Pi/pdisk_1.dat 2025-04-17T18:14:51.130043Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:51.159677Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:51.159777Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:51.161272Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1388, node 6 2025-04-17T18:14:51.209417Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:51.209441Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:51.209451Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:51.209598Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6202 TClient is connected to server localhost:6202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:14:51.752509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.767119Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:51.840786Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:52.034111Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:52.129743Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:55.072350Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347255206896826:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.072461Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.163710Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.197396Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.231521Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.264647Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.300408Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.340025Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.379095Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.440513Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347255206897484:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.440622Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.440672Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347255206897489:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.445085Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:55.455542Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347255206897491:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:55.531341Z node 6 :TX_PROXY ERROR: Actor# [6:7494347255206897545:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:55.997416Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347233732058569:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:55.997516Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2025-04-17T18:14:56.652048Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347259097278389:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:56.653140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001431/r3tmp/tmpEDlKSp/pdisk_1.dat 2025-04-17T18:14:56.999810Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:57.052920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:57.052999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:57.053821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:57.067863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001431/r3tmp/yandexrFkAZt.tmp 2025-04-17T18:14:57.067897Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001431/r3tmp/yandexrFkAZt.tmp 2025-04-17T18:14:57.068992Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001431/r3tmp/yandexrFkAZt.tmp 2025-04-17T18:14:57.069111Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage >> KqpReturning::ReturningWorksIndexedDelete+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2025-04-17T18:14:56.652740Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347258908666834:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:56.652802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00141d/r3tmp/tmpFvBuCV/pdisk_1.dat 2025-04-17T18:14:57.024780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:57.042209Z node 1 :HTTP ERROR: (#26,[::1]:10592) connection closed with error: Connection refused 2025-04-17T18:14:57.042490Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-17T18:14:57.044387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:57.044910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:57.047386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:57.067725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:57.067762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:57.067770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:57.067918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> KqpRanges::LiteralOrCompisiteCollision [GOOD] >> KqpRanges::MergeRanges |92.2%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> TObjectStorageListingTest::TestFilter >> TLocksTest::GoodLock >> TLocksFatTest::RangeSetRemove >> KqpNotNullColumns::AlterAddNotNullColumn [GOOD] >> KqpNotNullColumns::AlterAddIndex >> TFlatTest::LargeProxyReply >> TFlatTest::PathSorting >> TFlatTest::AutoSplitBySize >> TLocksTest::Range_BrokenLock0 >> TLocksTest::BrokenLockErase >> TFlatTest::WriteSplitKillRead >> KqpNewEngine::LocksInRoTx [GOOD] >> KqpNewEngine::LookupColumns >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TFlatTest::SplitInvalidPath >> KqpSqlIn::SimpleKey_Negated [GOOD] >> KqpSqlIn::TupleParameter >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest >> KqpNewEngine::ComplexLookupLimit [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> TFlatTest::Mix_DML_DDL >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> TLocksTest::UpdateLockedKey >> TLocksTest::Range_Pinhole >> KqpRanges::DuplicateKeyPredicateParam [GOOD] >> KqpRanges::DuplicateKeyPredicateMixed >> TLocksTest::Range_BrokenLock2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ComplexLookupLimit [GOOD] Test command err: Trying to start YDB, gRPC: 8204, MsgBus: 19905 2025-04-17T18:14:17.000185Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347087863159061:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:17.000285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001580/r3tmp/tmp8tHqiz/pdisk_1.dat 2025-04-17T18:14:17.336816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:17.375210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:17.375327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:17.376843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8204, node 1 2025-04-17T18:14:17.426732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:17.426758Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:17.426769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:17.426891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19905 TClient is connected to server localhost:19905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:17.900811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:19.738571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347100748061613:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:19.738690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.029174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.158644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347105043029014:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.158770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.159059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347105043029019:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.163618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:20.173688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347105043029021:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:14:20.268230Z node 1 :TX_PROXY ERROR: Actor# [1:7494347105043029073:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:21.436214Z node 1 :RPC_REQUEST WARN: Client lost Trying to start YDB, gRPC: 30015, MsgBus: 14458 2025-04-17T18:14:22.768107Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347114252335514:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:22.768151Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001580/r3tmp/tmppv8Yfd/pdisk_1.dat 2025-04-17T18:14:22.888084Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30015, node 2 2025-04-17T18:14:22.909787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:22.909979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:22.913555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:22.958243Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:22.958270Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:22.958279Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:22.958425Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14458 TClient is connected to server localhost:14458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:23.438701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:23.447695Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:23.455613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:23.535660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:23.693645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:23.788377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:25.687357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347127137239157:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:25.687441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:25.745707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:25.778897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:25.814492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:25.846992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:25.877374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:25.914799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:25.993560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:26.060157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347131432207119:2469], DatabaseId: /Root, PoolId: default, Failed to ... 474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.365044Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.393617Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.426418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.462153Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.497574Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.595735Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347239896653969:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:51.595844Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:51.595904Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347239896653974:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:51.602589Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:51.615820Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347239896653976:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:51.711004Z node 6 :TX_PROXY ERROR: Actor# [6:7494347239896654033:3553] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:52.304220Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347222716782364:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:52.304291Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:52.941817Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7407, MsgBus: 29057 2025-04-17T18:14:54.417943Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347252276363238:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:54.418115Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001580/r3tmp/tmp8fPGui/pdisk_1.dat 2025-04-17T18:14:54.539268Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:54.568452Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:54.568559Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:54.575417Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7407, node 7 2025-04-17T18:14:54.624337Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:54.624365Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:54.624374Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:54.624510Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29057 TClient is connected to server localhost:29057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:55.178197Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:55.197024Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:55.261667Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:55.446435Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:55.525981Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:58.470962Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347269456234215:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.471062Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.521963Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.600607Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.639468Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.710034Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.792714Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.828291Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.899139Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.974449Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347269456234888:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.974554Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.974719Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347269456234893:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.978802Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:58.996289Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347269456234895:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:59.061808Z node 7 :TX_PROXY ERROR: Actor# [7:7494347273751202246:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:59.418028Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347252276363238:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:59.418102Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:00.292010Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpReturning::ReplaceSerial [GOOD] >> KqpReturning::ReturningSerial >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> KqpSqlIn::KeySuffix_NotPointPrefix [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Int >> KqpPg::ValuesInsert+useSink [GOOD] >> KqpPg::ValuesInsert-useSink >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards >> KqpKv::ReadRows_Nulls [GOOD] >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> StatisticsSaveLoad::Simple [GOOD] >> StatisticsSaveLoad::Delete [GOOD] >> KqpNewEngine::DqSourceLiteralRange [GOOD] >> KqpNewEngine::DqSourceLimit >> KqpRanges::IsNullInValue [GOOD] >> KqpRanges::IsNullInJsonValue >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> TLocksTest::CK_GoodLock >> KqpNewEngine::DqSourceCount [GOOD] >> KqpNewEngine::DqSource >> KqpNewEngine::BrokenLocksAtROTx [GOOD] >> KqpNewEngine::BrokenLocksAtROTxSharded >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease >> KqpPg::TableInsert+useSink [GOOD] >> KqpPg::TableInsert-useSink >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Nulls [GOOD] Test command err: Trying to start YDB, gRPC: 20511, MsgBus: 24259 2025-04-17T18:14:30.709130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347148891425477:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:30.709215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00156e/r3tmp/tmpSYJmhf/pdisk_1.dat 2025-04-17T18:14:31.102658Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:31.122385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:31.122485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:31.124449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20511, node 1 2025-04-17T18:14:31.261227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:31.261250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:31.261274Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:31.261389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24259 TClient is connected to server localhost:24259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:31.741495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:31.755653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:33.728651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347161776328018:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:33.728849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:34.005398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 11638, MsgBus: 30584 2025-04-17T18:14:34.784145Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347166435706657:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:34.784191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00156e/r3tmp/tmpgt11cv/pdisk_1.dat 2025-04-17T18:14:34.877295Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11638, node 2 2025-04-17T18:14:34.939131Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:34.939253Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:34.943861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:34.967400Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:34.967424Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:34.967431Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:34.967522Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30584 TClient is connected to server localhost:30584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:35.377629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:35.386796Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:37.743187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347179320609195:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.743273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:37.759040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS [] IsSuccess(): 1 GetStatus(): SUCCESS 2025-04-17T18:14:37.867452Z node 2 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: no keys are found in request's proto Trying to start YDB, gRPC: 2839, MsgBus: 17572 2025-04-17T18:14:38.511365Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347184572103720:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:38.511421Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00156e/r3tmp/tmpDv07hX/pdisk_1.dat 2025-04-17T18:14:38.646886Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:38.659479Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:38.659557Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:38.660656Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2839, node 3 2025-04-17T18:14:38.704161Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:38.704182Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:38.704190Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:38.704284Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17572 TClient is connected to server localhost:17572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:14:39.123931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.133293Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:41.620674Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494347197457006259:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.620777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:41.638009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 7 ... tself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715740:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.134215Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037929 not found 2025-04-17T18:14:53.138853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715742:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.204260Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037930 not found 2025-04-17T18:14:53.206415Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715744:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.280956Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037931 not found 2025-04-17T18:14:53.290647Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715746:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.380013Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037932 not found 2025-04-17T18:14:53.388060Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715748:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.465430Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715750:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.466124Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037933 not found 2025-04-17T18:14:53.538582Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037934 not found 2025-04-17T18:14:53.545956Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715752:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.655491Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037935 not found 2025-04-17T18:14:53.660151Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715754:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.726143Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037936 not found 2025-04-17T18:14:53.730014Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.803467Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037937 not found 2025-04-17T18:14:53.806781Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715758:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.866221Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037938 not found 2025-04-17T18:14:53.871503Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.944759Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037939 not found Trying to start YDB, gRPC: 64550, MsgBus: 25234 2025-04-17T18:14:54.675365Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347252519814572:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:54.675429Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00156e/r3tmp/tmp5U1oVu/pdisk_1.dat 2025-04-17T18:14:54.797463Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:54.812692Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:54.812790Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:54.814553Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64550, node 6 2025-04-17T18:14:54.862663Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:54.862689Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:54.862699Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:54.862843Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25234 TClient is connected to server localhost:25234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:55.344099Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:58.125244Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.560343Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037888 not found 2025-04-17T18:14:58.563809Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.658719Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037889 not found 2025-04-17T18:14:58.663331Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.740668Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037890 not found Trying to start YDB, gRPC: 13110, MsgBus: 19194 2025-04-17T18:14:59.634654Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347274880162832:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:59.634722Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00156e/r3tmp/tmpIVMfOA/pdisk_1.dat 2025-04-17T18:14:59.808759Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:59.812815Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:59.812906Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:59.815284Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13110, node 7 2025-04-17T18:14:59.870636Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:59.870672Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:59.870684Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:59.870863Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19194 TClient is connected to server localhost:19194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:00.345655Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.650938Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347292060032680:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.651032Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.671852Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: 2025-04-17T18:14:55.312595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:14:55.312732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:14:55.312783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e0f/r3tmp/tmpma68CR/pdisk_1.dat 2025-04-17T18:14:55.770912Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20261, node 1 2025-04-17T18:14:56.274970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:56.275029Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:56.275078Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:56.275549Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:14:56.284183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:14:56.368063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:56.368215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:56.384442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27693 2025-04-17T18:14:56.948564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:59.990230Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:15:00.029199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:00.029314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:00.067961Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:15:00.070632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:00.307149Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.307586Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.307969Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.308071Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.308170Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.308316Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.308371Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.308441Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.308524Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.464328Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:00.464426Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:00.488926Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:00.628641Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:00.685590Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:15:00.685712Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:15:00.719047Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:15:00.719449Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:15:00.719658Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:15:00.719707Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:15:00.719764Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:15:00.719815Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:15:00.719859Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:15:00.719907Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:15:00.720618Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:15:00.762878Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:15:00.762998Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1869:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:15:00.771738Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2607] 2025-04-17T18:15:00.777808Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2617] 2025-04-17T18:15:00.778160Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2617], schemeshard id = 72075186224037897 2025-04-17T18:15:00.791292Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:15:00.810518Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:15:00.810581Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:15:00.810664Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:15:00.823330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:15:00.830579Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:15:00.830711Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:15:01.047939Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:15:01.193899Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:15:01.274823Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:15:02.032040Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:15:02.032498Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:15:02.045082Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:15:02.049152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2257:3084], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.049248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2274:3090], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.049310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.056853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-04-17T18:15:02.120567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2278:3093], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:15:02.401180Z node 1 :TX_PROXY ERROR: Actor# [1:2368:3122] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:02.743377Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2390:3134]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:15:02.743617Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:15:02.743737Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2392:3136] 2025-04-17T18:15:02.745742Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2392:3136] 2025-04-17T18:15:02.746460Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2393:2844] 2025-04-17T18:15:02.746690Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2392:3136], server id = [2:2393:2844], tablet id = 72075186224037894, status = OK 2025-04-17T18:15:02.746841Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2393:2844], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:15:02.746893Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:15:02.747116Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:15:02.747183Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2390:3134], StatRequests.size() = 1 2025-04-17T18:15:03.298567Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ODRkYmQ1ZmMtMTU4YmM0ZDItOTY4NTgxZTQtNjliOTQ3MDE=, TxId: 2025-04-17T18:15:03.298663Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ODRkYmQ1ZmMtMTU4YmM0ZDItOTY4NTgxZTQtNjliOTQ3MDE=, TxId: 2025-04-17T18:15:03.299843Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:15:03.302876Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-17T18:15:03.390477Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2426:3157]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:15:03.390673Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:15:03.390721Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2426:3157], StatRequests.size() = 1 2025-04-17T18:15:03.537939Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YWVmZGVmZjEtZDM5ZGQzMWItMzI4ODZhNC00YzIwOTY2Yw==, TxId: 01js2fc8jjfvz96kd700tpgd57 2025-04-17T18:15:03.539594Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YWVmZGVmZjEtZDM5ZGQzMWItMzI4ODZhNC00YzIwOTY2Yw==, TxId: 01js2fc8jjfvz96kd700tpgd57 2025-04-17T18:15:03.542113Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:15:03.545122Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-17T18:15:03.564377Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=M2FlMmE5OGMtYTMyYjJmM2YtZDA4M2ZkNTMtNjlmMDliNWM=, TxId: 01js2fc8ktbbdsjqc6zs7hvsy6 2025-04-17T18:15:03.564493Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=M2FlMmE5OGMtYTMyYjJmM2YtZDA4M2ZkNTMtNjlmMDliNWM=, TxId: 01js2fc8ktbbdsjqc6zs7hvsy6 >> KqpNewEngine::DeleteON [GOOD] >> KqpNewEngine::DeleteByKey >> KqpRanges::UpdateWhereInWithNull [GOOD] >> KqpRanges::UpdateWhereInMultipleUpdate >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: 2025-04-17T18:14:55.444946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:14:55.445091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:14:55.445209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000de2/r3tmp/tmp3avm2h/pdisk_1.dat 2025-04-17T18:14:55.773805Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27104, node 1 2025-04-17T18:14:56.274567Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:56.274632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:56.274667Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:56.275460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:14:56.283996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:14:56.382662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:56.382782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:56.400931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22887 2025-04-17T18:14:56.959835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:59.960005Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:14:59.998887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:59.998993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:00.040492Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:15:00.043259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:00.295312Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.297556Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.298136Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.298497Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.298567Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.298647Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.298746Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.298812Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.298863Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.464379Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:00.464477Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:00.477496Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:00.610535Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:00.653927Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:15:00.654041Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:15:00.683817Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:15:00.688389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:15:00.688589Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:15:00.688650Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:15:00.688704Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:15:00.688759Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:15:00.688810Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:15:00.688858Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:15:00.690117Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:15:00.725336Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:15:00.725434Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:15:00.732806Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:15:00.741645Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:15:00.741840Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:15:00.755957Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:15:00.777652Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:15:00.777731Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:15:00.777832Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:15:00.792625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:15:00.802095Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:15:00.802282Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:15:00.988343Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:15:01.178030Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:15:01.237463Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:15:01.839399Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:15:01.839898Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:15:01.908361Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:15:01.930654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2264:3088], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:01.930792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2279:3093], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:01.930896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:01.938448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-04-17T18:15:01.981343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2284:3096], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:15:02.198344Z node 1 :TX_PROXY ERROR: Actor# [1:2372:3125] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:02.743395Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2394:3137]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:15:02.743619Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:15:02.743764Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2396:3139] 2025-04-17T18:15:02.744637Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2396:3139] 2025-04-17T18:15:02.745181Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2397:2845] 2025-04-17T18:15:02.745509Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2396:3139], server id = [2:2397:2845], tablet id = 72075186224037894, status = OK 2025-04-17T18:15:02.745634Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2397:2845], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:15:02.745723Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:15:02.746383Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:15:02.746443Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2394:3137], StatRequests.size() = 1 2025-04-17T18:15:03.295008Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NGQ4YjUyNjItNDdlMjE0NDEtMjQwNzZjMGMtYjBiMWQwOTU=, TxId: 2025-04-17T18:15:03.295105Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NGQ4YjUyNjItNDdlMjE0NDEtMjQwNzZjMGMtYjBiMWQwOTU=, TxId: 2025-04-17T18:15:03.296107Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:15:03.300675Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:15:03.334925Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2430:3160]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:15:03.335095Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:15:03.335137Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2430:3160], StatRequests.size() = 1 2025-04-17T18:15:03.558683Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NDFjNGMzMDctMzVmNmM3YTgtNGFlZTVjODYtOWNmNWU5Mw==, TxId: 2025-04-17T18:15:03.558769Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NDFjNGMzMDctMzVmNmM3YTgtNGFlZTVjODYtOWNmNWU5Mw==, TxId: 2025-04-17T18:15:03.559983Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:15:03.563101Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-17T18:15:03.614838Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2462:3175]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:15:03.615045Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:15:03.615094Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2462:3175], StatRequests.size() = 1 2025-04-17T18:15:03.733888Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NjNkMTVmMjYtYzQxYmViNzMtYzc1NTcyOTgtYzhiY2YzNDI=, TxId: 01js2fc8s1ey9ms6wha6zt658n 2025-04-17T18:15:03.734033Z node 1 :STATISTICS WARN: [TQueryBase] Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=NjNkMTVmMjYtYzQxYmViNzMtYzc1NTcyOTgtYzhiY2YzNDI=, TxId: 01js2fc8s1ey9ms6wha6zt658n >> StatisticsSaveLoad::ForbidAccess [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2025-04-17T18:15:02.388485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347285164778399:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:02.388603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db7/r3tmp/tmpPgK2p6/pdisk_1.dat 2025-04-17T18:15:02.755054Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:02.799248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:02.799334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:02.800934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5269 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:03.037601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.063257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:03.070025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.244192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 waiting... 2025-04-17T18:15:03.291081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.322950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:03.343913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 28775, MsgBus: 29028 2025-04-17T18:14:13.075831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347075406814775:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:13.075893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001584/r3tmp/tmpXFabGs/pdisk_1.dat 2025-04-17T18:14:13.399996Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28775, node 1 2025-04-17T18:14:13.462299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:13.462403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:13.475026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:13.565072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:13.565093Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:13.565099Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:13.565202Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29028 TClient is connected to server localhost:29028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:14.102866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:14.130465Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:16.162460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347088291717314:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.162585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347088291717318:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.162664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:16.166527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:14:16.176458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347088291717328:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:14:16.268478Z node 1 :TX_PROXY ERROR: Actor# [1:7494347088291717379:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14636, MsgBus: 8377 2025-04-17T18:14:17.238101Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347094265084908:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:17.238172Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001584/r3tmp/tmpKPzC5t/pdisk_1.dat 2025-04-17T18:14:17.315555Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14636, node 2 2025-04-17T18:14:17.350380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:17.350459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:17.355020Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:17.402609Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:17.402632Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:17.402638Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:17.402728Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8377 TClient is connected to server localhost:8377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:17.806606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:17.820258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:17.943090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:18.081754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:18.135761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:14:19.759901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347102855021046:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:19.759973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:19.849344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:19.884656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:19.917776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:19.952914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:19.989467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.026633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.100364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.188323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347107149989013:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.188388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.188599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347107149989018:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.191390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePo ... not found or you don't have access permissions } 2025-04-17T18:14:55.139855Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.177420Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.209854Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.242849Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.276310Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.346061Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.415209Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:55.496050Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347254403515694:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.496162Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.496180Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347254403515699:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:55.500048Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:55.509677Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347254403515701:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:55.597751Z node 6 :TX_PROXY ERROR: Actor# [6:7494347254403515759:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:56.084985Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347237223644057:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:56.085072Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25568, MsgBus: 16156 2025-04-17T18:14:58.295963Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347270465692072:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:58.296029Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001584/r3tmp/tmpcjXn2N/pdisk_1.dat 2025-04-17T18:14:58.419888Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:58.450470Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:58.450579Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:58.453144Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25568, node 7 2025-04-17T18:14:58.499643Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:58.499669Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:58.499681Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:58.499878Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16156 TClient is connected to server localhost:16156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:59.118277Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.140779Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.219535Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.434233Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.521547Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.617253Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347287645563053:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.617370Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.687663Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.733129Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.778683Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.824806Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.867312Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.947166Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.001259Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.072957Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347291940531016:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.073128Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.073449Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347291940531021:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.077988Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:03.093284Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347291940531023:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:03.149695Z node 7 :TX_PROXY ERROR: Actor# [7:7494347291940531077:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:03.298288Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347270465692072:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:03.298374Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] >> TFlatTest::ShardFreezeUnfreeze [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: 2025-04-17T18:14:56.304249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:14:56.304398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:14:56.304548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000dd1/r3tmp/tmpkf5v03/pdisk_1.dat 2025-04-17T18:14:56.693388Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22706, node 1 2025-04-17T18:14:56.935742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:56.935806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:56.935841Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:56.936499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:14:56.938720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:14:57.030973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:57.031103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:57.047763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14751 2025-04-17T18:14:57.556324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:00.507962Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:15:00.552687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:00.552806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:00.590150Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:15:00.592210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:00.839996Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.840549Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.841267Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.841615Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.841726Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.841830Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.841939Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.842059Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:00.842132Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:15:01.012959Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.013078Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.026878Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:01.148349Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.189703Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:15:01.189814Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:15:01.217266Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:15:01.218999Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:15:01.219154Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:15:01.219194Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:15:01.219239Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:15:01.219301Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:15:01.219351Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:15:01.219416Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:15:01.220547Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:15:01.255733Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:15:01.255822Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:15:01.266406Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:15:01.270822Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:15:01.270959Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:15:01.283227Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:15:01.302765Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:15:01.302819Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:15:01.302885Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:15:01.313580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:15:01.322999Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:15:01.323134Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:15:01.549899Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:15:01.731944Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:15:01.794076Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:15:02.738919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.739104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.860319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:15:03.353248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2545:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.353429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.354836Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2550:3128]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:15:03.355010Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:15:03.355095Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2552:3130] 2025-04-17T18:15:03.355168Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2552:3130] 2025-04-17T18:15:03.355752Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2553:2992] 2025-04-17T18:15:03.356004Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2552:3130], server id = [2:2553:2992], tablet id = 72075186224037894, status = OK 2025-04-17T18:15:03.356212Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2553:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:15:03.356283Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:15:03.356572Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:15:03.356649Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2550:3128], StatRequests.size() = 1 2025-04-17T18:15:03.376699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2557:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.376847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.377237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3139], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.383396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:15:03.560101Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:15:03.560161Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:15:03.646530Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2552:3130], schemeshard count = 1 2025-04-17T18:15:03.975271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2564:3141], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-17T18:15:04.139666Z node 1 :TX_PROXY ERROR: Actor# [1:2690:3216] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:04.155056Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2713:3232]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:15:04.155233Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:15:04.155272Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2713:3232], StatRequests.size() = 1 2025-04-17T18:15:04.206353Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2fc8c691jtsdg9nec29w99, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRmMTkxYTEtYmI1YjEwMTQtMzUyYTdmZWItNDEyMTg0MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:15:04.386169Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2792:3262], for# user@builtin, access# DescribeSchema 2025-04-17T18:15:04.386259Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2792:3262], for# user@builtin, access# DescribeSchema 2025-04-17T18:15:04.398944Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:2782:3258], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:15:04.400714Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjY4YWQ4ZjMtM2Q1N2FiZDctNmU0NDg5ZTgtNWE4YWFmZjU=, ActorId: [1:2773:3250], ActorState: ExecuteState, TraceId: 01js2fc9cwbnnvj2b72k9fqxg0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TFlatTest::WriteSplitWriteSplit [GOOD] >> TObjectStorageListingTest::TestSkipShards [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 9670, MsgBus: 31089 2025-04-17T18:14:00.984383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347022337019725:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:00.984444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a1/r3tmp/tmpnSLLak/pdisk_1.dat 2025-04-17T18:14:01.408140Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:01.417286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:01.417361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:01.419963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9670, node 1 2025-04-17T18:14:01.518692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:01.518718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:01.518741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:01.518860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31089 TClient is connected to server localhost:31089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:02.125217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.150683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.296995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.468963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:02.540152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:04.114754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347039516890689:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.114833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.480153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.510273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.539355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.568864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.604710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.640740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.679154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.763581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347039516891347:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.763684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.763933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347039516891352:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:04.768034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:04.782916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347039516891354:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:04.868340Z node 1 :TX_PROXY ERROR: Actor# [1:7494347039516891411:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:05.885386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.985403Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347022337019725:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:05.985485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:06.141234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.316639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.456173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.729694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15781, MsgBus: 4341 2025-04-17T18:14:07.839638Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347048744741069:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:07.839776Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a1/r3tmp/tmpivSAJc/pdisk_1.dat 2025-04-17T18:14:07.937502Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15781, node 2 2025-04-17T18:14:07.995129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:07.995216Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:08.000019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:08.057993Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:08.058015Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:08.058024Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:08.058155Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4341 TClient is connected to server localhost:4341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 Proce ... not found or you don't have access permissions } 2025-04-17T18:14:54.879270Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:54.879356Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347253705983688:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:54.883333Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:54.893398Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7494347253705983690:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:54.965923Z node 8 :TX_PROXY ERROR: Actor# [8:7494347253705983746:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:55.433509Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7494347236526112061:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:55.433589Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:56.204089Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:57.186020Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913697219, txId: 281474976715674] shutting down ---------QUERY---------- --!syntax_v1 SELECT * FROM `/Root/TableWithIntKey` WHERE Key1 IN (1, 2, 100, 101, 102, 200, 201, 201, 1000, 1001, 1002, 2000, 2001, 2002) AND (Key1 > 2000) ORDER BY Key1; ---------RESULT--------- [[[2001];#];[[2002];[2]]] ------------------------ Trying to start YDB, gRPC: 16983, MsgBus: 20262 2025-04-17T18:14:58.175982Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347268152302004:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:58.176049Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015a1/r3tmp/tmpj7l2GW/pdisk_1.dat 2025-04-17T18:14:58.301037Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:58.338721Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:58.338829Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:58.340259Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16983, node 9 2025-04-17T18:14:58.394787Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:58.394809Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:58.394820Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:58.394967Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20262 TClient is connected to server localhost:20262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:59.024333Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:59.043028Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:59.195969Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.443512Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.522767Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.550544Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7494347285332172977:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.550649Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.620592Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.669549Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.713516Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.802017Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.848163Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.926078Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.972787Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.081830Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7494347289627140939:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.081970Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.086624Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7494347289627140944:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.092540Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:03.105952Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7494347289627140948:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:03.174762Z node 9 :TX_PROXY ERROR: Actor# [9:7494347289627141003:3555] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:03.177437Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7494347268152302004:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:03.177633Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:04.587351Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND (Key2 = 100 OR Key2 = 300) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]]] ------------------------ 2025-04-17T18:15:05.202927Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913705234, txId: 281474976715674] shutting down 2025-04-17T18:15:05.754464Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913705794, txId: 281474976715676] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND Key2 IN (100, 300, 400) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]];[[10u]]] ------------------------ >> KqpRanges::MergeRanges [GOOD] >> KqpRanges::Like >> TFlatTest::SplitThenMerge [GOOD] >> TFlatTest::PartBloomFilter [GOOD] >> KqpNotNullColumns::AlterAddIndex [GOOD] >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> TFlatTest::Init >> TObjectStorageListingTest::Listing >> KqpNotNullColumns::UpdateTable_UniqIndexPg [GOOD] >> KqpNotNullColumns::UpdateTable_Immediate >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] >> KqpRanges::DuplicateCompositeKeyPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] Test command err: Trying to start YDB, gRPC: 6732, MsgBus: 17763 2025-04-17T18:14:17.182192Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347091804738615:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:17.182274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00157f/r3tmp/tmpitR6NV/pdisk_1.dat 2025-04-17T18:14:17.537398Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6732, node 1 2025-04-17T18:14:17.603684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:17.610436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:17.614582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:17.662812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:17.662835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:17.662845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:17.662941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17763 TClient is connected to server localhost:17763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:18.166579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:18.183289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:18.192382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:18.332297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:18.457636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:18.511935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:20.148298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347104689642272:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.148410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.439499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.467579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.504573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.537008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.574508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.611756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.648540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:20.722814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347104689642934:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.722904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.723013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347104689642939:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:20.726574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:20.736429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347104689642941:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:20.803100Z node 1 :TX_PROXY ERROR: Actor# [1:7494347104689642996:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:21.529033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:22.182276Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347091804738615:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:22.182338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:22.261902Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913662289, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 65399, MsgBus: 29826 2025-04-17T18:14:22.949212Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347116598065045:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:22.949757Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00157f/r3tmp/tmpG4BRNV/pdisk_1.dat 2025-04-17T18:14:23.116097Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:23.117846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:23.117914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:23.121442Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65399, node 2 2025-04-17T18:14:23.266690Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:23.266715Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:23.266721Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:23.266815Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29826 TClient is connected to server localhost:29826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:23.668525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:23.676180Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:23.685198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part pro ... 1474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.181334Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.252952Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.298969Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:53.384370Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347249719724247:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:53.384474Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:53.384552Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347249719724252:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:53.388704Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:53.402801Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347249719724254:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:53.456899Z node 6 :TX_PROXY ERROR: Actor# [6:7494347249719724308:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:54.080543Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347232539852630:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:54.080626Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:54.657238Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:54.693975Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:54.735282Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32519, MsgBus: 1379 2025-04-17T18:14:58.433648Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347270448666663:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:58.433772Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00157f/r3tmp/tmprdPGbg/pdisk_1.dat 2025-04-17T18:14:58.573301Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:58.600985Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:58.601114Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:58.602645Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32519, node 7 2025-04-17T18:14:58.688163Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:58.688188Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:58.688204Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:58.688372Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1379 TClient is connected to server localhost:1379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:59.375885Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.390533Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.490378Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.710090Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:59.799394Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.762686Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347287628537645:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.762918Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:02.830662Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.906530Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.962505Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.012987Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.054970Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.096831Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.147732Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.236018Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347291923505604:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.236143Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.236445Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347291923505609:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.243414Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:03.263954Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347291923505611:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:03.348723Z node 7 :TX_PROXY ERROR: Actor# [7:7494347291923505669:3557] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:03.433846Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347270448666663:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:03.433923Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:04.860742Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpNewEngine::LookupColumns [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2025-04-17T18:15:01.207432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347281720095465:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.207720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dc7/r3tmp/tmp1ETKgs/pdisk_1.dat 2025-04-17T18:15:01.751756Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.759519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.759609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.762766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26079 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.022916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.052561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.247851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.276430Z node 1 :TX_PROXY ERROR: Actor# [1:7494347286015063364:2396] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-04-17T18:15:02.281431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.294851Z node 1 :TX_PROXY ERROR: Actor# [1:7494347286015063406:2432] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-04-17T18:15:04.461286Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347295379560869:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:04.461328Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dc7/r3tmp/tmpOXSMJS/pdisk_1.dat 2025-04-17T18:15:04.623070Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:04.644612Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:04.644688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:04.646249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10339 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:04.831544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.846760Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:15:04.854257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.926034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.952706Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715660: 2025-04-17T18:15:04.953002Z node 2 :TX_PROXY ERROR: Actor# [2:7494347295379561593:2393] txid# 281474976715660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-17T18:15:04.954158Z node 2 :TX_PROXY ERROR: Actor# [2:7494347295379561593:2393] txid# 281474976715660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-17T18:15:04.954224Z node 2 :TX_PROXY ERROR: Actor# [2:7494347295379561593:2393] txid# 281474976715660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-17T18:15:04.963616Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715661: 2025-04-17T18:15:04.963842Z node 2 :TX_PROXY ERROR: Actor# [2:7494347295379561601:2398] txid# 281474976715661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-17T18:15:04.963900Z node 2 :TX_PROXY ERROR: Actor# [2:7494347295379561601:2398] txid# 281474976715661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-17T18:15:04.963915Z node 2 :TX_PROXY ERROR: Actor# [2:7494347295379561601:2398] txid# 281474976715661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-17T18:15:04.979726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2025-04-17T18:15:01.257624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347282985756849:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.273747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dc4/r3tmp/tmpab0570/pdisk_1.dat 2025-04-17T18:15:01.720196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.720312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.722921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:01.725531Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15270 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.037030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.055828Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.077911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.345666Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.011s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:02.373676Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.033s,wait=0.026s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:02.400425Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:02.411012Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-04-17T18:15:02.448621Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913702210 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-17T18:15:02.603922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:15:02.604201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-04-17T18:15:02.610350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:15:02.610388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:15:02.610400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-17T18:15:02.610664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-17T18:15:02.610969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000d\000\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000d\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\310\000\000\000" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000\310\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 5 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-04-17T18:15:02.611015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:15:02.611898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:15:02.611986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-04-17T18:15:02.612114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.612142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-17T18:15:02.612456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-04-17T18:15:02.614383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:15:02.614499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:15:02.614900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-17T18:15:02.614963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-04-17T18:15:02.614997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2025-04-17T18:15:02.615043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72057594037968897 2025-04-17T18:15:02.615060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-04-17T18:15:02.615069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2025-04-17T18:15:02.615076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:5, partId: 0 2025-04-17T18:15:02.618743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710680, at schemeshard: 72057594046644480 2025-04-17T18:15:02.618771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710680, ready parts: 0/1, is published: true 2025-04-17T18:15:02.618791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710680, at schemeshard: 72057594046644480 2025-04-17T18:15:02.627310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle T ... Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:05.552641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347301594611845 RawX2: 4503608217307456 } TabletId: 72075186224037892 State: 4 2025-04-17T18:15:05.552649Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:05.552694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347301594611504 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-04-17T18:15:05.552702Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:05.552785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.552824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.552845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.552864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.552884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.553351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347301594612047 RawX2: 4503608217307481 } TabletId: 72075186224037893 State: 4 2025-04-17T18:15:05.553383Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:05.553609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.554136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347301594612048 RawX2: 4503608217307482 } TabletId: 72075186224037894 State: 4 2025-04-17T18:15:05.554168Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:05.554309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347301594612048 RawX2: 4503608217307482 } TabletId: 72075186224037894 State: 4 2025-04-17T18:15:05.554347Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:05.554469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347301594611507 RawX2: 4503608217307387 } TabletId: 72075186224037888 State: 4 2025-04-17T18:15:05.554497Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:05.554601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347301594611844 RawX2: 4503608217307455 } TabletId: 72075186224037891 State: 4 2025-04-17T18:15:05.554619Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:05.554692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.554730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.554991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.555031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:05.555226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:15:05.555457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-04-17T18:15:05.555639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:15:05.555740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-17T18:15:05.555864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-17T18:15:05.555958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-17T18:15:05.556066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:15:05.556176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-17T18:15:05.556263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-04-17T18:15:05.556365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:15:05.556386Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-04-17T18:15:05.556553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:15:05.556563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:15:05.556589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:15:05.556729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-17T18:15:05.556741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-17T18:15:05.556768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-17T18:15:05.556786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:15:05.556793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:15:05.556810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-17T18:15:05.556818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-17T18:15:05.556861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-17T18:15:05.556983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:15:05.557089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-17T18:15:05.557192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:15:05.557300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:15:05.557372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:05.557473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:15:05.557559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:15:05.557579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:15:05.557611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:15:05.559285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-17T18:15:05.559307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-17T18:15:05.559316Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-17T18:15:05.559332Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-17T18:15:05.559336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-17T18:15:05.559344Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-17T18:15:05.559354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:15:05.559357Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-04-17T18:15:05.559362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-17T18:15:05.559369Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T18:15:05.559379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:15:05.559383Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-17T18:15:05.559390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:15:05.559415Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TFlatTest::CrossRW ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2025-04-17T18:15:01.211951Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347283338675865:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.212124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dbf/r3tmp/tmpMo05GT/pdisk_1.dat 2025-04-17T18:15:01.767332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.767410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.769023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:01.812039Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:63717 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.044833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.070781Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.080851Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:15:02.128392Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.136537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913702105 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "A" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1744913702182 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "B" PathId: 4 Sche... (TRUNCATED) 2025-04-17T18:15:04.675431Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347295479398435:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:04.675847Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dbf/r3tmp/tmpuvnKQu/pdisk_1.dat 2025-04-17T18:15:04.824846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:04.824922Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:04.827652Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:04.837891Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15909 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:05.012388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.022456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.410293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.420091Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710719, at schemeshard: 72057594046644480 >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::AlterAddIndex [GOOD] Test command err: Trying to start YDB, gRPC: 27444, MsgBus: 3669 2025-04-17T18:14:32.571299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347156074704215:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:32.571373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001568/r3tmp/tmp20F2Q2/pdisk_1.dat 2025-04-17T18:14:32.917868Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27444, node 1 2025-04-17T18:14:32.968369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:32.968646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:32.970809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:32.992492Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:32.992519Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:32.992530Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:32.992663Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3669 TClient is connected to server localhost:3669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:33.545611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:33.561137Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:33.577332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:33.721263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:33.905333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:33.963728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:35.676356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347168959607883:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.676458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.987151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.016195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.039964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.066791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.090455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.124571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.160098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.242036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347173254575834:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.242115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.242448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347173254575839:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.246424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:36.256562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347173254575841:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:36.321015Z node 1 :TX_PROXY ERROR: Actor# [1:7494347173254575895:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:37.574479Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347156074704215:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:37.574627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9759, MsgBus: 9760 2025-04-17T18:14:38.395240Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347183782646051:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:38.395300Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001568/r3tmp/tmpL4Sc7Z/pdisk_1.dat 2025-04-17T18:14:38.503273Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9759, node 2 2025-04-17T18:14:38.531881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:38.531955Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:38.534656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:38.565141Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:38.565169Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:38.565175Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:38.565275Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9760 TClient is connected to server localhost:9760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:38.961060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:38.969042Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:38.980479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.051540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:1 ... pe: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.833832Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.901874Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.965383Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347270888939420:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.965489Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.965493Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347270888939425:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.968313Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:58.976820Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347270888939427:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:59.052604Z node 5 :TX_PROXY ERROR: Actor# [5:7494347275183906777:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:00.074497Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347258004035082:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:00.074571Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:00.108773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25791, MsgBus: 8278 2025-04-17T18:15:01.219029Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347284013854336:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.219087Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001568/r3tmp/tmpczugLt/pdisk_1.dat 2025-04-17T18:15:01.362310Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.375556Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.375658Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.377104Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25791, node 6 2025-04-17T18:15:01.437037Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:01.437064Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:01.437075Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:01.437217Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8278 TClient is connected to server localhost:8278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:02.021763Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.029423Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:15:02.041437Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.120884Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:02.302681Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.392571Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.245831Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347301193725278:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:05.245994Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:05.309749Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.379509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.419032Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.455484Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.532244Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.613995Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.654849Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.718947Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347301193725943:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:05.719041Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:05.719123Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347301193725948:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:05.722922Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:05.733755Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347301193725950:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:05.818235Z node 6 :TX_PROXY ERROR: Actor# [6:7494347301193726005:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:06.237613Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347284013854336:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:06.237701Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:07.123606Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:07.207192Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:15:07.251845Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2025-04-17T18:15:01.524833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347280326865512:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.547055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db9/r3tmp/tmpTvX95H/pdisk_1.dat 2025-04-17T18:15:02.043207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:02.043320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:02.051508Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:02.061610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29262 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.338674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:02.365088Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:02.378268Z node 1 :FLAT_TX_SCHEMESHARD WARN: TSplitMerge Propose failed StatusNameConflict Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), tableStr: /dc-1/Dir1, tableId: , opId: 281474976710659:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir1" SourceTabletId: 100500 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 42 } } } } 2025-04-17T18:15:02.384775Z node 1 :TX_PROXY ERROR: Actor# [1:7494347284621833299:2300] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) 2025-04-17T18:15:04.781663Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347293740998774:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:04.781734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db9/r3tmp/tmpQqSj6E/pdisk_1.dat 2025-04-17T18:15:04.972675Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:04.975979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:04.976060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:04.977345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8647 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:05.191449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:05.218137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.380958Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:05.391957Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:05.417688Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:05.422371Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913705332 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-17T18:15:05.450325Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:05.452069Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.452240Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:05.453375Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.453519Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:05.454067Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:05.455319Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.455452Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:05.455848Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:05.456424Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.456527Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:05.456879Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:05.457480Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.457574Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:05.457976Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:05.461942Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.462072Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:05.462674Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:05.463289Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.463388Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:05.463790Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:05.464365Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.466039Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:05.466506Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:05.467186Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.467291Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:05.467703Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:05.468338Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.468438Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:05.468824Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:05.469434Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.469535Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:05.469945Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:05.470594Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:05.470993Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:05.471388Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:05.47206 ... .079847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-17T18:15:06.079963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347298035967335 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-04-17T18:15:06.079978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715693, tablet: 72075186224037894, partId: 0 2025-04-17T18:15:06.080040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347298035967335 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-04-17T18:15:06.080051Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-17T18:15:06.080088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7494347298035967335 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-04-17T18:15:06.080115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:7, datashard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-17T18:15:06.080127Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.080144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-17T18:15:06.080158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-04-17T18:15:06.080173Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715693:0 129 -> 240 2025-04-17T18:15:06.080812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715693 2025-04-17T18:15:06.080844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715693 2025-04-17T18:15:06.080864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715693 2025-04-17T18:15:06.080914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.080949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.080993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.081023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.081092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.081092Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2025-04-17T18:15:06.081113Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:15:06.081136Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-17T18:15:06.081239Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2025-04-17T18:15:06.081256Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-04-17T18:15:06.081455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:15:06.081559Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-04-17T18:15:06.081568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-04-17T18:15:06.081586Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-04-17T18:15:06.081595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-04-17T18:15:06.081609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2025-04-17T18:15:06.081644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7494347302330934860:2428] message: TxId: 281474976715693 2025-04-17T18:15:06.081659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-04-17T18:15:06.081674Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715693:0 2025-04-17T18:15:06.081683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715693:0 2025-04-17T18:15:06.081772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:15:06.083381Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:06.083431Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-17T18:15:06.084922Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld 2025-04-17T18:15:06.085091Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:06.085141Z node 2 :TX_DATASHARD INFO: 72075186224037894 Initiating switch from PreOffline to Offline state 2025-04-17T18:15:06.086655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347298035966707 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-17T18:15:06.086736Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:06.087107Z node 2 :TX_DATASHARD INFO: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:06.087547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347298035967335 RawX2: 4503608217307478 } TabletId: 72075186224037894 State: 4 2025-04-17T18:15:06.087590Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:06.087814Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T18:15:06.087851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:06.088075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:06.088078Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037894 state Offline TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-17T18:15:06.090011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:15:06.090273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:15:06.090431Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-17T18:15:06.090593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-17T18:15:06.090737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:15:06.090852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:15:06.090865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:15:06.090905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:15:06.091602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:15:06.091616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:15:06.091644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-17T18:15:06.091657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-17T18:15:06.091683Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:15:06.094359Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-17T18:15:06.095311Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-17T18:15:06.095384Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-17T18:15:06.096259Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037894 reason = ReasonStop 2025-04-17T18:15:06.096292Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:7494347298035967452:2803], serverId# [2:7494347298035967453:2804], sessionId# [0:0:0] 2025-04-17T18:15:06.096514Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-04-17T18:15:06.096554Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 2025-04-17T18:15:06.097774Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2025-04-17T18:15:01.209358Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347283914638618:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.210330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dcc/r3tmp/tmp2iqIKZ/pdisk_1.dat 2025-04-17T18:15:01.715119Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.755790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.755886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.769429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10325, node 1 2025-04-17T18:15:02.008738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:02.008766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:02.008775Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:02.008928Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1130 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.590157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:02.620879Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:02.638463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dcc/r3tmp/tmp9glDd6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6258, node 2 TClient is connected to server localhost:20062 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... >> TLocksFatTest::PointSetNotBreak >> TFlatTest::SelectRangeItemsLimit >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks >> KqpReturning::ReturningWorksIndexedDelete-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService >> TObjectStorageListingTest::Split >> KqpSqlIn::TupleParameter [GOOD] >> KqpSqlIn::TupleLiteral ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LookupColumns [GOOD] Test command err: Trying to start YDB, gRPC: 22077, MsgBus: 21492 2025-04-17T18:14:25.084623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347129534891764:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:25.086083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001576/r3tmp/tmp6DmQDF/pdisk_1.dat 2025-04-17T18:14:25.393264Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22077, node 1 2025-04-17T18:14:25.466921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:25.467177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:25.478236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:25.502286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:25.502310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:25.502316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:25.502412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21492 TClient is connected to server localhost:21492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:25.957385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:25.975294Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:25.980658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:26.120766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:26.278133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:26.343642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:27.982294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347138124828125:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:27.982429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.360997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.393353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.426328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.491929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.524371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.559883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.624480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:28.711954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347142419796098:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.712046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.712083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347142419796103:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:28.715592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:28.725005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347142419796105:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:28.802573Z node 1 :TX_PROXY ERROR: Actor# [1:7494347142419796162:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:30.078675Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347129534891764:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:30.078752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2900, MsgBus: 26173 2025-04-17T18:14:30.738963Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347149192748000:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:30.739525Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001576/r3tmp/tmpAN5OzB/pdisk_1.dat 2025-04-17T18:14:30.924616Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2900, node 2 2025-04-17T18:14:30.969987Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:30.970007Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:30.970013Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:30.970120Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:14:31.075524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:31.075620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:31.076970Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26173 TClient is connected to server localhost:26173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:31.405065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:31.412488Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:31.422343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:31.499200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting.. ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.018039Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.051575Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.085011Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.119173Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.155449Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.225488Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:58.293250Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347271471959568:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.293380Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.293417Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347271471959573:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:58.297266Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:58.307440Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347271471959575:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:58.376796Z node 6 :TX_PROXY ERROR: Actor# [6:7494347271471959629:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:59.526370Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347254292087957:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:59.526465Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28444, MsgBus: 7078 2025-04-17T18:15:01.428504Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347281249209134:2088];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001576/r3tmp/tmpiiMRlk/pdisk_1.dat 2025-04-17T18:15:01.485286Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:15:01.660270Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.685127Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.685228Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.688630Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28444, node 7 2025-04-17T18:15:01.763937Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:01.763963Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:01.763973Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:01.764118Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7078 TClient is connected to server localhost:7078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:02.490103Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.505848Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:02.519438Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.607131Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:02.819762Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.909499Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.629385Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347298429080045:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:05.629466Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:05.704364Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.740052Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.776492Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.810276Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.891725Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.984472Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.064334Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.164344Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347302724048017:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:06.164452Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:06.164663Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347302724048022:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:06.168763Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:06.183147Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347302724048024:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:06.253148Z node 7 :TX_PROXY ERROR: Actor# [7:7494347302724048078:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:06.441024Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347281249209134:2088];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:06.441202Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-04-17T18:15:02.494884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347284967992271:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:02.498852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db4/r3tmp/tmpegHCDV/pdisk_1.dat 2025-04-17T18:15:02.888297Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:02.892477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:02.892574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:02.896472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6073 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:03.192816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:03.214921Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:03.226395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.392792Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:03.399968Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:03.424597Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:03.430930Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913703351 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) Copy TableOld to Table 2025-04-17T18:15:03.558157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:15:03.558475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.558940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-17T18:15:03.558972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-04-17T18:15:03.558991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:15:03.559031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-17T18:15:03.559045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-17T18:15:03.559180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-17T18:15:03.559268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:15:03.561101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:15:03.561133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-17T18:15:03.561564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:03.561666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-04-17T18:15:03.561819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:15:03.561841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:15:03.561947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-04-17T18:15:03.562032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:15:03.562049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347284967992748:2243], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-04-17T18:15:03.562060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347284967992748:2243], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-04-17T18:15:03.562087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.562115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-04-17T18:15:03.562362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:15:03.562475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:15:03.564012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:03.564081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:03.564104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-17T18:15:03.564120Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-17T18:15:03.564131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T18:15:03.564338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:03.564380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:03.564386Z node 1 :FLAT_ ... 5:07.370015Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-04-17T18:15:07.370137Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037890 state PreOffline 2025-04-17T18:15:07.370157Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-17T18:15:07.370981Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return to tablet 72075186224037891 2025-04-17T18:15:07.371015Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:07.371111Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state Check that tablet 72075186224037889 was deleted 2025-04-17T18:15:07.372310Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-17T18:15:07.373704Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:07.373828Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:07.373897Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2025-04-17T18:15:07.373932Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:07.374001Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-17T18:15:07.375418Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:07.375495Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-17T18:15:07.377360Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7494347307958612310:2690], serverId# [2:7494347307958612315:3452], sessionId# [0:0:0] 2025-04-17T18:15:07.377402Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:07.377446Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:07.377498Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:07.377901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347303663643286 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-17T18:15:07.377948Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:07.378282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:07.378290Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T18:15:07.378389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347303663643576 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2025-04-17T18:15:07.378426Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:07.378549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347303663643576 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2025-04-17T18:15:07.378579Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:07.378815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347303663643575 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2025-04-17T18:15:07.378861Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:07.379018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:07.379048Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-17T18:15:07.379075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:07.379098Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-17T18:15:07.379341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:07.379349Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-17T18:15:07.379905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:15:07.380125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:15:07.380259Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-17T18:15:07.380344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:07.380507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-17T18:15:07.380669Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-17T18:15:07.380689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:07.380755Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-17T18:15:07.380843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:15:07.381006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-17T18:15:07.381153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:15:07.381178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-17T18:15:07.381217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:15:07.381231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:15:07.381246Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-17T18:15:07.381247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:15:07.381699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:15:07.381723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:15:07.381886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:15:07.381905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:15:07.381948Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-17T18:15:07.381980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:15:07.381997Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7494347303663643762:2627], serverId# [2:7494347303663643763:2628], sessionId# [0:0:0] 2025-04-17T18:15:07.382007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:15:07.382016Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7494347303663643667:2561], serverId# [2:7494347303663643668:2562], sessionId# [0:0:0] 2025-04-17T18:15:07.382022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:15:07.382042Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-17T18:15:07.382048Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:15:07.382059Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7494347307958612162:3320], serverId# [2:7494347307958612163:3321], sessionId# [0:0:0] 2025-04-17T18:15:07.382075Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7494347303663643651:2549], serverId# [2:7494347303663643653:2551], sessionId# [0:0:0] 2025-04-17T18:15:07.382376Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:15:07.382438Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-17T18:15:07.383162Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-17T18:15:07.383222Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-17T18:15:07.384662Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-17T18:15:07.384744Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-17T18:15:07.677235Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-04-17T18:15:07.678121Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-04-17T18:15:07.678766Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> TFlatTest::SplitEmptyToMany >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> TCancelTx::CrossShardReadOnly >> TFlatTest::RejectByPerShardReadSize >> TLocksTest::SetLockFail >> KqpNewEngine::DqSourceLimit [GOOD] >> KqpNewEngine::DqSourceSequentialLimit >> TFlatTest::WriteSplitByPartialKeyAndRead >> TLocksTest::GoodSameKeyLock >> TFlatTest::CopyTableAndCompareColumnsSchema >> KqpReturning::ReturningSerial [GOOD] >> KqpReturning::ReturningColumnsOrder >> TFlatTest::LargeProxyReplyRW [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> TLocksTest::BrokenLockUpdate >> KqpRanges::IsNullInJsonValue [GOOD] >> KqpRanges::IsNotNullInValue >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLimit >> TFlatTest::CopyCopiedTableAndRead >> KqpExtractPredicateLookup::OverflowLookup [GOOD] >> KqpExtractPredicateLookup::ComplexRange >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> KqpSqlIn::KeyTypeMissmatch_Int [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Str >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> KqpNewEngine::DqSource [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2025-04-17T18:15:01.249770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347281345677014:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.251205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dca/r3tmp/tmpQ5WFdp/pdisk_1.dat 2025-04-17T18:15:01.725771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.752074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.752194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.761316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9495 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.024733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.042629Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.058361Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.067473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.249771Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347281345677014:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:06.249842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:06.716390Z node 1 :TX_PROXY ERROR: Actor# [1:7494347302820517037:4140] txid# 281474976711010 MergeResult Result too large TDataReq marker# P18 2025-04-17T18:15:06.716461Z node 1 :TX_PROXY ERROR: Actor# [1:7494347302820517037:4140] txid# 281474976711010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-04-17T18:15:07.266763Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347307506778502:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:07.266811Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dca/r3tmp/tmpbJQ515/pdisk_1.dat 2025-04-17T18:15:07.375726Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:07.413832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:07.413919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:07.415904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27714 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:07.556204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:07.562473Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.577733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-04-17T18:15:11.914159Z node 2 :TX_PROXY ERROR: Actor# [2:7494347324686651265:4143] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2025-04-17T18:15:11.914246Z node 2 :TX_PROXY ERROR: Actor# [2:7494347324686651265:4143] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> KqpNewEngine::DeleteByKey [GOOD] >> KqpRanges::UpdateWhereInMultipleUpdate [GOOD] >> KqpRanges::ValidatePredicates >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSource [GOOD] Test command err: Trying to start YDB, gRPC: 32101, MsgBus: 8392 2025-04-17T18:14:27.389057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347135791940585:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:27.389140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001574/r3tmp/tmp3IOErp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32101, node 1 2025-04-17T18:14:27.717942Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:14:27.717968Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:14:27.721857Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:27.751963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:27.751979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:27.751988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:27.752098Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:14:27.798763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:27.799294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:27.801162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8392 TClient is connected to server localhost:8392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:28.221583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:28.246509Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:28.256079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:28.381806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:28.517464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:28.577377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.043913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347148676844249:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.044064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.308272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.378540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.415711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.452482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.529226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.562777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.593347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:30.688466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347148676844920:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.688617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.688795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347148676844926:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:30.692403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:30.702573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347148676844928:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:30.765291Z node 1 :TX_PROXY ERROR: Actor# [1:7494347148676844985:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:32.390261Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347135791940585:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:32.398991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26813, MsgBus: 17521 2025-04-17T18:14:33.214486Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347163927373035:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:33.214568Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001574/r3tmp/tmpgcGveJ/pdisk_1.dat 2025-04-17T18:14:33.372243Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:33.376880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:33.376961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:33.390409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26813, node 2 2025-04-17T18:14:33.461874Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:33.461897Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:33.461903Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:33.462002Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17521 TClient is connected to server localhost:17521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:33.887452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:33.894607Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:33.901390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:33.982410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation p ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.759977Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.837216Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.881864Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.959455Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.044661Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.119101Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.194452Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347288828293982:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.194567Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.195011Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347288828293987:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.200614Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:03.218614Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347288828293989:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:03.278905Z node 6 :TX_PROXY ERROR: Actor# [6:7494347288828294044:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:03.947960Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347267353455059:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:03.948033Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26684, MsgBus: 20073 2025-04-17T18:15:05.956281Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347297652379876:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:05.956326Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001574/r3tmp/tmpzxxFwt/pdisk_1.dat 2025-04-17T18:15:06.127911Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:06.140444Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:06.140548Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:06.144108Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26684, node 7 2025-04-17T18:15:06.214801Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:06.214830Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:06.214842Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:06.214991Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20073 TClient is connected to server localhost:20073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:06.914438Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.923293Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:15:06.929743Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.009134Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.263065Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.358748Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.183532Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347319127218103:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.183656Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.261928Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.313445Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.362166Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.406136Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.444390Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.486519Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.559622Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.679528Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347319127218771:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.679658Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.679975Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347319127218776:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.684676Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:10.697089Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347319127218778:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:10.781867Z node 7 :TX_PROXY ERROR: Actor# [7:7494347319127218834:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:10.960262Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347297652379876:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:10.960337Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> KqpRanges::Like [GOOD] >> TFlatTest::GetTabletCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DeleteByKey [GOOD] Test command err: Trying to start YDB, gRPC: 5647, MsgBus: 15003 2025-04-17T18:14:29.427135Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347146514276034:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:29.427188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001570/r3tmp/tmp6RED65/pdisk_1.dat 2025-04-17T18:14:29.856981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:29.857054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:29.858818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:29.877246Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5647, node 1 2025-04-17T18:14:29.942136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:29.942153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:29.942192Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:29.942312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15003 TClient is connected to server localhost:15003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:30.424073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:30.453810Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:30.466233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:30.590743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:30.763246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:30.850399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:32.424187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347159399179704:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.424272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:32.735653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.764391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.798291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.822223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.849060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.880255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:32.950989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:33.018475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347163694147664:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:33.018550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:33.018732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347163694147669:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:33.022345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:33.033283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347163694147671:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:33.109880Z node 1 :TX_PROXY ERROR: Actor# [1:7494347163694147725:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:34.427246Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347146514276034:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:34.427313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25778, MsgBus: 18728 2025-04-17T18:14:35.199517Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347168597313172:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:35.199557Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001570/r3tmp/tmpQSiZvh/pdisk_1.dat 2025-04-17T18:14:35.323166Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25778, node 2 2025-04-17T18:14:35.354150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:35.354270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:35.355864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:35.383218Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:35.383239Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:35.383246Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:35.383333Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18728 TClient is connected to server localhost:18728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:14:35.815984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.829292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:35.880947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:36.064381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... lt not found or you don't have access permissions } 2025-04-17T18:15:03.022322Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.066553Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.106852Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.147240Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.194751Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.233609Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.318334Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:03.409116Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347290935665980:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.409223Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.409467Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347290935665985:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:03.414008Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:03.426287Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347290935665987:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:03.492522Z node 6 :TX_PROXY ERROR: Actor# [6:7494347290935666042:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:03.885124Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347269460827070:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:03.885210Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30160, MsgBus: 3709 2025-04-17T18:15:06.229639Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347304520647593:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:06.229711Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001570/r3tmp/tmpKU1pI3/pdisk_1.dat 2025-04-17T18:15:06.383443Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:06.390130Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:06.390250Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:06.391711Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30160, node 7 2025-04-17T18:15:06.518800Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:06.518829Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:06.518840Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:06.519003Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3709 TClient is connected to server localhost:3709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:07.159963Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.176556Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.256627Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.447008Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.533564Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.355841Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347321700518523:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.355971Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.430287Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.474924Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.550314Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.593091Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.636460Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.715330Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.758908Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.829370Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347321700519190:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.829478Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.829917Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347321700519195:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.834167Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:10.847979Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347321700519197:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:10.917201Z node 7 :TX_PROXY ERROR: Actor# [7:7494347321700519251:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:11.230381Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347304520647593:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:11.230464Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TFlatTest::LargeDatashardReplyDistributed >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::Like [GOOD] Test command err: Trying to start YDB, gRPC: 25991, MsgBus: 16617 2025-04-17T18:14:32.423837Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347159817801154:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:32.424207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00156a/r3tmp/tmpEFTudT/pdisk_1.dat 2025-04-17T18:14:32.789360Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:32.789755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:32.789850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:32.802008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25991, node 1 2025-04-17T18:14:32.866271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:32.866293Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:32.866306Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:32.866443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16617 TClient is connected to server localhost:16617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:33.373616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:33.394338Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:33.403596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:33.557057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:33.724051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:14:33.807288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:35.468108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347172702704812:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.468206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:35.785593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.817685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.848105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.876207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.915810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:35.956066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.029067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.117855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347176997672770:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.117953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.118156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347176997672775:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.121648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:36.133894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347176997672777:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:14:36.191107Z node 1 :TX_PROXY ERROR: Actor# [1:7494347176997672831:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:37.132475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.345759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.426719Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347159817801154:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:37.426777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:37.527599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.669781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:14:37.899975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63256, MsgBus: 5547 2025-04-17T18:14:39.138798Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347186950498156:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:39.139349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00156a/r3tmp/tmpGTw4SE/pdisk_1.dat 2025-04-17T18:14:39.254120Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:39.275354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:39.275426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:39.276713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63256, node 2 2025-04-17T18:14:39.374784Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:39.374808Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:39.374818Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:39.374937Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5547 TClient is connected to server localhost:5547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 ... operation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:04.489911Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:04.560848Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:04.632310Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:04.703642Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:04.810782Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347296971666323:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:04.810872Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:04.811367Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347296971666328:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:04.817502Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:04.827619Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347296971666330:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:04.927154Z node 5 :TX_PROXY ERROR: Actor# [5:7494347296971666386:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:05.663290Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347279791794685:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:05.663365Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:06.082527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.755255Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913706788, txId: 281474976715674] shutting down Trying to start YDB, gRPC: 65201, MsgBus: 19653 2025-04-17T18:15:08.153171Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347314170469864:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:08.153235Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00156a/r3tmp/tmp2WlqxI/pdisk_1.dat 2025-04-17T18:15:08.292033Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:08.327977Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:08.328080Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:08.329890Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65201, node 6 2025-04-17T18:15:08.396521Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:08.396550Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:08.396559Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:08.396718Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19653 TClient is connected to server localhost:19653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:08.978342Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:08.996342Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.078621Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.277385Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.367502Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:11.871659Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347327055373515:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:11.871749Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:11.946074Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:12.019136Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:12.058862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:12.099959Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:12.140579Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:12.237390Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:12.300125Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:12.392470Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347331350341475:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:12.392676Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:12.392935Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347331350341482:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:12.398789Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:12.412363Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347331350341484:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:12.472268Z node 6 :TX_PROXY ERROR: Actor# [6:7494347331350341538:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:13.153316Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347314170469864:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:13.153396Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:13.979009Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> TObjectStorageListingTest::SuffixColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2025-04-17T18:15:09.532183Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347315205351923:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:09.532413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d9e/r3tmp/tmp4QMLtz/pdisk_1.dat 2025-04-17T18:15:09.905679Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:09.953234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:09.953345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:09.956291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1385 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:10.174905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.190426Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.203063Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:10.208408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d9e/r3tmp/tmpl1ecXu/pdisk_1.dat 2025-04-17T18:15:12.748964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:15:12.838261Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:12.848795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.848882Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.850490Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31144 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:13.135630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.141151Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.158665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913713270 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) >> TObjectStorageListingTest::MaxKeysAndSharding >> TLocksFatTest::ShardLocks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] Test command err: Trying to start YDB, gRPC: 2673, MsgBus: 12998 2025-04-17T18:14:33.036159Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347161880029793:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:33.038350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001565/r3tmp/tmpFhVpLy/pdisk_1.dat 2025-04-17T18:14:33.418291Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:33.456212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:33.456334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:33.459460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2673, node 1 2025-04-17T18:14:33.533276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:33.533301Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:33.533314Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:33.533428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12998 TClient is connected to server localhost:12998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:34.076624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:34.102885Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:36.019004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347174764932194:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.019119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.276565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:36.403312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347174764932298:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.403388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.403724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347174764932303:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:36.408010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:14:36.418015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347174764932305:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:14:36.493631Z node 1 :TX_PROXY ERROR: Actor# [1:7494347174764932356:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:36.633363Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347174764932399:2355], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-17T18:14:36.634787Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjFlOGM4ZmMtNTNmZGMzZTMtNWVmNGUwNjAtYzI0NTlhMWI=, ActorId: [1:7494347170469964894:2328], ActorState: ExecuteState, TraceId: 01js2fbea43t8tnj4dhceke2bp, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:14:36.663709Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347174764932408:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-17T18:14:36.663893Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjFlOGM4ZmMtNTNmZGMzZTMtNWVmNGUwNjAtYzI0NTlhMWI=, ActorId: [1:7494347170469964894:2328], ActorState: ExecuteState, TraceId: 01js2fbeb4cdfkr44x6vwq0m2t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 12501, MsgBus: 9373 2025-04-17T18:14:37.409085Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347180996175686:2161];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:37.414743Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001565/r3tmp/tmphB9VeF/pdisk_1.dat 2025-04-17T18:14:37.541433Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:37.572660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:37.572738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:37.578800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12501, node 2 2025-04-17T18:14:37.659833Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:37.659863Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:37.659869Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:37.659970Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9373 TClient is connected to server localhost:9373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:38.045574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:40.469978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347193881078131:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.470075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.491706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.580735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347193881078234:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.580829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.581204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347193881078239:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.585428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:55.940772Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:55.942330Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13608, node 5 2025-04-17T18:14:55.983653Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:55.983685Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:55.983694Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:55.983863Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31216 TClient is connected to server localhost:31216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:56.523075Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:56.529910Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:59.004878Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347272031419221:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:59.005009Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347272031419244:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:59.005093Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:59.008804Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:14:59.017903Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347272031419250:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:14:59.076484Z node 5 :TX_PROXY ERROR: Actor# [5:7494347272031419301:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:59.120069Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:00.788594Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347254851549406:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:00.797049Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:06.455151Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2fc9vs69ewpzrm115nemtz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=M2YyNTllYmItOTFkNTg0NGEtZTMxMzliNjQtMTQ3OGZlZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:15:06.455411Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=M2YyNTllYmItOTFkNTg0NGEtZTMxMzliNjQtMTQ3OGZlZjc=, ActorId: [5:7494347293506256674:2521], ActorState: ExecuteState, TraceId: 01js2fc9vs69ewpzrm115nemtz, Create QueryResponse for error on request, msg: 2025-04-17T18:15:07.907852Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01js2fcbfc6qcvx41sw9m464f8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZDZkODUyZS00ZDA4MGY2MC1jNTI4Yjg1ZC03MDQyN2Q4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-17T18:15:07.908124Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ZDZkODUyZS00ZDA4MGY2MC1jNTI4Yjg1ZC03MDQyN2Q4Mg==, ActorId: [5:7494347302096191365:2545], ActorState: ExecuteState, TraceId: 01js2fcbfc6qcvx41sw9m464f8, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 18940, MsgBus: 62232 2025-04-17T18:15:08.780153Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347313888676946:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:08.780258Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001565/r3tmp/tmpj68AkC/pdisk_1.dat 2025-04-17T18:15:08.925883Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:08.941190Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:08.941299Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:08.943108Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18940, node 6 2025-04-17T18:15:08.999160Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:08.999184Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:08.999195Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:08.999328Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62232 TClient is connected to server localhost:62232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:09.660484Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.827403Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347331068546790:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:12.827462Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:12.855131Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:15:12.958640Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347331068546944:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:12.958726Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:12.958797Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347331068546949:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:12.962585Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:15:12.973040Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347331068546951:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:15:13.068473Z node 6 :TX_PROXY ERROR: Actor# [6:7494347335363514298:2431] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:13.783371Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347313888676946:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:13.783436Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> KqpRanges::DuplicateCompositeKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan-UseSink >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2025-04-17T18:15:10.058322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347319916390365:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:10.058513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d98/r3tmp/tmpkkwVrT/pdisk_1.dat 2025-04-17T18:15:10.477750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:10.511119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:10.511251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:10.514818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19636 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:10.768179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.789997Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.808778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.312719Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347332427433510:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:13.312828Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d98/r3tmp/tmpUiRMvb/pdisk_1.dat 2025-04-17T18:15:13.532558Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:13.559456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:13.559539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:13.560956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27889 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:13.785606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.794749Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.810626Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.819207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TFlatTest::SelectRangeForbidNullArgs2 >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2025-04-17T18:15:10.433886Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347321295150686:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:10.433934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d95/r3tmp/tmpXmkaA1/pdisk_1.dat 2025-04-17T18:15:10.786780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:10.788702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:10.788792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:10.793402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30496, node 1 2025-04-17T18:15:10.878870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:10.878898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:10.878905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:10.879041Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12134 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:11.171261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:11.193680Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:11.205851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913711359 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913711359 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) 2025-04-17T18:15:13.818293Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347333208418503:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:13.855391Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d95/r3tmp/tmpVKT5wE/pdisk_1.dat 2025-04-17T18:15:14.045437Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:14.068096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:14.068173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:14.069336Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13503, node 2 2025-04-17T18:15:14.206768Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:14.206791Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:14.206797Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:14.206907Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9257 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:14.437616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.443953Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.462466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.917671Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7494347337503387096:2484], Recipient [2:7494347337503386430:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2025-04-17T18:15:14.917722Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-04-17T18:15:14.917934Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:14.918130Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2025-04-17T18:15:14.918201Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2025-04-17T18:15:14.918238Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2025-04-17T18:15:14.918271Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2025-04-17T18:15:14.918298Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2025-04-17T18:15:14.918897Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2025-04-17T18:15:14.939278Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7494347337503387100:2485], Recipient [2:7494347337503386430:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2025-04-17T18:15:14.939307Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-04-17T18:15:14.939438Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:14.939595Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2025-04-17T18:15:14.939630Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2025-04-17T18:15:14.939685Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 >> KqpNewEngine::DqSourceSequentialLimit [GOOD] >> KqpNewEngine::DqSourceLocksEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2025-04-17T18:15:01.222526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347280891438532:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.222587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dbc/r3tmp/tmp6vNZMv/pdisk_1.dat 2025-04-17T18:15:01.706472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.706565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.708989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:01.735664Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17668 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.017627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.035025Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.048802Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:02.053855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.260208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.334840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.994770Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347297423439382:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:05.994812Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dbc/r3tmp/tmpJ8UbUx/pdisk_1.dat 2025-04-17T18:15:06.124386Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:06.143277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:06.143369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:06.145462Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21094 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:06.322582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.330652Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.346649Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:06.353182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.421393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.489064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.130806Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347319559404463:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:10.132084Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dbc/r3tmp/tmplzaJgE/pdisk_1.dat 2025-04-17T18:15:10.257594Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:10.292028Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:10.292117Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:10.293190Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26226 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:10.444481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.456655Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.465284Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:10.472943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.549702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:10.591924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:13.851931Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347332062383604:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:13.852140Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dbc/r3tmp/tmpzyUBAy/pdisk_1.dat 2025-04-17T18:15:14.138301Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:14.151587Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:14.151673Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:14.153487Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24013 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:14.435358Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:14.455548Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:14.462579Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.523249Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.596012Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::SelectRangeReverse >> KqpReturning::ReturningColumnsOrder [GOOD] >> KqpReturning::Random >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> TLocksTest::Range_BrokenLockMax >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService [GOOD] >> KqpReturning::ReturningTypes >> KqpSqlIn::KeyTypeMissmatch_Str [GOOD] >> KqpSqlIn::InWithCast >> TFlatTest::CopyTableAndRead >> KqpSqlIn::TupleLiteral [GOOD] >> KqpSqlIn::TupleSelect >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> TLocksFatTest::PointSetBreak >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> KqpRanges::IsNotNullInValue [GOOD] >> KqpRanges::IsNotNullInJsonValue2 >> TFlatTest::SelectBigRangePerf >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2025-04-17T18:15:12.076089Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347329123876926:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:12.077325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d82/r3tmp/tmpp7H7Cc/pdisk_1.dat 2025-04-17T18:15:12.462061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.462154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.465690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:12.485556Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10558 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:12.824290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:12.854560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913712983 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Va... (TRUNCATED) 2025-04-17T18:15:13.074138Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:13.075555Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:15:13.075582Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:15:13.200135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:15:13.200389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2025-04-17T18:15:13.200623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:15:13.200656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:15:13.200922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:15:13.201113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2025-04-17T18:15:13.201149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:15:13.201979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:15:13.202036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-04-17T18:15:13.202153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:13.203227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-17T18:15:13.203686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:15:13.203816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-04-17T18:15:13.204188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2025-04-17T18:15:13.204255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-17T18:15:13.204332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2025-04-17T18:15:13.204351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-04-17T18:15:13.204375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-04-17T18:15:13.207554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-04-17T18:15:13.207584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-04-17T18:15:13.207704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-04-17T18:15:13.207730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-17T18:15:13.207771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-04-17T18:15:13.208008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-17T18:15:13.208019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-04-17T18:15:13.208100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-17T18:15:13.208115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-17T18:15:13.208161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-17T18:15:13.208195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710668:0 2 -> 3 2025-04-17T18:15:13.208988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:13.209053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:13.209119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:13.209153Z node 1 :FLAT_TX_SCHEMESHARD ... :17.657355Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:17.658149Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-17T18:15:17.658163Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-17T18:15:17.676713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:17.676784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:17.676923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:15:17.682709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-17T18:15:17.682975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:15:17.683376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347345162737611 RawX2: 4503612512274679 } TabletId: 72075186224037889 State: 4 2025-04-17T18:15:17.683418Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:17.683948Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-17T18:15:17.685748Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-17T18:15:17.685775Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-17T18:15:17.685802Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-17T18:15:17.687146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347349457705118 RawX2: 4503612512274685 } TabletId: 72075186224037891 State: 4 2025-04-17T18:15:17.687204Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:17.687592Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T18:15:17.688070Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T18:15:17.689580Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-17T18:15:17.689680Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-17T18:15:17.691120Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-17T18:15:17.694108Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-17T18:15:17.695412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:15:17.695442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-17T18:15:17.695491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:15:17.707769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:17.707850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:17.707945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-17T18:15:17.708139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:15:17.708346Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-17T18:15:17.708421Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-17T18:15:17.709271Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-17T18:15:17.709339Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:15:17.709649Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-17T18:15:17.710231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:15:17.710273Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-17T18:15:17.710337Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-17T18:15:17.714495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:15:17.714895Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-17T18:15:17.714916Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-17T18:15:17.709577Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T18:15:17.709614Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-17T18:15:17.709677Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-17T18:15:17.710677Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:15:17.715665Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-17T18:15:17.715701Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-17T18:15:17.717019Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7494347350991476662:3040], serverId# [2:7494347350991476676:3045], sessionId# [0:0:0] 2025-04-17T18:15:17.717049Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7494347350991476946:3268], serverId# [2:7494347350991476949:3271], sessionId# [0:0:0] 2025-04-17T18:15:17.717061Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7494347350991477186:3431], serverId# [2:7494347350991477191:3436], sessionId# [0:0:0] 2025-04-17T18:15:17.718470Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-17T18:15:17.718509Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:15:17.718571Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-17T18:15:17.719968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:15:17.720143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:15:17.720308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:17.720443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:15:17.720550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:15:17.720582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:15:17.720620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:15:17.721740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-17T18:15:17.721755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-17T18:15:17.721790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:15:17.721792Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-17T18:15:17.721801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:15:17.721823Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-17T18:15:17.723148Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-17T18:15:17.724024Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-17T18:15:17.725120Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-17T18:15:17.725142Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-17T18:15:17.725156Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-17T18:15:17.725181Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-17T18:15:17.723202Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-17T18:15:17.723240Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-17T18:15:17.723298Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-17T18:15:17.724150Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-17T18:15:17.725185Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-17T18:15:17.725249Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-17T18:15:17.726793Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-17T18:15:17.726846Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-17T18:15:17.728817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:15:17.728839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:15:17.728885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:15:17.728905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:15:17.728936Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> TLocksTest::BrokenSameKeyLock >> TLocksTest::SetEraseSet [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 8474b 40r} data 6832b + FlatIndex{15} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 7 0 1036b {0, 1} | 7 39 1036b {5, 7} + BTreeIndex{Empty, PageId: 7 RowCount: 40 DataSize: 1036 GroupDataSize: 7438 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{7} Label{74 rev 1, 1036b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_2 ... 6:30:2062]) to queue queue_background_compaction 00000.495 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task gen2-table-101-tablet-1 (56 by [16:30:2062])) 00000.495 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.503 DD| RESOURCE_BROKER: Finish task gen2-table-101-tablet-1 (56 by [16:30:2062]) (release resources {1, 0}) 00000.503 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 600.000000 to 300.000000 (remove task gen2-table-101-tablet-1 (56 by [16:30:2062])) 00000.503 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987988 by [16:8:2055]) from queue queue_background_compaction 00000.503 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987988 by [16:8:2055]) to queue queue_background_compaction 00000.503 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task bckg-block (987987987988 by [16:8:2055])) 00000.503 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.506 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (63 by [16:30:2062]) priority=200 resources={1, 0} 00000.506 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_background_compaction 00000.506 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.507 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (63 by [16:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.507 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_compaction_gen0 00000.508 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_compaction_gen0 from 0.000000 to 300.000000 00000.508 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (63 by [16:30:2062]) from queue queue_compaction_gen0 00000.508 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_compaction_gen0 00000.508 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.514 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (63 by [16:30:2062]) (release resources {1, 0}) 00000.514 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.525 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.537 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.549 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.560 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.560 DD| RESOURCE_BROKER: Update task gen1-table-101-tablet-1 (62 by [16:30:2062]) (priority=166 type=background_compaction_gen1 resources={1, 0} resubmit=0) 00000.560 DD| RESOURCE_BROKER: Assigning waiting task gen1-table-101-tablet-1 (62 by [16:30:2062]) to queue queue_background_compaction 00000.560 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (in-fly consumption {1, 0}) 00000.560 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.562 DD| RESOURCE_BROKER: Removing task gen1-table-101-tablet-1 (62 by [16:30:2062]) 00000.562 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.562 NN| TABLET_SAUSAGECACHE: Poison cache serviced 55 reqs hit {55 29100b} miss {0 0b} 00000.562 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.562 II| FAKE_ENV: DS.0 gone, left {9705b, 90}, put {69340b, 689} 00000.563 II| FAKE_ENV: DS.1 gone, left {49683b, 125}, put {120837b, 750} 00000.563 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.563 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.563 II| FAKE_ENV: All BS storage groups are stopped 00000.563 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 2.000m 00000.563 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 659}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:15:09.428985Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00001.559 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.560 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00001.561 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.561 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00001.561 II| FAKE_ENV: DS.1 gone, left {2023426b, 4}, put {7269810b, 2026} 00001.561 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.561 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.561 II| FAKE_ENV: All BS storage groups are stopped 00001.561 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.561 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:15:11.007725Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00001.535 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.536 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00001.537 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.537 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00001.537 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269821b, 2026} 00001.537 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.537 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.537 II| FAKE_ENV: All BS storage groups are stopped 00001.538 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.538 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:15:12.584947Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00001.560 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.561 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00001.562 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.562 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00001.562 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211300b, 2026} 00001.562 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.562 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.562 II| FAKE_ENV: All BS storage groups are stopped 00001.562 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.563 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:15:14.160540Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00001.389 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.390 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00001.390 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.390 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00001.390 II| FAKE_ENV: DS.1 gone, left {2013604b, 4}, put {7237874b, 2026} 00001.391 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.391 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.391 II| FAKE_ENV: All BS storage groups are stopped 00001.391 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.391 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:15:15.562944Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00001.371 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.372 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00001.372 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.372 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00001.372 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211300b, 2026} 00001.372 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.372 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.372 II| FAKE_ENV: All BS storage groups are stopped 00001.373 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.373 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:15:16.947748Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00001.425 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.426 NN| TABLET_SAUSAGECACHE: Poison cache serviced 309 reqs hit {1118 6955338b} miss {2 9773b} 00001.427 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.427 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.427 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00001.427 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269821b, 2026} 00001.428 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.428 II| FAKE_ENV: All BS storage groups are stopped 00001.428 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.428 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:15:18.409559Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00002.291 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00002.292 NN| TABLET_SAUSAGECACHE: Poison cache serviced 651 reqs hit {780 4008302b} miss {1002 6916040b} 00002.293 II| FAKE_ENV: Shut order, stopping 4 BS groups 00002.293 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {200304b, 2033} 00002.293 II| FAKE_ENV: DS.1 gone, left {2023570b, 4}, put {9254517b, 2039} 00002.293 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00002.293 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00002.293 II| FAKE_ENV: All BS storage groups are stopped 00002.293 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00002.293 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] Test command err: Trying to start YDB, gRPC: 20423, MsgBus: 4780 2025-04-17T18:14:36.292787Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347174803253668:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:36.292941Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001564/r3tmp/tmpOukMZt/pdisk_1.dat 2025-04-17T18:14:36.702109Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:36.726895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:36.727009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:36.729788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20423, node 1 2025-04-17T18:14:36.826704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:36.826738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:36.826744Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:36.826855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4780 TClient is connected to server localhost:4780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:37.337119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:37.361627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:37.383376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:37.516727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:37.685975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:37.746627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:39.312077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347187688157337:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.312173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:39.671925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.699314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.767183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.833325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.860095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:39.929912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.004898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.091352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347191983125316:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.091439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.091518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347191983125321:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.095330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:40.106534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347191983125323:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:40.187061Z node 1 :TX_PROXY ERROR: Actor# [1:7494347191983125379:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:41.292812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347174803253668:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:41.292905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8743, MsgBus: 26523 2025-04-17T18:14:42.203757Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347198918055122:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:42.203856Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001564/r3tmp/tmprBQs2q/pdisk_1.dat 2025-04-17T18:14:42.296327Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8743, node 2 2025-04-17T18:14:42.347151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:42.347269Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:42.359467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:42.390694Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:42.390718Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:42.390732Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:42.390841Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26523 TClient is connected to server localhost:26523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:42.811268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.817139Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:42.825135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.890536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T1 ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.726227Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.762713Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.801966Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.840382Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.879196Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.004580Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347320045712258:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.004681Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.004908Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347320045712263:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:10.008577Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:10.018828Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347320045712265:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:10.089455Z node 6 :TX_PROXY ERROR: Actor# [6:7494347320045712320:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:10.925577Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347298570873330:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:10.925660Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24541, MsgBus: 25595 2025-04-17T18:15:12.924114Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347331364076131:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:12.924191Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001564/r3tmp/tmp80UDMG/pdisk_1.dat 2025-04-17T18:15:13.111100Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:13.125918Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:13.126045Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:13.127822Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24541, node 7 2025-04-17T18:15:13.250838Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:13.250865Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:13.250875Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:13.251040Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25595 TClient is connected to server localhost:25595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:13.971624Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.999942Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:15:14.013805Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.102534Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.332301Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:14.414521Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.389775Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347352838914354:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.389902Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.416176Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.453787Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.494737Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.535090Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.580355Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.624673Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.673343Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.754733Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347352838915012:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.754837Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.755003Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347352838915017:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.759988Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:17.771403Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347352838915019:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:17.863575Z node 7 :TX_PROXY ERROR: Actor# [7:7494347352838915076:3550] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:17.926361Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347331364076131:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:17.926434Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:19.207134Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> TFlatTest::LargeDatashardReply [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2025-04-17T18:15:11.911588Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347325701198934:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:11.911641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d8f/r3tmp/tmpZYiEsw/pdisk_1.dat 2025-04-17T18:15:12.360737Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:12.369177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.369288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.371492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1972 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:12.681451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.710287Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.731402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.925215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.983086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.084720Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710662: Validate (783): Key validation status: 3 2025-04-17T18:15:13.085129Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134210:2501] txid# 281474976710662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-17T18:15:13.085230Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134210:2501] txid# 281474976710662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-17T18:15:13.085274Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134210:2501] txid# 281474976710662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-17T18:15:13.088285Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710663: Validate (783): Key validation status: 3 2025-04-17T18:15:13.088538Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134232:2508] txid# 281474976710663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-17T18:15:13.088610Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134232:2508] txid# 281474976710663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-17T18:15:13.088634Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134232:2508] txid# 281474976710663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-04-17T18:15:13.091547Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710664: Validate (783): Key validation status: 3 2025-04-17T18:15:13.091764Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134239:2512] txid# 281474976710664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-17T18:15:13.091810Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134239:2512] txid# 281474976710664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-17T18:15:13.091827Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134239:2512] txid# 281474976710664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-17T18:15:13.094751Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710665: Validate (783): Key validation status: 3 2025-04-17T18:15:13.094848Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134245:2515] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-17T18:15:13.094901Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134245:2515] txid# 281474976710665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-17T18:15:13.094935Z node 1 :TX_PROXY ERROR: Actor# [1:7494347334291134245:2515] txid# 281474976710665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-04-17T18:15:15.362431Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347340362315822:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:15.365422Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d8f/r3tmp/tmpFwEFoE/pdisk_1.dat 2025-04-17T18:15:15.556570Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:15.565233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:15.565302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:15.567424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23304 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:15.766800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:15.782667Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:15.797698Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:15.808488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:15.881017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:15.935833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:18.920826Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347354223819214:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:18.920881Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d8f/r3tmp/tmpRoCm3O/pdisk_1.dat 2025-04-17T18:15:19.110327Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:19.121701Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:19.121753Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:19.124500Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7722 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:19.296063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.301578Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.319255Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.395943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.472286Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> TLocksTest::Range_IncorrectNullDot1 >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy >> TFlatTest::SelectRangeNullArgs3 >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2025-04-17T18:15:08.610736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347313648387083:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:08.611385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001da9/r3tmp/tmpgFslP3/pdisk_1.dat 2025-04-17T18:15:08.907115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:08.982862Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:7494347313648387303:2098] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-17T18:15:08.982943Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-17T18:15:08.983054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:08.983082Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T18:15:08.983133Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T18:15:08.983159Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T18:15:08.983172Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T18:15:08.983301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:08.983490Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-17T18:15:08.983538Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T18:15:08.983558Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T18:15:08.983571Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T18:15:08.983585Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T18:15:08.984061Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-17T18:15:08.984098Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:7494347313648387303:2098]) 2025-04-17T18:15:08.984168Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-17T18:15:08.984571Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-17T18:15:08.984738Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:7494347313648387303:2098])::Execute 2025-04-17T18:15:08.984762Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T18:15:08.984799Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:7494347313648387303:2098])::Complete 2025-04-17T18:15:08.984912Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 1744913708616118 ResourceMaximum { Memory: 270443331584 } 2025-04-17T18:15:08.984953Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-17T18:15:08.985007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:08.985099Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-17T18:15:08.985145Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-17T18:15:08.985171Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-17T18:15:08.985277Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-17T18:15:08.985292Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-17T18:15:08.985300Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-17T18:15:08.985308Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-17T18:15:08.985967Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-17T18:15:08.985997Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete TClient is connected to server localhost:8535 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:15:09.080606Z node 1 :TX_PROXY DEBUG: actor# [1:7494347313648387330:2103] Handle TEvNavigate describe path dc-1 2025-04-17T18:15:09.080696Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354905:2257] HANDLE EvNavigateScheme dc-1 2025-04-17T18:15:09.081918Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354905:2257] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:15:09.128784Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354905:2257] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-17T18:15:09.136761Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354905:2257] Handle TEvDescribeSchemeResult Forward to# [1:7494347317943354904:2256] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:09.164127Z node 1 :TX_PROXY DEBUG: actor# [1:7494347313648387330:2103] Handle TEvProposeTransaction 2025-04-17T18:15:09.164152Z node 1 :TX_PROXY DEBUG: actor# [1:7494347313648387330:2103] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:15:09.164253Z node 1 :TX_PROXY DEBUG: actor# [1:7494347313648387330:2103] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494347317943354918:2263] 2025-04-17T18:15:09.260839Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354918:2263] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-17T18:15:09.260903Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354918:2263] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:15:09.260974Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354918:2263] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:15:09.261187Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354918:2263] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:15:09.261272Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354918:2263] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:15:09.261309Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354918:2263] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:15:09.261399Z node 1 :TX_PROXY DEBUG: Actor# [1:7494347317943354918:2263] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:15:09.263385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:15:09.263567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.263756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:15:09.264039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:15:09.264080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.264672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:15:09.264789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-04-17T18:15:09.264927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, o ... ge state for txid 281474976710674:0 129 -> 240 2025-04-17T18:15:10.004857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.004921Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710674 datashard 72075186224037899 state PreOffline 2025-04-17T18:15:10.004964Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 Got TEvSchemaChangedResult from SS at 72075186224037899 2025-04-17T18:15:10.004975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:15:10.004994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710674:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:15:10.005255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-04-17T18:15:10.005368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710674:0 progress is 1/1 2025-04-17T18:15:10.005387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-04-17T18:15:10.005406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710674:0 progress is 1/1 2025-04-17T18:15:10.005413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-04-17T18:15:10.005425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710674, ready parts: 1/1, is published: true 2025-04-17T18:15:10.005493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7494347317943355938:2372] message: TxId: 281474976710674 2025-04-17T18:15:10.005528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-04-17T18:15:10.005543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710674:0 2025-04-17T18:15:10.005551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710674:0 2025-04-17T18:15:10.005621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-04-17T18:15:10.006498Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:10.006560Z node 1 :TX_DATASHARD INFO: 72075186224037899 Initiating switch from PreOffline to Offline state 2025-04-17T18:15:10.015801Z node 1 :TX_DATASHARD INFO: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:10.016233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347317943355522 RawX2: 4503603922340127 } TabletId: 72075186224037899 State: 4 2025-04-17T18:15:10.016308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:10.016689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:10.016770Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2025-04-17T18:15:10.016925Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 12 TabletID: 72075186224037899 2025-04-17T18:15:10.016957Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2025-04-17T18:15:10.017023Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2025-04-17T18:15:10.017105Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2025-04-17T18:15:10.017223Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2025-04-17T18:15:10.020427Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7494347313648387303:2098] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7494347313648387500:2190] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-04-17T18:15:10.020532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2025-04-17T18:15:10.020571Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037899 reason = ReasonStop 2025-04-17T18:15:10.020798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 1 2025-04-17T18:15:10.020998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:15:10.021007Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2025-04-17T18:15:10.021025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-04-17T18:15:10.021051Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2025-04-17T18:15:10.021062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:15:10.021110Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2025-04-17T18:15:10.021280Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037899 2025-04-17T18:15:10.021313Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2025-04-17T18:15:10.021326Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-17T18:15:10.021384Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037899 2025-04-17T18:15:10.021443Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2025-04-17T18:15:10.021689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:12 2025-04-17T18:15:10.021770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2025-04-17T18:15:10.021821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:15:10.022585Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2025-04-17T18:15:11.786225Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347323838556245:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:11.786556Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001da9/r3tmp/tmpFwrWZR/pdisk_1.dat 2025-04-17T18:15:11.947436Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:11.964978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:11.965039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:11.967221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5283 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:12.157693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.170377Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.180648Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:12.185631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.778434Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347323838556245:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:16.778487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:22.983480Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-17T18:15:22.992923Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-17T18:15:22.994503Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-04-17T18:15:23.023876Z node 2 :TX_PROXY ERROR: Actor# [2:7494347371083202717:5919] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TLocksTest::NoLocksSet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2025-04-17T18:15:13.246002Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347332932706956:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:13.246045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d78/r3tmp/tmp8Hdnhu/pdisk_1.dat 2025-04-17T18:15:13.823527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:13.823685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:13.826989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:13.829740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6903 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:14.161083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.187398Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.197515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:14.206394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.420234Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:14.431151Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:14.453122Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:14.461395Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-04-17T18:15:14.606998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:15:14.607258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:14.607732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-17T18:15:14.607779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-04-17T18:15:14.607808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:15:14.607842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-17T18:15:14.607856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-17T18:15:14.607980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-17T18:15:14.608075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:15:14.608689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:15:14.608718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-17T18:15:14.609203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:14.609310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-04-17T18:15:14.609420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:15:14.609432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:15:14.609545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-04-17T18:15:14.609593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:15:14.609610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347332932707476:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-04-17T18:15:14.609637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347332932707476:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-04-17T18:15:14.609666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:14.609687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-04-17T18:15:14.609965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:15:14.610041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:15:14.612250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:14.612326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:14.612338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-17T18:15:14.612360Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-17T18:15:14.612374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T18:15:14.612568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:14.612609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:14.612615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-17T18:15:14.612622Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-04-17T18:15:14.612650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-04-17T18:15:14.612692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-04-17T18:15:14.612796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-17T18:15:14.612849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-04-17T18:15:14.613283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-04-17T18:15:14.613303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-04-17T18:15:14.613312Z node 1 :FLAT_TX_SCHE ... shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-17T18:15:21.560725Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-17T18:15:21.561050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:15:21.561073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-17T18:15:21.561155Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-17T18:15:21.561368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:21.561763Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-17T18:15:21.561767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347366773699678 RawX2: 4503612512274750 } TabletId: 72075186224037892 State: 4 2025-04-17T18:15:21.561794Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:21.561795Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7494347362478732015:2386], serverId# [3:7494347362478732016:2387], sessionId# [0:0:0] 2025-04-17T18:15:21.561813Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037893 from 72075186224037891 is reset 2025-04-17T18:15:21.561868Z node 3 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:21.561917Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-17T18:15:21.561984Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347362478731903 RawX2: 4503612512274683 } TabletId: 72075186224037889 State: 4 2025-04-17T18:15:21.562039Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:21.562122Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-17T18:15:21.562172Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-17T18:15:21.562458Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-17T18:15:21.562506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:21.562567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:21.563296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:15:21.563334Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-17T18:15:21.563349Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T18:15:21.563472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-17T18:15:21.563609Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-17T18:15:21.563721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 5] was 1 2025-04-17T18:15:21.563864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:15:21.563971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:15:21.564080Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:15:21.564095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 5], at schemeshard: 72057594046644480 2025-04-17T18:15:21.564141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T18:15:21.564156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:15:21.564171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:15:21.565139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:15:21.565152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:15:21.565195Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-17T18:15:21.565208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-17T18:15:21.565234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:15:21.565245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:15:21.565273Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:15:21.565542Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-17T18:15:21.565570Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [3:7494347366773699586:2571], serverId# [3:7494347366773699587:2572], sessionId# [0:0:0] 2025-04-17T18:15:21.565584Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-17T18:15:21.565599Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:7494347366773699848:2751], serverId# [3:7494347366773699850:2753], sessionId# [0:0:0] 2025-04-17T18:15:21.565611Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-17T18:15:21.565625Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:7494347362478732025:2393], serverId# [3:7494347362478732026:2394], sessionId# [0:0:0] 2025-04-17T18:15:21.567397Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347366773699503 RawX2: 4503612512274742 } TabletId: 72075186224037891 State: 4 2025-04-17T18:15:21.567461Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:21.567666Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-17T18:15:21.567690Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-17T18:15:21.567830Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-17T18:15:21.568017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:21.568682Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-17T18:15:21.568754Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-17T18:15:21.570121Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-17T18:15:21.570172Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-17T18:15:21.571347Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:15:21.571393Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-17T18:15:21.571732Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:21.571941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-17T18:15:21.572079Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:15:21.572098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-17T18:15:21.572132Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:15:21.572427Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:15:21.572453Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:15:21.572485Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:15:21.573698Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-17T18:15:21.573733Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-17T18:15:21.573763Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:7494347366773699602:2582], serverId# [3:7494347366773699604:2584], sessionId# [0:0:0] 2025-04-17T18:15:21.574032Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-17T18:15:21.574078Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-17T18:15:21.574215Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-17T18:15:21.854465Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-04-17T18:15:21.858617Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-04-17T18:15:21.863866Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-04-17T18:15:21.868570Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-04-17T18:15:21.874429Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-04-17T18:15:21.875018Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TLocksFatTest::PointSetRemove [GOOD] >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> TFlatTest::SplitEmptyAndWrite >> TCancelTx::ImmediateReadOnly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28040, MsgBus: 12984 2025-04-17T18:14:46.357101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347218365766727:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:46.357394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001551/r3tmp/tmpw9jEx7/pdisk_1.dat 2025-04-17T18:14:46.760776Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:46.775185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:46.775277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:46.777007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28040, node 1 2025-04-17T18:14:46.866928Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:46.866953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:46.866971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:46.867099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12984 TClient is connected to server localhost:12984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:47.448116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.464946Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:47.475483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.611226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:47.786269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:14:47.859667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:49.541583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347231250670402:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.541725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.831240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.860148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.889705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.920916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.960078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.998783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.033180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.112982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347235545638360:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.113043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.113106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347235545638365:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.116245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:50.132656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347235545638367:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:50.222346Z node 1 :TX_PROXY ERROR: Actor# [1:7494347235545638424:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:51.065960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.286092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.355073Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347218365766727:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:51.355136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:51.413244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.511374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.717207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22042, MsgBus: 22014 2025-04-17T18:14:52.696670Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347242664278805:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:52.696737Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001551/r3tmp/tmpAtLgtF/pdisk_1.dat 2025-04-17T18:14:52.790727Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22042, node 2 2025-04-17T18:14:52.837339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:52.837413Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:52.840044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:52.869071Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:52.869095Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:52.869105Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:52.869267Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22014 TClient is connected to server localhost:22014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 ... fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:13.428146Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:13.442513Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347335205458410:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:13.505130Z node 5 :TX_PROXY ERROR: Actor# [5:7494347335205458465:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:14.097384Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347318025586774:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:14.097462Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:14.703422Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:14.980652Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:15:15.170011Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:15.341372Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:15:15.704251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10252, MsgBus: 20006 2025-04-17T18:15:18.456097Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347353313733875:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:18.456167Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001551/r3tmp/tmpJ5I7Lb/pdisk_1.dat 2025-04-17T18:15:18.615045Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:18.632610Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:18.632722Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:18.634362Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10252, node 6 2025-04-17T18:15:18.683457Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:18.683487Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:18.683496Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:18.683666Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20006 TClient is connected to server localhost:20006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:15:19.299071Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:19.311838Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.419376Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.634821Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.720066Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.597523Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347370493604828:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:22.597611Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:22.672934Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:22.746028Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:22.791382Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:22.846324Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:22.889195Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:22.939918Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:23.021347Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:23.104460Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347374788572795:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:23.104571Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:23.104931Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347374788572800:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:23.109113Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:23.121930Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347374788572802:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:23.205143Z node 6 :TX_PROXY ERROR: Actor# [6:7494347374788572857:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:23.457523Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347353313733875:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:23.457597Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","Stats":{"ComputeNodes":[{"Tasks":[{"NodeId":6,"FinishTimeMs":1744913724973,"TaskId":1,"Host":"ghrun-gkdwnlcfnq","ComputeTimeUs":142}],"CpuTimeUs":832}],"UseLlvm":"undefined","Tasks":1,"FinishedTasks":0,"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Join2"}],"BaseTimeMs":1744913724973,"NodesScanShards":[],"CpuTimeUs":{"Count":1,"Sum":832,"Max":832,"Min":832}},"CTE Name":"precompute_0_0"}],"Node Type":"Effect"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":341776,"CpuTimeUs":334884},"ProcessCpuTimeUs":1914,"TotalDurationUs":357424,"ResourcePoolId":"default","QueuedTimeUs":965},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":0.832,"A-Cpu":0.832,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"}],"Node Type":"Delete"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2025-04-17T18:15:18.731153Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347356185597579:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:18.731186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d6d/r3tmp/tmpsrEy0J/pdisk_1.dat 2025-04-17T18:15:19.132745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:19.132814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:19.134591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:19.136625Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13473 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:19.416726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:15:19.450784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:22.051589Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347371986017035:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:22.066231Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d6d/r3tmp/tmpsq8UHA/pdisk_1.dat 2025-04-17T18:15:22.355793Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:22.378975Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:22.379052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:22.383384Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10436 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:22.595409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.613578Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.623627Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:22.627951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> KqpNewEngine::DqSourceLocksEffects [GOOD] >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> KqpReturning::Random [GOOD] >> TFlatTest::CopyTableAndDropOriginal [GOOD] >> TLocksTest::Range_GoodLock0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2025-04-17T18:15:09.920390Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347314749686367:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:09.920463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d9c/r3tmp/tmpWsLEWe/pdisk_1.dat 2025-04-17T18:15:10.255244Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:10.300027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:10.300158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:10.305473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29031 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:10.559498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.578606Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.595368Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:10.603007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.735300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.784322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:14.920901Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347314749686367:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:14.920960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:17.630395Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347352572094377:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:17.631396Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d9c/r3tmp/tmpTdOW9v/pdisk_1.dat 2025-04-17T18:15:17.774722Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:17.802072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:17.802164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:17.804189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3540 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:17.985266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:17.994438Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:18.007236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:18.062215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:18.115979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.792757Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347367611803841:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:21.792810Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d9c/r3tmp/tmpuUZzFN/pdisk_1.dat 2025-04-17T18:15:21.933498Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:21.933584Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:21.935564Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:21.950515Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23641 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:22.144796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.152112Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.177101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.257007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.301569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::SelectRangeBytesLimit >> TLocksTest::SetBreakSetEraseBreak [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2025-04-17T18:15:11.884014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347324868620165:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:11.884070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d89/r3tmp/tmpU3ceU7/pdisk_1.dat 2025-04-17T18:15:12.320710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:12.326456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.326563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.329480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1647 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:12.589225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.606924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:12.613630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:1647 2025-04-17T18:15:13.100884Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555496:2387] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-17T18:15:13.100962Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555496:2387] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:13.114147Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555511:2399] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-17T18:15:13.114228Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555511:2399] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:13.139971Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555524:2409] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-17T18:15:13.140038Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555524:2409] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:13.195822Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555550:2429] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-17T18:15:13.195898Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555550:2429] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:13.210901Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555563:2439] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-17T18:15:13.210969Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555563:2439] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:13.230533Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555576:2449] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-17T18:15:13.230600Z node 1 :TX_PROXY ERROR: Actor# [1:7494347333458555576:2449] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:15.434727Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347342974909288:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:15.434777Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d89/r3tmp/tmpvzkE5R/pdisk_1.dat 2025-04-17T18:15:15.605420Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:15.628399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:15.628493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:15.631046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27737 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:15.826917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:15.835060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27737 2025-04-17T18:15:19.054372Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347358614849400:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:19.054418Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d89/r3tmp/tmphl3lmn/pdisk_1.dat 2025-04-17T18:15:19.199236Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:19.217200Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:19.217284Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:19.219705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18656 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:19.456195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.466727Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:15:19.469678Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18656 2025-04-17T18:15:19.819207Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850125:2383] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-17T18:15:19.819277Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850125:2383] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:19.833157Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850141:2396] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-17T18:15:19.833229Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850141:2396] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:19.846067Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850156:2408] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-17T18:15:19.846133Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850156:2408] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:19.894714Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850184:2430] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-17T18:15:19.894783Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850184:2430] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:19.920697Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850200:2443] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-17T18:15:19.920759Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850200:2443] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:19.932475Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850214:2454] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-17T18:15:19.932535Z node 3 :TX_PROXY ERROR: Actor# [3:7494347358614850214:2454] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:22.710796Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347370413035974:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:22.710851Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d89/r3tmp/tmpFwLuck/pdisk_1.dat 2025-04-17T18:15:22.933419Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:22.956700Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:22.956814Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:22.958681Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14060 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:23.185854Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.191566Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:23.195894Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:14060 2025-04-17T18:15:23.548879Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-04-17T18:15:23.549163Z node 4 :TX_PROXY ERROR: Actor# [4:7494347374708003988:2383] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-04-17T18:15:23.567205Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-04-17T18:15:23.567429Z node 4 :TX_PROXY ERROR: Actor# [4:7494347374708004002:2391] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2025-04-17T18:15:19.764113Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347360678219420:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:19.764162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d6b/r3tmp/tmprk5QX3/pdisk_1.dat 2025-04-17T18:15:20.202924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:20.220985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:20.221078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:20.223179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16969 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:20.513568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.558843Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.567479Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:20.574940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.189182Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347375415189823:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:23.189257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d6b/r3tmp/tmpVdYS2w/pdisk_1.dat 2025-04-17T18:15:23.341182Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:23.350411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:23.350521Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:23.352339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13421 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:23.552141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.562830Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.577165Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:23.584175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::SetLockNothing [GOOD] >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> TFlatTest::SelectRangeBothLimit [GOOD] >> TFlatTest::CopyTableAndDropCopy [GOOD] >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLocksEffects [GOOD] Test command err: Trying to start YDB, gRPC: 17062, MsgBus: 61712 2025-04-17T18:14:41.051227Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347198501466466:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:41.052000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00155c/r3tmp/tmpiQrmvf/pdisk_1.dat 2025-04-17T18:14:41.431046Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:41.435789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:41.435881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:41.438139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17062, node 1 2025-04-17T18:14:41.562806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:41.562838Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:41.562854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:41.562998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61712 TClient is connected to server localhost:61712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:42.030694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.053088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.179965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.341743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:42.433227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:43.966126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347207091402665:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.966254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.308615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.338476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.364950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.392990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.426027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.464091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.496030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:44.583708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347211386370628:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.583784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.583794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347211386370633:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:44.586663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:44.595107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347211386370635:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:44.659021Z node 1 :TX_PROXY ERROR: Actor# [1:7494347211386370689:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:46.046963Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347198501466466:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:46.049384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1604, MsgBus: 19550 2025-04-17T18:14:46.868980Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347217255436747:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:46.869033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00155c/r3tmp/tmpc7LW6h/pdisk_1.dat 2025-04-17T18:14:46.973282Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1604, node 2 2025-04-17T18:14:47.023652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:47.023732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:47.025564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:47.061149Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:47.061172Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:47.061179Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:47.061278Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19550 TClient is connected to server localhost:19550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:47.471884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.479366Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:14:47.488461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.573776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.737574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... leted, doublechecking } 2025-04-17T18:15:16.579270Z node 6 :TX_PROXY ERROR: Actor# [6:7494347347693065602:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:16.936917Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347326218226637:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:16.936997Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21404, MsgBus: 4704 2025-04-17T18:15:19.309171Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347361487409185:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:19.309219Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00155c/r3tmp/tmp15D8Vh/pdisk_1.dat 2025-04-17T18:15:19.467528Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:19.481975Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:19.490373Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:19.493985Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21404, node 7 2025-04-17T18:15:19.616126Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:19.616153Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:19.616165Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:19.616320Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4704 TClient is connected to server localhost:4704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:20.239635Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.248185Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:15:20.259917Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:20.375517Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:20.583524Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.669551Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.592612Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347378667280139:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:23.592722Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:23.668905Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:23.722097Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:23.775618Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:23.821376Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:23.918031Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:23.996873Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.038862Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.134639Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347382962248102:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.134748Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.134808Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347382962248107:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.139056Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:24.154725Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347382962248109:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:24.222047Z node 7 :TX_PROXY ERROR: Actor# [7:7494347382962248166:3551] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:24.309147Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347361487409185:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:24.309249Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:25.918449Z node 7 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-17T18:15:25.918706Z node 7 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:15:25.918887Z node 7 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-17T18:15:25.919152Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7494347387257215939:2515], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [7:7494347387257215804:2515]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[7:7494347387257215939:2515].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:15:25.919744Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7494347387257215932:2515], SessionActorId: [7:7494347387257215804:2515], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[7:7494347387257215804:2515]. isRollback=0 2025-04-17T18:15:25.920071Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=M2EwNWU0MzgtN2ZhOTA3MTAtOWQxYTRlNTEtMzFkY2NkM2Q=, ActorId: [7:7494347387257215804:2515], ActorState: ExecuteState, TraceId: 01js2fcyc4an2px9vs9y630bw3, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [7:7494347387257215933:2515] from: [7:7494347387257215932:2515] 2025-04-17T18:15:25.920207Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7494347387257215933:2515] TxId: 281474976715674. Ctx: { TraceId: 01js2fcyc4an2px9vs9y630bw3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=M2EwNWU0MzgtN2ZhOTA3MTAtOWQxYTRlNTEtMzFkY2NkM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:15:25.920440Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=M2EwNWU0MzgtN2ZhOTA3MTAtOWQxYTRlNTEtMzFkY2NkM2Q=, ActorId: [7:7494347387257215804:2515], ActorState: ExecuteState, TraceId: 01js2fcyc4an2px9vs9y630bw3, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 >> KqpSqlIn::InWithCast [GOOD] >> KqpReturning::ReturningTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::Random [GOOD] Test command err: Trying to start YDB, gRPC: 21012, MsgBus: 14148 2025-04-17T18:14:45.889535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347211785123070:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:45.889606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001557/r3tmp/tmpxGx5ZN/pdisk_1.dat 2025-04-17T18:14:46.229428Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21012, node 1 2025-04-17T18:14:46.297276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:46.297370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:46.300305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:46.321978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:46.322004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:46.322011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:46.322149Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14148 TClient is connected to server localhost:14148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:46.865042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:46.885141Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:46.897117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.043418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.209046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.286218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.955857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347224670026736:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:48.955986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.294515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.330509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.360509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.391066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.460619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.498813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.572113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.656885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347228964994703:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.656958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.657140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347228964994708:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.660433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:49.670623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347228964994710:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:49.736451Z node 1 :TX_PROXY ERROR: Actor# [1:7494347228964994767:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:50.889695Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347211785123070:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:50.889752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:13: Warning: At function: RemovePrefixMembers, At function: RemoveSystemMembers, At function: PersistableRepr, At function: SqlProject
:4:27: Warning: At function: Filter, At function: Coalesce
:4:50: Warning: At function: SqlIn
:4:50: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:13: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 6566, MsgBus: 10914 2025-04-17T18:14:51.874863Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347238095400949:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:51.874939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001557/r3tmp/tmpOOUXe6/pdisk_1.dat 2025-04-17T18:14:51.970942Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6566, node 2 2025-04-17T18:14:52.008370Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:52.008494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:52.012799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:52.049724Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:52.049755Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:52.049762Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:52.049874Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10914 TClient is connected to server localhost:10914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:52.418501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 720575940 ... 72057594046644480 2025-04-17T18:15:16.730495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:16.764311Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:16.808122Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:16.886539Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:16.931740Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.030392Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347349715301907:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.030507Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.031110Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347349715301912:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.038729Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:17.058140Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347349715301914:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:17.125916Z node 5 :TX_PROXY ERROR: Actor# [5:7494347349715301971:3552] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:17.557233Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347328240462986:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:17.557322Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:18.243934Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 [[[2];["321"]];[["111"];[2]]] Trying to start YDB, gRPC: 22553, MsgBus: 21639 2025-04-17T18:15:20.191444Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347362445158206:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:20.191492Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001557/r3tmp/tmpfwtH3u/pdisk_1.dat 2025-04-17T18:15:20.388775Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:20.410097Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:20.410240Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:20.412314Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22553, node 6 2025-04-17T18:15:20.490787Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:20.490812Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:20.490822Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:20.490949Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21639 TClient is connected to server localhost:21639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:15:21.059335Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:21.081901Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.172958Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.388800Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.477943Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.440727Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347379625029159:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.440818Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.508954Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.553987Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.597644Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.637073Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.680330Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.764408Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.843556Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.925485Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347379625029828:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.925626Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.925943Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347379625029834:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.931481Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:24.942145Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347379625029836:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:25.023756Z node 6 :TX_PROXY ERROR: Actor# [6:7494347383919997187:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:25.191684Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347362445158206:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.191769Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:26.296581Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-04-17T18:15:20.684582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347364197557419:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:20.694679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d66/r3tmp/tmpLinZFn/pdisk_1.dat 2025-04-17T18:15:21.185563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:21.222876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:21.222978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:21.224656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28930 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:21.507121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.538864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.562687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.764123Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:21.775136Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:21.808285Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.010s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:21.823490Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.016s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-04-17T18:15:21.950172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:15:21.950439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:21.950859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-17T18:15:21.950898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-04-17T18:15:21.950908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:15:21.950949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-17T18:15:21.950963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-17T18:15:21.951056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-17T18:15:21.951142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:15:21.951755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:15:21.951785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-17T18:15:21.952231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:21.952337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-04-17T18:15:21.952477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:15:21.952504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:15:21.952629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-04-17T18:15:21.952689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:15:21.952703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347368492525122:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-04-17T18:15:21.952714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347368492525122:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-04-17T18:15:21.952766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:21.952806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-04-17T18:15:21.953089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-17T18:15:21.953201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-04-17T18:15:21.955951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:21.956055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:21.956069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-17T18:15:21.956087Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-17T18:15:21.956112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-17T18:15:21.956367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:21.956427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-17T18:15:21.956435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-17T18:15:21.956444Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-04-17T18:15:21.956453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-04-17T18:15:21.956485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-04-17T18:15:21.956602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-17T18:15:21.956657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-04-17T18:15:21.956746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-04-17T18:15:21.956770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-04-17T18:15:21.956780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-04-17T18:15:21.958278Z no ... -04-17T18:15:25.158434Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-17T18:15:25.159722Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:25.159789Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:25.159834Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2025-04-17T18:15:25.159852Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:25.159888Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state Check that tablet 72075186224037888 was deleted 2025-04-17T18:15:25.161053Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:25.161157Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7494347386015830451:2374], serverId# [2:7494347386015830458:2684], sessionId# [0:0:0] 2025-04-17T18:15:25.161186Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:25.161215Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:25.161247Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7494347386015830453:2376], serverId# [2:7494347386015830460:2686], sessionId# [0:0:0] 2025-04-17T18:15:25.161474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347381720862628 RawX2: 4503608217307387 } TabletId: 72075186224037888 State: 4 2025-04-17T18:15:25.161553Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:25.161878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347381720862621 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-04-17T18:15:25.161906Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:25.162018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347381720862939 RawX2: 4503608217307446 } TabletId: 72075186224037890 State: 4 2025-04-17T18:15:25.162035Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:25.162468Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-17T18:15:25.162533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:25.162635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:25.162683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:25.162808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347381720862933 RawX2: 4503608217307445 } TabletId: 72075186224037891 State: 4 2025-04-17T18:15:25.162808Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T18:15:25.162823Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-17T18:15:25.162836Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:25.162954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347381720862933 RawX2: 4503608217307445 } TabletId: 72075186224037891 State: 4 2025-04-17T18:15:25.162973Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:25.163349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:25.163353Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-17T18:15:25.163392Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-17T18:15:25.163397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:15:25.165107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:15:25.165360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:15:25.165534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:15:25.165683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:15:25.165807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:15:25.165939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-17T18:15:25.166026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:25.166199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-17T18:15:25.166301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:15:25.166409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:15:25.166424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-17T18:15:25.166474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:15:25.166493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:15:25.166515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:15:25.166756Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-17T18:15:25.166782Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-17T18:15:25.166795Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-17T18:15:25.166809Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-17T18:15:25.168348Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-17T18:15:25.168365Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T18:15:25.168380Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-17T18:15:25.168394Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-17T18:15:25.170359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:15:25.170374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-17T18:15:25.170412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:15:25.170420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:15:25.170438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:15:25.170446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:15:25.170472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:15:25.170490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:15:25.170513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:15:25.170545Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:15:25.170729Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-17T18:15:25.170881Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-17T18:15:25.172354Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-17T18:15:25.172413Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-17T18:15:25.173703Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-17T18:15:25.173753Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-17T18:15:25.176338Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:15:25.176402Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-17T18:15:25.466781Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-04-17T18:15:25.470754Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-04-17T18:15:25.471207Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-04-17T18:15:25.471627Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock >> TFlatTest::ShardUnfreezeNonFrozen >> TFlatTest::SelectRangeReverseItemsLimit >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2025-04-17T18:15:02.739657Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347287413204833:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:02.739713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db1/r3tmp/tmpWqnVSf/pdisk_1.dat 2025-04-17T18:15:03.209644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:03.217084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:03.217179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:03.220474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18959 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:03.430661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:03.458552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.588956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.633791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.800316Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347299452782825:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:05.800363Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db1/r3tmp/tmpU2Ygfq/pdisk_1.dat 2025-04-17T18:15:05.960533Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:05.964171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:05.964220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:05.965936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28489 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:06.126279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:06.142460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.192280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.260080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.252656Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347315711030801:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:09.252714Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db1/r3tmp/tmpD0tQ4a/pdisk_1.dat 2025-04-17T18:15:09.400765Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:09.424321Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:09.424396Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:09.426371Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6993 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:09.599005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.610655Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.624711Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:09.631875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.692846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.747233Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.606997Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347330336938766:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:12.607038Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db1/r3tmp/tmpgYwYXP/pdisk_1.dat 2025-04-17T18:15:12.772220Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:12.793237Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.793336Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.794766Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10014 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:13.011335Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.017378Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.037980Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.113866Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.189997Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.358713Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494347347247234503:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:16.358809Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db1/r3tmp/tmpRlsbMB/pdisk_1.dat 2025-04-17T18:15:16.539383Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:16.556764Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:16.556854Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:16.558957Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25894 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:16.773075Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.786217Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.800338Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:16.813458Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.919355Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.994762Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db1/r3tmp/tmpQsfGS8/pdisk_1.dat 2025-04-17T18:15:20.510290Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:15:20.533060Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:20.533145Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:20.536679Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:20.549235Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23279 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:20.763468Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.769773Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.783926Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:20.788225Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.857842Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.933383Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.097801Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347381182737903:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:24.097901Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db1/r3tmp/tmpJK15NB/pdisk_1.dat 2025-04-17T18:15:24.221596Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:24.231254Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:24.231346Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:24.233120Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7431 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:24.503616Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.508692Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.520612Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.590109Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.643632Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2025-04-17T18:15:02.525069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347285092943353:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:02.525986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db2/r3tmp/tmpZp7xOf/pdisk_1.dat 2025-04-17T18:15:02.970548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:02.990607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:02.990702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:02.993481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6252 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:03.295423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.306969Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.324828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.446659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.486621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.818768Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347299233084472:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:05.818944Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db2/r3tmp/tmp4W01E6/pdisk_1.dat 2025-04-17T18:15:05.986517Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:05.991050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:05.991136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:05.992537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9656 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:06.179585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.194234Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.209366Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:06.213994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.271089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.318328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.083239Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347318057894216:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:09.083311Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db2/r3tmp/tmpkhx9f6/pdisk_1.dat 2025-04-17T18:15:09.245673Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:09.245787Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:09.248512Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:09.263766Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29737 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:09.459163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:09.483216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.541325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:09.590136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:12.670946Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347327396303251:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:12.671170Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db2/r3tmp/tmpqDeHDz/pdisk_1.dat 2025-04-17T18:15:12.836099Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:12.848598Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.848685Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.855663Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17062 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:13.093122Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.098971Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.111685Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:13.115695Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.210039Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.312284Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.510714Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494347347811732920:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:16.510755Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db2/r3tmp/tmpSpJqvB/pdisk_1.dat 2025-04-17T18:15:16.632574Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:16.648740Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:16.648824Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:16.652844Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18421 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:16.864415Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.878602Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.894639Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:16.901061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.991421Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:17.066403Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:20.347556Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347362533322042:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:20.347615Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db2/r3tmp/tmpPbR4sP/pdisk_1.dat 2025-04-17T18:15:20.567538Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:20.583890Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:20.583981Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:20.588759Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13123 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:20.811902Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:20.829786Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.920456Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.002674Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.454502Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347380114316316:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:24.454575Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001db2/r3tmp/tmpEySr9d/pdisk_1.dat 2025-04-17T18:15:24.571419Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:24.604164Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:24.604258Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:24.606566Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23363 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:24.795480Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.802223Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.809706Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.814671Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.892930Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.946084Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::InWithCast [GOOD] Test command err: Trying to start YDB, gRPC: 18422, MsgBus: 3021 2025-04-17T18:14:40.407044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347191959319024:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:40.407109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001560/r3tmp/tmpEEnjX9/pdisk_1.dat 2025-04-17T18:14:40.776816Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18422, node 1 2025-04-17T18:14:40.808416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:40.809615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:40.812136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:40.843222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:40.843247Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:40.843260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:40.843376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3021 TClient is connected to server localhost:3021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:41.441067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.466873Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:41.487495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:41.636373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:41.779895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.857915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:43.317719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347204844222672:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.317822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.669599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.702250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.726819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.755290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.795378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.825045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.854808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:43.922848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347204844223332:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.922918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.923097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347204844223337:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:43.927035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:43.937102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347204844223339:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:44.000217Z node 1 :TX_PROXY ERROR: Actor# [1:7494347204844223393:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:45.021755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.065039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.143788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:45.406233Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347191959319024:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:45.406308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 15772, MsgBus: 16887 2025-04-17T18:14:49.074674Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347228913201443:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:49.074746Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001560/r3tmp/tmpffqNGE/pdisk_1.dat 2025-04-17T18:14:49.174520Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15772, node 2 2025-04-17T18:14:49.210420Z node 2 :HIVE WARN: HIV ... boperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.469340Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.506116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.579928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.623589Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:17.709749Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347352641981105:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.709869Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.710353Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347352641981110:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.715613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:17.730019Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347352641981112:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:17.817684Z node 5 :TX_PROXY ERROR: Actor# [5:7494347352641981170:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:18.441009Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347335462109504:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:18.441086Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:5:22: Warning: At function: Filter, At function: Coalesce
:6:23: Warning: At function: SqlIn
:6:23: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 4916, MsgBus: 63677 2025-04-17T18:15:20.649993Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347362594173091:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:20.651010Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001560/r3tmp/tmpVXxEqw/pdisk_1.dat 2025-04-17T18:15:20.805923Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:20.833919Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:20.833985Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:20.836102Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4916, node 6 2025-04-17T18:15:20.969771Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:20.969794Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:20.969804Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:20.969970Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63677 TClient is connected to server localhost:63677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:21.684900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.694692Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:15:21.708499Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.840922Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.068298Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.178927Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.088439Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347384069011325:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.088540Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.166255Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.210823Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.290281Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.331746Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.423590Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.501354Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.577302Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.650254Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347362594173091:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.650342Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:25.717031Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347384069012012:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.717144Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.717448Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347384069012017:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.721824Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:25.736510Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347384069012019:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:25.802161Z node 6 :TX_PROXY ERROR: Actor# [6:7494347384069012074:3559] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2025-04-17T18:15:21.752413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347367314179544:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:21.752783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d60/r3tmp/tmp2MW3Jg/pdisk_1.dat 2025-04-17T18:15:22.164462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:22.164568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:22.167409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:22.196551Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20810 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:22.550321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.572196Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:22.602824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... insert finished 9647 usec 12732 usec 7749 usec 7062 usec 9509 usec 9417 usec 7754 usec 11352 usec 8394 usec 8396 usec 2025-04-17T18:15:25.303901Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347384634613039:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.303971Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d60/r3tmp/tmpNCnuqv/pdisk_1.dat 2025-04-17T18:15:25.393561Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:25.443555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:25.443628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:25.445416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10969 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:25.576158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.583261Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.599467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> KqpRanges::IsNotNullInJsonValue2 [GOOD] >> TLocksTest::Range_IncorrectDot1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningTypes [GOOD] Test command err: Trying to start YDB, gRPC: 10126, MsgBus: 63489 2025-04-17T18:14:37.183134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347180779581824:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:37.183928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001561/r3tmp/tmpqi3u3G/pdisk_1.dat 2025-04-17T18:14:37.599463Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10126, node 1 2025-04-17T18:14:37.632080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:37.632198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:37.635315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:37.680688Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:37.680716Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:37.680722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:37.680809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63489 TClient is connected to server localhost:63489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:38.189384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:38.203788Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:38.225028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:38.372212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:38.534304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:38.615333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:40.189069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347193664485470:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.189206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.491402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.522884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.556033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.597777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.632182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.674806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.720184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:40.809876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347193664486133:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.809949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.810173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347193664486138:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:40.814455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:40.824412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347193664486140:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:40.918024Z node 1 :TX_PROXY ERROR: Actor# [1:7494347193664486197:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:41.882265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.920268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:41.965417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:42.183573Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347180779581824:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:42.183650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26795, MsgBus: 18196 2025-04-17T18:14:44.938428Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347209239654025:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:44.938487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001561/r3tmp/tmp4BV7pe/pdisk_1.dat 2025-04-17T18:14:45.061386Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:45.065191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:45.065283Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:45.066690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26795, node 2 2025-04-17T18:14:45.107250Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:45.107274Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:45.107284Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:45.107406Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18196 TClient is connected to server localhost:18196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:45.499401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at scheme ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:14.576609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:14.622935Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:14.670798Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:14.754584Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347336437269719:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:14.754707Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:14.755016Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347336437269724:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:14.763234Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:14.779342Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347336437269726:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:14.863492Z node 5 :TX_PROXY ERROR: Actor# [5:7494347336437269783:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:15.260048Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347319257398086:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:15.260115Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:16.094288Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:16.147984Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:15:16.230909Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24762, MsgBus: 25054 2025-04-17T18:15:20.515998Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347361833410261:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:20.516045Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001561/r3tmp/tmpX3ukgo/pdisk_1.dat 2025-04-17T18:15:20.836743Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:20.851662Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:20.851777Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:20.855993Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24762, node 6 2025-04-17T18:15:20.926768Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:20.926794Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:20.926804Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:20.926970Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25054 TClient is connected to server localhost:25054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:21.563658Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.572512Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:21.588579Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.691657Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.919607Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.018997Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.903589Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347379013281211:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.903686Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:24.976941Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.024025Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.075704Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.123184Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.213806Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.275038Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.317264Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.410843Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347383308249174:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.410968Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.411426Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347383308249179:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.415804Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:25.431393Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347383308249181:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:25.495282Z node 6 :TX_PROXY ERROR: Actor# [6:7494347383308249237:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:25.515083Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347361833410261:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.515162Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2025-04-17T18:15:12.204150Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347331351943307:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:12.204207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d7f/r3tmp/tmpLglCjk/pdisk_1.dat 2025-04-17T18:15:12.633399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:12.668202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.668297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.673474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10362 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:12.917136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:12.953631Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:12.958664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913713088 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_1_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1_Copy" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913713186 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot... (TRUNCATED) 2025-04-17T18:15:13.167996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1744913713256 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_2_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2_Copy" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1744913713333 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) 2025-04-17T18:15:13.308666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1744913713396 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_3_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3_Copy" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710664 CreateStep: 1744913713431 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { ... (TRUNCATED) 2025-04-17T18:15:13.404528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710665 CreateStep: 1744913713480 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_4_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4_Copy" PathId: 10 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710666 CreateStep: 1744913713515 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildIn ... 46644480 2025-04-17T18:15:26.119432Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2025-04-17T18:15:26.119818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.119946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.120017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.120034Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2025-04-17T18:15:26.120093Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-04-17T18:15:26.120103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-04-17T18:15:26.120117Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-04-17T18:15:26.120125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-04-17T18:15:26.120137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715686, ready parts: 1/1, is published: true 2025-04-17T18:15:26.120170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7494347390306047609:2403] message: TxId: 281474976715686 2025-04-17T18:15:26.120188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-04-17T18:15:26.120204Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-04-17T18:15:26.120213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715686:0 2025-04-17T18:15:26.120276Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037895 state Ready 2025-04-17T18:15:26.120315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-04-17T18:15:26.120323Z node 2 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-04-17T18:15:26.120431Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037894 state Ready 2025-04-17T18:15:26.120463Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-04-17T18:15:26.124541Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:7494347390306047704:3016], serverId# [2:7494347390306047705:3017], sessionId# [0:0:0] 2025-04-17T18:15:26.124625Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-17T18:15:26.125769Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-17T18:15:26.125809Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:15:26.130808Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [2:7494347390306047714:3023], serverId# [2:7494347390306047715:3024], sessionId# [0:0:0] 2025-04-17T18:15:26.130943Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-17T18:15:26.132682Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-17T18:15:26.132733Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-17T18:15:26.136888Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-17T18:15:26.138037Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-17T18:15:26.138089Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:15:26.140820Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-17T18:15:26.143854Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-17T18:15:26.143912Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-17T18:15:26.146960Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-17T18:15:26.148609Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-17T18:15:26.148653Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:15:26.157883Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.007s,wait=0.006s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:26.158310Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-17T18:15:26.159401Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:15:26.159438Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-04-17T18:15:26.159810Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-17T18:15:26.159854Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-17T18:15:26.161328Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:26.165040Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:15:26.165080Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-04-17T18:15:26.167487Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-17T18:15:26.168813Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-17T18:15:26.168863Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:15:26.174151Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-17T18:15:26.175200Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-17T18:15:26.175246Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-17T18:15:26.180037Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-17T18:15:26.181200Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-17T18:15:26.181251Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:15:26.184240Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-17T18:15:26.185309Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-17T18:15:26.185352Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-17T18:15:26.189662Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-17T18:15:26.191034Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-17T18:15:26.191076Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:15:26.193946Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-17T18:15:26.196064Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:26.198312Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:15:26.198334Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-04-17T18:15:26.198693Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-17T18:15:26.198747Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-17T18:15:26.201338Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-04-17T18:15:26.202115Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:15:26.202127Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-04-17T18:15:26.202347Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-17T18:15:26.207655Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-17T18:15:26.207733Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:15:26.210593Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-17T18:15:26.212638Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-17T18:15:26.212679Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-17T18:15:26.215241Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-17T18:15:26.216811Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-17T18:15:26.216889Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-17T18:15:26.220029Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-17T18:15:26.222084Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-17T18:15:26.222154Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2025-04-17T18:15:26.224297Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-04-17T18:15:26.224726Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-04-17T18:15:26.225143Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-04-17T18:15:26.225536Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-04-17T18:15:26.225946Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-04-17T18:15:26.226388Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> TObjectStorageListingTest::CornerCases >> TFlatTest::ShardFreezeRejectBadProtobuf >> TLocksTest::GoodDupLock >> KqpSqlIn::TupleSelect [GOOD] >> KqpSqlIn::TupleNotOnlyOfKeys >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> TFlatTest::Ls ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::IsNotNullInJsonValue2 [GOOD] Test command err: Trying to start YDB, gRPC: 12606, MsgBus: 6412 2025-04-17T18:14:46.646744Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347219327880326:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:46.646789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001545/r3tmp/tmpom1UzU/pdisk_1.dat 2025-04-17T18:14:47.003003Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12606, node 1 2025-04-17T18:14:47.058929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:47.059861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:47.062503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:47.122053Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:47.122067Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:47.122071Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:47.122139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6412 TClient is connected to server localhost:6412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:47.639904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.658964Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:47.664476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.791575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.941435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.008569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:49.458236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347232212783962:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.458325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.806118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.835006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.864518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.892262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.972204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.014230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.045433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.125143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347236507751924:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.125235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.125284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347236507751929:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.128244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:50.135279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347236507751931:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:50.196452Z node 1 :TX_PROXY ERROR: Actor# [1:7494347236507751986:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:51.133714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.348297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.476818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.568218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.647080Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347219327880326:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:51.655623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:51.808734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25139, MsgBus: 7886 2025-04-17T18:14:52.835826Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347245340520455:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:52.835865Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001545/r3tmp/tmpXkr1cv/pdisk_1.dat 2025-04-17T18:14:52.933956Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25139, node 2 2025-04-17T18:14:52.965582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:52.965726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:52.967653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:53.005505Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:53.005540Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:53.005549Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:53.005715Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7886 TClient is connected to server localhost:7886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 Cr ... : [WorkloadService] [TPoolFetcherActor] ActorId: [5:7494347352578353009:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:17.236674Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:17.250283Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7494347352578353011:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:15:17.349183Z node 5 :TX_PROXY ERROR: Actor# [5:7494347352578353069:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:17.879716Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347331103514113:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:17.879842Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:18.569539Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:18.907555Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:15:19.103554Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:19.299978Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:15:19.672921Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16878, MsgBus: 1898 2025-04-17T18:15:21.292381Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347369166135956:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001545/r3tmp/tmpAsMU7Q/pdisk_1.dat 2025-04-17T18:15:21.428308Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:15:21.516921Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:21.550501Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:21.550614Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:21.551786Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16878, node 6 2025-04-17T18:15:21.630906Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:21.630939Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:21.630950Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:21.631101Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1898 TClient is connected to server localhost:1898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:22.328659Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.381041Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.487731Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:22.698450Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:22.790694Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.724310Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347386346006738:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.724487Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:25.794722Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.842268Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.879817Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:25.957785Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.011996Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.065790Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.129310Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.232157Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347390640974704:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:26.232316Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:26.232537Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347390640974709:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:26.242941Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:26.256753Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347390640974711:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:26.270071Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347369166135956:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:26.270158Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:26.355238Z node 6 :TX_PROXY ERROR: Actor# [6:7494347390640974767:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:27.702811Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:28.024150Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:15:28.321370Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:28.485672Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:15:28.807116Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 >> TLocksTest::CK_Range_BrokenLock >> TFlatTest::SelectRangeNullArgs4 [GOOD] >> TFlatTest::MiniKQLRanges >> TLocksTest::Range_CorrectNullDot >> TLocksFatTest::RangeSetBreak >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> TFlatTest::ReadOnlyMode >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> TFlatTest::SplitBoundaryRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2025-04-17T18:15:25.359913Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347383703671211:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.360018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d59/r3tmp/tmpBX3Nnj/pdisk_1.dat 2025-04-17T18:15:25.707411Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:25.762288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:25.762396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:25.763949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62750 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:26.012422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.038002Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.055454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.564751Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347400230626233:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:28.564800Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d59/r3tmp/tmphjnQqX/pdisk_1.dat 2025-04-17T18:15:28.705967Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:28.733620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:28.733715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:28.736365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6175 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:28.897167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.902597Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.918434Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:28.923103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TObjectStorageListingTest::ManyDeletes [GOOD] >> TFlatTest::RejectByPerRequestSize [GOOD] >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2025-04-17T18:15:26.627663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347390798832626:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:26.627896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d53/r3tmp/tmpIcc3Ng/pdisk_1.dat 2025-04-17T18:15:27.008172Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:27.054381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:27.054495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:27.056088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3524 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:27.270136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:27.294401Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:27.316657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:27.461059Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:27.464675Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:27.489599Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:27.493673Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913727424 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-04-17T18:15:27.606385Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-17T18:15:27.606417Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-17T18:15:27.606765Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-17T18:15:27.606796Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-17T18:15:27.608682Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=0, 4 blobs 2r (max 2), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2025-04-17T18:15:27.625642Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.33, eph 3} end=0, 4 blobs 8r (max 8), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913727424 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-17T18:15:27.759076Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-17T18:15:27.759102Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T18:15:27.764356Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-17T18:15:27.764691Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-17T18:15:27.771783Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T18:15:29.834305Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347403659006087:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:29.834500Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d53/r3tmp/tmpccBunT/pdisk_1.dat 2025-04-17T18:15:29.968168Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:29.999087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:29.999161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:30.000445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21415 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:30.163485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:30.168564Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:30.182625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:30.288834Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:30.296346Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:30.336638Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:30.343720Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld 2025-04-17T18:15:30.369966Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3307 2180 6413)b }, ecr=1.000 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDes ... 4046644480 2025-04-17T18:15:30.496725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037891 splitOp: 281474976715678:0 alterVersion: 1 at tablet: 72057594046644480 waiting... 2025-04-17T18:15:30.497419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037890 cookie: 72057594046644480:3 msg type: 269553152 2025-04-17T18:15:30.497580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037891 cookie: 72057594046644480:4 msg type: 269553152 2025-04-17T18:15:30.497666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037890 2025-04-17T18:15:30.497685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037891 2025-04-17T18:15:30.518937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715678, at schemeshard: 72057594046644480 2025-04-17T18:15:30.518965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2025-04-17T18:15:30.518984Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715678, at schemeshard: 72057594046644480 2025-04-17T18:15:30.556122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-04-17T18:15:30.556201Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-04-17T18:15:30.556650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:15:30.556978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-04-17T18:15:30.557007Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-04-17T18:15:30.557028Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 3 -> 131 2025-04-17T18:15:30.557346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:15:30.557423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:15:30.557449Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:15:30.557472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TSplitMerge TTransferData operationId# 281474976715678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976715678:0 at tablet 72057594046644480 2025-04-17T18:15:30.557702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2025-04-17T18:15:30.557770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037888 2025-04-17T18:15:30.563902Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-17T18:15:30.564208Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-17T18:15:30.564385Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-17T18:15:30.564547Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-17T18:15:30.564754Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.30, eph -9223372036854775808} end=0, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-17T18:15:30.571823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-04-17T18:15:30.571884Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-04-17T18:15:30.572162Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 131 -> 132 2025-04-17T18:15:30.572257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-17T18:15:30.572710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:15:30.572827Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:15:30.572851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715678, path id: [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-17T18:15:30.573029Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:15:30.573054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7494347403659006582:2240], at schemeshard: 72057594046644480, txId: 281474976715678, path id: 3 2025-04-17T18:15:30.573082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:15:30.573108Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:15:30.573132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976715678 at tablet 72057594046644480 2025-04-17T18:15:30.575197Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-04-17T18:15:30.575296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-04-17T18:15:30.575314Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715678 2025-04-17T18:15:30.575336Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715678, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-04-17T18:15:30.575353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-04-17T18:15:30.575408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2025-04-17T18:15:30.575561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2025-04-17T18:15:30.575766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2025-04-17T18:15:30.578120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-04-17T18:15:30.578161Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-17T18:15:30.578232Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-04-17T18:15:30.578245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-04-17T18:15:30.578267Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-04-17T18:15:30.578276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-04-17T18:15:30.578290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 1/1, is published: true 2025-04-17T18:15:30.578328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7494347407953974303:2359] message: TxId: 281474976715678 2025-04-17T18:15:30.578352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-04-17T18:15:30.578374Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715678:0 2025-04-17T18:15:30.578384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715678:0 2025-04-17T18:15:30.578525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-17T18:15:30.578886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:15:30.578898Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715678:0 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913730280 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2025-04-17T18:15:11.948497Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347323669851519:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:11.948665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d8c/r3tmp/tmpSNxBQR/pdisk_1.dat 2025-04-17T18:15:12.396461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.396724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.399615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:12.423455Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23899 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:12.672858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:12.713398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.949420Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347323669851519:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:16.949498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:20.299585Z node 1 :TX_DATASHARD ERROR: Transaction read size 51002213 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2025-04-17T18:15:20.299682Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002213 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2025-04-17T18:15:20.299974Z node 1 :TX_PROXY ERROR: Actor# [1:7494347362324558835:2939] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2025-04-17T18:15:20.955793Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347365133374618:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:20.955832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d8c/r3tmp/tmptsy6Jg/pdisk_1.dat 2025-04-17T18:15:21.104638Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:21.120000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:21.120091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:21.126385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27981 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:21.339287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.348859Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.378879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.956056Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347365133374618:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.956148Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:28.585407Z node 2 :TX_DATASHARD ERROR: Transaction read size 51002453 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-04-17T18:15:28.585505Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002453 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-04-17T18:15:28.585633Z node 2 :TX_PROXY ERROR: Actor# [2:7494347399493114621:2936] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-04-17T18:15:29.431283Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347404547299406:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:29.440070Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d8c/r3tmp/tmpReGzX4/pdisk_1.dat 2025-04-17T18:15:29.539068Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:29.575638Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:29.575740Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:29.578043Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16061 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:29.783616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:29.795810Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:29.819639Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:29.829181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.042746Z node 3 :TX_PROXY DEBUG: actor# [3:7494347404547299409:2086] Handle TEvProposeTransaction 2025-04-17T18:15:33.042788Z node 3 :TX_PROXY DEBUG: actor# [3:7494347404547299409:2086] TxId# 281474976715700 ProcessProposeTransaction 2025-04-17T18:15:33.042833Z node 3 :TX_PROXY DEBUG: actor# [3:7494347404547299409:2086] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7494347421727169611:2611] DataReq marker# P0 2025-04-17T18:15:33.042899Z node 3 :TX_PROXY DEBUG: Actor# [3:7494347421727169611:2611] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2025-04-17T18:15:33.043479Z node 3 :TX_PROXY DEBUG: Actor [3:7494347421727169611:2611] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-04-17T18:15:33.043504Z node 3 :TX_PROXY DEBUG: Actor [3:7494347421727169611:2611] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-04-17T18:15:33.043536Z node 3 :TX_PROXY DEBUG: Actor# [3:7494347421727169611:2611] txid# 281474976715700 SEND to# [3:7494347404547299592:2115] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-04-17T18:15:33.043664Z node 3 :TX_PROXY DEBUG: Actor# [3:7494347421727169611:2611] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-17T18:15:33.045061Z node 3 :TX_PROXY DEBUG: Actor# [3:7494347421727169611:2611] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-04-17T18:15:33.045356Z node 3 :TX_PROXY DEBUG: Actor# [3:7494347421727169611:2611] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-04-17T18:15:33.045790Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:33.046899Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2025-04-17T18:15:33.047169Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:33.047981Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2025-04-17T18:15:33.048706Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:15:33.048825Z node 3 :TX_PROXY DEBUG: Actor# [3:7494347421727169611:2611] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000823 out readset size 0 marker# P6 2025-04-17T18:15:33.049330Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:15:33.049439Z node 3 :TX_PROXY DEBUG: Actor# [3:7494347421727169611:2611] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000447 out readset size 0 marker# P6 2025-04-17T18:15:33.049488Z node 3 :TX_PROXY ERROR: Actor# [3:7494347421727169611:2611] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001270 exceeded limit 10000 Status# ExecError 2025-04-17T18:15:33.049546Z node 3 :TX_PROXY ERROR: Actor# [3:7494347421727169611:2611] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2025-04-17T18:15:33.049659Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2025-04-17T18:15:33.049692Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 2025-04-17T18:15:33.050078Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2025-04-17T18:15:33.050111Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 >> TLocksFatTest::LocksLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2025-04-17T18:15:08.822965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347313546813960:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:08.823027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001da2/r3tmp/tmp5whEhk/pdisk_1.dat 2025-04-17T18:15:09.194315Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:09.199136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:09.199251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:09.203428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15872, node 1 2025-04-17T18:15:09.297597Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:09.297618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:09.297628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:09.297727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28647 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:09.552565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.572448Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.592384Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:09.605613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.831711Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347313546813960:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:13.843929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:14.799000Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347336303406413:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:14.799057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001da2/r3tmp/tmpvzg6Yk/pdisk_1.dat 2025-04-17T18:15:14.960922Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:14.973468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:14.973540Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:14.975009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14898, node 2 2025-04-17T18:15:15.029394Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:15.029423Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:15.029433Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:15.029561Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22953 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:15.262717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:15.287701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ..2025-04-17T18:15:19.799076Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347336303406413:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:19.799135Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:24.723567Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-17T18:15:24.723587Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:24.724298Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037890 2025-04-17T18:15:24.724453Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037889 2025-04-17T18:15:24.724570Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2025-04-17T18:15:24.724708Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-04-17T18:15:24.725108Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037891 2025-04-17T18:15:24.725309Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037892 2025-04-17T18:15:24.726048Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:15:24.726139Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-17T18:15:24.726500Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-04-17T18:15:24.726598Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037891 2025-04-17T18:15:24.733873Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976716500 at step 1744913724771 at tablet 72075186224037892 { Transactions { TxId: 281474976716500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913724771 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2025-04-17T18:15:24.733914Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-17T18:15:24.734051Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-17T18:15:24.734075Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:24.734099Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744913724771:281474976716500] in PlanQueue unit at 72075186224037892 2025-04-17T18:15:24.734132Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037892 got data tx from cache 1744913724771:281474976716500 2025-04-17T18:15:24.735380Z node 2 :TX_DATASHARD DEBUG: tx 281474976716500 released its data 2025-04-17T18:15:24.735415Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:15:24.736054Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976716500 at step 1744913724771 at tablet 72075186224037891 { Transactions { TxId: 281474976716500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913724771 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-04-17T18:15:24.736078Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:15:24.736192Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-17T18:15:24.736206Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:24.736223Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744913724771:281474976716500] in PlanQueue unit at 72075186224037891 2025-04-17T18:15:24.736251Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1744913724771:281474976716500 2025-04-17T18:15:24.736968Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:24.737212Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976716500 at step 1744913724771 at tablet 72075186224037890 { Transactions { TxId: 281474976716500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913724771 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-04-17T18:15:24.737225Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:15:24.737297Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:15:24.737310Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:24.737323Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744913724771:281474976716500] in PlanQueue unit at 72075186224037890 2025-04-17T18:15:24.737345Z node 2 :TX_DATASHARD ... DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:32.934235Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744913732975:281474976716911] in PlanQueue unit at 72075186224037889 2025-04-17T18:15:32.934255Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 got data tx from cache 1744913732975:281474976716911 2025-04-17T18:15:32.935226Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-04-17T18:15:32.935246Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:15:32.935784Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976716911 at step 1744913732975 at tablet 72075186224037891 { Transactions { TxId: 281474976716911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913732975 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-04-17T18:15:32.935799Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:15:32.935879Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-17T18:15:32.935892Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:32.935905Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744913732975:281474976716911] in PlanQueue unit at 72075186224037891 2025-04-17T18:15:32.935927Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1744913732975:281474976716911 2025-04-17T18:15:32.936231Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-17T18:15:32.936671Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:32.936782Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1744913732975} 2025-04-17T18:15:32.936845Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:15:32.936847Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037892 restored its data 2025-04-17T18:15:32.937440Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037890 restored its data 2025-04-17T18:15:32.937665Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-04-17T18:15:32.937685Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:15:32.937782Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1744913732975} 2025-04-17T18:15:32.937898Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1744913732975} 2025-04-17T18:15:32.937967Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:15:32.938330Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-04-17T18:15:32.938351Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:15:32.938528Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-17T18:15:32.938538Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037889 restored its data 2025-04-17T18:15:32.939169Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037892 restored its data 2025-04-17T18:15:32.939628Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-04-17T18:15:32.939646Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:15:32.939860Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:32.940022Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:15:32.940265Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1744913732975} 2025-04-17T18:15:32.940295Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-17T18:15:32.940333Z node 2 :TX_DATASHARD DEBUG: Complete [1744913732975 : 281474976716911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7494347413612832164:3673], exec latency: 0 ms, propose latency: 4 ms 2025-04-17T18:15:32.940350Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:15:32.941645Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037890 restored its data 2025-04-17T18:15:32.942356Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:32.942644Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-17T18:15:32.942683Z node 2 :TX_DATASHARD DEBUG: Complete [1744913732975 : 281474976716911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7494347413612832164:3673], exec latency: 6 ms, propose latency: 9 ms 2025-04-17T18:15:32.942708Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-17T18:15:32.942800Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:15:32.943376Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037889 restored its data 2025-04-17T18:15:32.945221Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-04-17T18:15:32.945358Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:15:32.947237Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:15:32.947283Z node 2 :TX_DATASHARD DEBUG: Complete [1744913732975 : 281474976716911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7494347413612832164:3673], exec latency: 10 ms, propose latency: 15 ms 2025-04-17T18:15:32.947305Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:15:32.954236Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:15:32.954923Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037889 restored its data 2025-04-17T18:15:32.960088Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:32.966415Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:15:32.966473Z node 2 :TX_DATASHARD DEBUG: Complete [1744913732975 : 281474976716911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7494347413612832164:3673], exec latency: 25 ms, propose latency: 32 ms 2025-04-17T18:15:32.966503Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:15:32.986435Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:32.987218Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:32.987731Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:32.988234Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:32.988422Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2025-04-17T18:15:32.988922Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:32.989038Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2025-04-17T18:15:32.989401Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:32.989869Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:32.990347Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-04-17T18:15:32.990872Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2025-04-17T18:15:32.991368Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2025-04-17T18:15:32.991917Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2025-04-17T18:15:32.992041Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2025-04-17T18:15:28.016981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347399838520997:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:28.017033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d4e/r3tmp/tmpqxw3K3/pdisk_1.dat 2025-04-17T18:15:28.432397Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:28.472530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:28.472665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:28.474648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3980 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:28.720818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.734624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.751089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.410549Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347409428761331:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:31.410959Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d4e/r3tmp/tmpKR1zcy/pdisk_1.dat 2025-04-17T18:15:31.565517Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:31.580399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:31.580469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:31.581831Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63813 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:31.761438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:31.767406Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.780685Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:31.785012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2025-04-17T18:15:20.987288Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347364925375809:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:20.994817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d63/r3tmp/tmpPO1PiN/pdisk_1.dat 2025-04-17T18:15:21.445926Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:21.475759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:21.475872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:21.482923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11368 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:21.724873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.754204Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.767580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.905654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.985477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.982758Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347364925375809:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.982821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:28.448297Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347399725880539:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:28.448438Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d63/r3tmp/tmpfUaB8t/pdisk_1.dat 2025-04-17T18:15:28.551562Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:28.579182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:28.579256Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:28.580522Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5567 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:28.749647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.756101Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.764964Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:28.769372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.836084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.908388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.826951Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347409444661594:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:31.827073Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d63/r3tmp/tmpayO7cV/pdisk_1.dat 2025-04-17T18:15:31.970197Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:31.999388Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:31.999501Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:32.001058Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3888 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:32.199706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.217915Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.239074Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:32.250087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.333267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.412333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart >> TFlatTest::SelectRangeSkipNullKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2025-04-17T18:15:29.606124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347404425743412:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:29.606271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d4b/r3tmp/tmpOFWS5r/pdisk_1.dat 2025-04-17T18:15:29.959657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:30.010337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:30.010459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:30.014092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27543 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:30.225423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:30.239268Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:30.243285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:30.384304Z node 1 :TX_PROXY ERROR: Actor# [1:7494347408720711403:2361] txid# 281474976710659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-04-17T18:15:32.700187Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347413922377563:2148];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d4b/r3tmp/tmpyNKheD/pdisk_1.dat 2025-04-17T18:15:32.825228Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:15:32.877929Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:32.898911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:32.898984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:32.900520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6872 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:33.111356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.118897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.183114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.199195Z node 2 :TX_PROXY ERROR: Actor# [2:7494347418217345473:2392] txid# 281474976710660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2025-04-17T18:15:33.204030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.218567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... >> TObjectStorageListingTest::Decimal [GOOD] >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot >> TFlatTest::LargeDatashardReplyRW [GOOD] >> TFlatTest::LsPathId [GOOD] >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2025-04-17T18:15:29.703733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347400678348959:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:29.703811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d49/r3tmp/tmpqMfKd0/pdisk_1.dat 2025-04-17T18:15:30.081849Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:30.103200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:30.103286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:30.107090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24478 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:30.411487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:30.424461Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:30.446141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.128301Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347421144961108:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:33.128363Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d49/r3tmp/tmpiDTZR5/pdisk_1.dat 2025-04-17T18:15:33.263386Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:33.275764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:33.275834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:33.279038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64722 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:33.447141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:33.461014Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.467602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:36.343500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:36.343609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:36.343651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:36.343685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:36.344534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:36.344592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:36.344663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:36.344743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:36.346115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:36.437560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:36.437618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:36.461158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:36.462715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:36.462884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:36.476203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:36.476520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:36.480736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:36.481077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:36.491599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:36.498322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:36.498399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:36.499200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:36.499258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:36.499343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:36.499480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:36.505390Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:36.632000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:36.633160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:36.634462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:36.635707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:36.635763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:36.638385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:36.638526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:36.638709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:36.638819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:36.638844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:36.638874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:36.641524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:36.641573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:36.641607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:36.645998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:36.646047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:36.646086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:36.646135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:36.656412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:36.658333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:36.659332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:36.660355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:36.660469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:36.660508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:36.661790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:36.661849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:36.662059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:36.662162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:36.664682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:36.664739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:36.664949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:36.664992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:36.665332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:36.665372Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:36.665476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:36.665507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:36.665597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:36.665651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:36.665694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:36.665747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:36.665780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:36.665807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:36.665855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:36.665885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:36.665914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:36.667609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:36.667729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:36.667767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-04-17T18:15:37.674237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:37.675685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-17T18:15:37.676002Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 2025-04-17T18:15:37.676562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-17T18:15:37.676833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 2025-04-17T18:15:37.677106Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 2025-04-17T18:15:37.677590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-17T18:15:37.677786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 Forgetting tablet 72075186233409569 Forgetting tablet 72075186233409568 2025-04-17T18:15:37.678270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-17T18:15:37.681060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-04-17T18:15:37.681187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2025-04-17T18:15:37.681578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.681702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:37.681759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2025-04-17T18:15:37.681906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-17T18:15:37.682053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-17T18:15:37.682096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-17T18:15:37.682142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-17T18:15:37.682197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-17T18:15:37.682264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:37.682339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-17T18:15:37.682381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-04-17T18:15:37.682434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-17T18:15:37.682470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-04-17T18:15:37.682520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-04-17T18:15:37.682653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-17T18:15:37.682693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-04-17T18:15:37.682727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2025-04-17T18:15:37.682758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2025-04-17T18:15:37.683672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-17T18:15:37.683720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-17T18:15:37.683799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-17T18:15:37.683831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-17T18:15:37.685530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:15:37.685574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:15:37.685696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:37.685735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:37.685875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2025-04-17T18:15:37.686017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:37.686049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-17T18:15:37.686096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2025-04-17T18:15:37.686664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T18:15:37.686743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T18:15:37.686776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-04-17T18:15:37.686817Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2025-04-17T18:15:37.686871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-17T18:15:37.687201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:15:37.687260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-04-17T18:15:37.687323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:37.687626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T18:15:37.687704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-17T18:15:37.687789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-04-17T18:15:37.687827Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2025-04-17T18:15:37.687859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:37.687929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-04-17T18:15:37.688245Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2025-04-17T18:15:37.688745Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2025-04-17T18:15:37.689155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-17T18:15:37.689959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-17T18:15:37.691210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-17T18:15:37.692464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:15:37.692692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-17T18:15:37.694206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-17T18:15:37.694297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2025-04-17T18:15:37.694876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2025-04-17T18:15:37.694921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2025-04-17T18:15:37.695686Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-04-17T18:15:37.695801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-04-17T18:15:37.695837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1677:3547] TestWaitNotification: OK eventTxId 129 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:37.400183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:37.400266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:37.400307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:37.400340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:37.400382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:37.400441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:37.400493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:37.400559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:37.400851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:37.480368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:37.480410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:37.494561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:37.494945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:37.495106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:37.503092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:37.503253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:37.503872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.504073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:37.507125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:37.507961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:37.507999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:37.508141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:37.508171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:37.508195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:37.508262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.512969Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:37.627927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:37.628133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.628340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:37.628612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:37.628672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.630728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.630851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:37.631047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.631110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:37.631143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:37.631173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:37.632982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.633031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:37.633086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:37.635203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.635244Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.635278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:37.635332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:37.638899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:37.640596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:37.640767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:37.641674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.641801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:37.641853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:37.642140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:37.642212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:37.642379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:37.642468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:37.644309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:37.644347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:37.644542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:37.644582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:37.644913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.644954Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:37.645038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:37.645069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:37.645108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:37.645149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:37.645183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:37.645221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:37.645275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:37.645302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:37.645352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:37.645387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:37.645415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:37.647123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:37.647235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:37.647269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.747839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:15:37.747979Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-17T18:15:37.748508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:15:37.748631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409547 2025-04-17T18:15:37.748892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:15:37.750848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T18:15:37.750938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T18:15:37.751664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.751770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:37.751823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-17T18:15:37.751914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:15:37.752045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:37.752086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:37.752121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:37.752150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:37.752203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:37.752274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:15:37.752313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T18:15:37.752366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:37.752400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:15:37.752443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:15:37.752537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:15:37.752561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T18:15:37.752582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:15:37.752601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T18:15:37.753554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:15:37.753599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-17T18:15:37.754167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:15:37.754225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-17T18:15:37.754845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:15:37.754878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:15:37.754996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:37.755033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:37.755165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:15:37.755290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:37.755321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T18:15:37.755355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:15:37.755885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:37.755981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:37.756015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:15:37.756054Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:15:37.756127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:15:37.756461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:15:37.756511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:15:37.756564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:37.756797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:37.756861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:37.756883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:15:37.756905Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:15:37.756942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:37.756995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T18:15:37.757575Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-04-17T18:15:37.757697Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-04-17T18:15:37.757789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.758254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:15:37.759364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:15:37.761466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-17T18:15:37.761699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:15:37.762201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:15:37.762622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:15:37.762911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:15:37.762949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:15:37.763388Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:15:37.763477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:15:37.763511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:392:2372] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-17T18:15:37.764653Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-17T18:15:37.766312Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:37.615415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:37.615527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:37.615563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:37.615596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:37.615638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:37.615677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:37.615730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:37.615792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:37.616084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:37.695324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:37.695368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:37.714940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:37.715318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:37.715482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:37.727675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:37.727865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:37.728519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.728730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:37.731322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:37.732500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:37.732556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:37.732731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:37.732780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:37.732817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:37.732932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.739019Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:37.855648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:37.855855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.856040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:37.856245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:37.856291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.862898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.863076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:37.863270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.863325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:37.863356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:37.863387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:37.867249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.867318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:37.867351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:37.869607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.869667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.869715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:37.869784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:37.873956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:37.876516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:37.876709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:37.877700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:37.877855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:37.877907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:37.878234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:37.878311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:37.878482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:37.878572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:37.881213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:37.881263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:37.881502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:37.881547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:37.881897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:37.881961Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:37.882060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:37.882092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:37.882138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:37.882209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:37.882249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:37.882290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:37.882352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:37.882388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:37.882460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:37.882501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:37.882540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:37.892148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:37.892325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:37.892368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -17T18:15:38.055455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.055504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.055543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.062527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:15:38.063938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:38.065814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:38.065881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:38.066340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:15:38.066391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:15:38.066476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:38.068079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.074513Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.074594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:38.074671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:38.075181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:469:2058] recipient: [1:15:2062] 2025-04-17T18:15:38.131146Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:38.131375Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 218us result status StatusPathDoesNotExist 2025-04-17T18:15:38.131520Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-17T18:15:38.132395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:470:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:473:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:474:2058] recipient: [1:472:2432] Leader for TabletID 72057594046678944 is [1:475:2433] sender: [1:476:2058] recipient: [1:472:2432] 2025-04-17T18:15:38.167916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:38.168013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:38.168069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:38.168112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:38.168159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:38.168188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:38.168240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:38.168299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:38.168567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:38.185481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:38.186752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:38.186909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:38.187041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:38.187071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:38.187361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:38.188036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:38.188148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.188224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.188580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.188654Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T18:15:38.188827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.188929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.189037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.189127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.189222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.189395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.189624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.189737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.190085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.190190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.190374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.190477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.190569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.190729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.190828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.190966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.191200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.191377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.191431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.191491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:15:38.198462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:38.198539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:38.198754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:38.198805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:38.198841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:38.200113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:475:2433] sender: [1:536:2058] recipient: [1:15:2062] 2025-04-17T18:15:38.232712Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:38.232937Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 235us result status StatusPathDoesNotExist 2025-04-17T18:15:38.233083Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2025-04-17T18:15:30.902811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347408474060566:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:30.903725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d41/r3tmp/tmpCTfZZR/pdisk_1.dat 2025-04-17T18:15:31.370628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:31.392096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:31.392217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:31.397118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1295 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:31.629052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.648938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... Error 128: Mix freeze cmd with other options is forbidden 2025-04-17T18:15:31.803393Z node 1 :TX_PROXY ERROR: Actor# [1:7494347412769028540:2361] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } 2025-04-17T18:15:31.806830Z node 1 :TX_PROXY ERROR: Actor# [1:7494347412769028553:2367] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2025-04-17T18:15:31.809109Z node 1 :TX_PROXY ERROR: Actor# [1:7494347412769028559:2372] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-04-17T18:15:31.816477Z node 1 :TX_PROXY ERROR: Actor# [1:7494347412769028565:2377] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-04-17T18:15:34.254719Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347425544132096:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:34.255420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d41/r3tmp/tmpHw3Llz/pdisk_1.dat 2025-04-17T18:15:34.388837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:34.388933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:34.391585Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:34.406306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18796 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:34.574482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.591009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::MergeEmptyAndWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2025-04-17T18:15:30.742133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347404997494718:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:30.742233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d45/r3tmp/tmpK4BfZj/pdisk_1.dat 2025-04-17T18:15:31.246625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:31.246715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:31.246837Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:31.251818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15951, node 1 2025-04-17T18:15:31.402796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:31.402815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:31.402822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:31.402927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12464 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:31.661203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:15:31.701558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:34.297145Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347422643169436:2102];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:34.299371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d45/r3tmp/tmpTkAaNH/pdisk_1.dat 2025-04-17T18:15:34.400327Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22068, node 2 2025-04-17T18:15:34.438961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:34.439081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:34.444857Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:34.513041Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:34.513062Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:34.513068Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:34.513160Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13249 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:34.703645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.711100Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.721837Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:34.726773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2025-04-17T18:15:16.779566Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347347803784442:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:16.779601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d75/r3tmp/tmpNiY31b/pdisk_1.dat 2025-04-17T18:15:17.133817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:17.133944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:17.135631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:17.186285Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31426 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:17.418371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:17.434454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:17.461173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.781129Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347347803784442:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:21.781199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:27.010730Z node 1 :MINIKQL_ENGINE ERROR: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-04-17T18:15:27.021085Z node 1 :TX_DATASHARD ERROR: Datashard execution error for [1744913726493:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-04-17T18:15:27.025893Z node 1 :TX_PROXY ERROR: Actor# [1:7494347390753463623:5941] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2025-04-17T18:15:27.026059Z node 1 :TX_PROXY ERROR: Actor# [1:7494347390753463623:5941] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2025-04-17T18:15:27.727863Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347395781216032:2155];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:27.729258Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d75/r3tmp/tmpjY1Pb4/pdisk_1.dat 2025-04-17T18:15:27.900475Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:27.905895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:27.905977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:27.907561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25105 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:28.083522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.091750Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.099485Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:28.102090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.727924Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347395781216032:2155];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:32.729226Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:37.420186Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-17T18:15:37.430916Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-17T18:15:37.433796Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-04-17T18:15:37.435424Z node 2 :TX_PROXY ERROR: Actor# [2:7494347434435927989:5916] txid# 281474976716361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable |92.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2025-04-17T18:15:31.710620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347409607913780:2239];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:31.710800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d3c/r3tmp/tmpvzWHnb/pdisk_1.dat 2025-04-17T18:15:32.127406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:32.144872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:32.144950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:32.146525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14357 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:32.413970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: 2025-04-17T18:15:32.434374Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: // TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913732471 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePo... (TRUNCATED) TClient::Ls request: /dc-11 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" TClient::Ls request: /dc-2 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913732471 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913732492 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/Berkanavt TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913732492 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 Shard... (TRUNCATED) 2025-04-17T18:15:32.455869Z node 1 :TX_PROXY ERROR: Actor# [1:7494347413902881502:2324] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913732471 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913732492 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744913732471 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744913732492 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "arcadia" Path... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "arcadia" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913732506 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsI... (TRUNCATED) 2025-04-17T18:15:34.814652Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347424128187820:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:34.814714Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d3c/r3tmp/tmpiwzPtP/pdisk_1.dat 2025-04-17T18:15:34.932562Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:34.975952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:34.976049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:34.979147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63250 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:35.157124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.162846Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:35.190108Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> TLocksTest::BrokenDupLock [GOOD] >> TFlatTest::RejectByIncomingReadSetSize [GOOD] >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob >> VDiskBalancing::TestRandom_Mirror3dc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2025-04-17T18:15:32.622586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347415742871227:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:32.622643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d2d/r3tmp/tmpUI0DFI/pdisk_1.dat 2025-04-17T18:15:33.064929Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:33.100575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:33.100637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:33.101897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25661 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:33.332971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:33.360953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.851513Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347430078441989:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:35.851548Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d2d/r3tmp/tmphRt0mY/pdisk_1.dat 2025-04-17T18:15:35.974999Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:36.017739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:36.017818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:36.019337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27497 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:36.168381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.173961Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.191385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.303393Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.005s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:36.304004Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.015s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:36.366568Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.020s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:36.368777Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.018s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913736286 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-17T18:15:36.412243Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:36.413983Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.414163Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:36.415432Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.415581Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:36.416602Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:36.417343Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.417470Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:36.417889Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:36.418526Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.418623Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:36.419007Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:36.419595Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.419683Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:36.420070Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:36.420756Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.420851Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:36.421207Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:36.421787Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.421872Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:36.422255Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:36.422797Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.423239Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:36.423615Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:36.424197Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.424277Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:36.424632Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:36.425247Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.425947Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:36.426330Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:36.426934Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.427030Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:36.427384Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:36.427980Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-17T18:15:36.428310Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:15:36.428697Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-17T18:15:36.429263Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:15:36.429288Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:15:36.429455Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:15:36.429800Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-17T18:15:36.430308Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:15:36.430328Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:15:36.523619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG ... shard: 72057594046644480, txId: 281474976715687 2025-04-17T18:15:36.674031Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:15:36.674041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:15:36.674060Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:15:36.674071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 0/1, is published: true 2025-04-17T18:15:36.674094Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037890 2025-04-17T18:15:36.674424Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:36.674708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-04-17T18:15:36.674738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-04-17T18:15:36.674757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-04-17T18:15:36.675533Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1744913736713} 2025-04-17T18:15:36.675554Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:15:36.675643Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:15:36.675675Z node 2 :TX_DATASHARD DEBUG: Complete [1744913736713 : 281474976715687] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7494347430078442323:2144], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:15:36.675712Z node 2 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715687 state PreOffline TxInFly 0 TClient::Ls request: /dc-1/Dir/TableOld 2025-04-17T18:15:36.675741Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:15:36.675897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1744913736713 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 693 } } 2025-04-17T18:15:36.675911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-04-17T18:15:36.676002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1744913736713 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 693 } } 2025-04-17T18:15:36.676065Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1744913736713 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 693 } } 2025-04-17T18:15:36.676390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347434373410198 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-04-17T18:15:36.676403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-04-17T18:15:36.676517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347434373410198 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-04-17T18:15:36.676538Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-17T18:15:36.676601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7494347434373410198 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-04-17T18:15:36.676630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, datashard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-17T18:15:36.676645Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-17T18:15:36.676657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-04-17T18:15:36.676678Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715687:0 129 -> 240 2025-04-17T18:15:36.677031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-17T18:15:36.677110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-17T18:15:36.677168Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2025-04-17T18:15:36.677179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-17T18:15:36.677196Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-17T18:15:36.677207Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:15:36.677480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:15:36.677582Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-04-17T18:15:36.677596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-04-17T18:15:36.677614Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-04-17T18:15:36.677622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-04-17T18:15:36.677634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2025-04-17T18:15:36.677674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7494347434373410423:2393] message: TxId: 281474976715687 2025-04-17T18:15:36.677689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-04-17T18:15:36.677711Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715687:0 2025-04-17T18:15:36.677720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715687:0 2025-04-17T18:15:36.677780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:15:36.678276Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-17T18:15:36.678310Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-17T18:15:36.679438Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-17T18:15:36.680057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494347434373410198 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-04-17T18:15:36.680107Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:15:36.680373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-17T18:15:36.680814Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-17T18:15:36.681333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:15:36.681528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:15:36.681717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:15:36.681770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:15:36.681822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:15:36.682143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:15:36.682163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:15:36.682278Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:15:36.682818Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-17T18:15:36.682849Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7494347434373410311:2577], serverId# [2:7494347434373410312:2578], sessionId# [0:0:0] 2025-04-17T18:15:36.683241Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-17T18:15:36.683710Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:15:36.683785Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 >> TLocksTest::GoodNullLock [GOOD] >> TLocksTest::Range_BrokenLock1 [GOOD] >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TTxDataShardMiniKQL::WriteEraseRead >> TTxDataShardMiniKQL::ReadSpecialColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2025-04-17T18:15:01.205121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347282994091930:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.205161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd2/r3tmp/tmpaoKKJi/pdisk_1.dat 2025-04-17T18:15:01.664159Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.685358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.685471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.705386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29488 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.037374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.055052Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.083082Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:02.094466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.234412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.306168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd2/r3tmp/tmpRVTJfb/pdisk_1.dat 2025-04-17T18:15:04.521343Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:04.523222Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:15:04.571007Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:04.571077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:04.572196Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20486 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:04.704952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:04.721687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.800925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.854040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.566662Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347309432138922:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:07.566725Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd2/r3tmp/tmpiVZR7a/pdisk_1.dat 2025-04-17T18:15:07.683113Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:07.728149Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:07.728221Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:07.730128Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25579 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:07.886908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:15:07.907067Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:07.983085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:08.061574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.787061Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347322504470721:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:10.792333Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd2/r3tmp/tmpaF7a8V/pdisk_1.dat 2025-04-17T18:15:10.904742Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:10.954168Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:10.954275Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:10.958684Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21843 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:11.159726Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:11.167861Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, ... onnecting -> Connected TClient is connected to server localhost:65090 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:22.703332Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.710365Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.726635Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.792912Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:22.870913Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.274391Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347389259172395:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:26.274428Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd2/r3tmp/tmpETe249/pdisk_1.dat 2025-04-17T18:15:26.425236Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:26.441271Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:26.441363Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:26.442830Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3191 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:26.673887Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.678619Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.697370Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.825635Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.905008Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:30.457253Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347408553891206:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:30.457595Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd2/r3tmp/tmpReQYLF/pdisk_1.dat 2025-04-17T18:15:30.676198Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:30.703474Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:30.703572Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:30.707371Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3153 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:30.987663Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:31.005223Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:31.009938Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.077096Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.137571Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.937439Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347423927924633:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:34.937497Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd2/r3tmp/tmpnVCZv0/pdisk_1.dat 2025-04-17T18:15:35.086201Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:35.118251Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:35.118367Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:35.119495Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28867 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:35.383142Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:35.405103Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.472005Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.532837Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TTxDataShardMiniKQL::Write >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> TTxDataShardMiniKQL::ReadConstant >> KqpJoin::JoinMismatchDictKeyTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2025-04-17T18:15:32.855732Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347416756432452:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:32.857333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d33/r3tmp/tmpk0cy2G/pdisk_1.dat 2025-04-17T18:15:33.269174Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:33.274034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:33.274137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:33.281348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31521 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:33.529792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.678882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:15:33.679084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:15:33.679198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T18:15:33.679233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-17T18:15:33.679285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:15:33.679472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:15:33.679513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:15:33.681407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-17T18:15:33.681509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2025-04-17T18:15:33.681646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:15:33.681670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:15:33.681790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:15:33.681893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:15:33.681924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347421051400261:2382], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-17T18:15:33.681942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347421051400261:2382], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-17T18:15:33.681975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:15:33.681995Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:15:33.682025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 waiting... 2025-04-17T18:15:33.684723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:33.686147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T18:15:33.686473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T18:15:33.686493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-17T18:15:33.686511Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-17T18:15:33.686533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-17T18:15:33.686774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T18:15:33.686827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-17T18:15:33.686834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-17T18:15:33.686849Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-17T18:15:33.686865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:15:33.686942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-17T18:15:33.687066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:33.687078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-17T18:15:33.687094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:33.688220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-04-17T18:15:33.688328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:15:33.688428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-17T18:15:33.688455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-17T18:15:33.690145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744913733738, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:15:33.690274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744913733738 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:15:33.690314Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744913733738, at schemeshard: 72057594046644480 2025-04-17T18:15:33.690434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-17T18:15:33.690589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:15:33.690636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:15:33.692103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:15:33.692124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:15:33.692258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:15:33.692329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:15:33.692354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347421051400261:2382], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-17T18:15:33.692370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494347421051400261:2382], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-17T18:15:33.692410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:15:33.692438Z node 1 : ... hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:15:33.886578Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:15:33.886647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-17T18:15:33.886687Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-04-17T18:15:33.886707Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:15:33.886738Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [1:7494347421051400230:2351] 2025-04-17T18:15:33.886757Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [1:7494347421051400230:2351] 2025-04-17T18:15:33.886820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-17T18:15:33.886836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710661 2025-04-17T18:15:33.886848Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-04-17T18:15:33.886855Z node 1 :PIPE_SERVER DEBUG: [72057594046382081] HandleSend Sender# [1:7494347421051400227:2351] EventType# 269156352 2025-04-17T18:15:33.886858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2025-04-17T18:15:33.886949Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{37, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-04-17T18:15:33.887012Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:15:33.887139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-17T18:15:33.887183Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-04-17T18:15:33.887240Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:15:33.887297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-04-17T18:15:33.887311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710661 2025-04-17T18:15:33.887320Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-17T18:15:33.887329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-17T18:15:33.887376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710661, subscribers: 1 2025-04-17T18:15:33.887387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7494347421051400628:2303] 2025-04-17T18:15:33.887456Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{38, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-04-17T18:15:33.887484Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:15:33.887531Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:15:33.887557Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:121:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:15:33.887664Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2025-04-17T18:15:33.887942Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:15:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:15:33.887966Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:15:1:24576:109:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:15:33.888068Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} commited cookie 1 for step 15 2025-04-17T18:15:33.888755Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:15:33.888780Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:118:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:15:33.888789Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:15:33.888812Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:131:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:15:33.888825Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2025-04-17T18:15:33.888840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2025-04-17T18:15:33.888879Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2025-04-17T18:15:33.888917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2025-04-17T18:15:33.889025Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received poison pill [1:7494347421051400629:2303] 2025-04-17T18:15:33.889047Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:7494347421051400629:2303] 2025-04-17T18:15:33.889094Z node 1 :PIPE_SERVER DEBUG: [72057594046644480] Got PeerClosed from# [1:7494347421051400629:2303] 2025-04-17T18:15:33.950336Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-17T18:15:33.950397Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:15:33.950489Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:15:33.950518Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:15:33.950557Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [1:7494347421051400230:2351] 2025-04-17T18:15:33.950571Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [1:7494347421051400230:2351] 2025-04-17T18:15:33.950605Z node 1 :PIPE_SERVER DEBUG: [72057594046382081] HandleSend Sender# [1:7494347421051400227:2351] EventType# 269156352 2025-04-17T18:15:36.042495Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347433107239197:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:36.042545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d33/r3tmp/tmpbkGoeX/pdisk_1.dat 2025-04-17T18:15:36.109855Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:36.164589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:36.164669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:36.166395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9757 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:36.285721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.290305Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.309939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.377810Z node 2 :TX_PROXY ERROR: Actor# [2:7494347445992142199:2614] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000087 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2025-04-17T18:15:39.377890Z node 2 :TX_PROXY ERROR: Actor# [2:7494347445992142199:2614] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2025-04-17T18:15:01.221474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347283800203532:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.222359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd0/r3tmp/tmpUS2CdP/pdisk_1.dat 2025-04-17T18:15:01.689226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.723043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.723125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.728774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17788 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.017466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.033865Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.048172Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:02.052603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.212574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.260319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.477761Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347296081811477:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:04.477833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd0/r3tmp/tmppBHzoe/pdisk_1.dat 2025-04-17T18:15:04.588793Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:04.640814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:04.640885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:04.642984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7207 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:04.817770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.823385Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.832725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.907866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.948586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:07.803365Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347309295341874:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:07.803419Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd0/r3tmp/tmpnXLtRl/pdisk_1.dat 2025-04-17T18:15:07.917459Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:07.947916Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:07.947993Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:07.949525Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9427 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:08.116966Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:08.145526Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:08.216453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:08.263512Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:11.350704Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347327013972489:2168];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:11.417231Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd0/r3tmp/tmpkOSOzz/pdisk_1.dat 2025-04-17T18:15:11.527716Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:11.551023Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:11.551360Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:11.555688Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26996 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025 ... nished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:23.925884Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.931020Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.941922Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:23.945986Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.014045Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.092699Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:27.615307Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347394635375542:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:27.615937Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd0/r3tmp/tmpIoowDD/pdisk_1.dat 2025-04-17T18:15:27.736702Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:27.762719Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:27.762812Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:27.765626Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23871 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:28.047560Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.051704Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.075050Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.145678Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:28.202415Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:32.028546Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347416120836431:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:32.028603Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd0/r3tmp/tmpecS7QF/pdisk_1.dat 2025-04-17T18:15:32.208625Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:32.208736Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:32.212378Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:32.234787Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9573 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:32.478374Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.486418Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.497959Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:32.504184Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.576271Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.705514Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.640515Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347433962188790:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:36.640612Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dd0/r3tmp/tmpagAPot/pdisk_1.dat 2025-04-17T18:15:36.784773Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:36.819131Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:36.819242Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:36.821063Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11241 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:37.132754Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:37.154688Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.238828Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.292138Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2025-04-17T18:15:01.220190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347281434851800:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.220277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dce/r3tmp/tmpowIimr/pdisk_1.dat 2025-04-17T18:15:01.720740Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.725769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.725854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.728903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11141 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.047275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.066459Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.088661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.240339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.320975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:04.681268Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347295121784125:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:04.681368Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dce/r3tmp/tmpggn3LO/pdisk_1.dat 2025-04-17T18:15:04.824714Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:04.836877Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:04.836977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:04.839766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19332 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:05.071555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.082374Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.102373Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:05.106809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:05.180737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:05.229818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:07.983071Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347309526464946:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:07.983134Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dce/r3tmp/tmpouyZqo/pdisk_1.dat 2025-04-17T18:15:08.105817Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:08.151030Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:08.151118Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:08.152649Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6749 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:08.326220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:08.348576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:08.423789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:08.494900Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:11.317988Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347326406655167:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:11.318080Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dce/r3tmp/tmpJH89JX/pdisk_1.dat 2025-04-17T18:15:11.498921Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:11.499020Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:11.499349Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:11.512554Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14103 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 202 ... 68897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9327 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:23.917252Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.923785Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.938626Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.025944Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:24.136787Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:27.517963Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347394091560871:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:27.518118Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dce/r3tmp/tmpjwjJQg/pdisk_1.dat 2025-04-17T18:15:27.666923Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:27.690808Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:27.690895Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:27.693936Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12672 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:27.932231Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:27.956565Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.038068Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.162322Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dce/r3tmp/tmpAACXtx/pdisk_1.dat 2025-04-17T18:15:32.256448Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:32.274335Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:15:32.276033Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:32.276119Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:32.278886Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18985 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:32.571337Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:32.587818Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:32.596585Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.667855Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.737806Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.578827Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347432177118950:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:36.578886Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dce/r3tmp/tmpFC21dn/pdisk_1.dat 2025-04-17T18:15:36.767586Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:36.802837Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:36.802942Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:36.804777Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26166 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:37.065382Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.072626Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.082324Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:37.087007Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.165549Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.276423Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks >> TLocksTest::Range_BrokenLock3 [GOOD] >> TReplicationTests::Create >> TReplicationTests::CreateSequential >> KqpJoin::FullOuterJoin >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] Test command err: Trying to start YDB, gRPC: 19006, MsgBus: 25636 2025-04-17T18:14:47.331160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347220953585843:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:47.331223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001540/r3tmp/tmpyCd5MP/pdisk_1.dat 2025-04-17T18:14:47.742792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:47.772337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:47.772453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:47.774218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19006, node 1 2025-04-17T18:14:47.824976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:47.825008Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:47.825022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:47.825127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25636 TClient is connected to server localhost:25636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:48.377318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.395041Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:14:48.400623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.536873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.691538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:48.773072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:50.340204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347233838489479:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.340339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.641868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.673349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.698077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.722726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.750823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.778532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.807137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:50.879590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347233838490135:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.879677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.879718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347233838490140:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:50.883186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:50.891747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347233838490142:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:50.961673Z node 1 :TX_PROXY ERROR: Actor# [1:7494347233838490197:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:51.808752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.837735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.868835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:52.331166Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347220953585843:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:52.331260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 16929, MsgBus: 21827 2025-04-17T18:14:54.714721Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347250616993695:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:54.714841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001540/r3tmp/tmpR808Fx/pdisk_1.dat 2025-04-17T18:14:54.817981Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16929, node 2 2025-04-17T18:14:54.855427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:54.855528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:54.862958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:54.888641Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:54.888661Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:54.888666Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:54.888757Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21827 TClient is connected to server localhost:21827 WaitRootIsUp 'Root'... ... e: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:25.711124Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7494347361967250062:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.711198Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:26.872694Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.919161Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:15:26.973656Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:22: Warning: At function: Filter, At function: Coalesce
:7:31: Warning: At function: SqlIn
:7:31: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 12510, MsgBus: 32328 2025-04-17T18:15:30.965449Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347407519504961:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:30.965488Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001540/r3tmp/tmp2FJ80q/pdisk_1.dat 2025-04-17T18:15:31.263335Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:31.282033Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:31.282152Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:31.284030Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12510, node 6 2025-04-17T18:15:31.342945Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:31.342977Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:31.342985Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:31.343164Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32328 TClient is connected to server localhost:32328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:15:31.967346Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:31.978601Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:31.990104Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.066619Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.313234Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.410660Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.412314Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347428994343199:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:35.412446Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:35.488698Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.526803Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.562392Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.603979Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.645964Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.720207Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.796069Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.898063Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347428994343873:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:35.898169Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:35.898480Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347428994343878:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:35.903595Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:35.918147Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347428994343880:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:35.965856Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7494347407519504961:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:35.965994Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:35.976651Z node 6 :TX_PROXY ERROR: Actor# [6:7494347428994343934:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:37.451480Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:37.502116Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:15:37.548248Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 11552161844908061364 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2025-04-17T18:15:41.593052Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-17T18:15:41.593674Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-04-17T18:15:41.677143Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] >> TSchemeShardTTLTestsWithReboots::AlterTable >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> TReplicationTests::Create [GOOD] >> TReplicationTests::ConsistencyLevel >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2025-04-17T18:15:41.543892Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:15:41.564960Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:41.565394Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:15:41.565604Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:41.608879Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:41.620117Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:41.698734Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:15:41.703882Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:15:41.703951Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:15:41.703990Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:15:41.705036Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:15:41.705385Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:15:41.705437Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:15:41.705699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:41.705736Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.790744Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:15:41.843845Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:15:41.844052Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:15:41.844169Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:15:41.844217Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:15:41.844266Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:15:41.844309Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.844459Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.844503Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.844764Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:15:41.844851Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:15:41.844959Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.844996Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:41.845062Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:15:41.845107Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:41.845155Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:41.845189Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:15:41.845227Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:41.845324Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.845357Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.845408Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:15:41.853175Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:15:41.853273Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:41.853384Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:41.853558Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:15:41.853605Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:15:41.853696Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:15:41.853752Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.853793Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:15:41.853847Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:15:41.853880Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.854202Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:15:41.854243Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:15:41.854280Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:41.854325Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.854370Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:15:41.854427Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:41.854468Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:15:41.854498Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.854522Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:15:41.866568Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:41.866635Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.866683Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.866731Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:15:41.866801Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:15:41.867389Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.867446Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.867487Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:15:41.867567Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:15:41.867600Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:15:41.867710Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.867749Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.867798Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:15:41.867835Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:15:41.876577Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:15:41.876652Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.876896Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.876940Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.876999Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.877039Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:41.877077Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:41.877117Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:15:41.877156Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:15:41.877208Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.877244Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:15:41.877294Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:15:41.877343Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:15:41.877545Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:15:41.877580Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.877603Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:15:41.877626Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:15:41.877647Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:15:41.877711Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.877735Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:15:41.877769Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:41.877800Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:41.877843Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:15:41.877891Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:15:41.877925Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:15:41.877998Z node 1 :TX_DATA ... tateInit, received event# 268828673, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:43.576802Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:43.580896Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [3:232:2225] 2025-04-17T18:15:43.581040Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:43.584724Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-04-17T18:15:43.609587Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:43.609677Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:15:43.611303Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:15:43.611367Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:15:43.611420Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:15:43.611710Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:15:43.611847Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:15:43.611883Z node 3 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [3:275:2225] in generation 3 2025-04-17T18:15:43.643670Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:15:43.643754Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-04-17T18:15:43.643844Z node 3 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-17T18:15:43.643942Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-04-17T18:15:43.644173Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [3:280:2264] 2025-04-17T18:15:43.644224Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:15:43.644274Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-17T18:15:43.644312Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:43.644541Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-17T18:15:43.644642Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-17T18:15:43.644766Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:232:2225], Recipient [3:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:43.644813Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:43.645143Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:15:43.645237Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:15:43.645372Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 232 RawX2: 12884904113 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-04-17T18:15:43.645455Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [3:24:2071], Recipient [3:232:2225]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-17T18:15:43.645488Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T18:15:43.645527Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-17T18:15:43.645564Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:43.645611Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:43.645648Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:43.645680Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:15:43.645709Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:43.645741Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:43.645771Z node 3 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:15:43.645812Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:43.645904Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [3:24:2071], Recipient [3:232:2225]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-04-17T18:15:43.645937Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T18:15:43.645985Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-17T18:15:43.646090Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:278:2262], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:282:2266] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:15:43.646119Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:15:43.646222Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:123:2149], Recipient [3:232:2225]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-17T18:15:43.646255Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T18:15:43.646294Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-17T18:15:43.646342Z node 3 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-17T18:15:43.658244Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:278:2262], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:278:2262] ServerId: [3:282:2266] } 2025-04-17T18:15:43.658298Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:15:43.705284Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T18:15:43.705352Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T18:15:43.705559Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:288:2270], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:43.705592Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:43.705643Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:286:2269], serverId# [3:288:2270], sessionId# [0:0:0] 2025-04-17T18:15:43.705864Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-17T18:15:43.705897Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:43.706005Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:43.706728Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-17T18:15:43.706823Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T18:15:43.706866Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-17T18:15:43.706906Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:43.706944Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:43.706979Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:15:43.707037Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-17T18:15:43.707075Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T18:15:43.707095Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:15:43.707119Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:15:43.707142Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T18:15:43.707526Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-17T18:15:43.707594Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:15:43.707650Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T18:15:43.707678Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:15:43.707709Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:43.707736Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-17T18:15:43.707780Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T18:15:43.707854Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayComplete 2025-04-17T18:15:43.707883Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:43.707922Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-17T18:15:43.707984Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-17T18:15:43.708024Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T18:15:43.708047Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-17T18:15:43.708072Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-17T18:15:43.708126Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:43.708164Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-17T18:15:43.708203Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2025-04-17T18:15:02.955635Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347287166470998:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:02.955693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001daf/r3tmp/tmpgownRl/pdisk_1.dat 2025-04-17T18:15:03.382555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:03.406977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:03.407074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:03.409843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12979 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:03.630048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.650827Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.667246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:03.771595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:03.846294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:06.284281Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347303000676748:2086];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:06.285286Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001daf/r3tmp/tmpgXNXYe/pdisk_1.dat 2025-04-17T18:15:06.528743Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:06.540317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:06.540388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:06.542278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16683 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:06.736721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.745197Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.760553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.809098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.852347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:09.653606Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347315699303225:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:09.653679Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001daf/r3tmp/tmpzm5oOW/pdisk_1.dat 2025-04-17T18:15:09.825009Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:09.858929Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:09.859008Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:09.860582Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4967 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:10.031293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.049154Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.117149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:10.166306Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.288270Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347333277560501:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:13.288378Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001daf/r3tmp/tmpYmpxaz/pdisk_1.dat 2025-04-17T18:15:13.569884Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:13.572552Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:13.572659Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:13.574931Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27331 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:13.795791Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... Connecting -> Connected TClient is connected to server localhost:22419 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:25.211869Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.222480Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.243477Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.329834Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:25.412108Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:29.104968Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347401059296202:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:29.105022Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001daf/r3tmp/tmp7X42Qr/pdisk_1.dat 2025-04-17T18:15:29.250165Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:29.322385Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:29.322488Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:29.324233Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7532 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:29.583361Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:29.589423Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:29.603472Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:29.681281Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:29.737353Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.793954Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347417688728324:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:33.794015Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001daf/r3tmp/tmp7qlo0h/pdisk_1.dat 2025-04-17T18:15:33.919890Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:33.943719Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:33.943822Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:33.945814Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3319 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:34.199408Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.205178Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.219983Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.296541Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.357379Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:38.391851Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347443049696247:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:38.391904Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001daf/r3tmp/tmp79X51P/pdisk_1.dat 2025-04-17T18:15:38.556145Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:38.589409Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:38.589489Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:38.591235Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8078 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:38.856211Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:38.876273Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:38.956509Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.031461Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2025-04-17T18:15:41.543883Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:15:41.564582Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:41.565003Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:15:41.565245Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:41.615590Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:41.619878Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:41.703327Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:15:41.705135Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:15:41.705213Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:15:41.705263Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:15:41.705600Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:15:41.705947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:15:41.706044Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:15:41.706359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:41.706402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.790780Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:15:41.821460Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:15:41.826355Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:15:41.826494Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:15:41.826525Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:15:41.826560Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:15:41.826595Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.826755Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.826809Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.827021Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:15:41.827082Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:15:41.827208Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.827239Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:41.827306Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:15:41.827354Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:41.827383Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:41.827412Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:15:41.827455Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:41.827535Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.827577Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.827623Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:15:41.835060Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:15:41.835118Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:41.835184Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:41.835290Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:15:41.835330Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:15:41.835380Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:15:41.835427Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.835458Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:15:41.835496Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:15:41.835521Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.835765Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:15:41.835793Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:15:41.835820Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:41.835846Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.835881Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:15:41.835905Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:41.835932Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:15:41.835964Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.835988Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:15:41.850997Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:41.851067Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.851103Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.851142Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:15:41.851227Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:15:41.851842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.851896Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.851961Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:15:41.852042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:15:41.852071Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:15:41.852203Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.852247Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.852300Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:15:41.852343Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:15:41.860545Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:15:41.860621Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.860894Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.860956Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.861013Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.861052Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:41.861086Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:41.861129Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:15:41.861172Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:15:41.861234Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.861273Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:15:41.861323Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:15:41.861379Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:15:41.861548Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:15:41.861590Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.861614Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:15:41.861634Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:15:41.861656Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:15:41.861731Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.861763Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:15:41.861797Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:41.861830Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:41.861870Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:15:41.861918Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:15:41.861973Z node 1 :TX_DATASHARD TR ... 931557Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:43.931592Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:6] at 9437184 on unit FinishPropose 2025-04-17T18:15:43.931620Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:43.936060Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T18:15:43.936100Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T18:15:43.936339Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:300:2282], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:43.936363Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:43.936405Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:299:2281], serverId# [3:300:2282], sessionId# [0:0:0] 2025-04-17T18:15:43.936561Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\351\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4e\005\'?8\003\013?>\003?\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\0 2025-04-17T18:15:43.941761Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:43.941831Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:43.942447Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2025-04-17T18:15:43.942510Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-17T18:15:43.942541Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2025-04-17T18:15:43.942574Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:43.942604Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:43.942642Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:15:43.942684Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 9437184 2025-04-17T18:15:43.942714Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-17T18:15:43.942754Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:15:43.942778Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:15:43.942798Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2025-04-17T18:15:43.943258Z node 3 :TX_DATASHARD TRACE: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2025-04-17T18:15:43.943316Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:15:43.943362Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-17T18:15:43.943385Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:15:43.943408Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:43.943431Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit FinishPropose 2025-04-17T18:15:43.943464Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-17T18:15:43.943519Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is DelayComplete 2025-04-17T18:15:43.943548Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:43.943599Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit CompletedOperations 2025-04-17T18:15:43.943629Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2025-04-17T18:15:43.943670Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-17T18:15:43.943691Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2025-04-17T18:15:43.943715Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 9437184 has finished 2025-04-17T18:15:43.943766Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:43.943799Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:8] at 9437184 on unit FinishPropose 2025-04-17T18:15:43.943831Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TLocksTest::CK_BrokenLock [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> TLocksFatTest::RangeSetNotBreak [GOOD] >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::Alter >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2025-04-17T18:15:41.544694Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:15:41.562141Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:41.563366Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:15:41.564227Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:41.611816Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:41.617781Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:41.700958Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:15:41.703479Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:15:41.703563Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:15:41.703617Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:15:41.705100Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:15:41.705505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:15:41.705580Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:15:41.705888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:41.705935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.790765Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:15:41.824654Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:15:41.824876Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:15:41.825005Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:15:41.825056Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:15:41.825119Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:15:41.825156Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.825325Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.825370Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.825651Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:15:41.825751Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:15:41.825873Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.825915Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:41.826009Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:15:41.826057Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:41.826102Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:41.826154Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:15:41.826355Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:41.826476Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.826513Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.826572Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:15:41.829363Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:15:41.829426Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:41.829518Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:41.829681Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:15:41.829727Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:15:41.829824Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:15:41.829887Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.829924Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:15:41.830011Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:15:41.830058Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.830376Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:15:41.830427Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:15:41.830468Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:41.830520Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.830573Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:15:41.830630Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:41.830672Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:15:41.830707Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.830733Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:15:41.846691Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:41.846774Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.846809Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.846854Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:15:41.846933Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:15:41.847554Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.847614Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.847672Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:15:41.847767Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-17T18:15:41.847803Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:15:41.847957Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.848013Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:15:41.848054Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:15:41.848095Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:15:41.851985Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:15:41.852062Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.852309Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.852357Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.852416Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.852463Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:41.852501Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:41.852558Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-17T18:15:41.852606Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-17T18:15:41.852653Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:15:41.852705Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:15:41.852753Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:15:41.852810Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:15:41.852956Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Restart 2025-04-17T18:15:41.852996Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:41.853030Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:41.853066Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:41.853099Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:41.853385Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.853435Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:15:41.853612Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-17T18:15:41.853664Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:15:41.853718Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:15:41.853762Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:15:41.853810Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:15:41.853879Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.853912Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEc ... at 9437185 2025-04-17T18:15:44.655405Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 4 -> retry Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:15:44.655441Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} touch new 0b, 0b lo load (0b in total), 86213808b requested for data (96990534b in total) 2025-04-17T18:15:44.655476Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release tx data 2025-04-17T18:15:44.655498Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} released on update Res{3 10776726b}, Memory{0 dyn 0} 2025-04-17T18:15:44.655529Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} update Res{3 96990534b} type transaction 2025-04-17T18:15:44.655591Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-17T18:15:44.655644Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:44.655668Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2025-04-17T18:15:44.656306Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2025-04-17T18:15:44.859271Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2025-04-17T18:15:44.859347Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:15:44.859395Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:44.859421Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:15:44.859442Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompleteOperation 2025-04-17T18:15:44.859466Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2025-04-17T18:15:44.859638Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is DelayComplete 2025-04-17T18:15:44.859655Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2025-04-17T18:15:44.859672Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompletedOperations 2025-04-17T18:15:44.859690Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2025-04-17T18:15:44.859709Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Executed 2025-04-17T18:15:44.859723Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2025-04-17T18:15:44.859738Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437184 has finished 2025-04-17T18:15:44.859757Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:44.859781Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:44.859800Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:44.859817Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:44.859883Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-17T18:15:44.859922Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-17T18:15:44.860058Z node 3 :RESOURCE_BROKER DEBUG: Update task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) (priority=5 type=transaction resources={0, 96990534} resubmit=1) 2025-04-17T18:15:44.860100Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) to queue queue_transaction 2025-04-17T18:15:44.860145Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) from queue queue_transaction 2025-04-17T18:15:44.860174Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) to queue queue_transaction 2025-04-17T18:15:44.860211Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 33.873553 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311])) 2025-04-17T18:15:44.860322Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-17T18:15:44.860405Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:254:2225]) (release resources {0, 96990534}) 2025-04-17T18:15:44.860440Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 33.873553 to 16.936776 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:254:2225])) 2025-04-17T18:15:44.860504Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-17T18:15:44.860531Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2025-04-17T18:15:44.861317Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437185 restored its data 2025-04-17T18:15:45.087916Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2025-04-17T18:15:45.088056Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:15:45.088162Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2025-04-17T18:15:45.088251Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2025-04-17T18:15:45.088312Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompleteOperation 2025-04-17T18:15:45.088374Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2025-04-17T18:15:45.088650Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is DelayComplete 2025-04-17T18:15:45.088697Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2025-04-17T18:15:45.088750Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompletedOperations 2025-04-17T18:15:45.088808Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2025-04-17T18:15:45.088845Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is Executed 2025-04-17T18:15:45.088882Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2025-04-17T18:15:45.088925Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437185 has finished 2025-04-17T18:15:45.088975Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:45.089021Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-17T18:15:45.089097Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-17T18:15:45.089145Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-17T18:15:45.089316Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-17T18:15:45.089398Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-17T18:15:45.089714Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311]) (release resources {0, 96990534}) 2025-04-17T18:15:45.089797Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:365:2311])) 2025-04-17T18:15:45.103063Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2025-04-17T18:15:45.103131Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T18:15:45.103180Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2025-04-17T18:15:45.103261Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:15:45.103383Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-17T18:15:45.103460Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T18:15:45.103719Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2025-04-17T18:15:45.103751Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:45.103779Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2025-04-17T18:15:45.103819Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-17T18:15:45.103870Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-17T18:15:45.104123Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:45.104376Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:343:2311], Recipient [3:454:2396]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-17T18:15:45.104428Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:15:45.104488Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2025-04-17T18:15:45.104590Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:232:2225], Recipient [3:454:2396]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-17T18:15:45.104615Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-17T18:15:45.104637Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:44.582616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:44.582706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:44.582761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:44.582797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:44.583863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:44.583909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:44.584004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:44.584067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:44.585249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:44.668090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:44.668146Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:44.684226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:44.685305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:44.685490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:44.694238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:44.694497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:44.697516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:44.698565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:44.703628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:44.711389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:44.711472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:44.711653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:44.711687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:44.711752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:44.711871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.722987Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:44.853411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:44.853645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.853833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:44.854084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:44.854144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.858026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:44.858222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:44.858457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.858536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:44.858577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:44.858617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:44.861063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.861129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:44.861164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:44.863253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.863303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.863401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:44.863453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.867317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:44.869402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:44.869658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:44.870779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:44.870939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:44.870993Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:44.871310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:44.871378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:44.871542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:44.871656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:44.873924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:44.873992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:44.874211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:44.874262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:44.874666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.874718Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:44.874835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:44.874872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.874913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:44.874950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.874991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:44.875054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.875092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:44.875123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:44.875196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:44.875247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:44.875295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:44.877119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:44.877243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:44.877306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:15:45.260168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T18:15:45.268089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:15:45.268709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:15:45.269779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1394 } } 2025-04-17T18:15:45.269822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T18:15:45.269965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1394 } } 2025-04-17T18:15:45.270076Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1394 } } 2025-04-17T18:15:45.270709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 406 RawX2: 4294969671 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:45.270751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T18:15:45.270863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 406 RawX2: 4294969671 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:45.270909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:45.270975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 406 RawX2: 4294969671 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:45.271028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:45.271061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-17T18:15:45.273305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.273643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.286169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:45.286255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:45.286421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:45.286461Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:45.286522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:45.286563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:45.286625Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.286662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:15:45.286692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:15:45.286714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T18:15:45.289866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.290242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.290290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-04-17T18:15:45.290362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:15:45.290419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-04-17T18:15:45.290487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-17T18:15:45.290518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-17T18:15:45.292456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.292515Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:15:45.292604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:45.292636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:45.292669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:45.292697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:45.292731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T18:15:45.292801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-04-17T18:15:45.292869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:45.292929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:15:45.292960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:15:45.293087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:15:45.293115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:15:45.294776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:15:45.294820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:436:2397] TestWaitNotification: OK eventTxId 102 2025-04-17T18:15:45.295329Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:45.295584Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 238us result status StatusSuccess 2025-04-17T18:15:45.296061Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:44.654041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:44.654133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:44.654214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:44.654248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:44.654285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:44.654312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:44.654406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:44.654471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:44.654763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:44.737102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:44.737165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:44.755421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:44.755858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:44.756065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:44.764265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:44.764457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:44.765061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:44.765276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:44.768059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:44.769299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:44.769356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:44.769541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:44.769583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:44.769632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:44.769748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.775943Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:44.886936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:44.887170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.887354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:44.887565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:44.887633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.889628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:44.889746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:44.889920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.890001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:44.890035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:44.890065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:44.891756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.891804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:44.891836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:44.893547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.893594Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.893635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:44.893675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.897007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:44.898760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:44.898932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:44.899796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:44.899922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:44.899971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:44.900248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:44.900302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:44.900456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:44.900523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:44.902355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:44.902399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:44.902560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:44.902617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:44.902994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.903049Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:44.903136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:44.903165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.903195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:44.903220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.903251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:44.903303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.903335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:44.903361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:44.903414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:44.903450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:44.903489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:44.905023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:44.905125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:44.905166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 18:15:45.191670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T18:15:45.191742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/3, is published: true 2025-04-17T18:15:45.192208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969602 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:45.192248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-17T18:15:45.192350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969602 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:45.192406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:45.192494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969602 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:45.192555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:45.192599Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:15:45.192643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:15:45.192706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-17T18:15:45.193179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:45.193210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-17T18:15:45.193312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:45.193355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:45.193408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:45.193443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:45.193471Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.193495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:15:45.193537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T18:15:45.196938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:15:45.197036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:15:45.199748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:15:45.199830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:15:45.199904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.199957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:15:45.200001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:15:45.200229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.200391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:15:45.200424Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-17T18:15:45.200499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:15:45.200524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:15:45.200553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:15:45.200582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:15:45.200618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-17T18:15:45.200787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:45.200809Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:15:45.200854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:15:45.200877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:15:45.200903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:15:45.200917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:15:45.200931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-17T18:15:45.200977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2347] message: TxId: 101 2025-04-17T18:15:45.201007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:15:45.201038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:15:45.201060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:15:45.201158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:15:45.201191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-17T18:15:45.201204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-17T18:15:45.201222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:15:45.201251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-17T18:15:45.201262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-17T18:15:45.201297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:15:45.203053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:15:45.203092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:380:2348] TestWaitNotification: OK eventTxId 101 2025-04-17T18:15:45.203492Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:45.203716Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 213us result status StatusSuccess 2025-04-17T18:15:45.204095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> BuildStatsHistogram::Many_Serial [GOOD] >> KqpJoin::IdxLookupPartialWithTempTable >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2025-04-17T18:15:05.889746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347300960230490:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:05.889814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dad/r3tmp/tmplGM3gY/pdisk_1.dat 2025-04-17T18:15:06.285024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:06.342002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:06.342128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:06.343848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17689 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:06.615721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.643039Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.654092Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:06.661452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:06.808958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:06.856977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.187101Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347317727975991:2086];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:09.204008Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dad/r3tmp/tmpnJTwMt/pdisk_1.dat 2025-04-17T18:15:09.326794Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:09.348378Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:09.348453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:09.349767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4821 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:09.539088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:09.560475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:09.629215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:09.673138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.590808Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347331067994565:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:12.591465Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dad/r3tmp/tmpBJ2crC/pdisk_1.dat 2025-04-17T18:15:12.808408Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:12.820777Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.820828Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.822150Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6204 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:13.059475Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.078602Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.098928Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:13.109089Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.220226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.276499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.298243Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347345077387151:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:16.298310Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dad/r3tmp/tmp2kyQdB/pdisk_1.dat 2025-04-17T18:15:16.442043Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:16.464609Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:16.464708Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:16.466000Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27104 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Scheme ... 7968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4865 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:28.030409Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.036812Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.054622Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.149610Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:28.241789Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dad/r3tmp/tmpMDm9Lt/pdisk_1.dat 2025-04-17T18:15:32.076082Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:15:32.207622Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:32.207724Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:32.226630Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:32.230937Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30062 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:32.491107Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.498967Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.513083Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.593722Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.670156Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.235054Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347433150570169:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:36.235118Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dad/r3tmp/tmpWQw5Wu/pdisk_1.dat 2025-04-17T18:15:36.378614Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:36.405659Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:36.405756Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:36.407291Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17386 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:36.700192Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.714580Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.731389Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.808929Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.869328Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:40.643672Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347450645729682:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:40.643775Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dad/r3tmp/tmpFhMhfd/pdisk_1.dat 2025-04-17T18:15:40.769022Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:40.800929Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:40.801033Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:40.802663Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9296 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:41.055111Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:41.060701Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:41.074044Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:41.147624Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:41.210509Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2025-04-17T18:15:32.615686Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347413643547010:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:32.616163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d36/r3tmp/tmp114UL1/pdisk_1.dat 2025-04-17T18:15:33.053499Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:33.072671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:33.072757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:33.074901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11349 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:33.328725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:33.356697Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:33.361729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.471483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.513431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.601269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347413643547010:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:37.601338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:39.631463Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347447556229715:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:39.631521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d36/r3tmp/tmpJDXKa5/pdisk_1.dat 2025-04-17T18:15:39.719862Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:39.764268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:39.764342Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:39.766043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62567 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:39.896382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:39.909862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.954108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.994491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:44.632894Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347447556229715:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:44.632959Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> KqpJoin::JoinLeftPureFull >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] Test command err: Trying to start YDB, gRPC: 31172, MsgBus: 11779 2025-04-17T18:15:41.333487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347452543114680:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:41.333554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e54/r3tmp/tmpmDJCer/pdisk_1.dat 2025-04-17T18:15:41.646563Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31172, node 1 2025-04-17T18:15:41.709939Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:41.709973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:41.709981Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:41.710055Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:15:41.716663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:41.716801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:41.718595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11779 TClient is connected to server localhost:11779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:42.170225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:42.203927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:42.329301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:42.466806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:42.528120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:44.149453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347465428018339:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:44.149592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:44.443005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:44.509591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:44.537711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:44.566706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:44.594621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:44.625287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:44.654287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:44.697603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347465428019004:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:44.697719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:44.697750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347465428019009:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:44.700321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:44.707850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347465428019011:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:44.793558Z node 1 :TX_PROXY ERROR: Actor# [1:7494347465428019067:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:45.655620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2025-04-17T18:15:41.543887Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:15:41.564566Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:41.564983Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:15:41.565266Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:41.612263Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:41.618937Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:41.701994Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:15:41.703679Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:15:41.703771Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:15:41.703816Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:15:41.705068Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:15:41.705465Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:15:41.705538Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:15:41.705880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:41.705925Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.790828Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:15:41.829934Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:15:41.830152Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:15:41.830277Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:15:41.830315Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:15:41.830350Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:15:41.830385Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.830541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.830605Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.830889Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:15:41.831009Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:15:41.831136Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.831196Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:41.831279Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:15:41.831323Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:41.831360Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:41.831397Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:15:41.831441Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:41.831539Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.831576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.831627Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:15:41.834313Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:15:41.834379Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:41.834477Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:41.834634Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:15:41.834689Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:15:41.834763Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:15:41.834836Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.834877Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:15:41.834929Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:15:41.834965Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.835277Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:15:41.835316Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:15:41.835353Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:41.835386Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.835436Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:15:41.835489Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:41.835530Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:15:41.835562Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.835605Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:15:41.854908Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:41.854988Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.855023Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.855061Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:15:41.855130Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:15:41.855668Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.855718Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.855763Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:15:41.855846Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:15:41.855879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:15:41.856004Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.856054Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.856102Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:15:41.856143Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:15:41.860038Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:15:41.860107Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.860327Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.860373Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.860421Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.860472Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:41.860507Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:41.860554Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:15:41.860591Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:15:41.860633Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.860666Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:15:41.860713Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:15:41.860747Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:15:41.860898Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:15:41.860928Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.860951Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:15:41.860970Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:15:41.861004Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:15:41.861063Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.861090Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:15:41.861125Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:41.861154Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:41.861190Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:15:41.861239Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:15:41.861271Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:15:45.513089Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-17T18:15:45.513118Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T18:15:45.513138Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:15:45.513161Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:15:45.513180Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T18:15:45.513210Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:15:45.513245Z node 3 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 33554432 more memory 2025-04-17T18:15:45.513270Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T18:15:45.513371Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:45.513401Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T18:15:45.513527Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:15:45.541354Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-17T18:15:45.541452Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 7340039, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:15:45.541524Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:45.541570Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:15:45.541609Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:45.541652Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-17T18:15:45.541739Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:15:45.541767Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:45.541801Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-17T18:15:45.541836Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-17T18:15:45.541880Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T18:15:45.541906Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-17T18:15:45.541940Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-17T18:15:45.580601Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:45.580680Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-17T18:15:45.580734Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2025-04-17T18:15:45.580830Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:46.495405Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T18:15:46.495510Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T18:15:46.495910Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:297:2278], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:46.495953Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:46.496011Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:296:2277], serverId# [3:297:2278], sessionId# [0:0:0] 2025-04-17T18:15:46.657670Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2025-04-17T18:15:46.659792Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:46.659927Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:46.700111Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2025-04-17T18:15:46.700240Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-17T18:15:46.700281Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2025-04-17T18:15:46.700320Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:46.700356Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:46.700413Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:15:46.700482Z node 3 :TX_DATASHARD TRACE: Activated operation [0:3] at 9437184 2025-04-17T18:15:46.700527Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-17T18:15:46.700550Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:15:46.700572Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:15:46.700596Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-17T18:15:46.700650Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:15:46.700699Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 requested 46269638 more memory 2025-04-17T18:15:46.700747Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-17T18:15:46.700896Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:46.700949Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-17T18:15:46.701009Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:15:46.724786Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 exceeded memory limit 50463942 and requests 403711536 more for the next try 2025-04-17T18:15:46.730618Z node 3 :TX_DATASHARD DEBUG: tx 3 released its data 2025-04-17T18:15:46.730725Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-17T18:15:46.731104Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:46.731142Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-17T18:15:46.777099Z node 3 :TX_DATASHARD DEBUG: tx 3 at 9437184 restored its data 2025-04-17T18:15:46.777214Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:15:46.868916Z node 3 :TX_DATASHARD TRACE: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2025-04-17T18:15:46.869018Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:15:46.869096Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:46.869132Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:15:46.869173Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:46.869212Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit FinishPropose 2025-04-17T18:15:46.869256Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is DelayComplete 2025-04-17T18:15:46.869285Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:46.869322Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit CompletedOperations 2025-04-17T18:15:46.869356Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2025-04-17T18:15:46.869408Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-17T18:15:46.869438Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2025-04-17T18:15:46.869471Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:3] at 9437184 has finished 2025-04-17T18:15:46.946008Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:46.946078Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:3] at 9437184 on unit FinishPropose 2025-04-17T18:15:46.946138Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-04-17T18:15:46.946243Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:47.007672Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:15:47.007725Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-17T18:15:47.012751Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerGcApplied >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: Got : 24000 2106439 49449 9 9 Expected: 24000 2106439 49449 9 9 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 9 9 Expected: 12816 1121048 49449 9 9 Got : 24000 3547100 81694 9 9 Expected: 24000 3547100 81694 9 9 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425282 81694 9 9 Expected: 9582 1425282 81694 9 9 Got : 24000 2460139 23760 9 9 Expected: 24000 2460139 23760 9 9 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060767 23760 9 9 Expected: 10440 1060767 23760 9 9 Got : 24000 4054050 46562 9 9 Expected: 24000 4054050 46562 9 9 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2273213 46562 9 9 Expected: 13570 2273213 46562 9 9 Got : 24000 2106459 49449 9 9 Expected: 24000 2106459 49449 9 9 Got : 24000 2460219 23555 9 9 Expected: 24000 2460219 23555 9 9 Got : 24000 4054270 46543 9 9 Expected: 24000 4054270 46543 9 9 Got : 24000 2106439 25272 38 44 Expected: 24000 2106439 25272 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 25272 20 23 Expected: 12816 1121048 25272 20 23 Got : 24000 3547100 49916 64 44 Expected: 24000 3547100 49916 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 49916 26 17 Expected: 9582 1425198 49916 26 17 Got : 24000 2460139 13170 42 41 Expected: 24000 2460139 13170 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 13170 18 18 Expected: 10440 1060798 13170 18 18 Got : 24000 4054050 29361 68 43 Expected: 24000 4054050 29361 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 29361 38 24 Expected: 13570 2277890 29361 38 24 Got : 24000 2106459 25428 38 44 Expected: 24000 2106459 25428 38 44 Got : 24000 2460219 13482 41 41 Expected: 24000 2460219 13482 41 41 Got : 24000 4054270 29970 67 43 Expected: 24000 4054270 29970 67 43 Got : 24000 2106479 25458 38 44 Expected: 24000 2106479 25458 38 44 Got : 24000 2460259 13528 42 41 Expected: 24000 2460259 13528 42 41 Got : 24000 4054290 30013 67 43 Expected: 24000 4054290 30013 67 43 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 0% bytes, 4 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 205073 (actual 205115 - 0% error) 14% (actual 14%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 17416844 (actual 17420850 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24056 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48061 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72061 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96085 - 0% error) 10% (actual 10%) key = (400354, 133459) value = 120047 (actual 120093 - 0% error) 10% (actual 10%) key = (480133, 160052) value = 144053 (actual 144100 - 0% error) 10% (actual 10%) key = (560080, 186701) value = 168060 (actual 168102 - 0% error) 10% (actual 10%) key = (639892, 213305) value = 192073 (actual 192119 - 0% error) 10% (actual 10%) key = (719776, 239933) value = 216090 (actual 216137 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2042645 - 0% error) 10% (actual 10%) key = (159427, 53150) value = 4076220 (actual 4080259 - 0% error) 10% (actual 10%) key = (239872, 79965) value = 6113940 (actual 6117932 - 0% error) 10% (actual 10%) key = (319834, 106619) value = 8152983 (actual 8156951 - 0% error) 10% (actual 10%) key = (400105, 133376) value = 10190566 (actual 10194584 - 0% error) 10% (actual 10%) key = (479833, 159952) value = 12228261 (actual 12232212 - 0% error) 10% (actual 10%) key = (559774, 186599) value = 14265925 (actual 14269984 - 0% error) 10% (actual 10%) key = (639385, 213136) value = 16304923 (actual 16308915 - 0% error) 10% (actual 10%) key = (719437, 239820) value = 18342658 (actual 18346641 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 51 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 217180 (actual 217228 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 18443184 (actual 18447186 - 0% error) 9% (actual 9%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 3% bytes, 111 pages RowCountHistogram: 6% (actual 6%) key = (80152, 26725) value = 7654 (actual 7700 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 21908 (actual 21959 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 37729 (actual 37776 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 44561 (actual 44610 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 62406 (actual 62455 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 70269 (actual 70314 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 83950 (actual 83996 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 96207 (actual 96256 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 110645 (actual 110694 - 0% error) 12% (actual 12%) DataSizeHistogram: 6% (actual 6%) key = (80152, 26725) value = 650681 (actual 654673 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 1862907 (actual 1866988 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 3200081 (actual 3204123 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 3780473 (actual 3784554 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 5294670 (actual 5298760 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 5965285 (actual 5969310 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 7125413 (actual 7129406 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 8166922 (actual 8170966 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 9391370 (actual 9395383 - 0% error) 12% (actual 12%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (109672, 36565) value = 12716 (actual 12760 - 0% error) 10% (actual 10%) key = (200011, 66678) value = 25439 (actual 25485 - 0% error) 10% (actual 10%) key = (242497, 80840) value = 38151 (actual 38197 - 0% error) 10% (actual 10%) key = (323278, 107767) value = 50861 (actual 50910 - 0% error) 9% (actual 9%) key = (365755, 121926) value = 63568 (actual 63614 - 0% error) 10% (actual 10%) key = (482191, 160738) value = 76283 (actual 76335 - 0% error) 10% (actual 9%) key = (610882, 203635) value = 88992 (actual 89039 - 0% error) 10% (actual 10%) key = (673702, 224575) value = 101722 (actual 101768 - 0% error) 10% (actual 10%) key = (715753, 238592) value = 114435 (actual 114484 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) ... 140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) [0:0:935:0:0:0:0] 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) [0:0:936:0:0:0:0] 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) [0:0:937:0:0:0:0] 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) [0:0:938:0:0:0:0] 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) [0:0:939:0:0:0:0] 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) [0:0:940:0:0:0:0] 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) [0:0:941:0:0:0:0] 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) [0:0:942:0:0:0:0] 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) [0:0:943:0:0:0:0] 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) [0:0:944:0:0:0:0] 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) [0:0:945:0:0:0:0] 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) [0:0:946:0:0:0:0] 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) [0:0:947:0:0:0:0] 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) [0:0:948:0:0:0:0] 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) [0:0:949:0:0:0:0] 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) [0:0:950:0:0:0:0] 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) [0:0:951:0:0:0:0] 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) [0:0:952:0:0:0:0] 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) [0:0:953:0:0:0:0] 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) [0:0:954:0:0:0:0] 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) [0:0:955:0:0:0:0] 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) [0:0:956:0:0:0:0] 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) [0:0:957:0:0:0:0] 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) [0:0:958:0:0:0:0] 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) [0:0:959:0:0:0:0] 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) [0:0:960:0:0:0:0] 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) [0:0:961:0:0:0:0] 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) [0:0:962:0:0:0:0] 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) [0:0:963:0:0:0:0] 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) [0:0:964:0:0:0:0] 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) [0:0:965:0:0:0:0] 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) [0:0:966:0:0:0:0] 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) [0:0:967:0:0:0:0] 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) [0:0:968:0:0:0:0] 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) [0:0:969:0:0:0:0] 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) [0:0:970:0:0:0:0] 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) [0:0:971:0:0:0:0] 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) [0:0:972:0:0:0:0] 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) [0:0:973:0:0:0:0] 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) [0:0:974:0:0:0:0] 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) [0:0:975:0:0:0:0] 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) [0:0:976:0:0:0:0] 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) [0:0:977:0:0:0:0] 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) [0:0:978:0:0:0:0] 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) [0:0:979:0:0:0:0] 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) [0:0:980:0:0:0:0] 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) [0:0:981:0:0:0:0] 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) [0:0:982:0:0:0:0] 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) [0:0:983:0:0:0:0] 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) [0:0:984:0:0:0:0] 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) [0:0:985:0:0:0:0] 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) [0:0:986:0:0:0:0] 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) [0:0:987:0:0:0:0] 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) [0:0:988:0:0:0:0] 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) [0:0:989:0:0:0:0] 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) [0:0:990:0:0:0:0] 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) [0:0:991:0:0:0:0] 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) [0:0:992:0:0:0:0] 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) [0:0:993:0:0:0:0] 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) [0:0:994:0:0:0:0] 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) [0:0:995:0:0:0:0] 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) [0:0:996:0:0:0:0] 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) [0:0:997:0:0:0:0] 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) [0:0:998:0:0:0:0] 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) [0:0:999:0:0:0:0] 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) [0:0:1000:0:0:0:0] 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:47.054456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:47.054518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:47.054565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:47.054599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:47.054634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:47.054656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:47.054710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:47.054775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:47.055009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:47.127418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:47.127466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:47.140671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:47.140974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:47.141163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:47.148454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:47.148613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:47.149118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:47.149360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:47.152045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:47.153240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:47.153311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:47.153485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:47.153529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:47.153565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:47.153708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.161710Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:47.279828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:47.280090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.280332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:47.280580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:47.280637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.283238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:47.283388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:47.283587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.283652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:47.283695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:47.283732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:47.285737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.285799Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:47.285839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:47.287789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.287838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.287894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:47.287941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:47.291665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:47.293498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:47.293747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:47.294772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:47.294910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:47.294971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:47.295274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:47.295340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:47.295518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:47.295610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:47.297524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:47.297569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:47.297753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:47.297797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:47.298227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.298275Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:47.298364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:47.298406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:47.298445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:47.298474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:47.298513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:47.298569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:47.298607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:47.298645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:47.298722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:47.298768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:47.298816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:47.300545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:47.300647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:47.300710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-04-17T18:15:47.506834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:47.506946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:47.507020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-04-17T18:15:47.507141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-04-17T18:15:47.507258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:47.507334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-17T18:15:47.519111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:47.519165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:47.519320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:15:47.519475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:47.519507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:15:47.519540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:15:47.520761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.520812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:15:47.521706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:15:47.521794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:15:47.521825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:15:47.521866Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:15:47.521898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:47.522532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:15:47.522589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:15:47.522610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:15:47.522630Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:15:47.522649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:15:47.522700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-17T18:15:47.523092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1362 } } 2025-04-17T18:15:47.523125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:47.523210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1362 } } 2025-04-17T18:15:47.523276Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1362 } } 2025-04-17T18:15:47.524068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:47.524101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:47.524200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:47.524241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:47.524319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:47.524376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:47.524413Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.524440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:15:47.524468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T18:15:47.527144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:15:47.527347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:15:47.527709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.527967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.528191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.528231Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:15:47.528322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:15:47.528361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:15:47.528393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:15:47.528415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:15:47.528442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T18:15:47.528494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 101 2025-04-17T18:15:47.528527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:15:47.528556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:15:47.528580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:15:47.528679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:15:47.529901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:15:47.529960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:334:2313] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-17T18:15:47.532895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:47.533107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:47.534727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2025-04-17T18:15:47.537286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:47.537409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> TLocksTest::CK_Range_BrokenLock [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> TLocksTest::CK_Range_BrokenLockInf >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore >> TObjectStorageListingTest::SchemaChecks [GOOD] >> TLocksTest::GoodSameShardLock [GOOD] >> KqpJoin::FullOuterJoin [GOOD] >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:47.948057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:47.948144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:47.948221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:47.948258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:47.948333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:47.948361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:47.948432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:47.948504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:47.948802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:48.031034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:48.031100Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:48.047453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:48.047890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:48.048090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:48.056490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:48.056681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:48.057347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:48.057589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:48.060471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:48.061719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:48.061780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:48.061969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:48.062039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:48.062089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:48.062254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:48.068997Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:48.208508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:48.208704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:48.208850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:48.209018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:48.209061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:48.211533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:48.211668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:48.211851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:48.211939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:48.211977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:48.212012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:48.213846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:48.213911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:48.213946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:48.215488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:48.215537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:48.215588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:48.215637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:48.218748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:48.220195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:48.220332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:48.221001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:48.221112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:48.221150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:48.221349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:48.221389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:48.221508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:48.221576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:48.223038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:48.223070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:48.223214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:48.223257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:48.223559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:48.223595Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:48.223680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:48.223706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:48.223733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:48.223756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:48.223787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:48.223828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:48.223855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:48.223878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:48.223923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:48.223950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:48.223984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:48.225212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:48.225294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:48.225332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:15:48.225376Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:15:48.225412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:48.225494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:15:48.227623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:15:48.228001Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1744913748.228864 542322 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-04-17T18:15:48.229170Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T18:15:48.243732Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T18:15:48.245621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:48.245891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:48.246026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2025-04-17T18:15:48.246450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1744913748 seconds (20195 days, 55 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2025-04-17T18:15:48.247164Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:15:48.249819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1744913748 seconds (20195 days, 55 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:48.249945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1744913748 seconds (20195 days, 55 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-17T18:15:48.250425Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> OlapEstimationRowsCorrectness::TPCH10 |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> KqpExtractPredicateLookup::PointJoin [GOOD] >> KqpExtractPredicateLookup::SqlInJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 61563, MsgBus: 30716 2025-04-17T18:15:43.087128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347463512193252:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:43.087226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e52/r3tmp/tmpVoD7WW/pdisk_1.dat 2025-04-17T18:15:43.402885Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61563, node 1 2025-04-17T18:15:43.460672Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:43.460702Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:43.460712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:43.460864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:15:43.469086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:43.469225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:43.470840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30716 TClient is connected to server localhost:30716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:43.958858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:43.971929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:43.980866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:44.129723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:44.265240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:44.335809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:46.045306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347476397096919:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:46.045388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:46.347852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:46.373807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:46.400913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:46.427254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:46.454086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:46.527103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:46.557732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:46.616706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347476397097584:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:46.616781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:46.616801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347476397097590:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:46.619607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:46.627055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347476397097592:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:46.681806Z node 1 :TX_PROXY ERROR: Actor# [1:7494347476397097643:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:47.547004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.579191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.629773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:15:48.118254Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347463512193252:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:48.118529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2025-04-17T18:15:17.921544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347350103561676:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:17.921610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d71/r3tmp/tmpRmBOt9/pdisk_1.dat 2025-04-17T18:15:18.269868Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22840, node 1 2025-04-17T18:15:18.339519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:18.339638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:18.343434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:18.441608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:18.441636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:18.441647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:18.441757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18229 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:18.747508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:18.775417Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:18.790049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.920490Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347350103561676:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:22.923600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:15:33.268189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:15:33.268220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:45.285182Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347469498266513:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:45.285247Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d71/r3tmp/tmpad2W4y/pdisk_1.dat 2025-04-17T18:15:45.411490Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:45.445973Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:45.446104Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:45.448031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28066, node 2 2025-04-17T18:15:45.540792Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:45.540814Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:45.540820Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:45.540940Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7055 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:45.808164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.818588Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.853541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2025-04-17T18:15:12.060614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347327991911816:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:12.060661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d86/r3tmp/tmpO9h30z/pdisk_1.dat 2025-04-17T18:15:12.466604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:12.502769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:12.502883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:12.507136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4346 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:12.761136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.794770Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.817257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.968261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.045348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:15.476711Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347341626741400:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:15.510946Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d86/r3tmp/tmpIXDa7r/pdisk_1.dat 2025-04-17T18:15:15.694122Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:15.696338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:15.696396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:15.697762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12626 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:15.907716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:15.918764Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:15.943986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.025353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:16.100882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:18.863540Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347355921255140:2279];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d86/r3tmp/tmpED9ijk/pdisk_1.dat 2025-04-17T18:15:18.949205Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:15:19.009300Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:19.021926Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:19.022000Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:19.022839Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2349 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:19.229945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.234498Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.246722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.321832Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.381805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:22.137168Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347371249220906:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:22.137216Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d86/r3tmp/tmp9urNKS/pdisk_1.dat 2025-04-17T18:15:22.288789Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:22.297115Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:22.297189Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:22.302023Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16916 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025 ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:33.478204Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5724 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:33.739262Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:33.756981Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.813261Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:33.858515Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:37.064718Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347438980319011:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:37.064766Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d86/r3tmp/tmpJXqJBl/pdisk_1.dat 2025-04-17T18:15:37.173785Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:37.173871Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:37.175927Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:37.186793Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62057 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:37.415288Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.435907Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.514247Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.587200Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:41.167988Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347452971490819:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:41.168073Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d86/r3tmp/tmpdpuBSJ/pdisk_1.dat 2025-04-17T18:15:41.334850Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.356679Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:41.356773Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:41.358891Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6131 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:41.626277Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:41.647689Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:41.724970Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:41.782124Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.007285Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347471117847222:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:45.007472Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d86/r3tmp/tmpZFhuAF/pdisk_1.dat 2025-04-17T18:15:45.162215Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:45.179199Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:45.179284Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:45.180558Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2176 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:45.415546Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.423548Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.437483Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:45.445763Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.533054Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:45.615116Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> TLocksTest::MultipleLocks [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> KqpJoin::JoinLeftPureCross ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:15:49.807989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:49.808070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:49.808106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:49.808135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:49.808172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:49.808195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:49.808255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:49.808319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:49.808593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:49.889504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:49.889560Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:49.903708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:49.903833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:49.904001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:49.909356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:49.909624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:49.910160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:49.910456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:49.913993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:49.915190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:49.915250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:49.915378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:49.915423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:49.915462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:49.915638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:49.921373Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:15:50.044155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:50.044367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.044539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:50.044698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:50.044736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.046398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.046492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:50.046621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.046663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:50.046686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:50.046708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:50.048202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.048240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:50.048263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:50.050509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.050556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.050606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:50.050659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.058636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:50.060342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:50.060467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:50.061243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.061335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:50.061366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:50.061554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:50.061610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:50.061767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:50.061882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:50.063519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:50.063560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:50.063692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:50.063721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:50.063969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.064011Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:50.064107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:50.064144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.064178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:50.064198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.064224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:50.064279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.064310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:50.064335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:50.064385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:50.064441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:50.064479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:50.065878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:50.066026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:50.066067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-17T18:15:50.435738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 104 at step: 5000004 2025-04-17T18:15:50.436350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.436499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:50.436559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 104:0 HandleReply TEvOperationPlan, operationId: 104:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-17T18:15:50.436838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 129 2025-04-17T18:15:50.436987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:15:50.440453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:50.440508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:15:50.440747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:50.440790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-17T18:15:50.441196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.441251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:15:50.441768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:15:50.441863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:15:50.441897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:15:50.441936Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:15:50.441980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:15:50.442086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T18:15:50.444885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:15:50.456950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1107 } } 2025-04-17T18:15:50.457004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:50.457140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1107 } } 2025-04-17T18:15:50.457267Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1107 } } FAKE_COORDINATOR: Erasing txId 104 2025-04-17T18:15:50.458251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-17T18:15:50.458315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:50.458466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-17T18:15:50.458516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:50.458624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-17T18:15:50.458693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.458732Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.458782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:15:50.458823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-17T18:15:50.461940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.462291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.462584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.462653Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T18:15:50.462787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:15:50.462825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:15:50.462866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:15:50.462906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:15:50.462953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-17T18:15:50.463014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2317] message: TxId: 104 2025-04-17T18:15:50.463069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:15:50.463108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:15:50.463139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:15:50.463254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:15:50.465244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:15:50.465312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:446:2418] TestWaitNotification: OK eventTxId 104 2025-04-17T18:15:50.465929Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:50.466220Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 268us result status StatusSuccess 2025-04-17T18:15:50.466641Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TLocksTest::BrokenNullLock [GOOD] >> TReplicationTests::CopyReplicatedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:50.347846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:50.347944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:50.347990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:50.348028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:50.348076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:50.348118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:50.348198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:50.348274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:50.348586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:50.430244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:50.430314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:50.445718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:50.446110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:50.446323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:50.454102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:50.454349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:50.455007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.455261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:50.457762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:50.458952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:50.459036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:50.459217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:50.459262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:50.459304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:50.459423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.466082Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:50.589252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:50.589531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.589737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:50.589977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:50.590055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.592688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.592832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:50.593043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.593112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:50.593149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:50.593205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:50.595319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.595379Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:50.595415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:50.597372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.597418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.597472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:50.597513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.607171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:50.610981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:50.611174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:50.612238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.612395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:50.612447Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:50.612790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:50.612848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:50.613023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:50.613119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:50.615442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:50.615490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:50.615701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:50.615744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:50.616135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.616184Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:50.616277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:50.616312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.616348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:50.616382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.616416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:50.616479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.616522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:50.616552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:50.616619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:50.616670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:50.616708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:50.618472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:50.618603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:50.618642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:15:50.618703Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:15:50.618751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:50.618845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:15:50.623146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:15:50.623678Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1744913750.624833 543432 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-04-17T18:15:50.625246Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T18:15:50.642608Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T18:15:50.645290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:50.645701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.646144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-04-17T18:15:50.647013Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:15:50.650136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:50.650327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-17T18:15:50.650890Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1744913750.651423 543432 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-04-17T18:15:50.654028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:50.654350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.654581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2025-04-17T18:15:50.658452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:50.658591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:50.329854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:50.329968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:50.330031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:50.330067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:50.330130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:50.330164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:50.330273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:50.330350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:50.330683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:50.411486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:50.411538Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:50.426440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:50.426838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:50.427019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:50.435791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:50.435957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:50.436507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.436730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:50.439301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:50.440464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:50.440520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:50.440675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:50.440715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:50.440749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:50.440857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.447002Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:50.572885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:50.573140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.573316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:50.573521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:50.573579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.579475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.579610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:50.579805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.579861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:50.579903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:50.579943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:50.581823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.581876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:50.581914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:50.583761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.583807Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.583852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:50.583896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.592820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:50.594934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:50.595105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:50.596071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:50.596225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:50.596270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:50.596578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:50.596629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:50.596795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:50.596872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:50.599117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:50.599171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:50.599366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:50.599424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:50.599779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.599822Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:50.599908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:50.599941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.599976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:50.600016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.600056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:50.600109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:50.600141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:50.600167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:50.600233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:50.600282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:50.600313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:50.602060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:50.602260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:50.602317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:15:50.602367Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:15:50.602413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:50.602510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:15:50.606235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:15:50.606724Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:15:50.607998Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T18:15:50.625252Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T18:15:50.627870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:50.628209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:50.630081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2025-04-17T18:15:50.631351Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:15:50.635011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:50.635194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-17T18:15:50.635776Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 4257, MsgBus: 3166 2025-04-17T18:15:44.454494Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347467959719384:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:44.454703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e50/r3tmp/tmpgYCh3W/pdisk_1.dat 2025-04-17T18:15:44.773970Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4257, node 1 2025-04-17T18:15:44.830136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:44.830155Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:44.830170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:44.830391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:15:44.832366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:44.832469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:44.834839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3166 TClient is connected to server localhost:3166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:45.269817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.299700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.412153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.580662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.661536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:47.427039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347480844622978:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:47.427122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:47.707527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.745290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.776654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.847743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.881591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.921088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.953860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:48.033297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347485139590938:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:48.033395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:48.033462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347485139590943:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:48.037061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:48.046672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347485139590945:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:48.127403Z node 1 :TX_PROXY ERROR: Actor# [1:7494347485139591003:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:49.115284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:49.146539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:15:49.214735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:15:49.448082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347467959719384:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:49.448620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:43.580893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:43.581018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:43.581062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:43.581111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:43.582128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:43.582169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:43.582293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:43.582369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:43.583434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:43.670651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:43.670708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:43.687569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:43.688065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:43.688259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:43.697701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:43.698012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:43.701702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:43.702747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:43.708969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:43.715820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:43.715908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:43.716128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:43.716176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:43.716227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:43.716425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.728100Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:43.832745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:43.832992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.833205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:43.833518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:43.833570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.835826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:43.835978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:43.836150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.836217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:43.836254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:43.836307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:43.838349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.838403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:43.838441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:43.842057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.842101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.842164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:43.842238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:43.846531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:43.848240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:43.848845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:43.849884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:43.850012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:43.850058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:43.851073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:43.851136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:43.851534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:43.851621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:43.853587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:43.853651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:43.853800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:43.853845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:43.854242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.854367Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:43.854486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:43.854537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:43.854576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:43.854609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:43.854644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:43.854685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:43.854719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:43.854746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:43.854805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:43.854838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:43.854869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:43.856568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:43.856661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:43.856712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... BUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:15:51.102376Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T18:15:51.112031Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:15:51.112261Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:15:51.114572Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1724 } } 2025-04-17T18:15:51.114623Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T18:15:51.114764Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1724 } } 2025-04-17T18:15:51.114871Z node 8 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1724 } } 2025-04-17T18:15:51.115445Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 410 RawX2: 34359740747 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:51.115501Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T18:15:51.115684Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 410 RawX2: 34359740747 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:51.115771Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:51.115891Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 410 RawX2: 34359740747 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:51.115966Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:51.116051Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-17T18:15:51.118978Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:51.119444Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:51.132046Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 34359740661 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:51.132112Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:51.132227Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 34359740661 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:51.132264Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:51.132327Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 34359740661 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:51.132376Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:51.132425Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:51.132469Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:15:51.132510Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:15:51.132546Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T18:15:51.134392Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:51.134768Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:51.134830Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-04-17T18:15:51.134892Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:15:51.135059Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-04-17T18:15:51.135155Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-17T18:15:51.135199Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-17T18:15:51.136912Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:51.136952Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:15:51.137070Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:51.137117Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:51.137161Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:51.137192Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:51.137228Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T18:15:51.137296Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:335:2314] message: TxId: 102 2025-04-17T18:15:51.137355Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:51.137402Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:15:51.137434Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:15:51.137566Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:15:51.137606Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:15:51.139034Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:15:51.139098Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:440:2401] TestWaitNotification: OK eventTxId 102 2025-04-17T18:15:51.139588Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:51.139823Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 265us result status StatusSuccess 2025-04-17T18:15:51.140149Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2025-04-17T18:15:25.781500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347384614211895:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.782479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d56/r3tmp/tmpB0OGOL/pdisk_1.dat 2025-04-17T18:15:26.253432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:26.253579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:26.256290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:26.285736Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:63791 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:26.547462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.562952Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.576618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.695858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:26.782644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:29.032636Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347404641703873:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:29.032672Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d56/r3tmp/tmpWlW244/pdisk_1.dat 2025-04-17T18:15:29.179262Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:29.191027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:29.191098Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:29.192177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19808 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:29.341868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:29.364232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:29.420261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:29.494941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.207624Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347414921497011:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:32.207843Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d56/r3tmp/tmpVpf1He/pdisk_1.dat 2025-04-17T18:15:32.427440Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:32.433082Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:32.433157Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:32.434559Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6701 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:32.618652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:32.634734Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:32.641979Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.713434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.769030Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.838167Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347428899559698:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:35.838244Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d56/r3tmp/tmp7layco/pdisk_1.dat 2025-04-17T18:15:35.945384Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:35.988932Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:35.989017Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:35.990390Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14178 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:36.198726Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.204830Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.213829Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:36.218866Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.287567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.369061Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.487774Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7494347443504735036:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:39.487840Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d56/r3tmp/tmpO6VLX9/pdisk_1.dat 2025-04-17T18:15:39.578704Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:39.619382Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:39.619479Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:39.620993Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1162 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:39.801610Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:39.821097Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.891162Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.935200Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:42.831746Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7494347459999453426:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:42.831818Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d56/r3tmp/tmp47sfOp/pdisk_1.dat 2025-04-17T18:15:42.943875Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:42.968120Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:42.968217Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:42.969906Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23587 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:43.168891Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:43.188483Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:43.249008Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:43.321246Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:46.679867Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347476799908184:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:46.680113Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d56/r3tmp/tmpvcYP9J/pdisk_1.dat 2025-04-17T18:15:46.816228Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:46.830095Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:46.830223Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:46.831750Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27453 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:47.045939Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:47.065005Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:47.119568Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.190163Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:43.580990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:43.581097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:43.581138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:43.581187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:43.582155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:43.582219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:43.582292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:43.582361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:43.583430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:43.670640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:43.670698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:43.691998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:43.692422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:43.692584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:43.702125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:43.702354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:43.702939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:43.703112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:43.708757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:43.715820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:43.715903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:43.716116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:43.716161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:43.716248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:43.716355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.723207Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:43.827874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:43.829387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.830343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:43.831632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:43.831710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.834804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:43.834923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:43.835103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.835291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:43.835325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:43.835383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:43.838584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.838632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:43.838662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:43.841760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.841800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.841856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:43.841914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:43.845885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:43.847893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:43.848836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:43.849792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:43.849913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:43.849957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:43.851078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:43.851135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:43.851296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:43.851389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:43.853441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:43.853482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:43.853632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:43.853670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:43.854151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:43.854221Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:43.854306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:43.854351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:43.854387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:43.854419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:43.854449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:43.854483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:43.854511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:43.854537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:43.854587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:43.854620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:43.854651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:43.856513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:43.856615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:43.856648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... E: StateWork, received event# 2146435072, Sender [9:124:2150], Recipient [9:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T18:15:51.968343Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T18:15:51.968404Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:51.968433Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:51.968574Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:15:51.968738Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:51.968769Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-17T18:15:51.968806Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:15:51.969312Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:51.969355Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:15:51.969492Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:15:51.969528Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:51.969575Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:51.969626Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:51.969667Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:51.969715Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-17T18:15:51.969773Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:51.969824Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:15:51.969865Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:15:51.970033Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:15:51.970088Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-17T18:15:51.970141Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-17T18:15:51.970205Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-17T18:15:51.971004Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:205:2207], Recipient [9:124:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-04-17T18:15:51.971046Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T18:15:51.971114Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:51.971184Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:51.971226Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:15:51.971283Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-17T18:15:51.971334Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:51.971432Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:15:51.972470Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:205:2207], Recipient [9:124:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-04-17T18:15:51.972507Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-17T18:15:51.972565Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:51.972637Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:51.972663Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:15:51.972692Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-17T18:15:51.972721Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:15:51.972794Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-17T18:15:51.972838Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-17T18:15:51.973704Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435084, Sender [9:124:2150], Recipient [9:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-04-17T18:15:51.973745Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-04-17T18:15:51.973810Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:15:51.973865Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:15:51.973942Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:51.975631Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:15:51.976078Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:15:51.976112Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:15:51.977564Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:15:51.977600Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-17T18:15:51.977684Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:15:51.977923Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:15:51.977977Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:15:51.978473Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [9:447:2402], Recipient [9:124:2150]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:51.978535Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:51.978582Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T18:15:51.978733Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [9:363:2342], Recipient [9:124:2150]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-04-17T18:15:51.978768Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T18:15:51.978848Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:15:51.978952Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:15:51.978996Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:445:2400] 2025-04-17T18:15:51.979184Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [9:447:2402], Recipient [9:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:15:51.979223Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-17T18:15:51.979267Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-17T18:15:51.979681Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [9:448:2403], Recipient [9:124:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-17T18:15:51.979741Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-17T18:15:51.979861Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:51.980059Z node 9 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 211us result status StatusPathDoesNotExist 2025-04-17T18:15:51.980231Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |92.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2025-04-17T18:15:12.861511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347329462698174:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:12.862383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d7b/r3tmp/tmpTai7qY/pdisk_1.dat 2025-04-17T18:15:13.302908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:13.308819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:13.308928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:13.310609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20599 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:13.566026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:13.592993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.714297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:13.780907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.268564Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347347386813262:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:16.269708Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d7b/r3tmp/tmp2hEDxP/pdisk_1.dat 2025-04-17T18:15:16.447824Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:16.458928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:16.458993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:16.463062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2839 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:16.682235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.690367Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.701961Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:16.709298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.779375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:16.822343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:19.663058Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347359190887795:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:19.663091Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d7b/r3tmp/tmpK7TX6f/pdisk_1.dat 2025-04-17T18:15:19.883116Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:19.887710Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:19.887782Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:19.890475Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24805 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:20.087541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.102480Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.122808Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:20.128062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.207112Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:20.296522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.285329Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347375695757382:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:23.285398Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d7b/r3tmp/tmpQa47aJ/pdisk_1.dat 2025-04-17T18:15:23.525503Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:23.547331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:23.547387Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:23.549865Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21017 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Schem ... ecting -> Connected TClient is connected to server localhost:24922 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:35.015499Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.020810Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.029356Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:35.033681Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.112533Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.186154Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:38.623348Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347441961152311:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:38.623780Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d7b/r3tmp/tmpwm637K/pdisk_1.dat 2025-04-17T18:15:38.757655Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:38.790014Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:38.790124Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:38.791971Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28486 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:39.033114Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:39.050043Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.111375Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.157857Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:42.573491Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347456568175609:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:42.573566Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d7b/r3tmp/tmp2ryL4C/pdisk_1.dat 2025-04-17T18:15:42.695622Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:42.720666Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:42.720770Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:42.722260Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20327 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:42.946335Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:42.963011Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:43.075665Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:43.144960Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:47.023281Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347480586629631:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:47.023373Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d7b/r3tmp/tmpEykOMg/pdisk_1.dat 2025-04-17T18:15:47.156712Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:47.183188Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:47.183299Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:47.184787Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24012 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:47.460987Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:47.477377Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:47.483458Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:47.577658Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:47.637884Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:52.168131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:52.168206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:52.168242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:52.168266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:52.168302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:52.168322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:52.168392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:52.168446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:52.168788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:52.253376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:52.253433Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:52.269533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:52.269953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:52.270159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:52.278580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:52.278777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:52.279408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:52.279671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:52.282383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:52.283637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:52.283708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:52.283872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:52.283916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:52.283954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:52.284069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.294912Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:52.427888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:52.428132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.428317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:52.428534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:52.428590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.430953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:52.431094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:52.431269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.431325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:52.431363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:52.431395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:52.433223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.433275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:52.433306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:52.435000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.435046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.435111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:52.435159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:52.438784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:52.440502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:52.440672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:52.441603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:52.441736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:52.441781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:52.442102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:52.442153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:52.442318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:52.442411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:52.444310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:52.444352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:52.444536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:52.444579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:52.444927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.444968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:52.445055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:52.445098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:52.445134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:52.445163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:52.445199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:52.445268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:52.445304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:52.445342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:52.445397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:52.445445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:52.445477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:52.447152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:52.447258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:52.447295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T18:15:52.696165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:52.696221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:52.696422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:15:52.696626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:52.696668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:15:52.696715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:15:52.697995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.698077Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:15:52.699367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:15:52.699484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:15:52.699529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:15:52.699584Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:15:52.699629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:52.700510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:15:52.700606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:15:52.700637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:15:52.700669Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:15:52.700699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:15:52.700779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-17T18:15:52.701359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1727 } } 2025-04-17T18:15:52.701401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:52.701523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1727 } } 2025-04-17T18:15:52.701623Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1727 } } 2025-04-17T18:15:52.702848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:52.702909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:52.703065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:52.703112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:52.703195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:15:52.703279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:52.703320Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.703360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:15:52.703407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T18:15:52.706879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:15:52.707156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:15:52.707638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.708042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.708400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:15:52.708463Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:15:52.708595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:15:52.708647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:15:52.708693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:15:52.708726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:15:52.708761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T18:15:52.708829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 101 2025-04-17T18:15:52.708880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:15:52.708920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:15:52.708955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:15:52.709091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:15:52.710836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:15:52.710904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:334:2313] TestWaitNotification: OK eventTxId 101 2025-04-17T18:15:52.711484Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:52.711725Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 254us result status StatusSuccess 2025-04-17T18:15:52.712242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::ShouldSkipDroppedColumn >> KqpJoin::JoinLeftPureFull [GOOD] >> KqpIndexLookupJoin::LeftOnly-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] Test command err: Trying to start YDB, gRPC: 27900, MsgBus: 26998 2025-04-17T18:15:46.582137Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347474580806655:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:46.582699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e4d/r3tmp/tmpSmH9v7/pdisk_1.dat 2025-04-17T18:15:46.835409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:46.852173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:46.852303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:46.853954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27900, node 1 2025-04-17T18:15:46.932972Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:46.933003Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:46.933013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:46.933232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26998 TClient is connected to server localhost:26998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:47.363950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:47.396329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:47.553085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:47.714360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:47.785013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:49.568104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347487465710324:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:49.568236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:49.846946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:49.877601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:49.905269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:49.933740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:49.968227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:50.003992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:50.047342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:50.117088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347491760678280:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:50.117169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:50.117385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347491760678285:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:50.121320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:50.131952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347491760678287:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:50.211694Z node 1 :TX_PROXY ERROR: Actor# [1:7494347491760678343:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:51.174296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:51.205792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:15:51.234453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:15:51.584819Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347474580806655:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:51.584913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Execution, code: 1060
:3:44: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureFull [GOOD] Test command err: Trying to start YDB, gRPC: 5327, MsgBus: 29427 2025-04-17T18:15:47.654821Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347480706492731:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:47.654970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e4a/r3tmp/tmpGKwtNC/pdisk_1.dat 2025-04-17T18:15:48.021628Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5327, node 1 2025-04-17T18:15:48.078522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:48.079178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:48.098519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:48.122910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:48.122935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:48.122945Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:48.123089Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29427 TClient is connected to server localhost:29427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:48.597893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:48.615213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:48.746875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:48.903571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:48.971229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:50.675238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347493591396393:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:50.675514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:50.933495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:50.964980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:50.993455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:51.022618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:51.052141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:51.087505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:51.119043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:51.181690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347497886364349:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:51.181786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:51.182131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347497886364354:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:51.185211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:51.194054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347497886364356:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:51.275853Z node 1 :TX_PROXY ERROR: Actor# [1:7494347497886364411:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:52.654261Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347480706492731:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:52.663034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:55.711605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:55.711694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:55.711732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:55.711770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:55.711811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:55.711844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:55.711926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:55.712001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:55.712305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:55.792786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:55.792839Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:55.807538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:55.807970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:55.808151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:55.816014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:55.816201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:55.816843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:55.817062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:55.819652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:55.820868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:55.820926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:55.821086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:55.821131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:55.821168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:55.821296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:55.829390Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:55.952113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:55.952333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:55.952512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:55.952736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:55.952798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:55.959073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:55.959238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:55.959451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:55.959521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:55.959559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:55.959596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:55.961800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:55.961888Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:55.961934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:55.963991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:55.964046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:55.964102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:55.964145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:55.967835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:55.969690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:55.969877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:55.970897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:55.971081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:55.971130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:55.971392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:55.971443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:55.971617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:55.971686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:55.973608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:55.973650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:55.973822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:55.973859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:55.974259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:55.974305Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:55.974385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:55.974418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:55.974450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:55.974477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:55.974508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:55.974572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:55.974605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:55.974634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:55.974701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:55.974735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:55.974761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:55.976438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:55.976554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:55.976594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T18:15:56.259234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:56.259330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:56.259379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T18:15:56.259629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T18:15:56.259771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T18:15:56.270863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:56.270911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:15:56.271199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:56.271241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T18:15:56.271339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.271385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:15:56.272539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:56.272665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:56.272698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:15:56.272731Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T18:15:56.272765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:15:56.272847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T18:15:56.273328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1157 } } 2025-04-17T18:15:56.273363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:56.273496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1157 } } 2025-04-17T18:15:56.273595Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1157 } } 2025-04-17T18:15:56.274113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:56.274156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T18:15:56.274297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:56.274353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:56.274435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:56.274491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:56.274542Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.274576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:15:56.274609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T18:15:56.277243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:15:56.278670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.278798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.279040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.279080Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:15:56.279185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:56.279220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:56.279274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:56.279304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:56.279352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T18:15:56.279412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-04-17T18:15:56.279452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:56.279484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:15:56.279526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:15:56.279639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:15:56.281479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:15:56.281521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:393:2365] TestWaitNotification: OK eventTxId 102 2025-04-17T18:15:56.282012Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:56.282293Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 257us result status StatusSuccess 2025-04-17T18:15:56.282730Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoin::JoinLeftPureCross [GOOD] >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> BasicUsage::GetAllStartPartitionSessions >> TLocksTest::Range_CorrectDot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureCross [GOOD] Test command err: Trying to start YDB, gRPC: 62939, MsgBus: 4393 2025-04-17T18:15:51.506440Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347495473478501:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:51.506507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e42/r3tmp/tmpbQXQEo/pdisk_1.dat 2025-04-17T18:15:51.907973Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62939, node 1 2025-04-17T18:15:51.956568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:51.956652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:51.990817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:52.050637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:52.050666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:52.050674Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:52.050780Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4393 TClient is connected to server localhost:4393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:52.573749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:52.588187Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:52.602209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:52.734493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:52.892779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:52.958961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:54.466334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347508358382156:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:54.466463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:54.778087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:54.805075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:54.826547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:54.850452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:54.876070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:54.909009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:54.939759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:55.025807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347512653350117:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:55.025888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:55.025953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347512653350122:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:55.029962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:55.040716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347512653350124:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:55.097486Z node 1 :TX_PROXY ERROR: Actor# [1:7494347512653350178:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:56.510487Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347495473478501:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:56.510679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:56.824667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:56.824755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:56.824798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:56.824827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:56.824871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:56.824897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:56.824977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:56.825043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:56.825334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:56.909550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:56.909604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:56.927568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:56.927944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:56.928121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:56.941944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:56.942203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:56.942899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:56.943160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:56.946237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:56.947489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:56.947569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:56.947752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:56.947800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:56.947842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:56.947965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.954404Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:57.101654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:57.101901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.102107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:57.102410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:57.102473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.107059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:57.107205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:57.107430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.107501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:57.107536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:57.107571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:57.109742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.109798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:57.109851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:57.111800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.111848Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.111902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:57.111976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:57.121779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:57.123939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:57.124176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:57.125136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:57.125274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:57.125323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:57.125597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:57.125651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:57.125801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:57.125917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:57.128146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:57.128206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:57.128395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:57.128437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:57.128813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.128859Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:57.128952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:57.128987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:57.129025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:57.129057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:57.129096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:57.129159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:57.129201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:57.129230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:57.129296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:57.129345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:57.129382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:57.131136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:57.131249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:57.131296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-04-17T18:15:57.476566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:57.476639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:57.476680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T18:15:57.476926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T18:15:57.477038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T18:15:57.479846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:57.479892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:15:57.480116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:57.480161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T18:15:57.480612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.480669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:15:57.481182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:57.481252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:15:57.481280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:15:57.481322Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T18:15:57.481374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:15:57.481452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T18:15:57.485512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:15:57.499287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 936 } } 2025-04-17T18:15:57.499362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T18:15:57.499508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 936 } } 2025-04-17T18:15:57.499618Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 936 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:15:57.500621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:57.500685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-17T18:15:57.500835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:57.500887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:15:57.500969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:15:57.501025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:57.501059Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.501103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:15:57.501145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T18:15:57.503871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.504075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.504177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:15:57.504223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:15:57.504330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:57.504381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:57.504416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:15:57.504460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:57.504499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T18:15:57.504571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2347] message: TxId: 102 2025-04-17T18:15:57.504618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:15:57.504664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:15:57.504700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:15:57.504815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:15:57.506448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:15:57.506546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:509:2434] TestWaitNotification: OK eventTxId 102 2025-04-17T18:15:57.507063Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:15:57.507309Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 272us result status StatusSuccess 2025-04-17T18:15:57.507810Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Secret::Simple [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9 >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: 2025-04-17T18:13:41.166074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:41.166579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:41.166630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a69/r3tmp/tmpYDvukq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15329, node 1 TClient is connected to server localhost:1359 2025-04-17T18:13:41.755311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:41.799708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:41.803419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:41.803480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:41.803515Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:41.803934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:41.839486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:41.839630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:41.851410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-17T18:13:53.864923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:817:2683], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:53.865078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:53.873201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-17T18:13:54.070301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2761], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:54.070418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:54.070760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:939:2766], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:54.075524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-17T18:13:54.208947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:941:2768], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:13:54.757743Z node 1 :TX_PROXY ERROR: Actor# [1:1038:2836] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:55.398447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:13:55.795985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-17T18:13:56.371514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-17T18:13:57.069710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:13:57.484127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:13:58.875823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-17T18:13:59.316144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-04-17T18:14:15.511114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:14:15.511176Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-04-17T18:14:38.621204Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715721. Ctx: { TraceId: 01js2fbfvk4680ysjzz4crtqwp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2IzZjMyZTktNDc1YWJhMDItNDQwY2IyOTEtM2ZiNmIyNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-17T18:15:01.535896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715739:0, at schemeshard: 72057594046644480 2025-04-17T18:15:02.758887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715746:0, at schemeshard: 72057594046644480 2025-04-17T18:15:04.707468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715757:0, at schemeshard: 72057594046644480 2025-04-17T18:15:05.273817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715760:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-17T18:15:18.246443Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715772. Ctx: { TraceId: 01js2fcpkxehvcemgxx4j76jn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU5NjhhMTQtNDk4MmU0NzQtNzg0NWZjYmEtZTYwODllNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-04-17T18:15:56.623126Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715812. Ctx: { TraceId: 01js2fdw9m9y6yxfgfnp7xy44d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVmMjI4MWYtZTFhMmFhOGUtYmI5NGJmMGMtZjg3ODJjMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2025-04-17T18:15:20.415372Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347364694392492:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:20.419392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d68/r3tmp/tmpumDyc2/pdisk_1.dat 2025-04-17T18:15:20.874693Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:20.880404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:20.880498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:20.884266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31899 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:21.176506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.189560Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.202583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.325087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:21.386975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.653340Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347376259050841:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:23.653416Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d68/r3tmp/tmpO76xrX/pdisk_1.dat 2025-04-17T18:15:23.820531Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:23.823126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:23.823189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:23.826096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25716 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:24.014254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.020622Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.032028Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.036531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:24.101093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:24.151763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:27.203101Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347393060455292:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:27.203156Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d68/r3tmp/tmpxRP497/pdisk_1.dat 2025-04-17T18:15:27.328758Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:27.355444Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:27.355549Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:27.357440Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22783 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:27.507178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:27.513970Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:27.528699Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:27.602506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:27.651205Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:30.532443Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347404720155065:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:30.532522Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d68/r3tmp/tmpBmVyKP/pdisk_1.dat 2025-04-17T18:15:30.703775Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:30.720985Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:30.721087Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:30.723152Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18981 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Sche ... Connecting -> Connected TClient is connected to server localhost:2467 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:41.984385Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:42.004884Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:42.081243Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:42.137763Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.494286Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347469304277526:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:45.494840Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d68/r3tmp/tmpdVyCit/pdisk_1.dat 2025-04-17T18:15:45.614122Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:45.650569Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:45.650666Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:45.652026Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13211 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:45.912650Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.918554Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.932085Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.999823Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:46.049465Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:49.661379Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347487878417514:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:49.661435Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d68/r3tmp/tmpDzEf6p/pdisk_1.dat 2025-04-17T18:15:49.826921Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:49.848021Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:49.848084Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:49.849357Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9161 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:50.080249Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:50.087023Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:50.101058Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:50.108023Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:50.177644Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:50.255764Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.922707Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347505269468116:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:53.922779Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d68/r3tmp/tmpv7zlzh/pdisk_1.dat 2025-04-17T18:15:54.093742Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:54.119659Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:54.119755Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:54.122961Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9293 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:54.402420Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:54.422935Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:54.482812Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:54.549668Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> TLocksTest::BrokenSameShardLock [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:16:00.194391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:00.194477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:00.194536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:00.194577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:00.194623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:00.194667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:00.194747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:00.194817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:00.195143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:00.277437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:00.277500Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:00.297706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:00.298103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:00.298390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:00.307477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:00.307689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:00.308311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:00.308526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:00.311166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:00.312348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:00.312419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:00.312624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:00.312667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:00.312706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:00.312823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.320197Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:16:00.416177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:00.416381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.416556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:00.416800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:00.416859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.419194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:00.419311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:00.419456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.419515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:00.419548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:00.419595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:00.421324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.421377Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:00.421417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:00.423177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.423225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.423285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:00.423334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.432903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:00.434775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:00.434946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:00.435860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:00.436001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:00.436053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:00.436332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:00.436392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:00.436570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:00.436669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:00.438486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:00.438533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:00.438699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:00.438751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:00.439134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.439179Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:00.439265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:00.439311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.439347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:00.439378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.439411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:00.439465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.439513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:00.439544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:00.439598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:00.439639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:00.439683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:00.441286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:00.441384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:00.441450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:16:00.441517Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:16:00.441558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:00.441651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:16:00.444506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:16:00.445062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:16:00.446301Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T18:16:00.463835Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T18:16:00.466424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:00.466769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.466893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2025-04-17T18:16:00.467275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2025-04-17T18:16:00.467970Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:16:00.474621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:00.474800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-17T18:16:00.475397Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 1778, MsgBus: 12468 2025-04-17T18:15:53.811969Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347506288883165:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:53.812216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e39/r3tmp/tmpZGh4iK/pdisk_1.dat 2025-04-17T18:15:54.139974Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1778, node 1 2025-04-17T18:15:54.202666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:54.203053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:54.216070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:54.260864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:54.260894Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:54.260904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:54.261050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12468 TClient is connected to server localhost:12468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:54.754499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:54.781825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:54.901132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.040739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:55.099037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:56.826930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347519173786832:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:56.827060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:57.101063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:57.143196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:57.172762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:57.199402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:57.226728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:57.269416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:57.313558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:57.382462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347523468754793:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:57.382558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:57.386134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347523468754798:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:57.389488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:57.398552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347523468754800:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:57.478199Z node 1 :TX_PROXY ERROR: Actor# [1:7494347523468754857:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:58.316322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.352863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.380479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.444553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.470452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.499070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.812397Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347506288883165:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:58.812470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] >> KqpRanges::ValidatePredicates [GOOD] >> KqpRanges::ValidatePredicatesDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2025-04-17T18:15:22.241055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347370375006123:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:22.256737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5d/r3tmp/tmp0jpgkI/pdisk_1.dat 2025-04-17T18:15:22.718755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:22.718894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:22.721616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:22.770026Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31976 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:23.045904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:23.105180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.231559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:23.317362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.737734Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347385179172437:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:25.737810Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5d/r3tmp/tmp5Nmezo/pdisk_1.dat 2025-04-17T18:15:25.894329Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:25.904423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:25.904514Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:25.906493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24243 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:26.103337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.109494Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.118852Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:26.123952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.187308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:26.273904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.977043Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347399334172317:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:28.978415Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5d/r3tmp/tmpW6iQx8/pdisk_1.dat 2025-04-17T18:15:29.119585Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:29.164926Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:29.165007Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:29.166758Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30995 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:29.311499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:29.328394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:29.383980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:29.438759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:32.671281Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347415649399728:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:32.671315Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5d/r3tmp/tmpSjFx6q/pdisk_1.dat 2025-04-17T18:15:32.844404Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:32.860781Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:32.860874Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:32.864277Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1877 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:33.091582Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDo ... ished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:44.137806Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:44.146069Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:44.162764Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:44.258252Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:44.308033Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:47.736392Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347479748671900:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:47.736498Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5d/r3tmp/tmpBTFhTb/pdisk_1.dat 2025-04-17T18:15:47.869382Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:47.891863Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:47.891961Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:47.893099Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26592 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:48.111227Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:48.134416Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:48.211873Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:48.291734Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:52.265477Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347503224753564:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:52.265576Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5d/r3tmp/tmp7wfZEI/pdisk_1.dat 2025-04-17T18:15:52.414775Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:52.420706Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:52.420811Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:52.422621Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1077 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:52.690893Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:52.698631Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:52.711937Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:52.718062Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:52.794859Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:52.868335Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:56.570542Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347519346089921:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:56.570609Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5d/r3tmp/tmpiN1pri/pdisk_1.dat 2025-04-17T18:15:56.716555Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:56.748573Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:56.748678Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:56.750312Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9019 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:57.012361Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.025662Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.048682Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:57.057901Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.136621Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.223376Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 62200, MsgBus: 25221 2025-04-17T18:15:55.889813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347514498895483:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:55.889871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e33/r3tmp/tmpl1SQie/pdisk_1.dat 2025-04-17T18:15:56.236092Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62200, node 1 2025-04-17T18:15:56.294813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:56.295018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:56.296853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:56.314658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:56.314676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:56.314687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:56.314786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25221 TClient is connected to server localhost:25221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:56.809165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:56.833140Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:56.852766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.000844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.167389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.228928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:58.743881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347527383799161:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:58.743989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:59.035986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:59.067065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:59.101012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:59.130748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:59.156682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:59.189580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:59.260599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:59.336516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347531678767123:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:59.336596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:59.336756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347531678767128:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:59.340814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:59.355619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347531678767130:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:59.413562Z node 1 :TX_PROXY ERROR: Actor# [1:7494347531678767184:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:00.316884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:00.357830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:00.389930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:16:00.440952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:16:00.472979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:16:00.503677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:16:00.889669Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347514498895483:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:00.889752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:15:55.909678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:55.909764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:55.909805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:55.909841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:55.909882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:55.909908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:55.909958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:55.910021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:55.910407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:55.989034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:55.989104Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:56.006020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:56.006152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:56.006352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:56.012455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:56.012765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:56.013389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:56.013635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:56.017457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:56.018746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:56.018801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:56.018903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:56.018944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:56.018981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:56.019159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.025177Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:15:56.136722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:56.136963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.137148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:56.137368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:56.137420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.139755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:56.139903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:56.140086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.140167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:56.140202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:56.140232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:56.142024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.142087Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:56.142139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:56.143820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.143862Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.143929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:56.143980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:56.147382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:56.149024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:56.149187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:56.150480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:56.150599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:56.150640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:56.150882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:56.150942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:56.151100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:56.151181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:56.152986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:56.153032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:56.153191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:56.153228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:56.153573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:56.153619Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:56.153724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:56.153755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:56.153794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:56.153823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:56.153857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:56.153920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:56.153953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:56.153979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:56.154065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:56.154101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:56.154130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:56.155670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:56.155744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:56.155766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d::TEvNotifyTxCompletionResult> complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.911366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.911434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.911483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.911601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.911657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.911701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.915195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.915287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.915356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.915417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.915463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.915511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.915587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.915705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.915746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T18:16:02.915849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:16:02.915883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:16:02.915912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:16:02.915937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:16:02.915967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T18:16:02.916022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2810:4075] message: TxId: 103 2025-04-17T18:16:02.916061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:16:02.916124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T18:16:02.916157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T18:16:02.917075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-17T18:16:02.919140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:16:02.919192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:4014:5206] TestWaitNotification: OK eventTxId 103 2025-04-17T18:16:02.919649Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:02.919921Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 296us result status StatusSuccess 2025-04-17T18:16:02.920399Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-04-17T18:16:02.923061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:02.923248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:16:02.927122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2025-04-17T18:16:02.929369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:02.929486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T18:16:02.929755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T18:16:02.929801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T18:16:02.930278Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-17T18:16:02.930364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:16:02.930398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4352:5543] TestWaitNotification: OK eventTxId 104 >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2025-04-17T18:15:24.454253Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347379981060932:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:24.455223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5a/r3tmp/tmpgwS10G/pdisk_1.dat 2025-04-17T18:15:24.797112Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:24.865533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:24.865659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:24.866940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9062 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:25.077369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.101782Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.110404Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:25.117630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.246891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:25.324643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:27.623744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347394936709831:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:27.623899Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5a/r3tmp/tmpywLYxi/pdisk_1.dat 2025-04-17T18:15:27.808099Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:27.822461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:27.822555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:27.825484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32034 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:28.006019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.023465Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.033194Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:28.037110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.101845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.154951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.023633Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347409871554006:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:31.023674Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5a/r3tmp/tmpmjQ10R/pdisk_1.dat 2025-04-17T18:15:31.172132Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:31.199026Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:31.199116Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:31.200516Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65052 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:31.393246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:31.397509Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.415792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.490879Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.564751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.454348Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347423656968235:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:34.462506Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5a/r3tmp/tmpunUUWr/pdisk_1.dat 2025-04-17T18:15:34.590023Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:34.630623Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:34.630699Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:34.632321Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4060 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... /001d5a/r3tmp/tmpvrx5lk/pdisk_1.dat 2025-04-17T18:15:45.426335Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:45.429955Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:45.430053Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:45.431986Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6357 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:45.696100Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:45.713717Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.789347Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:45.849045Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:49.585277Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347488553387504:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:49.585352Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5a/r3tmp/tmpDCCcPm/pdisk_1.dat 2025-04-17T18:15:49.726379Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:49.747720Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:49.747811Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:49.749132Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8515 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:50.011199Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:50.040380Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:50.115368Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:50.190233Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:54.055962Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347509095395291:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:54.056041Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5a/r3tmp/tmpUetZv1/pdisk_1.dat 2025-04-17T18:15:54.272089Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:54.288636Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:54.288727Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:54.290757Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62233 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:54.552282Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:54.569487Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:54.628044Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:54.682594Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:58.513784Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347526031667756:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:58.514503Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d5a/r3tmp/tmpUJE9Xi/pdisk_1.dat 2025-04-17T18:15:58.661988Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:58.693669Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:58.693773Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:58.694806Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25084 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:58.942920Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:58.969907Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:59.053326Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:59.126259Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpJoin::RightSemiJoin_SecondaryIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:16:03.866651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:03.866738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:03.866795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:03.866829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:03.866869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:03.866897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:03.866952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:03.867027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:03.867324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:03.946558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:03.946623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:03.960998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:03.961108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:03.961334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:03.969528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:03.969817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:03.970463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:03.970722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:03.974454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:03.975758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:03.975818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:03.975922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:03.975965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:03.976003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:03.976139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:03.981759Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:16:04.121371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:04.121615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:04.121798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:04.122261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:04.122340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:04.129638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:04.129818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:04.130028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:04.130114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:04.130154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:04.130212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:04.135187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:04.135266Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:04.135309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:04.138417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:04.138474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:04.138534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:04.138616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:04.142387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:04.146115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:04.146396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:04.147348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:04.147498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:04.147550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:04.147824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:04.147879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:04.148073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:04.148165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:04.150406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:04.150455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:04.150616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:04.150671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:04.150979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:04.151021Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:04.151141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:04.151176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:04.151220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:04.151256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:04.151290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:04.151346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:04.151384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:04.151421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:04.151497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:04.151539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:04.151580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:04.153425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:04.153547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:04.153586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nc, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:389:2361], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:16:04.820951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-17T18:16:04.821068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:16:04.821196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-17T18:16:04.821241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-17T18:16:04.821270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-17T18:16:04.821496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:04.821610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:04.821654Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-17T18:16:04.821695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-17T18:16:04.831006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-17T18:16:04.831070Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-17T18:16:04.831156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T18:16:04.831187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:16:04.831218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T18:16:04.831275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:16:04.831353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-17T18:16:04.831427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710760 2025-04-17T18:16:04.831479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:16:04.831523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-17T18:16:04.831550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-17T18:16:04.831624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-17T18:16:04.833475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-17T18:16:04.833547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-17T18:16:04.833621Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-17T18:16:04.833697Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:389:2361], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:16:04.835607Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:16:04.835698Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:389:2361], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:16:04.835739Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:16:04.837195Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:16:04.837270Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:389:2361], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:16:04.837321Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-17T18:16:04.837440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:16:04.837481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:480:2441] TestWaitNotification: OK eventTxId 102 2025-04-17T18:16:04.837997Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:04.838306Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 287us result status StatusSuccess 2025-04-17T18:16:04.838770Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> TSchemeShardTTLTestsWithReboots::CopyTable >> TLocksTest::Range_GoodLock1 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> OlapEstimationRowsCorrectness::TPCH11 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:16:05.323958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:05.324057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:05.324094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:05.324120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:05.324154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:05.324177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:05.324232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:05.324278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:05.324545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:05.407756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:05.407816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:05.424115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:05.424515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:05.424692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:05.432607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:05.432795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:05.433447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:05.433688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:05.436558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:05.437775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:05.437854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:05.438044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:05.438107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:05.438153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:05.438305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:05.444444Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:16:05.591969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:05.592219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:05.592410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:05.592676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:05.592728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:05.595105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:05.595243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:05.595397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:05.595454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:05.595511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:05.595543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:05.598505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:05.598571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:05.598608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:05.600923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:05.600978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:05.601034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:05.601084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:05.604787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:05.607039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:05.607237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:05.608248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:05.608389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:05.608441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:05.608713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:05.608769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:05.608930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:05.609028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:05.611506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:05.611549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:05.611751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:05.611791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:05.612148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:05.612199Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:05.612290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:05.612323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:05.612358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:05.612390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:05.612423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:05.612482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:05.612526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:05.612553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:05.612614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:05.612664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:05.612699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:05.614528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:05.614640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:05.614677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :16:06.184278Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:16:06.184349Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:16:06.184498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-17T18:16:06.184546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-17T18:16:06.184581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-17T18:16:06.184834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:06.184916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:06.184956Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-17T18:16:06.184993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-17T18:16:06.186605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-17T18:16:06.186645Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-17T18:16:06.186720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T18:16:06.186765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:16:06.186815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T18:16:06.186856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:16:06.186893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-17T18:16:06.186949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:124:2150] message: TxId: 281474976710760 2025-04-17T18:16:06.186982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:16:06.187009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-17T18:16:06.187034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-17T18:16:06.187100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:16:06.188493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-17T18:16:06.188548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-17T18:16:06.188611Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-17T18:16:06.188682Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:16:06.190000Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:16:06.190085Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:16:06.190149Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:16:06.191670Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:16:06.191752Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:16:06.191793Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-17T18:16:06.191897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:16:06.191932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:475:2436] TestWaitNotification: OK eventTxId 102 2025-04-17T18:16:06.192444Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:06.192679Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 247us result status StatusSuccess 2025-04-17T18:16:06.193147Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull >> KqpPg::TableInsert-useSink [GOOD] >> KqpPg::TempTablesSessionsIsolation >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 8637, MsgBus: 8457 2025-04-17T18:15:55.429761Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347512439309957:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:55.429981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e36/r3tmp/tmp4EP9AU/pdisk_1.dat 2025-04-17T18:15:55.768919Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8637, node 1 2025-04-17T18:15:55.815923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:55.816020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:55.824003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:55.850979Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:55.851004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:55.851011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:55.851155Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8457 TClient is connected to server localhost:8457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:56.362689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:56.378077Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:56.396761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:56.538356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:56.718745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:56.797552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:58.233122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347525324213613:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:58.233222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:58.480284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.513211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.546895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.579599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.608319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.675955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.751575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.824755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347525324214281:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:58.824819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:58.825068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347525324214286:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:58.828176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:15:58.836175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347525324214288:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:15:58.937780Z node 1 :TX_PROXY ERROR: Actor# [1:7494347525324214346:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:59.784947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:15:59.827460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:00.430419Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347512439309957:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:00.430509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26301, MsgBus: 9503 2025-04-17T18:16:01.279712Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347538296948820:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:01.279772Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e36/r3tmp/tmpefWstE/pdisk_1.dat 2025-04-17T18:16:01.398495Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:01.401417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:01.401484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:01.403989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26301, node 2 2025-04-17T18:16:01.522735Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:01.522755Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:01.522763Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:01.522879Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9503 TClient is connected to server localhost:9503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:01.988245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:01.997828Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:16:02.002963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.117661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.278603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:16:02.350272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:16:04.332368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347551181852467:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:04.332535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:04.386629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:04.437961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:04.479066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:04.517112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:04.589303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:04.625313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:04.660673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:04.727247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347551181853128:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:04.727324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:04.727676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347551181853133:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:04.746895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:04.759172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494347551181853135:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:16:04.837186Z node 2 :TX_PROXY ERROR: Actor# [2:7494347551181853193:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:05.851152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:05.922600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:06.282266Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347538296948820:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:06.282328Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2025-04-17T18:15:27.923814Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347393299765476:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:27.923897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d51/r3tmp/tmpS3B4vp/pdisk_1.dat 2025-04-17T18:15:28.379537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:28.383151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:28.383256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:28.387280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5109 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:28.667668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.682815Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.696789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.826451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:28.904046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.332324Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347411184863417:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:31.334511Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d51/r3tmp/tmpirrf1z/pdisk_1.dat 2025-04-17T18:15:31.526029Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:31.551340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:31.551428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:31.555311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5722 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:31.721846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.741939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.818947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.897510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.696210Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347422602032576:2086];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:34.697655Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d51/r3tmp/tmppm555e/pdisk_1.dat 2025-04-17T18:15:34.791420Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:34.826059Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:34.826132Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:34.826999Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17072 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:35.019393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:35.038276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.110158Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:35.159472Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:38.111212Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347440401619761:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:38.111286Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d51/r3tmp/tmp0nd0lk/pdisk_1.dat 2025-04-17T18:15:38.294201Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:38.310671Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:38.310755Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:38.313146Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22949 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:38.516846Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:38.545858Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation p ... shed: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:49.415095Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:49.419348Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:49.437439Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:49.513031Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:49.575288Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.271190Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347505617611587:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:53.271311Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d51/r3tmp/tmpGERzXg/pdisk_1.dat 2025-04-17T18:15:53.428719Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:53.456455Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:53.456551Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:53.460388Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9484 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:53.715763Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:53.740098Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.819049Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:53.873248Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:57.628650Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347524273142922:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:57.628723Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d51/r3tmp/tmpo5YNUV/pdisk_1.dat 2025-04-17T18:15:57.767049Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:57.783076Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:57.783188Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:57.789364Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11374 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:58.077900Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.092304Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:58.104033Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:58.109436Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:58.171047Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:58.236221Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.351924Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347545292219171:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:02.352474Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d51/r3tmp/tmpRRi71a/pdisk_1.dat 2025-04-17T18:16:02.524667Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:02.556344Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:02.556437Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:02.559530Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11314 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:02.827846Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.842361Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.862853Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:16:02.869579Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:16:02.943788Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:16:03.017988Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:16:07.446919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:07.447023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:07.447081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:07.447116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:07.447161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:07.447184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:07.447255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:07.447318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:07.447647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:07.534929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:07.534989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:07.550052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:07.550492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:07.550694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:07.558202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:07.558376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:07.558985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:07.559208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:07.561686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:07.563038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:07.563111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:07.563291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:07.563336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:07.563377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:07.563492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.569546Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:16:07.698219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:07.698469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.698678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:07.698942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:07.698996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.701303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:07.701437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:07.701618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.701673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:07.701708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:07.701740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:07.703558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.703627Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:07.703665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:07.705309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.705352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.705405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:07.705452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:07.714040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:07.718037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:07.718286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:07.719224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:07.719354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:07.719405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:07.719665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:07.719718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:07.719872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:07.719951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:07.721919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:07.721960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:07.722140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:07.722221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:07.722578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.722622Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:07.722708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:07.722741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:07.722774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:07.722805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:07.722846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:07.722900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:07.722938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:07.722965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:07.723020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:07.723067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:07.723118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:07.724758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:07.724856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:07.724914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... type: 269090816 2025-04-17T18:16:07.997180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T18:16:07.997774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:07.997867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:07.997935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T18:16:07.998194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T18:16:07.998319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T18:16:08.002032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:08.002092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:16:08.002375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:08.002424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T18:16:08.002715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:08.002764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:16:08.003393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:16:08.003514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:16:08.003561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:16:08.003618Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T18:16:08.003656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:16:08.003733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-17T18:16:08.012825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:16:08.037470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1023 } } 2025-04-17T18:16:08.037538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T18:16:08.037711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1023 } } 2025-04-17T18:16:08.037894Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1023 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:16:08.038700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:16:08.038754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T18:16:08.038912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:16:08.038967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:16:08.039103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:16:08.039168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:08.039204Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:08.039258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:16:08.039306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T18:16:08.041621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:08.042581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:08.042712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:08.042750Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:16:08.042871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:16:08.042919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:16:08.042960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:16:08.042994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:16:08.043055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T18:16:08.043118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-04-17T18:16:08.043177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:16:08.043232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:16:08.043265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:16:08.043428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:16:08.045133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:16:08.045184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:393:2365] TestWaitNotification: OK eventTxId 102 2025-04-17T18:16:08.045739Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:08.045980Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 289us result status StatusSuccess 2025-04-17T18:16:08.046524Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TLocksTest::CK_Range_GoodLock [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> TLocksTest::Range_IncorrectDot2 [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull >> TLocksTest::Range_EmptyKey [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2025-04-17T18:15:41.546245Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:15:41.564070Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:41.564469Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:15:41.564687Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:41.612512Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:41.617802Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:41.700829Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:15:41.703072Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:15:41.703152Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:15:41.703210Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:15:41.705053Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:15:41.705445Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:15:41.705505Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:15:41.705763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:41.705805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.790968Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:15:41.823733Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:15:41.823904Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:15:41.823991Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:15:41.824015Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:15:41.824038Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:15:41.824064Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.824198Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.824241Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.825099Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:15:41.825200Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:15:41.825308Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.825352Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:41.825413Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:15:41.825460Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:41.825500Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:41.825531Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:15:41.825568Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:41.825668Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.825698Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.825743Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:15:41.829237Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:15:41.829308Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:41.829412Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:41.829548Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:15:41.829590Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:15:41.829659Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:15:41.829707Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.829744Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:15:41.829790Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:15:41.829825Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.830104Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:15:41.830147Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:15:41.830216Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:41.830251Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.830288Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:15:41.830322Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:41.830357Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:15:41.830388Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.830422Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:15:41.842932Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:41.843045Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.843084Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.843122Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:15:41.843788Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:15:41.846350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.846407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.846443Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:15:41.846521Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:15:41.846547Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:15:41.846683Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.846722Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.846801Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:15:41.846867Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:15:41.850719Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:15:41.850802Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.851095Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.851136Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.851181Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.851222Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:41.851256Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:41.851341Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:15:41.851376Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:15:41.851531Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.851588Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:15:41.851639Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:15:41.851673Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:15:41.851859Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:15:41.851894Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.851917Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:15:41.851938Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:15:41.851967Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:15:41.852055Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.852085Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:15:41.852117Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:41.852147Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:41.852183Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:15:41.852233Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:15:41.852265Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:15:41.852316Z node 1 :TX_DATA ... 4MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\231\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1002 ExecLevel: 0 Flags: 0 2025-04-17T18:16:09.047169Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:16:09.047255Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:16:09.047738Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CheckDataTx 2025-04-17T18:16:09.047797Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-17T18:16:09.047826Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CheckDataTx 2025-04-17T18:16:09.047854Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:16:09.047882Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:16:09.047935Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:16:09.047994Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1002] at 9437184 2025-04-17T18:16:09.048027Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-17T18:16:09.048050Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:16:09.048073Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:16:09.048096Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2025-04-17T18:16:09.048136Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:16:09.048433Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2025-04-17T18:16:09.048480Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:16:09.048526Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:16:09.048551Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:16:09.048578Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit FinishPropose 2025-04-17T18:16:09.048605Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2025-04-17T18:16:09.048635Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is DelayComplete 2025-04-17T18:16:09.048656Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2025-04-17T18:16:09.048684Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit CompletedOperations 2025-04-17T18:16:09.048710Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2025-04-17T18:16:09.048748Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-17T18:16:09.048770Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2025-04-17T18:16:09.048792Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1002] at 9437184 has finished 2025-04-17T18:16:09.067750Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:16:09.067809Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2025-04-17T18:16:09.067846Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-17T18:16:09.067920Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2025-04-17T18:16:09.072817Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T18:16:09.072870Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T18:16:09.073863Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4542:6461], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:09.073902Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:09.073937Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4541:6460], serverId# [3:4542:6461], sessionId# [0:0:0] 2025-04-17T18:16:09.074296Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2025-04-17T18:16:09.074334Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:16:09.074413Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:16:09.074854Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2025-04-17T18:16:09.074904Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-17T18:16:09.074930Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2025-04-17T18:16:09.074958Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:16:09.074985Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:16:09.075035Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:16:09.075092Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1003] at 9437184 2025-04-17T18:16:09.075123Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-17T18:16:09.075146Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:16:09.075168Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:16:09.075188Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2025-04-17T18:16:09.075225Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:16:09.075528Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2025-04-17T18:16:09.075571Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:16:09.075619Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:16:09.075642Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:16:09.075667Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit FinishPropose 2025-04-17T18:16:09.075690Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2025-04-17T18:16:09.075722Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is DelayComplete 2025-04-17T18:16:09.075747Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2025-04-17T18:16:09.075770Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit CompletedOperations 2025-04-17T18:16:09.075792Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2025-04-17T18:16:09.075827Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-17T18:16:09.075848Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2025-04-17T18:16:09.075870Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1003] at 9437184 has finished 2025-04-17T18:16:09.116869Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:16:09.116923Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-17T18:16:09.118639Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:16:09.118687Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2025-04-17T18:16:09.118724Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-04-17T18:16:09.118798Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:16:09.121773Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerGcApplied .2025-04-17T18:16:09.125095Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4556:6474], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:09.125161Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:09.125210Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4555:6473], serverId# [3:4556:6474], sessionId# [0:0:0] 2025-04-17T18:16:09.125809Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553160, Sender [3:4554:6472], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1717 LastUpdateTime: 1717 } >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2025-04-17T18:15:30.952092Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347408192327864:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:30.952161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d3f/r3tmp/tmpnCvdnx/pdisk_1.dat 2025-04-17T18:15:31.344767Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:31.352387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:31.352487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:31.355176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64323 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:31.600920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.617633Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.630913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.770007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.819532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.062291Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347424058094925:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:34.062377Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d3f/r3tmp/tmpQ8iefS/pdisk_1.dat 2025-04-17T18:15:34.168980Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:34.200211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:34.200309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:34.201765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20421 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:34.379433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:34.404460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.488464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.555406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.389816Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347436156850177:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:37.389897Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d3f/r3tmp/tmpbSGtwH/pdisk_1.dat 2025-04-17T18:15:37.493323Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:37.532324Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:37.532393Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:37.533923Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16329 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:37.703263Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:37.708227Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.719965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.774707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.824609Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:40.409005Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347448453854008:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:40.409069Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d3f/r3tmp/tmpkZiUZh/pdisk_1.dat 2025-04-17T18:15:40.494156Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:40.537680Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:40.537748Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:40.539434Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28474 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:40.706630Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubD ... SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:51.573801Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:15:51.596034Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:51.674688Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:51.758252Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:55.366853Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347516240544016:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:55.366919Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d3f/r3tmp/tmpwo5xTW/pdisk_1.dat 2025-04-17T18:15:55.486307Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:55.515058Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:55.515131Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:55.519368Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63026 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:55.767474Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.773329Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.790766Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.879412Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.957338Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:59.628835Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347532236811611:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:59.628900Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d3f/r3tmp/tmpqxeOGs/pdisk_1.dat 2025-04-17T18:15:59.781653Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:59.815265Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:59.815371Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:59.816492Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64144 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:00.099633Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:00.110749Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:00.121918Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:16:00.126964Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:00.204184Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:00.267317Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:04.382847Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347551929684177:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:04.382928Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d3f/r3tmp/tmpVz3GqQ/pdisk_1.dat 2025-04-17T18:16:04.563058Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:04.581321Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:04.581422Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:04.582960Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1200 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:04.972681Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:04.980005Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:04.998878Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:05.084422Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:16:05.147867Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 >> TLocksTest::CK_Range_BrokenLockInf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2025-04-17T18:15:30.317196Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347405719254602:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:30.317317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d46/r3tmp/tmpYieSdS/pdisk_1.dat 2025-04-17T18:15:30.808679Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:30.820575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:30.822257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:30.827787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8037 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:31.129429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:31.167275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.310366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:31.378242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.806041Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347420171875326:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:33.806086Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d46/r3tmp/tmpEG2guP/pdisk_1.dat 2025-04-17T18:15:33.903534Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:33.950333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:33.950399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:33.955498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64452 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:34.087842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.092826Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.107684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.152654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.191171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.149905Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347437614154564:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:37.149971Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d46/r3tmp/tmpacxPfs/pdisk_1.dat 2025-04-17T18:15:37.257380Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:37.294907Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:37.294989Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:37.296099Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61181 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:37.475410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:37.480579Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:37.498482Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:15:37.576176Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:37.616942Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:40.563073Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347450191514906:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:40.563142Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d46/r3tmp/tmpAbXREg/pdisk_1.dat 2025-04-17T18:15:40.683826Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:40.701506Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:40.701588Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:40.703864Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2210 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:40.887205Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDom ... ished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:51.651766Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:51.661614Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:51.672000Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:51.677361Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:51.758086Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:51.829185Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.303626Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347513157388307:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:55.303766Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d46/r3tmp/tmpgrgjlz/pdisk_1.dat 2025-04-17T18:15:55.435905Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:55.461888Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:55.461986Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:55.463524Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2417 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:55.752761Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.759398Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.779485Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.861881Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:55.943307Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:59.748097Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347531876163239:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:59.748187Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d46/r3tmp/tmp46p0Nt/pdisk_1.dat 2025-04-17T18:15:59.897645Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:59.921714Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:59.921814Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:59.927456Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63336 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:00.227670Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:00.233196Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:00.257537Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:00.358221Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:00.419452Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:04.529834Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347554610356979:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:04.529907Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d46/r3tmp/tmpsPZGNX/pdisk_1.dat 2025-04-17T18:16:04.725757Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:04.725863Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:04.733267Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:04.746161Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64359 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:05.148085Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:16:05.170456Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:16:05.186376Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:05.266684Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:05.354480Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2025-04-17T18:15:32.572981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347416262072781:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:32.573033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d30/r3tmp/tmp2VPpaH/pdisk_1.dat 2025-04-17T18:15:32.992265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:32.992358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:33.013134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:33.017109Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29738 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:33.294650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:33.325987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:33.436774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:33.513338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.870308Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347429376346704:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:35.870479Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d30/r3tmp/tmpXU9QcV/pdisk_1.dat 2025-04-17T18:15:36.013642Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:36.018918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:36.019018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:36.020453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1940 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:36.193069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.199192Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.216497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:36.295861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:36.374265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:39.103534Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347447070047735:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:39.103673Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d30/r3tmp/tmpo7D28S/pdisk_1.dat 2025-04-17T18:15:39.215831Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:39.253831Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:39.253909Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:39.255173Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10290 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:39.398465Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:39.413822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:39.462278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:39.506833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:42.190796Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347457907263160:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:42.190841Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d30/r3tmp/tmpf40umc/pdisk_1.dat 2025-04-17T18:15:42.324642Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:42.342097Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:42.342378Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:42.344328Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10820 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-17T18:15:42.556352Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:15:42.561550Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, u ... 968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18122 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:53.478343Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.483639Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.503676Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.563275Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.624545Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d30/r3tmp/tmpWEnmc8/pdisk_1.dat 2025-04-17T18:15:57.420613Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:57.421823Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:15:57.452505Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:57.452602Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:57.454536Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14677 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:57.688080Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:57.713965Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.801538Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.864268Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:01.607846Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347540521983828:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:01.609033Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d30/r3tmp/tmpDO7yaR/pdisk_1.dat 2025-04-17T18:16:01.763537Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:01.766881Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:01.766974Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:01.771142Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5104 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:02.063307Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.073656Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.086166Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:16:02.091826Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.162056Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.241609Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.009099Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347562211319131:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:06.009197Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d30/r3tmp/tmphnZ0zc/pdisk_1.dat 2025-04-17T18:16:06.157908Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:06.179599Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:06.179705Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:06.182864Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22874 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:06.448985Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.455161Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.481611Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.574267Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.677801Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2025-04-17T18:15:31.872434Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347412657982535:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:31.875211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d39/r3tmp/tmpCdcerC/pdisk_1.dat 2025-04-17T18:15:32.275146Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:32.311970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:32.312087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:32.314022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30513 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:32.546537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.568239Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.577275Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:32.581640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.715170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:32.792441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:34.883730Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347424059893729:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:34.883782Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d39/r3tmp/tmpPsnD1H/pdisk_1.dat 2025-04-17T18:15:35.007885Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:35.036172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:35.036242Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:35.042899Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23273 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:35.201815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:15:35.221702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:35.292527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:35.360010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:38.119550Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347439460479825:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:38.119630Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d39/r3tmp/tmpTKgQuU/pdisk_1.dat 2025-04-17T18:15:38.328322Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:38.345972Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:38.346054Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:38.351367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17940 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:38.544411Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:38.555903Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:38.563521Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:38.647824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:38.695090Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:15:41.538522Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494347454708560591:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:41.538639Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d39/r3tmp/tmpz150Vy/pdisk_1.dat 2025-04-17T18:15:41.663847Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.688319Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:41.688392Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:41.689775Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25441 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 20 ... necting -> Connected TClient is connected to server localhost:23488 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:52.932947Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:52.953567Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.030681Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.088277Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:56.883477Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7494347518654777078:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:56.883557Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d39/r3tmp/tmpxNCSuO/pdisk_1.dat 2025-04-17T18:15:57.035010Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:57.060332Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:57.060424Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:57.062417Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14606 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:57.300829Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.310912Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:57.326924Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-17T18:15:57.341203Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:15:57.404657Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:57.478066Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:01.232848Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7494347539128264185:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:01.233986Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d39/r3tmp/tmpEtFEkv/pdisk_1.dat 2025-04-17T18:16:01.336535Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:01.357025Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:01.357125Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:01.367251Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3043 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:01.580433Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:16:01.599562Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:01.682249Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:01.760562Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.363928Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494347560403228454:2156];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:06.434837Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d39/r3tmp/tmpbnOvVT/pdisk_1.dat 2025-04-17T18:16:06.596330Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:06.596448Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:06.599155Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:06.599463Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23307 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:06.923796Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.930496Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.947735Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:07.028805Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:07.135714Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:16:13.253605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:13.253706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:13.253750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:13.253781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:13.253822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:13.253851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:13.253909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:13.253979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:13.254378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:13.322786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:13.322843Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:13.336082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:13.336195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:13.336387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:13.341703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:13.341894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:13.342354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:13.342582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:13.346238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:13.347237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:13.347298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:13.347376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:13.347403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:13.347429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:13.347559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.352736Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:16:13.482867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:13.483123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.483280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:13.483470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:13.483517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.485295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:13.485420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:13.485565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.485616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:13.485638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:13.485659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:13.487382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.487417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:13.487444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:13.488659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.488689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.488729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:13.488762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:13.491490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:13.492599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:13.492756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:13.493546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:13.493639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:13.493676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:13.493903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:13.493955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:13.494083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:13.494193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:13.495533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:13.495571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:13.495703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:13.495735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:13.495954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.495989Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:13.496056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:13.496080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:13.496106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:13.496126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:13.496166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:13.496220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:13.496254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:13.496280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:13.496343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:13.496387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:13.496426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:13.498048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:13.498151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:13.498202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:16:13.498253Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:16:13.498287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:13.498374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:16:13.502836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:16:13.503414Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:16:13.504732Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] Bootstrap 2025-04-17T18:16:13.521262Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] Become StateWork (SchemeCache [1:275:2266]) 2025-04-17T18:16:13.523715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:13.524046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.524151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2025-04-17T18:16:13.524541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-04-17T18:16:13.525398Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:16:13.532376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:13.532576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-17T18:16:13.533228Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 29655, MsgBus: 4867 2025-04-17T18:16:02.051577Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347545323807895:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:02.052360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e2a/r3tmp/tmptr0MiH/pdisk_1.dat 2025-04-17T18:16:02.501879Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:02.504397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:02.504486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:02.516250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29655, node 1 2025-04-17T18:16:02.665717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:02.665741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:02.665747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:02.665858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4867 TClient is connected to server localhost:4867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:03.282689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:03.312432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:03.454236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:03.609884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:03.700610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:05.290936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347558208711423:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:05.291085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:05.640573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:05.681683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:05.708894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:05.743564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:05.773774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:05.810669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:05.842828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:05.923882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347558208712086:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:05.923934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:05.924175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347558208712091:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:05.928074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:05.940608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347558208712093:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:16:06.022560Z node 1 :TX_PROXY ERROR: Actor# [1:7494347562503679446:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:06.957093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.025637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.047511Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347545323807895:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:07.049473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4390, MsgBus: 29673 2025-04-17T18:16:08.323239Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347569099582874:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:08.323307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e2a/r3tmp/tmpcDBfBU/pdisk_1.dat 2025-04-17T18:16:08.482601Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:08.493998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:08.494302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:08.495717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4390, node 2 2025-04-17T18:16:08.575918Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:08.575939Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:08.575947Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:08.576034Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29673 TClient is connected to server localhost:29673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:09.000855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:09.015511Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:16:09.031096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:09.113552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:09.261620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:09.333668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:11.293123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347581984486530:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:11.293218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:11.329157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:11.358081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:11.385953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:11.412189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:11.438062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:11.475376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:11.509292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:11.586256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347581984487194:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:11.586323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:11.586391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347581984487199:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:11.590062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:11.603425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494347581984487201:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:16:11.694607Z node 2 :TX_PROXY ERROR: Actor# [2:7494347581984487257:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:12.635667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:12.736635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.326299Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347569099582874:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:13.326376Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 13398, MsgBus: 23198 2025-04-17T18:16:05.787557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347557163389787:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:05.815409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e24/r3tmp/tmpCKIeVl/pdisk_1.dat 2025-04-17T18:16:06.240454Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:06.277054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:06.282355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13398, node 1 2025-04-17T18:16:06.286158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:06.342742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:06.342759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:06.342764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:06.342856Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23198 TClient is connected to server localhost:23198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:06.921593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.949101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:06.965250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:07.107031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:07.266998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:07.360751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:08.954134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347570048293406:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:08.954304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:09.274218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:09.305593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:09.337823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:09.369937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:09.410998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:09.478517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:09.549014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:09.636705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347574343261372:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:09.636798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:09.637161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347574343261377:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:09.640764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:09.651925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347574343261379:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:16:09.733090Z node 1 :TX_PROXY ERROR: Actor# [1:7494347574343261437:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:10.743605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:10.781544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:10.788725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347557163389787:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:10.788799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:16:10.855559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:16:10.889716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:16:10.920869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:16:11.788643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:43: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:53: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:107:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:107:2139] Leader for TabletID 9437184 is [1:126:2151] sender: [1:129:2057] recipient: [1:107:2139] 2025-04-17T18:15:41.545739Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:15:41.562306Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:41.563379Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:15:41.564256Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:41.612627Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:41.697399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:41.697468Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.702456Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:41.703386Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:15:41.704783Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:15:41.704845Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:15:41.704883Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:15:41.705167Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:15:41.705245Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:15:41.705299Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:195:2151] in generation 2 Leader for TabletID 9437184 is [1:126:2151] sender: [1:211:2057] recipient: [1:14:2061] 2025-04-17T18:15:41.789291Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:15:41.829325Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:15:41.829544Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:15:41.829658Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:216:2214] 2025-04-17T18:15:41.829699Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:15:41.829737Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:15:41.829777Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.829989Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.830036Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.830351Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:15:41.830467Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:15:41.830539Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.830592Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:41.830658Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:15:41.830701Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:41.830734Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:41.830770Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:15:41.830808Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:41.830934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:212:2211], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.830987Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.831043Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:210:2210], serverId# [1:212:2211], sessionId# [0:0:0] 2025-04-17T18:15:41.833666Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:15:41.833734Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:41.833827Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:41.834014Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:15:41.834062Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:15:41.834122Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:15:41.834213Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.834262Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:15:41.834314Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:15:41.834349Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.834650Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:15:41.834684Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:15:41.834719Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:41.834753Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.834797Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:15:41.834822Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:41.834858Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:15:41.834905Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.834934Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:15:41.847674Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:41.847739Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.847775Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.847820Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:15:41.847889Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:15:41.848389Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:222:2220], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.848442Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.848487Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:221:2219], serverId# [1:222:2220], sessionId# [0:0:0] 2025-04-17T18:15:41.848631Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-17T18:15:41.848668Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:15:41.848787Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.848840Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:15:41.848887Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:15:41.848922Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:15:41.857846Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:15:41.857923Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.858243Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.858293Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.858364Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.858408Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:41.858447Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:41.858491Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-17T18:15:41.858528Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-17T18:15:41.858570Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:15:41.858624Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:15:41.858665Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:15:41.858699Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:15:41.858871Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-17T18:15:41.858903Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:15:41.858940Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:15:41.858964Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:15:41.858988Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:15:41.859046Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.859085Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:15:41.859128Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:41.859166Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:41.859206Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-17T18:15:41.859261Z node 1 :TX_DATASHARD TRAC ... :14.118559Z node 21 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:16:14.118691Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [21:281:2265], Recipient [21:235:2228]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [21:285:2269] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:16:14.118740Z node 21 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:16:14.118847Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [21:124:2150], Recipient [21:235:2228]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-17T18:16:14.118885Z node 21 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T18:16:14.118937Z node 21 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-17T18:16:14.119011Z node 21 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-17T18:16:14.131411Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [21:281:2265], Recipient [21:235:2228]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [21:281:2265] ServerId: [21:285:2269] } 2025-04-17T18:16:14.131481Z node 21 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:16:14.164502Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [21:292:2274], Recipient [21:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:14.164581Z node 21 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:14.164627Z node 21 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [21:290:2273], serverId# [21:292:2274], sessionId# [0:0:0] 2025-04-17T18:16:14.164785Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [21:289:2272], Recipient [21:235:2228]: NKikimrTabletBase.TEvGetCounters 2025-04-17T18:16:14.177507Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [21:99:2134], Recipient [21:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 90194315350 } 2025-04-17T18:16:14.177581Z node 21 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T18:16:14.177973Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [21:294:2276], Recipient [21:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:14.178024Z node 21 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:14.178080Z node 21 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [21:293:2275], serverId# [21:294:2276], sessionId# [0:0:0] 2025-04-17T18:16:14.178377Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [21:99:2134], Recipient [21:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 90194315350 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-17T18:16:14.178417Z node 21 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:16:14.178505Z node 21 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:16:14.179303Z node 21 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-17T18:16:14.179387Z node 21 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T18:16:14.179439Z node 21 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-17T18:16:14.179484Z node 21 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:16:14.179520Z node 21 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:16:14.179554Z node 21 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:16:14.179614Z node 21 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-17T18:16:14.179648Z node 21 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T18:16:14.179670Z node 21 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:16:14.179689Z node 21 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:16:14.179713Z node 21 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T18:16:14.179754Z node 21 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:16:14.179808Z node 21 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 132374 more memory 2025-04-17T18:16:14.179855Z node 21 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T18:16:14.180140Z node 21 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:16:14.180188Z node 21 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T18:16:14.180245Z node 21 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:16:14.181182Z node 21 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2025-04-17T18:16:14.181312Z node 21 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-17T18:16:14.181356Z node 21 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T18:16:14.181562Z node 21 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:16:14.181594Z node 21 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T18:16:14.182248Z node 21 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-17T18:16:14.182293Z node 21 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:16:14.182684Z node 21 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2025-04-17T18:16:14.182755Z node 21 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-17T18:16:14.182780Z node 21 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T18:16:14.182900Z node 21 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:16:14.182925Z node 21 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T18:16:14.183322Z node 21 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-17T18:16:14.183350Z node 21 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:16:14.183740Z node 21 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2025-04-17T18:16:14.183828Z node 21 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-17T18:16:14.183857Z node 21 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-17T18:16:14.183963Z node 21 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:16:14.183987Z node 21 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-17T18:16:14.184364Z node 21 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-17T18:16:14.184393Z node 21 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:16:14.496979Z node 21 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-17T18:16:14.497102Z node 21 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:16:14.497189Z node 21 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:16:14.497232Z node 21 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:16:14.497286Z node 21 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-17T18:16:14.497333Z node 21 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-17T18:16:14.497440Z node 21 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:16:14.497478Z node 21 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-17T18:16:14.497521Z node 21 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-17T18:16:14.497565Z node 21 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-17T18:16:14.497623Z node 21 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-17T18:16:14.497656Z node 21 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-17T18:16:14.497706Z node 21 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-17T18:16:14.510568Z node 21 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:16:14.510655Z node 21 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-17T18:16:14.510723Z node 21 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-17T18:16:14.510847Z node 21 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:16:14.513833Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [21:299:2281], Recipient [21:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:14.513916Z node 21 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:14.513976Z node 21 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [21:298:2280], serverId# [21:299:2281], sessionId# [0:0:0] 2025-04-17T18:16:14.514215Z node 21 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [21:297:2279], Recipient [21:235:2228]: NKikimrTabletBase.TEvGetCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:46.007972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:46.008067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:46.008113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:46.008150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:46.008189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:46.008209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:46.008270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:46.008323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:46.008571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:46.079137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:46.079186Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:46.095326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:46.095783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:46.095988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:46.104662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:46.104850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:46.105483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:46.105710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:46.108407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:46.109655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:46.109729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:46.109899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:46.109940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:46.109978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:46.110115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:46.116303Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:46.252207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:46.252456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:46.252670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:46.252896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:46.252953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:46.255565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:46.255704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:46.255905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:46.255971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:46.256003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:46.256036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:46.258115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:46.258168Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:46.258223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:46.260191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:46.260239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:46.260288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:46.260331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:46.263973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:46.266020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:46.266227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:46.267226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:46.267389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:46.267440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:46.267728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:46.267781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:46.267960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:46.268061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:46.270276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:46.270320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:46.270541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:46.270588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:46.270968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:46.271010Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:46.271103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:46.271133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:46.271165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:46.271234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:46.271278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:46.271333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:46.271376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:46.271409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:46.271472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:46.271511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:46.271553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:46.273270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:46.273382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:46.273432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44 2025-04-17T18:16:13.941149Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.941225Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.941307Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.941372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.941668Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.942347Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.942826Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.950969Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.952178Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.952380Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.952493Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.952583Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.952799Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.952912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.953190Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.954352Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.954467Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.954535Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.954619Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.954692Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.954757Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.954817Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.954906Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.954970Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.955022Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.955127Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:13.955175Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:16:13.955301Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:16:13.955343Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:13.955393Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:16:13.955437Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:13.955490Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T18:16:13.955570Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:2776:4041] message: TxId: 101 2025-04-17T18:16:13.955620Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:13.955703Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:16:13.955744Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:16:13.956923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-17T18:16:13.961414Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:16:13.961483Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:2777:4042] TestWaitNotification: OK eventTxId 101 2025-04-17T18:16:13.962206Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:13.962514Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 362us result status StatusSuccess 2025-04-17T18:16:13.963152Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:16:15.226441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:15.226527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:15.226570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:15.226601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:15.226639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:15.226666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:15.226721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:15.226783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:15.227120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:15.311641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:15.311705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:15.326592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:15.326729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:15.326902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:15.332817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:15.333129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:15.333772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:15.334036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:15.337922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:15.339225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:15.339285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:15.339388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:15.339431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:15.339484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:15.339657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.345933Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:16:15.491827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:15.492059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.492219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:15.492420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:15.492474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.494806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:15.494933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:15.495076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.495129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:15.495180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:15.495214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:15.499192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.499244Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:15.499278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:15.501725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.501771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.501821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:15.501873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:15.505485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:15.507018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:15.507187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:15.508087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:15.508210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:15.508270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:15.508513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:15.508564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:15.508721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:15.508795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:15.510585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:15.510630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:15.510785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:15.510837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:15.511145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.511188Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:15.511316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:15.511351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:15.511388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:15.511418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:15.511455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:15.511505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:15.511538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:15.511566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:15.511640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:15.511681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:15.511711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:15.513461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:15.513560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:15.513599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 46678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 795 } } FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:16:15.808829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 4294969605 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:16:15.808905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-17T18:16:15.809075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 4294969605 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:16:15.809137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:16:15.809220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 4294969605 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:16:15.809283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:15.809320Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:16:15.809361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:16:15.809404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-17T18:16:15.810535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 4294969608 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:16:15.810581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-17T18:16:15.810709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 4294969608 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:16:15.810749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:16:15.810820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 4294969608 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:16:15.810863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:15.810888Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.810913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:16:15.810943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T18:16:15.815686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:16:15.815949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:16:15.821574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:16:15.821678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:16:15.821865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:16:15.822082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.825043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:16:15.825422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:16:15.825482Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-17T18:16:15.825581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:16:15.825614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:16:15.825660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:16:15.825692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:16:15.825729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-17T18:16:15.825999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.826294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:15.826339Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:16:15.826400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:16:15.826430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:16:15.826466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:16:15.826487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:16:15.826529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-17T18:16:15.826593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:380:2348] message: TxId: 101 2025-04-17T18:16:15.826636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:16:15.826681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:16:15.826716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:16:15.826846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:16:15.826902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-17T18:16:15.826922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-17T18:16:15.826949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:16:15.826967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-17T18:16:15.826982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-17T18:16:15.827020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:16:15.829290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:16:15.829332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:381:2349] TestWaitNotification: OK eventTxId 101 2025-04-17T18:16:15.830347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:15.830664Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 329us result status StatusSuccess 2025-04-17T18:16:15.831173Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:15:53.904341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:53.904438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:53.904485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:53.904516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:53.904558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:53.904586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:53.904666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:53.904741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:53.905027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:53.964281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:53.964334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:53.980711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:53.981099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:53.981283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:53.989571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:53.989730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:53.990134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:53.990341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:53.992326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:53.993371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:53.993446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:53.993618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:53.993658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:53.993689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:53.993825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.002566Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:15:54.134346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:54.134603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.134827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:54.135078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:54.135138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.139304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:54.139463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:54.139696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.139763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:54.139808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:54.139849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:54.147006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.147069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:54.147105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:54.150950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.150999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.151046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:54.151084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:54.154423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:54.156793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:54.156979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:54.158000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:54.158249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:54.158311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:54.158642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:54.158708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:54.158905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:54.159009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:54.160899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:54.160938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:54.161096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:54.161128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:54.161408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.161443Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:54.161519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:54.161543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:54.161574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:54.161600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:54.161628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:15:54.161671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:54.161713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:15:54.161741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:15:54.161803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:15:54.161855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:15:54.161891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:15:54.163311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:54.163425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:15:54.163470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -04-17T18:15:54.535996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.536202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.536249Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-17T18:15:54.536340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:15:54.536381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:15:54.536411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-17T18:15:54.536437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:15:54.536478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-17T18:15:54.536529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2347] message: TxId: 103 2025-04-17T18:15:54.536571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-17T18:15:54.536607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-17T18:15:54.536637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-17T18:15:54.536719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:15:54.537912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-17T18:15:54.537949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:507:2468] TestWaitNotification: OK eventTxId 103 2025-04-17T18:16:00.193653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:16:00.193750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:02.079086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0659 2025-04-17T18:16:02.079234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0377 2025-04-17T18:16:02.121565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-17T18:16:02.121721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-17T18:16:02.121809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:16:02.121969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-04-17T18:16:02.122023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-17T18:16:02.122115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:16:02.122213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-04-17T18:16:02.132572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T18:16:05.832005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0171 2025-04-17T18:16:05.832114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0101 2025-04-17T18:16:05.878377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-17T18:16:05.878519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-17T18:16:05.878603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:16:05.878686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-04-17T18:16:05.878728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-17T18:16:05.878775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:16:05.878815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-04-17T18:16:05.890389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T18:16:09.532202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0171 2025-04-17T18:16:09.532293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0101 2025-04-17T18:16:09.573373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-17T18:16:09.573547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-17T18:16:09.573623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:16:09.573711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-04-17T18:16:09.573764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-17T18:16:09.573822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:16:09.573860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-04-17T18:16:09.585796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T18:16:12.951357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0054 2025-04-17T18:16:12.951484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.003 2025-04-17T18:16:12.993806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-17T18:16:12.993959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-17T18:16:12.994039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:16:12.994115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-04-17T18:16:12.994196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-17T18:16:12.994255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T18:16:12.994295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-04-17T18:16:13.005085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-17T18:16:16.558460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-17T18:16:16.558601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T18:16:16.558832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-17T18:16:16.559032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 60024000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 4 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-17T18:16:16.566342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:16:16.567314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T18:16:16.567377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:16:16.583093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T18:16:16.583305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T18:16:16.583354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.024000Z, at schemeshard: 72057594046678944 2025-04-17T18:16:16.583412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 24540, MsgBus: 14525 2025-04-17T18:16:03.502491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347549287102696:2250];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:03.502676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e27/r3tmp/tmpQEugst/pdisk_1.dat 2025-04-17T18:16:03.967525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:03.967666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:03.969257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:04.001103Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24540, node 1 2025-04-17T18:16:04.124884Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:04.124911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:04.124938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:04.125075Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14525 TClient is connected to server localhost:14525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:04.683248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:04.697158Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:04.707926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:04.852608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:05.017852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:05.109617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.787154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347562172006161:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:06.787260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:07.136531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.171665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.202331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.233975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.264123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.297361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.348677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.410480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347566466974114:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:07.410591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:07.410632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347566466974119:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:07.414213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:07.429313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347566466974121:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:16:07.507170Z node 1 :TX_PROXY ERROR: Actor# [1:7494347566466974176:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:08.449093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:08.503676Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347549287102696:2250];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:08.503735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:16:08.539218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17855, MsgBus: 3532 2025-04-17T18:16:10.144062Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347580625917209:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:10.158960Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e27/r3tmp/tmpmE3aco/pdisk_1.dat 2025-04-17T18:16:10.270807Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:10.288250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:10.288328Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:10.290071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17855, node 2 2025-04-17T18:16:10.361992Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:10.362017Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:10.362024Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:10.362141Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3532 TClient is connected to server localhost:3532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:16:10.774251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:10.800593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:10.858585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:11.028371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:11.089775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:13.211924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347593510820834:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:13.212079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:13.262715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.334617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.364265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.394646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.431865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.502503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.576849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.659811Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347593510821513:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:13.659914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:13.660049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347593510821518:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:13.662695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:13.670802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494347593510821520:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:16:13.762981Z node 2 :TX_PROXY ERROR: Actor# [2:7494347593510821578:3548] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:14.765975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:14.878694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:15.144605Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347580625917209:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:15.144668Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TestDataErasure::DataErasureManualLaunch >> TestDataErasure::DataErasureWithCopyTable >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] >> TConsoleConfigTests::TestModifyConfigItem ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:16:00.637819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:00.637919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:00.637973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:00.638007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:00.638048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:00.638096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:00.638197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:00.638264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:00.638537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:00.714808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:00.714864Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:00.730745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:00.731103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:00.731280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:00.743898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:00.744093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:00.744679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:00.744900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:00.747621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:00.748825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:00.748904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:00.749082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:00.749130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:00.749166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:00.749283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.755844Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:16:00.878402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:00.878630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.878813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:00.879031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:00.879080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.881616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:00.881748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:00.881911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.881966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:00.882000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:00.882036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:00.883821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.883872Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:00.883909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:00.885469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.885508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.885559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:00.885600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.889159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:00.890853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:00.891020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:00.891927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:00.892060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:00.892104Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:00.892342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:00.892388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:00.892544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:00.892629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:00.894416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:00.894460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:00.894609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:00.894649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:00.895008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.895048Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:00.895131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:00.895163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.895201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:00.895226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.895258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:00.895312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.895342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:00.895370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:00.895436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:00.895495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:00.895537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:00.897305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:00.897407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:00.897456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:17.862969Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:16:17.863179Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:17.863219Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:16:17.863265Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:16:17.863627Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.863682Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:16:17.865348Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:16:17.865449Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:16:17.865479Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:16:17.865507Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-17T18:16:17.865538Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:17.866251Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:16:17.866338Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:16:17.866368Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:16:17.866402Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-17T18:16:17.866429Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:16:17.866491Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-17T18:16:17.867731Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1175 } } 2025-04-17T18:16:17.867779Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T18:16:17.867899Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1175 } } 2025-04-17T18:16:17.868010Z node 18 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1175 } } 2025-04-17T18:16:17.869264Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 77309413623 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:16:17.869300Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-17T18:16:17.869390Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 77309413623 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:16:17.869426Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:16:17.869519Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 77309413623 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:16:17.869578Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:17.869625Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.869665Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:16:17.869704Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T18:16:17.875181Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:16:17.875396Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:16:17.875656Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.876808Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.876946Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.876980Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:16:17.877067Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:16:17.877104Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:17.877149Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:16:17.877184Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:17.877227Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T18:16:17.877302Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:336:2315] message: TxId: 101 2025-04-17T18:16:17.877343Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:17.877382Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:16:17.877410Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:16:17.877539Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:16:17.879301Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:16:17.879350Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:337:2316] TestWaitNotification: OK eventTxId 101 2025-04-17T18:16:17.879806Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:17.879994Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 234us result status StatusSuccess 2025-04-17T18:16:17.880440Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsoleTests::TestCreateTenant >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:16:11.142905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:11.143019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:11.143084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:11.143126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:11.143175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:11.143208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:11.143300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:11.143400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:11.143732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:11.231145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:11.231209Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:11.246755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:11.247195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:11.247398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:11.255690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:11.255880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:11.256532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:11.256803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:11.259694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:11.261043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:11.261109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:11.261286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:11.261337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:11.261378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:11.261513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.268132Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:16:11.399117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:11.399370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.399572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:11.399831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:11.399904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.402394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:11.402540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:11.402744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.402835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:11.402881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:11.402917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:11.404796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.404852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:11.404887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:11.406802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.406853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.406909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:11.406956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:11.410664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:11.412537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:11.412723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:11.413753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:11.413903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:11.413956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:11.414282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:11.414346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:11.414524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:11.414653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:11.416907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:11.416971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:11.417177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:11.417220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:11.417623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.417673Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:11.417808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:11.417845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:11.417888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:11.417917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:11.417953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:11.418012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:11.418048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:11.418078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:11.418160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:11.418235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:11.418269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:11.419969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:11.420101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:11.420158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... lete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.655060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.655276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.655366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.655440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.655558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.655656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.655723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.659252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.659424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.659507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.659595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.659702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.659793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.659884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.659953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.662861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.662996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.663052Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:16:17.663151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:16:17.663184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:17.663222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:16:17.663251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:17.663284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-17T18:16:17.663380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2804:4069] message: TxId: 101 2025-04-17T18:16:17.663450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:17.663508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:16:17.663539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:16:17.664679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-17T18:16:17.667593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:16:17.667643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:2805:4070] TestWaitNotification: OK eventTxId 101 2025-04-17T18:16:17.668289Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:17.668567Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 312us result status StatusSuccess 2025-04-17T18:16:17.669230Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1744913777.669886 551135 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-04-17T18:16:17.672805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:17.672994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:17.673611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-04-17T18:16:17.676631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:17.676808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> TableCreator::CreateTables >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> KqpPg::TempTablesDrop [GOOD] >> KqpPg::TempTablesWithCache ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:16:20.483980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:20.484074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:20.484118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:20.484153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:20.484192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:20.484216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:20.484275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:20.484338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:20.484674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:20.565211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:20.565269Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:20.584933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:20.585390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:20.585591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:20.594116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:20.594327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:20.594978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:20.595178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:20.597733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:20.598995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:20.599058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:20.599250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:20.599290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:20.599338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:20.599495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:20.607833Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:16:20.763696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:20.763936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:20.764144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:20.764383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:20.764437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:20.767286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:20.767430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:20.767669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:20.767733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:20.767766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:20.767813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:20.769875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:20.769949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:20.769987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:20.771909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:20.771960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:20.772007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:20.772081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:20.775630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:20.777631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:20.777807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:20.778764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:20.778883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:20.778936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:20.779261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:20.779338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:20.779509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:20.779585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:20.781726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:20.781768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:20.781966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:20.782005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:20.782405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:20.782457Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:20.782583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:20.782620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:20.782655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:20.782686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:20.782717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:20.782753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:20.782784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:20.782812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:20.782876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:20.782907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:20.782934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:20.784629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:20.784722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:20.784757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 20.827599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:20.827661Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-17T18:16:20.827804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-17T18:16:20.828024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:20.828103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-04-17T18:16:20.830829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:20.830902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:20.831085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:16:20.831204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:20.831249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-17T18:16:20.831311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-17T18:16:20.831647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:20.831704Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:16:20.831814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:16:20.831850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:20.831888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-17T18:16:20.831924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:20.831964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-17T18:16:20.832007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-17T18:16:20.832049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:16:20.832087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:16:20.832165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:16:20.832228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-17T18:16:20.832272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-17T18:16:20.832321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-17T18:16:20.833088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:16:20.833190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:16:20.833226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:16:20.833271Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-17T18:16:20.833312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:20.834127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:16:20.834254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-17T18:16:20.834309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-17T18:16:20.834338Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-17T18:16:20.834366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:16:20.834432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-17T18:16:20.838031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:16:20.838323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:16:20.838548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:16:20.838591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:16:20.838986Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:16:20.839071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:16:20.839119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:299:2290] TestWaitNotification: OK eventTxId 101 2025-04-17T18:16:20.839563Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:20.839746Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 211us result status StatusSuccess 2025-04-17T18:16:20.840095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-17T18:16:20.843826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:20.844088Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-04-17T18:16:20.844188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-04-17T18:16:20.844329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2025-04-17T18:16:20.847286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-17T18:16:20.847470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:16:20.847743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:16:20.847783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:16:20.848245Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:16:20.848323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:16:20.848353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 102 >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TestDataErasure::DataErasureManualLaunch [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] |93.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:16:18.447456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:18.447549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:18.447599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:18.447633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:18.447675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:18.447704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:18.447760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:18.447866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:18.448199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:18.538991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:18.539075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:18.553958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:18.554088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:18.554280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:18.560861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:18.561156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:18.561819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:18.562152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:18.569911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:18.571399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:18.571467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:18.571600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:18.571649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:18.571701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:18.571888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.578127Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:16:18.705776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:18.705990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.706262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:18.706467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:18.706508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.711000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:18.711145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:18.711355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.711423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:18.711474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:18.711515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:18.715078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.715145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:18.715183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:18.717806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.717883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.717923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:18.717983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:18.721825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:18.723810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:18.724001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:18.724923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:18.725049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:18.725108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:18.725355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:18.725403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:18.725564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:18.725656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:18.727596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:18.727645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:18.727808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:18.727846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:18.728171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.728215Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:18.728304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:18.728337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:18.728388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:18.728437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:18.728476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:18.728511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:18.728547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:18.728572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:18.728625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:18.728660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:18.728691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:18.730480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:18.730586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:18.730621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... LAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1896:3565], Recipient [1:834:2718]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1897:3566] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-17T18:16:20.026877Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:16:20.026913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409551 2025-04-17T18:16:20.028561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-04-17T18:16:20.055110Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:634:2552], Recipient [1:458:2410]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409549 Status: OK 2025-04-17T18:16:20.055170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-17T18:16:20.055241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-04-17T18:16:20.055307Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409549, shardIdx# 72075186233409546:4 in# 60 ms, next wakeup in# 14.940000s, rate# 1, in queue# 0 shards, running# 1 shards at schemeshard 72075186233409546 2025-04-17T18:16:20.057159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-04-17T18:16:20.083331Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:642:2558], Recipient [1:458:2410]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409550 Status: OK 2025-04-17T18:16:20.083446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-17T18:16:20.083545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-04-17T18:16:20.083635Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, shardIdx# 72075186233409546:5 in# 62 ms, next wakeup in# 14.938000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-04-17T18:16:20.083732Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-04-17T18:16:20.083793Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 1 2025-04-17T18:16:20.086213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-04-17T18:16:20.086675Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1912:3581], Recipient [1:293:2277]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:20.086714Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:20.086751Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T18:16:20.086926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:458:2410], Recipient [1:293:2277]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 1 Status: COMPLETED 2025-04-17T18:16:20.086956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-17T18:16:20.087029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-17T18:16:20.087092Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 63 ms, next wakeup# 599.937000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-17T18:16:20.087177Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-17T18:16:20.087462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1911:3580], Recipient [1:458:2410]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1912:3581] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-17T18:16:20.087501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:16:20.087548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2025-04-17T18:16:20.089307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-17T18:16:20.089359Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-17T18:16:20.090904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1916:3585], Recipient [1:293:2277]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1917:3586] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:16:20.090955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:16:20.090982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-04-17T18:16:20.091257Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:293:2277]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-04-17T18:16:20.091311Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T18:16:20.091357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T18:16:20.091445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T18:16:20.091502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-17T18:16:20.091581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-17T18:16:20.091637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-17T18:16:21.062508Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T18:16:21.062578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T18:16:21.062626Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-17T18:16:21.062726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:293:2277], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:21.062755Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:21.062972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:293:2277]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-04-17T18:16:21.063007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T18:16:21.063052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T18:16:21.063132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T18:16:21.063163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-17T18:16:21.063225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-17T18:16:21.063266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-17T18:16:21.480517Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:21.480605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:21.480722Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T18:16:21.480752Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T18:16:21.480779Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-17T18:16:21.480906Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:293:2277], Recipient [1:293:2277]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:21.480932Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:21.481062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:293:2277]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-17T18:16:21.481087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T18:16:21.481113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T18:16:21.481176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T18:16:21.481201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-17T18:16:21.481260Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-04-17T18:16:21.487172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-17T18:16:21.487935Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1996:3665], Recipient [1:293:2277]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:21.488001Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:21.488037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T18:16:21.488197Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:275:2266], Recipient [1:293:2277]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-17T18:16:21.488232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-17T18:16:21.488277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:16:00.044452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:00.044529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:00.044563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:00.044593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:00.044630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:00.044670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:00.044726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:00.044780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:00.045076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:00.126623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:00.126696Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:00.141154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:00.141278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:00.141482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:00.148756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:00.149031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:00.149657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:00.149931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:00.154209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:00.155413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:00.155471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:00.155569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:00.155608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:00.155644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:00.155811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.171490Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:16:00.316133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:00.316376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.316556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:00.316795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:00.316851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.319088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:00.319211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:00.319374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.319432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:00.319465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:00.319497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:00.322490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.322541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:00.322580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:00.328324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.328384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.328431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:00.328483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.332312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:00.335095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:00.335321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:00.336292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:00.336430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:00.336494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:00.336743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:00.336792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:00.336982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:00.337060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:00.339076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:00.339118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:00.339273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:00.339314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:00.339618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:00.339660Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:00.339759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:00.339790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.339826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:00.339852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.339888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:00.339950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:00.339984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:00.340009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:00.340095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:00.340134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:00.340163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:00.341912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:00.342027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:00.342080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 215 } } 2025-04-17T18:16:22.487428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-17T18:16:22.487545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-04-17T18:16:22.487585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-17T18:16:22.489923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:22.490257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:22.490326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:22.490437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-17T18:16:22.490608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:22.496056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T18:16:22.496237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-17T18:16:22.496704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:22.496850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:22.496919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-17T18:16:22.497268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-17T18:16:22.497463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-17T18:16:22.504360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:22.504421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:16:22.504704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:22.504754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T18:16:22.505258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:22.505341Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:16:22.506095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:16:22.506274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:16:22.506317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:16:22.506358Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-17T18:16:22.506401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:16:22.506497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:16:22.507859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1384 } } 2025-04-17T18:16:22.507914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T18:16:22.508052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1384 } } 2025-04-17T18:16:22.508145Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1384 } } 2025-04-17T18:16:22.510771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:16:22.510822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-17T18:16:22.510966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:16:22.511035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:16:22.511125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 310 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-17T18:16:22.511195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:22.511239Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:22.511304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:16:22.511350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-17T18:16:22.516409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:16:22.516654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:22.517054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:22.517352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:16:22.517397Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-17T18:16:22.517543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:16:22.517629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:16:22.517681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-17T18:16:22.517714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:16:22.517753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-17T18:16:22.517824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2317] message: TxId: 102 2025-04-17T18:16:22.517884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-17T18:16:22.517945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:16:22.517987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:16:22.518120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:16:22.524328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:16:22.524416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:620:2575] TestWaitNotification: OK eventTxId 102 2025-04-17T18:16:22.524918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-17T18:16:22.524997Z node 1 :FLAT_TX_SCHEMESHARD ERROR: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2025-04-17T18:16:22.537378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-17T18:16:22.537544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-17T18:16:22.537611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.039500Z, at schemeshard: 72057594046678944 2025-04-17T18:16:22.537669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> TFlatTest::AutoSplitMergeQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 12450, MsgBus: 18003 2025-04-17T18:16:10.593305Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347577722524250:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:10.593458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e1e/r3tmp/tmp84uumi/pdisk_1.dat 2025-04-17T18:16:10.993922Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12450, node 1 2025-04-17T18:16:11.029326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:11.030214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:11.054219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:11.086615Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:11.086640Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:11.086650Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:11.086738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18003 TClient is connected to server localhost:18003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:11.569655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:11.589227Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:11.593511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:11.711137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:11.856367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:11.927710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:13.467050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347590607427927:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:13.467156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:13.791920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.825490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.852958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.886858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.922514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:13.960310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:14.024743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:14.092750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347594902395883:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:14.092830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:14.093136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347594902395888:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:14.096810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:14.107923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347594902395890:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:16:14.203334Z node 1 :TX_PROXY ERROR: Actor# [1:7494347594902395947:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:15.277985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:15.343923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:15.597965Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347577722524250:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:15.598268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28601, MsgBus: 28551 2025-04-17T18:16:16.863200Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347606530100110:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:16.866288Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e1e/r3tmp/tmppcije6/pdisk_1.dat 2025-04-17T18:16:17.049467Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:17.078571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:17.078654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:17.081664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28601, node 2 2025-04-17T18:16:17.174674Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:17.174692Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:17.174697Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:17.174775Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28551 TClient is connected to server localhost:28551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:17.595492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:17.603824Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:16:17.623207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:16:17.716141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.903862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:17.976900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:20.050705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347623709971059:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:20.050774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:20.098243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:20.130435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:20.169083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:20.203127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:20.236783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:20.300272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:20.368850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:20.472198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347623709971734:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:20.472300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:20.472458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347623709971739:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:20.475704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:20.485486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494347623709971741:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:16:20.572340Z node 2 :TX_PROXY ERROR: Actor# [2:7494347623709971797:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:21.585519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:21.651834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:21.866338Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347606530100110:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:21.866399Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> TableCreator::CreateTables [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2025-04-17T18:15:01.213377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347283919674493:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:01.213452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001dc2/r3tmp/tmp0ykYao/pdisk_1.dat 2025-04-17T18:15:01.697326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:01.707259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:01.707361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:01.713892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11634 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:02.020741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.034348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:02.048317Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:15:02.061315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913702189 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) A-0 2025-04-17T18:15:02.586084Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.10, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.039s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-17T18:15:02.594383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 B-0 2025-04-17T18:15:02.621191Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.12, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.028s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-17T18:15:02.624656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-04-17T18:15:02.696156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T18:15:02.696297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-04-17T18:15:02.699445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2025-04-17T18:15:02.699584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-04-17T18:15:02.699739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2025-04-17T18:15:02.968244Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 2 blobs 1r (max 1), put Spent{time=0.049s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-17T18:15:02.981037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-04-17T18:15:03.020010Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.18, eph 2} end=0, 3 blobs 2r (max 2), put Spent{time=0.044s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583126 0 0)b }, ecr=1.000 2025-04-17T18:15:03.080787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T18:15:03.080884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583004 row count 2 2025-04-17T18:15:03.080933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583004 2025-04-17T18:15:03.081084Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 1, Rows# 2, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2025-04-17T18:15:03.082288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 599 seconds 2025-04-17T18:15:03.082345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-04-17T18:15:03.082460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T18:15:03.106797Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, shardIdx# 72057594046644480:1 in# 25 ms, with status# 1, next wakeup in# 599.974261s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-04-17T18:15:03.149953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-04-17T18:15:03.248173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T18:15:03.248265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583004 row count 2 2025-04-17T18:15:03.248308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583004 2025-04-17T18:15:03.248366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-04-17T18:15:03.251033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2025-04-17T18:15:03.295971Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 2 blobs 1r (max 1), put Spent{time=0.030s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-17T18:15:03.315037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-04-17T18:15:03.345776Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 3} end=0, 4 blobs 3r (max 3), put Spent{time=0.035s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874672 0 0)b }, ecr=1.000 2025-04-17T18:15:03.415759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T18:15:03.415907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874506 row count 3 2025-04-17T18:15:03.415961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874506 2025-04-17T18:15:03.416044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-04-17T18:15:03.416509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T18:15:03.419246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874522 rowCount 3 cpuUsage 0 2025-04-17T18:15:03.518383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T18:15:03.518481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874522 row count 3 2025-04-17T18:15:03.518532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874522 2025-0 ... d: true CreateTxId: 281474976715659 CreateStep: 1744913747150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913747150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913747150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913747150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913747150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913747150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-04-17T18:16:21.735202Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037916 not found 2025-04-17T18:16:21.736300Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2025-04-17T18:16:21.736370Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037920 not found 2025-04-17T18:16:21.986540Z node 3 :FLAT_TX_SCHEMESHARD WARN: TSplitMerge Propose failed StatusInvalidParameter Reached MaxPartitionsCount limit: 6, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976710688:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037924 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\023\001\000\000" } SchemeshardId: 72057594046644480 2025-04-17T18:16:22.015735Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037921 not found 2025-04-17T18:16:22.106612Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037923 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913747150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 28 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 28 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 26 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-04-17T18:16:23.038316Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037926 not found 2025-04-17T18:16:23.038356Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2025-04-17T18:16:23.042046Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037925 not found 2025-04-17T18:16:23.110376Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037930 not found 2025-04-17T18:16:23.112583Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found 2025-04-17T18:16:23.195162Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-04-17T18:16:23.197413Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037924 not found 2025-04-17T18:16:23.265986Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037932 not found 2025-04-17T18:16:23.266021Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037931 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913747150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 32 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 32 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 30 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744913747150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 32 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 32 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 30 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag >> ResourcePoolsDdl::TestCreateResourcePool >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState >> KqpWorkloadServiceDistributed::TestDistributedQueue >> KqpWorkloadService::TestQueueSizeSimple >> KqpWorkloadServiceActors::TestPoolFetcher >> KqpExtractPredicateLookup::ComplexRange [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-04-17T18:16:21.523944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347624755635502:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:21.524466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ce9/r3tmp/tmpALQnHl/pdisk_1.dat 2025-04-17T18:16:22.083949Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:22.120564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:22.120660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:22.123193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20963 TServer::EnableGrpc on GrpcPort 10136, node 1 2025-04-17T18:16:22.482788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:22.482814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:22.482823Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:22.482947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:16:22.826440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:22.871166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:16:22.873051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TestDataErasure::DataErasureWithCopyTable [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpExtractPredicateLookup::ComplexRange [GOOD] Test command err: Trying to start YDB, gRPC: 25699, MsgBus: 5944 2025-04-17T18:13:58.170825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347012194336025:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.170901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e2/r3tmp/tmpqR9X7C/pdisk_1.dat 2025-04-17T18:13:58.657868Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.673125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.673259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.676140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25699, node 1 2025-04-17T18:13:58.842492Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.842520Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.842532Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.842648Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5944 TClient is connected to server localhost:5944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.552723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.588720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.808798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.972771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:00.051519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.432914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347025079239681:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.433039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.787244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.828000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.862399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.891577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.965430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.035703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.108604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.166046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347029374207648:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.166122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.166172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347029374207653:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.169400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:02.178384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347029374207655:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:02.254217Z node 1 :TX_PROXY ERROR: Actor# [1:7494347029374207712:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:03.166956Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347012194336025:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.167029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5543, MsgBus: 31462 2025-04-17T18:14:05.146074Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347042290911885:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:05.146165Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e2/r3tmp/tmp2Tq7eo/pdisk_1.dat 2025-04-17T18:14:05.294771Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:05.311674Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:05.311743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:05.313369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5543, node 2 2025-04-17T18:14:05.376303Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:05.376321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:05.376329Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:05.376436Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31462 TClient is connected to server localhost:31462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:05.795254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:14:05.810410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.866904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.014850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:06.104593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... HARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:16:08.633163Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:16:08.707019Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:16:08.768920Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:16:08.822823Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:16:08.881453Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5400, MsgBus: 2494 2025-04-17T18:16:12.911640Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7494347585287738392:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:12.911721Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015e2/r3tmp/tmptoUD4n/pdisk_1.dat 2025-04-17T18:16:13.142615Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:13.148661Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:13.148767Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:13.152321Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5400, node 14 2025-04-17T18:16:13.267079Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:13.267122Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:13.267135Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:13.267334Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2494 TClient is connected to server localhost:2494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:16:14.400024Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:14.414930Z node 14 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:14.432499Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:14.550580Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:14.855711Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:15.000153Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:17.916231Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7494347585287738392:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:17.916342Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:16:18.734346Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7494347611057543972:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:18.734495Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:18.839316Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:18.895738Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:18.968806Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:19.032038Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:19.114896Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:19.176220Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:19.243920Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:19.347859Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7494347615352511941:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:19.348015Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:19.348336Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7494347615352511946:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:19.353881Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:19.372866Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7494347615352511948:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:16:19.457979Z node 14 :TX_PROXY ERROR: Actor# [14:7494347615352512004:3563] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:21.519614Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:21.603523Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:16:21.681386Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:16:21.744378Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:16:21.806820Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:16:21.882579Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:16:21.972589Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:16:22.041013Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:16:22.104234Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:16:22.255736Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithCopyTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:73:2058] recipient: [1:59:2100] 2025-04-17T18:16:18.328293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:18.328478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:18.328520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:18.328555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:18.329362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:18.329424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:18.329527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:18.329593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:18.331305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:18.418793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:18.418857Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:18.427090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:18.427209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:18.428328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:18.435079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:18.435349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:18.438485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:18.439415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:18.443046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:18.456383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:18.456458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:18.456552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:18.456593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:18.456674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:18.458387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.461090Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:148:2058] recipient: [1:16:2063] 2025-04-17T18:16:18.577421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:18.579049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.579953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:18.581261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:18.581328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.582492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:18.582620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:18.582836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.582945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:18.582997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:18.583042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:18.583628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.583678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:18.583709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:18.584082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.584119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.584157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:18.584210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:18.588514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:18.588938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:18.589751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:18.590712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:18.590832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 74 RawX2: 4294969404 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:18.590889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:18.592172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:18.592234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:18.592440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:18.592511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:18.593601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:18.593666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:18.593820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:18.593865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:124:2135], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:18.594219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:18.594260Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:18.594346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:18.594382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:18.594424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:18.594454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:18.594496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:18.594534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:18.594565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:18.594604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:18.594671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:18.594706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:18.594737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:18.596416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:18.596498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:18.596534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, a ... 121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T18:16:26.526063Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:16:26.526141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:16:26.526192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72075186233409546, queue size# 2 2025-04-17T18:16:26.526282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-17T18:16:26.526316Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-17T18:16:26.526460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72075186233409546:6 data size 5019511 row count 49 2025-04-17T18:16:26.526537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 49, DataSize 5019511 2025-04-17T18:16:26.526604Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 49, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2025-04-17T18:16:26.526664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409551 2025-04-17T18:16:26.526709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72075186233409546:7 data size 5121950 row count 50 2025-04-17T18:16:26.526744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409546:7 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 50, DataSize 5121950 2025-04-17T18:16:26.526781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:7 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2025-04-17T18:16:26.526808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409552 2025-04-17T18:16:26.526866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-04-17T18:16:26.538873Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T18:16:26.538950Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T18:16:26.538991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72075186233409546, queue size# 0 2025-04-17T18:16:26.560950Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.561017Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.561116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.561147Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.574606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.574684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.574774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.574801Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.609090Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.609160Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.609262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.609291Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.619748Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.619823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.619939Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.619967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.654101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.654171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.654297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.654328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.666730Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.666803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.666889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.666917Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.706660Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.706735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.706838Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.706869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.717325Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.717401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.717492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:275:2238], Recipient [1:275:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.717520Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.754638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.754710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T18:16:26.754806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:183:2176], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.754834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T18:16:26.765291Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:183:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T18:16:26.765363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-17T18:16:26.765391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-17T18:16:26.765586Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:186:2178], Recipient [1:183:2176]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-17T18:16:26.765614Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-17T18:16:26.765642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-17T18:16:26.765703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-17T18:16:26.765733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-17T18:16:26.765795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 14.999500s, Timestamp# 1970-01-01T00:01:25.000500Z 2025-04-17T18:16:26.765829Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 35 s 2025-04-17T18:16:26.766365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-17T18:16:26.769242Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1760:3464], Recipient [1:183:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:26.769301Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:16:26.769337Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-17T18:16:26.769437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:169:2169], Recipient [1:183:2176]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-17T18:16:26.769477Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-17T18:16:26.769511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder >> KqpRanges::ValidatePredicatesDataQuery [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess |93.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateSharedTenant >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ValidatePredicatesDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 26179, MsgBus: 23721 2025-04-17T18:14:46.280367Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347217135717908:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:46.280694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00154b/r3tmp/tmpwsVzqF/pdisk_1.dat 2025-04-17T18:14:46.609013Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26179, node 1 2025-04-17T18:14:46.678295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:46.678404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:46.680020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:46.687563Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:46.687580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:46.687589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:46.687703Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23721 TClient is connected to server localhost:23721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:14:47.151554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.169922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.299519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.453650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:47.520576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:49.147103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347230020621402:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.147218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.487411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.517429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.548345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.617287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.651681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.685404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.757983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.839689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347230020622075:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.839769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.839976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347230020622080:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:49.844287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:49.855646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347230020622082:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:49.955217Z node 1 :TX_PROXY ERROR: Actor# [1:7494347230020622139:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:50.912451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.121610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.247538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.280305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347217135717908:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:51.280370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:51.368962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:14:51.558276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 19748, MsgBus: 11577 2025-04-17T18:14:52.888957Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347243179848432:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:52.889002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00154b/r3tmp/tmpHXbA3N/pdisk_1.dat 2025-04-17T18:14:52.990718Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19748, node 2 2025-04-17T18:14:53.038331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:53.038438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:53.043504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:14:53.074767Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:53.074791Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:53.074799Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:14:53.074924Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11577 TClient is connected to server localhost:11577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Child ... ];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] 2025-04-17T18:15:58.807633Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913758840, txId: 281474976710788] shutting down Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < 9000 OR Key3 IS NULL ORDER BY `Value`; 2025-04-17T18:15:59.076190Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913759106, txId: 281474976710790] shutting down 2025-04-17T18:15:59.346957Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913759379, txId: 281474976710792] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Value = 20 ORDER BY `Value`; 2025-04-17T18:15:59.603735Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913759638, txId: 281474976710794] shutting down 2025-04-17T18:15:59.834829Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913759869, txId: 281474976710796] shutting down EXPECTED: [[[20u]]] RECEIVED: [[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE (Key1 <= 1000) OR (Key1 > 2000 AND Key1 < 5000) OR (Key1 >= 8000) ORDER BY `Value`; 2025-04-17T18:16:00.363235Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913760387, txId: 281474976710798] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < NULL ORDER BY `Value`; 2025-04-17T18:16:00.890527Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913760926, txId: 281474976710800] shutting down 2025-04-17T18:16:00.994878Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913760990, txId: 281474976710802] shutting down 2025-04-17T18:16:01.111941Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744913761108, txId: 281474976710804] shutting down EXPECTED: [] RECEIVED: [] 2025-04-17T18:16:01.175755Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037924 not found 2025-04-17T18:16:01.175798Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037922 not found 2025-04-17T18:16:01.180715Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037923 not found Trying to start YDB, gRPC: 27104, MsgBus: 22376 2025-04-17T18:16:02.432874Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494347544213725538:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:02.432933Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00154b/r3tmp/tmpVKgpSA/pdisk_1.dat 2025-04-17T18:16:02.652999Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:02.689885Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:02.690006Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:02.695289Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27104, node 7 2025-04-17T18:16:02.826442Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:02.826473Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:02.826484Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:02.826640Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22376 TClient is connected to server localhost:22376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:03.482585Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:03.502058Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:03.638441Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:03.869894Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:03.990330Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:06.863497Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347561393596488:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:06.863608Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:06.939917Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:06.990357Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.063784Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.145147Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.184745Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.267695Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.315461Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:07.418596Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347565688564456:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:07.418726Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:07.418985Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494347565688564461:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:07.424242Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:07.433731Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494347544213725538:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:07.433819Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:16:07.438958Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494347565688564463:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:16:07.505879Z node 7 :TX_PROXY ERROR: Actor# [7:7494347565688564522:3554] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:09.433552Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.621176Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:16:17.621202Z node 7 :IMPORT WARN: Table profiles were not loaded >> KqpPg::TempTablesWithCache [GOOD] >> KqpPg::TableDeleteWhere+useSink >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestValidation >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> Secret::ValidationQueryService [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> Secret::Validation [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: 2025-04-17T18:13:40.506356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:40.506856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:40.506907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a80/r3tmp/tmpAbtXd5/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23993, node 1 TClient is connected to server localhost:27812 2025-04-17T18:13:41.075840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:41.113946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:41.117560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:41.117624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:41.117662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:41.118082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:41.153912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:41.154081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:41.165718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-17T18:13:52.834701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.834808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.834884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.843995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T18:13:52.863392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T18:13:52.906816Z node 1 :TX_PROXY ERROR: Actor# [1:813:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:52.951009Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:822:2676], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2025-04-17T18:13:52.952621Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGIwZmIzYmYtZTM0NjNmOWItYjVhNWMwZjItMTJkMWM1ZGU=, ActorId: [1:746:2626], ActorState: ExecuteState, TraceId: 01js2fa3hv9jnhhs36acnse0vm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-04-17T18:14:03.323919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-17T18:14:04.050955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.485326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-17T18:14:05.152297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:05.981383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.460154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:14:07.048574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:14:07.812790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T18:14:09.932549Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjA2OGQzZDgtZmNkZDM4ZWItZTEwMzY4OWQtMTZjNGUxOQ==, ActorId: [1:842:2688], ActorState: ExecuteState, TraceId: 01js2fadkz1nt2edyg47vctsz5, Create QueryResponse for error on request, msg: 2025-04-17T18:14:09.934261Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01js2fadkz1nt2edyg47vctsz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA2OGQzZDgtZmNkZDM4ZWItZTEwMzY4OWQtMTZjNGUxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-04-17T18:14:11.371697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:14:11.371767Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-17T18:14:45.732771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.869963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.509138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715729:0, at schemeshard: 72057594046644480 2025-04-17T18:14:49.183549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715734:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2025-04-17T18:15:02.008510Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmRjZDFhMWMtMTVjMTFiNDAtMTJjY2YyMzMtYWE4ZGFlZjA=, ActorId: [1:3357:4585], ActorState: ExecuteState, TraceId: 01js2fc6d40yfxspasewdw5cf8, Create QueryResponse for error on request, msg: 2025-04-17T18:15:02.011709Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01js2fc6d40yfxspasewdw5cf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmRjZDFhMWMtMTVjMTFiNDAtMTJjY2YyMzMtYWE4ZGFlZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-17T18:15:14.441732Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=15; 2025-04-17T18:15:14.441978Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 15 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:15:14.442413Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 15 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:15:14.442763Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3756:4813], Table: `//Root/.metadata/secrets/access` ([72057594046644480:13:1]), SessionActorId: [1:3663:4813]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3756:4813].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:15:14.443275Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3749:4813], SessionActorId: [1:3663:4813], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:3663:4813]. isRollback=0 2025-04-17T18:15:14.443673Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTI1YTZmNjQtZWI2YTc4YWItMjhkYmZhYi0xMTQzM2M0NQ==, ActorId: [1:3663:4813], ActorState: ExecuteState, TraceId: 01js2fck5qayyhs549j62xpzv2, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3750:4813] from: [1:3749:4813] 2025-04-17T18:15:14.443839Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:3750:4813] TxId: 281474976715760. Ctx: { TraceId: 01js2fck5qayyhs549j62xpzv2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI1YTZmNjQtZWI2YTc4YWItMjhkYmZhYi0xMTQzM2M0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:15:14.444156Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTI1YTZmNjQtZWI2YTc4YWItMjhkYmZhYi0xMTQzM2M0NQ==, ActorId: [1:3663:4813], ActorState: ExecuteState, TraceId: 01js2fck5qayyhs549j62xpzv2, Create QueryResponse for error on request, msg: 2025-04-17T18:15:14.449523Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01js2fcjyyb82gs74mywvaatvn" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZTI1YTZmNjQtZWI2YTc4YWItMjhkYmZhYi0xMTQzM2M0NQ==" tx_control { tx_id: "01js2fcjyyb82gs74mywvaatvn" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-17T18:15:26.767518Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWYyZTYyMDMtMzEzMzI4NS1jOGYyMTE1My00ZGJlOTQxNA==, ActorId: [1:3965:5036], ActorState: ExecuteState, TraceId: 01js2fcyn3938grt6sq2yfnmf9, Create QueryResponse for error on request, msg: 2025-04-17T18:15:26.769027Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715772. Ctx: { TraceId: 01js2fcyn3938grt6sq2yfnmf9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWYyZTYyMDMtMzEzMzI4NS1jOGYyMTE1My00ZGJlOTQxNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-17T18:15:38.113414Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4290:5283], for# root@builtin, access# DescribeSchema 2025-04-17T18:15:38.113523Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4290:5283], for# root@builtin, access# DescribeSchema 2025-04-17T18:15:38.115434Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4287:5280], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:15:38.117282Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTc5MDE1ZTgtNDFjMjM2ODgtYzQwN2E2MmUtZDk3ZTZhNDg=, ActorId: [1:4282:5276], ActorState: ExecuteState, TraceId: 01js2fdabm0n9k84mjff108559, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-17T18:15:49.646494Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator 2025-04-17T18:15:50.516890Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjMxY2Y2MTQtNWFiZWE0NjQtNGQ4YWFhZWYtN2JmNDI0ZDg=, ActorId: [1:4547:5475], ActorState: ExecuteState, TraceId: 01js2fdnkn15yw2aprzdcrpy4w, Create QueryResponse for error on request, msg: 2025-04-17T18:15:50.518126Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715796. Ctx: { TraceId: 01js2fdnkn15yw2aprzdcrpy4w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjMxY2Y2MTQtNWFiZWE0NjQtNGQ4YWFhZWYtN2JmNDI0ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-17T18:16:03.094753Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmEwNzA2MWMtOGFkYzBjYzItMTU2YWI5ZWYtNzg5YTdmNmY=, ActorId: [1:4942:5771], ActorState: ExecuteState, TraceId: 01js2fe1wt79y8w59729n04zqx, Create QueryResponse for error on request, msg: 2025-04-17T18:16:03.095660Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715812. Ctx: { TraceId: 01js2fe1wt79y8w59729n04zqx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmEwNzA2MWMtOGFkYzBjYzItMTU2YWI5ZWYtNzg5YTdmNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-17T18:16:29.914740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715843. Ctx: { TraceId: 01js2fewhqaweyzgx64150tnhw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwYjNmMTMtNzAwZTNhZTktNmUwOTNmODItZmZiNjM2OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: 2025-04-17T18:13:40.583729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:40.584182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:40.584230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001a84/r3tmp/tmpqBR9ta/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15350, node 1 TClient is connected to server localhost:11205 2025-04-17T18:13:41.102717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:41.134501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:41.137889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:41.137936Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:41.137966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:41.138351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:41.172821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:41.172924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:41.184343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-17T18:13:52.927822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:52.928010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-04-17T18:14:03.294990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.295119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.301047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-17T18:14:03.474760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:891:2725], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.474884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.475163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:896:2730], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:03.479878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-17T18:14:03.624718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:898:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:14:03.933547Z node 1 :TX_PROXY ERROR: Actor# [1:994:2799] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:04.476088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:04.939698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-17T18:14:05.696626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.470393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:06.918817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:14:08.211790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:14:08.547709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-04-17T18:14:11.546166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:14:11.546262Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-17T18:14:45.738090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-17T18:14:46.650152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.355297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715726:0, at schemeshard: 72057594046644480 2025-04-17T18:14:48.816085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715729:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-17T18:15:14.300635Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=16; 2025-04-17T18:15:14.301044Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 16 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:15:14.301297Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 16 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-17T18:15:14.301892Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3727:4779], Table: `//Root/.metadata/secrets/access` ([72057594046644480:13:1]), SessionActorId: [1:3635:4779]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3727:4779].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-17T18:15:14.302541Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3720:4779], SessionActorId: [1:3635:4779], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:3635:4779]. isRollback=0 2025-04-17T18:15:14.303098Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTMxZjY1YmEtYjUxOGRjYzEtMWMxOTI4OTgtZDNhNzRkN2E=, ActorId: [1:3635:4779], ActorState: ExecuteState, TraceId: 01js2fck0t4j0v6dh7s49akgdv, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3721:4779] from: [1:3720:4779] 2025-04-17T18:15:14.303348Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:3721:4779] TxId: 281474976715756. Ctx: { TraceId: 01js2fck0t4j0v6dh7s49akgdv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTMxZjY1YmEtYjUxOGRjYzEtMWMxOTI4OTgtZDNhNzRkN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-17T18:15:14.304152Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTMxZjY1YmEtYjUxOGRjYzEtMWMxOTI4OTgtZDNhNzRkN2E=, ActorId: [1:3635:4779], ActorState: ExecuteState, TraceId: 01js2fck0t4j0v6dh7s49akgdv, Create QueryResponse for error on request, msg: 2025-04-17T18:15:14.317926Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01js2fcjsx4z4zt4h45n98r304" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YTMxZjY1YmEtYjUxOGRjYzEtMWMxOTI4OTgtZDNhNzRkN2E=" tx_control { tx_id: "01js2fcjsx4z4zt4h45n98r304" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-17T18:15:38.059097Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4316:5289], for# root@builtin, access# DescribeSchema 2025-04-17T18:15:38.059215Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4316:5289], for# root@builtin, access# DescribeSchema 2025-04-17T18:15:38.061340Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4305:5278], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:15:38.063429Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWVmMWViM2ItMzgwZGEwMzctZjkwMmJkYjItNmNkODZmYTA=, ActorId: [1:4293:5267], ActorState: ExecuteState, TraceId: 01js2fda9vagbajf646fjtn08y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-17T18:15:49.799940Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-17T18:16:30.128486Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715844. Ctx: { TraceId: 01js2fewsr605ckkh61nr95jc2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODRhZDY3ZjktYmZlOGQ2NTUtNzIyZGRmOTctYjAwZThhODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:15:44.582620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:44.582712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:44.582772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:44.582826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:44.583834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:44.583884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:44.583953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:44.584020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:44.585056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:44.671227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:15:44.671286Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:15:44.687038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:44.687768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:44.687918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:44.697342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:44.697916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:44.698418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:44.698600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:44.703673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:44.711386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:44.711451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:44.711675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:44.711722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:44.711770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:44.711921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:15:44.718343Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:15:44.809268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:44.810926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.812454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:44.813457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:44.813525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.817760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:44.817886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:44.818088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.818194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:44.818227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:44.818250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:44.819878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.819913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:44.819933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:44.821267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.821307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.821361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:44.821400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.824574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:44.825894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:44.826669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:44.827448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:44.827553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:44.827589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:44.828658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:44.828701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:44.828828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:44.828892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:44.830852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:44.830915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:44.831053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:44.831083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:44.831431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:44.831464Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:44.831530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:44.831551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:44.831586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:33.464339Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 219043334251 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:33.464380Z node 51 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 1003:0 HandleReply TEvOperationPlan, operationId: 1003:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-17T18:16:33.464626Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2025-04-17T18:16:33.464730Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-17T18:16:33.469971Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:33.470017Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:16:33.470305Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:33.470346Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [51:205:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-04-17T18:16:33.470765Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:33.470815Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2025-04-17T18:16:33.471454Z node 51 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:16:33.471542Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:16:33.471573Z node 51 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-17T18:16:33.471606Z node 51 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-17T18:16:33.471639Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:16:33.471716Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-04-17T18:16:33.472739Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1162 } } 2025-04-17T18:16:33.472782Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-17T18:16:33.472915Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1162 } } 2025-04-17T18:16:33.473002Z node 51 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1162 } } 2025-04-17T18:16:33.473493Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 219043334412 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-17T18:16:33.473533Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-17T18:16:33.473637Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 219043334412 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-17T18:16:33.473681Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:16:33.473773Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 219043334412 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-17T18:16:33.473820Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:33.473850Z node 51 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:33.473884Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:16:33.473921Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-17T18:16:33.476853Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:16:33.478037Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:33.478164Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:33.478303Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:33.478340Z node 51 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-04-17T18:16:33.478443Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-17T18:16:33.478478Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-17T18:16:33.478516Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-17T18:16:33.478543Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-17T18:16:33.478577Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-17T18:16:33.478612Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-17T18:16:33.478648Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-17T18:16:33.478676Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-17T18:16:33.478786Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-17T18:16:33.481381Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-17T18:16:33.481444Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-17T18:16:33.481774Z node 51 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-17T18:16:33.481854Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-17T18:16:33.481891Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:450:2423] TestWaitNotification: OK eventTxId 1003 2025-04-17T18:16:33.482329Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:33.482529Z node 51 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 239us result status StatusSuccess 2025-04-17T18:16:33.483024Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants |93.2%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2025-04-17T18:16:35.428140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:35.428723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:35.428778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e25/r3tmp/tmpsmfJ3q/pdisk_1.dat 2025-04-17T18:16:35.991503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:36.046045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:36.090327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:36.090474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:36.103363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:36.214557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:36.736926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:36.737094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:874:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:36.737193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:36.746548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:36.899378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:879:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:16:36.990645Z node 1 :TX_PROXY ERROR: Actor# [1:961:2772] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:37.945722Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2ff3kxb4gf4809pw4tcmtq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYyZmIwNzYtN2EyNzMxN2YtZGIyNDQ1YjAtNjFjYjg1MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:16:37.995992Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:992:2793], TxId: 281474976715660, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2ff3kxb4gf4809pw4tcmtq. SessionId : ydb://session/3?node_id=1&id=ZDYyZmIwNzYtN2EyNzMxN2YtZGIyNDQ1YjAtNjFjYjg1MDk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2025-04-17T18:16:37.999081Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:992:2793], TxId: 281474976715660, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01js2ff3kxb4gf4809pw4tcmtq. SessionId : ydb://session/3?node_id=1&id=ZDYyZmIwNzYtN2EyNzMxN2YtZGIyNDQ1YjAtNjFjYjg1MDk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2025-04-17T18:16:38.010820Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:993:2794], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01js2ff3kxb4gf4809pw4tcmtq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDYyZmIwNzYtN2EyNzMxN2YtZGIyNDQ1YjAtNjFjYjg1MDk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-17T18:16:38.019985Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDYyZmIwNzYtN2EyNzMxN2YtZGIyNDQ1YjAtNjFjYjg1MDk=, ActorId: [1:863:2711], ActorState: ExecuteState, TraceId: 01js2ff3kxb4gf4809pw4tcmtq, Create QueryResponse for error on request, msg: 2025-04-17T18:16:38.024395Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2ff3kxb4gf4809pw4tcmtq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYyZmIwNzYtN2EyNzMxN2YtZGIyNDQ1YjAtNjFjYjg1MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.2%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::NotFullStatisticsColumnshard >> BasicStatistics::TwoTables |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> ResourcePoolsDdl::TestWorkloadConfigOnServerless >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath >> KqpIndexes::MultipleSecondaryIndex+UseSink |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> HttpRequest::Analyze >> BasicStatistics::TwoServerlessDbs >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> BasicStatistics::TwoNodes >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2025-04-17T18:16:19.707863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:19.707935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:19.775990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:20.963719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:20.963781Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:21.007386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:22.048861Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:22.048924Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:22.121004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:23.113419Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:23.113495Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:23.181942Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:24.629356Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:24.629428Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:24.671358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:33.463018Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:33.463096Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:33.507479Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:35.041567Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:35.041654Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:35.099825Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:40.102420Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:40.102513Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:40.152149Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:41.519759Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:41.519839Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:41.566538Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:42.715088Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:42.715191Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:42.765388Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:44.104795Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:44.104889Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:44.151140Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> PgCatalog::CheckSetConfig [GOOD] >> PgCatalog::PgDatabase+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:16:11.101291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:11.101363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:11.101402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:11.101430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:11.101473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:11.101501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:11.101572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:11.101637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:11.101904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:11.184288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:11.184356Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:11.197337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:11.197695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:11.197859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:11.206333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:11.206490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:11.207043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:11.207253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:11.210703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:11.211831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:11.211886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:11.212056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:11.212100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:11.212136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:11.212245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.221485Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:16:11.356218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:11.356433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.356618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:11.356830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:11.356887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.358993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:11.359123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:11.359269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.359319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:11.359363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:11.359407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:11.361166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.361216Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:11.361245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:11.362920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.362963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.363010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:11.363046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:11.371283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:11.372985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:11.373142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:11.374031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:11.374197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:11.374247Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:11.374531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:11.374600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:11.374734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:11.374804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:11.376473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:11.376508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:11.376631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:11.376690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:11.376985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:11.377023Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:11.377099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:11.377125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:11.377200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:11.377230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:11.377258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:11.377309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:11.377340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:11.377365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:11.377414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:11.377449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:11.377487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:11.378953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:11.379060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:11.379117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:45.239393Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:45.239733Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:16:45.254500Z node 37 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [37:121:2147] sender: [37:238:2058] recipient: [37:15:2062] 2025-04-17T18:16:45.271057Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:45.271446Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:45.271792Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:45.272151Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:45.272265Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:45.276579Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:45.276753Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:45.277041Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:45.277152Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:45.277242Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:45.277319Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:45.279812Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:45.279882Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:45.279943Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:45.281455Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:45.281504Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:45.281583Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:45.281681Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:45.281908Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:45.283495Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:45.283838Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:45.284970Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:45.285199Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 158913792108 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:45.285295Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:45.285711Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:45.285825Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:45.286335Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:45.286489Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:45.292517Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:45.292637Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:45.292959Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:45.293058Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:45.293586Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:45.293681Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:45.293937Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:45.294005Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:45.294074Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:45.294144Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:45.294250Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:16:45.294354Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:45.294419Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:16:45.294484Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:16:45.294586Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:16:45.294669Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:16:45.294727Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:16:45.295437Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:45.295651Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:16:45.295734Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:16:45.295820Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:16:45.295909Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:45.296085Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:16:45.303402Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:16:45.304207Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:16:45.305866Z node 37 :TX_PROXY DEBUG: actor# [37:268:2259] Bootstrap 2025-04-17T18:16:45.357616Z node 37 :TX_PROXY DEBUG: actor# [37:268:2259] Become StateWork (SchemeCache [37:273:2264]) 2025-04-17T18:16:45.361260Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:45.361964Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:16:45.362342Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-04-17T18:16:45.363288Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-04-17T18:16:45.364780Z node 37 :TX_PROXY DEBUG: actor# [37:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:16:45.370698Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:45.371051Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-17T18:16:45.371711Z node 37 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] Test command err: 2025-04-17T18:16:19.708414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:19.708482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:19.774114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:21.217896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:21.217951Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:21.264047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:22.316991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:22.317063Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:22.371282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:23.416075Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:23.416143Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:23.463267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:24.723674Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:24.723739Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:24.781938Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:26.015180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:26.015254Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:26.058055Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:27.319367Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:27.319447Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:27.372431Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:28.643846Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:28.643926Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:28.690612Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:29.887436Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:29.887515Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:29.946134Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:30.997283Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:30.997374Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:31.042079Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:32.354927Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:32.355011Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:32.424485Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:33.593644Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:33.593732Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:33.650559Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:35.225638Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:35.225726Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:35.270328Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:36.485442Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:36.485527Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:36.551562Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:38.140203Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:38.140395Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:38.184756Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:39.987569Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:39.987650Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:40.041010Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:41.804367Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:41.804446Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:41.855626Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:43.650155Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:43.650340Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:43.700926Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:45.034113Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:45.034225Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:45.085987Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:45.680575Z node 23 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:114} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] >> BasicStatistics::Serverless |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> ColumnStatistics::CountMinSketchServerlessStatistics >> KqpIndexes::MultipleSecondaryIndex+UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndex-UseSink >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] >> HttpRequest::ProbeServerless |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:107:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:107:2139] Leader for TabletID 9437184 is [1:126:2151] sender: [1:129:2057] recipient: [1:107:2139] 2025-04-17T18:15:41.543873Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:15:41.562301Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:41.563346Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:15:41.564240Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:41.617488Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:41.700221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:41.700282Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.704728Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:41.705484Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:15:41.707035Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:15:41.707099Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:15:41.707167Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:15:41.707532Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:15:41.707622Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:15:41.707682Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:195:2151] in generation 2 Leader for TabletID 9437184 is [1:126:2151] sender: [1:211:2057] recipient: [1:14:2061] 2025-04-17T18:15:41.789502Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:15:41.832730Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:15:41.832920Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:15:41.833025Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:216:2214] 2025-04-17T18:15:41.833060Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:15:41.833095Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:15:41.833134Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.833313Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.833357Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.833641Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:15:41.833750Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:15:41.833829Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.833864Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:41.833929Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:15:41.833975Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:41.834010Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:41.834044Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:15:41.834082Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:41.834209Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:212:2211], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.834252Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.834305Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:210:2210], serverId# [1:212:2211], sessionId# [0:0:0] 2025-04-17T18:15:41.836771Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:15:41.836831Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:41.836899Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:41.837023Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:15:41.837066Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:15:41.837132Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:15:41.837178Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.837221Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:15:41.837275Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:15:41.837307Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.837533Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:15:41.837567Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:15:41.837598Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:41.837628Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.837685Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:15:41.837719Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:41.837749Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:15:41.837799Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.837821Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:15:41.853759Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:41.853813Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.853842Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.853885Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:15:41.853938Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:15:41.854454Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:222:2220], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.854508Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.854547Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:221:2219], serverId# [1:222:2220], sessionId# [0:0:0] 2025-04-17T18:15:41.854651Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-17T18:15:41.854676Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:15:41.854790Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.854852Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:15:41.854890Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:15:41.854927Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:15:41.858285Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:15:41.858371Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.858600Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.858634Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.858673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.858708Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:41.858733Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:41.858787Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-17T18:15:41.858816Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-17T18:15:41.858852Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:15:41.858899Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:15:41.858935Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:15:41.858978Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:15:41.859098Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-17T18:15:41.859126Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-17T18:15:41.859148Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:15:41.859168Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:15:41.859190Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:15:41.859268Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.859294Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:15:41.859329Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:41.859393Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:41.859437Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-17T18:15:41.859475Z node 1 :TX_DATASHARD TRAC ... 9437184 to execution unit ExecuteDataTx 2025-04-17T18:16:49.060026Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit ExecuteDataTx 2025-04-17T18:16:49.060316Z node 40 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437184 with status COMPLETE 2025-04-17T18:16:49.060376Z node 40 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:16:49.060433Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-04-17T18:16:49.060464Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:16:49.060492Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompleteOperation 2025-04-17T18:16:49.060521Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompleteOperation 2025-04-17T18:16:49.060710Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is DelayComplete 2025-04-17T18:16:49.060743Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompleteOperation 2025-04-17T18:16:49.060775Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompletedOperations 2025-04-17T18:16:49.060805Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompletedOperations 2025-04-17T18:16:49.060847Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-04-17T18:16:49.060873Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompletedOperations 2025-04-17T18:16:49.060904Z node 40 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437184 has finished 2025-04-17T18:16:49.060939Z node 40 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:16:49.060971Z node 40 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:16:49.061004Z node 40 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:16:49.061037Z node 40 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:16:49.061217Z node 40 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [40:453:2395], Recipient [40:453:2395]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:16:49.061255Z node 40 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:16:49.061306Z node 40 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-17T18:16:49.061342Z node 40 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:16:49.061370Z node 40 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-17T18:16:49.061425Z node 40 :TX_DATASHARD DEBUG: Found ready operation [7:6] in PlanQueue unit at 9437186 2025-04-17T18:16:49.061468Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit PlanQueue 2025-04-17T18:16:49.061501Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.061530Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit PlanQueue 2025-04-17T18:16:49.061564Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit LoadTxDetails 2025-04-17T18:16:49.061596Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit LoadTxDetails 2025-04-17T18:16:49.062486Z node 40 :TX_DATASHARD DEBUG: LoadTxDetails at 9437186 loaded tx from db 7:6 keys extracted: 1 2025-04-17T18:16:49.062540Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.062573Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit LoadTxDetails 2025-04-17T18:16:49.062604Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit FinalizeDataTxPlan 2025-04-17T18:16:49.062637Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit FinalizeDataTxPlan 2025-04-17T18:16:49.062681Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.062709Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit FinalizeDataTxPlan 2025-04-17T18:16:49.062739Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit BuildAndWaitDependencies 2025-04-17T18:16:49.062770Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit BuildAndWaitDependencies 2025-04-17T18:16:49.062817Z node 40 :TX_DATASHARD TRACE: Operation [7:6] is the new logically complete end at 9437186 2025-04-17T18:16:49.062853Z node 40 :TX_DATASHARD TRACE: Operation [7:6] is the new logically incomplete end at 9437186 2025-04-17T18:16:49.062886Z node 40 :TX_DATASHARD TRACE: Activated operation [7:6] at 9437186 2025-04-17T18:16:49.062930Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.062959Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit BuildAndWaitDependencies 2025-04-17T18:16:49.062988Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit BuildDataTxOutRS 2025-04-17T18:16:49.063017Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit BuildDataTxOutRS 2025-04-17T18:16:49.063070Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.063100Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit BuildDataTxOutRS 2025-04-17T18:16:49.063130Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit StoreAndSendOutRS 2025-04-17T18:16:49.063162Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit StoreAndSendOutRS 2025-04-17T18:16:49.063194Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.063221Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit StoreAndSendOutRS 2025-04-17T18:16:49.063247Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit PrepareDataTxInRS 2025-04-17T18:16:49.063276Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit PrepareDataTxInRS 2025-04-17T18:16:49.063310Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.063339Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit PrepareDataTxInRS 2025-04-17T18:16:49.063366Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit LoadAndWaitInRS 2025-04-17T18:16:49.063394Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit LoadAndWaitInRS 2025-04-17T18:16:49.063423Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.063452Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit LoadAndWaitInRS 2025-04-17T18:16:49.063477Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit ExecuteDataTx 2025-04-17T18:16:49.063505Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2025-04-17T18:16:49.063827Z node 40 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2025-04-17T18:16:49.063883Z node 40 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:16:49.063932Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.063962Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2025-04-17T18:16:49.063990Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompleteOperation 2025-04-17T18:16:49.064019Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2025-04-17T18:16:49.064202Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is DelayComplete 2025-04-17T18:16:49.064236Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2025-04-17T18:16:49.064267Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompletedOperations 2025-04-17T18:16:49.064299Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2025-04-17T18:16:49.064335Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-17T18:16:49.064363Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2025-04-17T18:16:49.064393Z node 40 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437186 has finished 2025-04-17T18:16:49.064425Z node 40 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:16:49.064454Z node 40 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-17T18:16:49.064485Z node 40 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-17T18:16:49.064517Z node 40 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-17T18:16:49.087119Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2025-04-17T18:16:49.087189Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2025-04-17T18:16:49.087256Z node 40 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-17T18:16:49.087303Z node 40 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2025-04-17T18:16:49.087369Z node 40 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [40:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:16:49.087418Z node 40 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-17T18:16:49.087683Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2025-04-17T18:16:49.087724Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2025-04-17T18:16:49.087767Z node 40 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-17T18:16:49.087801Z node 40 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2025-04-17T18:16:49.087853Z node 40 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [40:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:16:49.087889Z node 40 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-17T18:16:49.088286Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2025-04-17T18:16:49.088342Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2025-04-17T18:16:49.088388Z node 40 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:16:49.088429Z node 40 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2025-04-17T18:16:49.088478Z node 40 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [40:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:16:49.088514Z node 40 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> AnalyzeColumnshard::Analyze ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10855, MsgBus: 25187 2025-04-17T18:16:12.627941Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347589025457182:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:12.633952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e1b/r3tmp/tmp7uDYdC/pdisk_1.dat 2025-04-17T18:16:12.951695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:12.951897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:12.969076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:12.971709Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10855, node 1 2025-04-17T18:16:13.114341Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:13.114365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:13.114375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:13.114516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25187 TClient is connected to server localhost:25187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:13.618629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:13.633928Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:15.433466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347601910359649:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:15.434247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347601910359642:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:15.434376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:15.441723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:15.459068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347601910359656:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:15.526591Z node 1 :TX_PROXY ERROR: Actor# [1:7494347601910359707:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:15.793308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:16:15.928593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:16:15.964728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:15.993505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:16.028925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:16.223256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:16.257151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:16.288814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:16.321787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:16.392108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:16:16.426763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:16:16.461907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:16:16.496708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.066005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:16:17.091143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.123498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.158377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.186524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.210198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.242890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.272855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.345408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.387268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.474666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.512441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.547298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.580913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.616721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.626410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347589025457182:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:17.626480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:16:17.646987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:16:17.686629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.178122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.181563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.184707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.187565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.190363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.192781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.196681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.197607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.202043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.202652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.207385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.207847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.211624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.213415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.215842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.218933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.219943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.224401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.224573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.228660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.230088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.233015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.236129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.237757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.241966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.242982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.247109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.247567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.251644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.252629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.257019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.258553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.262290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.263857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.267421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.270587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.271639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.276193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.276668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.281225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.281988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.287569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.291542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.293627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.301742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:43.377361Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fej4600kxjs2k9er4m97b", SessionId: ydb://session/3?node_id=1&id=YTQxYTA1ZWYtMTIzMGI1ZWYtNGMxN2I2YS05ZTQ1ODZkYQ==, Slow query, duration: 24.554053s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:16:43.641807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:43.642429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7494347623385202992:2955];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-17T18:16:43.642763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:43.644063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink >> ResourcePoolsDdl::TestWorkloadConfigOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> KqpIndexes::MultipleSecondaryIndex-UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> BasicUsage::PreferredDatabaseNoFallback [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:15:54.765706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:15:54.765801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:54.765850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:15:54.765887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:15:54.765939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:15:54.765972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:15:54.766020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:15:54.766090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:15:54.766395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:54.835102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:15:54.835166Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:15:54.847243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:54.847911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:15:54.848078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:15:54.866339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:15:54.866965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:15:54.867498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:54.867693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:15:54.870570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:54.871978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:54.872045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:54.872286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:15:54.872337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:54.872380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:15:54.872539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:15:54.878604Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:15:54.977987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:15:54.978311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.978536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:15:54.978785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:15:54.978873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.982710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:54.982860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:15:54.983103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.983178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:15:54.983217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:15:54.983252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:15:54.985662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.985725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:15:54.985758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:15:54.988456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.988510Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.988569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:54.988616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:15:54.992166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:15:54.994112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:15:54.994313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:15:54.995308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:15:54.995443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:15:54.995502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:54.995808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:15:54.995859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:15:54.996023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:15:54.996152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:15:54.998841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:15:54.998903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:15:54.999091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:15:54.999140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:15:54.999544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:15:54.999591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:15:54.999692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:15:54.999729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:15:54.999787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... :56.448770Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-04-17T18:16:56.448838Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate:TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-17T18:16:56.449135Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:16:56.449302Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2025-04-17T18:16:56.449424Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:16:56.449486Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:16:56.451471Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:16:56.453697Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:56.453841Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:56.454086Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:56.454125Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:56.454314Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:16:56.454476Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:56.454518Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:207:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-04-17T18:16:56.454563Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:207:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-04-17T18:16:56.454770Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:56.454823Z node 62 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-04-17T18:16:56.454927Z node 62 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:56.454963Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:16:56.455000Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-17T18:16:56.456178Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:16:56.456277Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:16:56.456310Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-17T18:16:56.456348Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T18:16:56.456383Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-17T18:16:56.457230Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:16:56.457322Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:16:56.457352Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-17T18:16:56.457387Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:16:56.457419Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:16:56.457506Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-04-17T18:16:56.460972Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:16:56.461026Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:56.461070Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T18:16:56.461276Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-17T18:16:56.461310Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-17T18:16:56.461352Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-17T18:16:56.461383Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-17T18:16:56.461419Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-17T18:16:56.461461Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-17T18:16:56.461501Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-17T18:16:56.461531Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-17T18:16:56.461634Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:16:56.461669Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:16:56.462687Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:16:56.462736Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:16:56.462802Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:16:56.463329Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:16:56.463715Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:16:56.466086Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-17T18:16:56.466336Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-17T18:16:56.466392Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-17T18:16:56.466752Z node 62 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-17T18:16:56.466853Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-17T18:16:56.466885Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:473:2446] TestWaitNotification: OK eventTxId 1003 2025-04-17T18:16:56.467324Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:16:56.467527Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 244us result status StatusSuccess 2025-04-17T18:16:56.467986Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2025-04-17T18:15:58.253683Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1744913758253629 2025-04-17T18:15:58.702882Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347529013213616:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:58.702949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:15:58.782506Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347526754340505:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:58.782566Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001863/r3tmp/tmpWnv3Ys/pdisk_1.dat 2025-04-17T18:15:58.976676Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:15:59.000350Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:15:59.350958Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:59.362328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:59.362899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:59.363787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:59.363832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:59.367977Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:15:59.368088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:59.369627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30315, node 1 2025-04-17T18:15:59.641988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001863/r3tmp/yandex8sq43n.tmp 2025-04-17T18:15:59.642015Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001863/r3tmp/yandex8sq43n.tmp 2025-04-17T18:15:59.646339Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001863/r3tmp/yandex8sq43n.tmp 2025-04-17T18:15:59.646519Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:00.121024Z INFO: TTestServer started on Port 8181 GrpcPort 30315 TClient is connected to server localhost:8181 PQClient connected to localhost:30315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:00.479027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:16:02.131042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347543934210016:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:02.133026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347543934210005:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:02.133218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:02.163814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-04-17T18:16:02.187464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347546193083649:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:02.187998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:02.191143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494347543934210019:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-04-17T18:16:02.287296Z node 2 :TX_PROXY ERROR: Actor# [2:7494347543934210047:2130] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:02.704557Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494347546193083708:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:16:02.703304Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494347543934210054:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:16:02.703670Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWVlYjdmZTEtNGQyYWE5OGUtYTUzMzUyZTctNGVlMjA0MDI=, ActorId: [2:7494347543934210003:2308], ActorState: ExecuteState, TraceId: 01js2fe1td5cn4b48berwmat2k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:16:02.707685Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:16:02.711481Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODA0YjgzNDgtOTlmYmRkMDMtZWRmMGVhNTYtYjdlNDM0ZTk=, ActorId: [1:7494347546193083646:2336], ActorState: ExecuteState, TraceId: 01js2fe1wbad25q6pjbr4nqpe4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:16:02.711874Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:16:02.885432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:16:03.032043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:03.224806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:30315", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-17T18:16:03.700009Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347529013213616:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:03.700097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:16:03.737602Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2fe31437xzpyf0vvm4h93s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGU5OTFlNGQtMWNkMTk0YWMtZmY1ODEyNmItYTNkNjhmZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:16:03.782824Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347526754340505:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:03.782864Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7494347550488051453:2990] === CheckClustersList. Ok 2025-04-17T18:16:09.420984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-top ... me = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-17T18:16:25.470052Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494347642086300091:2516] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T18:16:25.473051Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494347642086300091:2516] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-17T18:16:25.620373Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494347642086300091:2516] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-17T18:16:25.620622Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494347642086300130:2516] connected; active server actors: 1 2025-04-17T18:16:25.620678Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494347642086300091:2516] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-17T18:16:25.620694Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494347642086300091:2516] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-17T18:16:25.620876Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494347642086300130:2516] disconnected; active server actors: 1 2025-04-17T18:16:25.620896Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494347642086300130:2516] disconnected no session 2025-04-17T18:16:25.729243Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494347642086300091:2516] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T18:16:25.729287Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494347642086300091:2516] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-17T18:16:25.729305Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494347642086300091:2516] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-17T18:16:25.729331Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:16:25.730742Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-04-17T18:16:25.734321Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7494347642086300152:2516], now have 1 active actors on pipe 2025-04-17T18:16:25.734406Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:16:25.734434Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:16:25.734558Z node 4 :PERSQUEUE INFO: new Cookie src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-17T18:16:25.734662Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T18:16:25.734737Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T18:16:25.735529Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:16:25.735563Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:16:25.735638Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T18:16:25.736009Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0 2025-04-17T18:16:25.742377Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744913785742 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:16:25.742508Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T18:16:25.744976Z :INFO: [] MessageGroupId [src] SessionId [src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0] Write session: close. Timeout = 0 ms 2025-04-17T18:16:25.745035Z :INFO: [] MessageGroupId [src] SessionId [src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0] Write session will now close 2025-04-17T18:16:25.745079Z :DEBUG: [] MessageGroupId [src] SessionId [src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0] Write session: aborting 2025-04-17T18:16:25.745556Z :INFO: [] MessageGroupId [src] SessionId [src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:16:25.745595Z :DEBUG: [] MessageGroupId [src] SessionId [src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0] Write session: destroy 2025-04-17T18:16:25.746648Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0 grpc read done: success: 0 data: 2025-04-17T18:16:25.746666Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0 grpc read failed 2025-04-17T18:16:25.746687Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0 grpc closed 2025-04-17T18:16:25.746698Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|176a5330-38bc40b9-bdcc2fa9-ae9acb8b_0 is DEAD 2025-04-17T18:16:25.747262Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:16:25.751704Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494347642086300152:2516] destroyed 2025-04-17T18:16:25.751775Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2025-04-17T18:16:25.831442Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:27.832429Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-17T18:16:28.954353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:16:28.954385Z node 3 :IMPORT WARN: Table profiles were not loaded Test retry state: get retry delay 2025-04-17T18:16:29.833447Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === In the next federation discovery response dc2 will be available Test retry state: get retry delay 2025-04-17T18:16:31.839243Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:33.846316Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:35.849454Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:37.850547Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:39.854566Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:41.862346Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:43.864697Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:45.865422Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:47.866441Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:49.874131Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:51.875595Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-17T18:16:53.876610Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-17T18:16:54.288659Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-04-17T18:16:54.289476Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-04-17T18:16:54.288775Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-04-17T18:16:54.292840Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-04-17T18:16:54.293421Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 === Waiting for repair >>> Ready to answer: ok 2025-04-17T18:16:55.877665Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:2892" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:2892" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:2892" location: "dc3" status: AVAILABLE weight: 500 } ] ControlPlaneEndpoint: cp.logbroker-federation:2135 } === Closing the session 2025-04-17T18:16:55.892864Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2025-04-17T18:16:55.893714Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2025-04-17T18:16:55.904197Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-04-17T18:16:55.904247Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2025-04-17T18:16:55.904315Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2025-04-17T18:16:55.904397Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-04-17T18:16:55.904455Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2025-04-17T18:16:56.352363Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710740, task: 1, CA Id [3:7494347775230288014:2820]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-17T18:16:56.385354Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710740, task: 1, CA Id [3:7494347775230288014:2820]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:16:56.436077Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710740, task: 1, CA Id [3:7494347775230288014:2820]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:16:56.501569Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710740, task: 1, CA Id [3:7494347775230288014:2820]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState >> BasicStatistics::ServerlessGlobalIndex >> HttpRequest::Status [GOOD] >> HttpRequest::Analyze [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2025-04-17T18:16:44.334835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:44.335151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:44.335277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00151e/r3tmp/tmpQi7iuM/pdisk_1.dat 2025-04-17T18:16:44.711160Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23417, node 1 2025-04-17T18:16:45.178503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:45.178599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:45.178630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:45.179078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:45.181232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:45.316616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:45.316753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:45.347592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14204 2025-04-17T18:16:45.931012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:49.284362Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:49.321138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:49.321276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:49.355231Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:49.357272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:49.618073Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.618781Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.619301Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.619467Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.619718Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.619800Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.619873Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.619951Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.620017Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.811676Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:49.811798Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:49.844020Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:50.030403Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:50.110957Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:50.111040Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:50.162978Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:50.163866Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:50.164127Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:50.164205Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:50.164275Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:50.164325Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:50.164401Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:50.164464Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:50.165113Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:50.191950Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:50.192097Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1869:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:50.198988Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-17T18:16:50.207969Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1906:2617] 2025-04-17T18:16:50.208599Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1906:2617], schemeshard id = 72075186224037897 2025-04-17T18:16:50.211160Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:16:50.243557Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:50.243624Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:50.243737Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:16:50.267594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:50.282654Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:50.282865Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:50.528967Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:50.695112Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:50.799659Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:51.756017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2233:3068], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:51.756183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:51.780326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:16:52.273820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:52.274044Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:52.274377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:16:52.274548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:16:52.274696Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:16:52.274828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:16:52.274942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:16:52.275062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:16:52.275179Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:16:52.275320Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:16:52.275451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:16:52.275562Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2379:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:16:52.329965Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2892];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:52.330063Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2892];tablet_id=72075186224037900;process= ... storeV1Chunks_V2;id=15; 2025-04-17T18:16:52.722673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:16:52.722750Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:16:52.722773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:16:53.170641Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.176011Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.181143Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.189198Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.195059Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.200711Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.206642Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.212618Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.217310Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.223790Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:54.763626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3369:3169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:54.763838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:54.767277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-04-17T18:16:55.213000Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.213547Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.215090Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.215571Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.215948Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.217099Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.217555Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.218023Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.218494Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.219783Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:56.077247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3796:3224], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.077489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.081307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-17T18:16:56.181852Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.182575Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.183991Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.184557Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.185094Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.185625Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.186332Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.187979Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.188501Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.189005Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000012s 2025-04-17T18:16:58.294899Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4064:4227] 2025-04-17T18:16:58.318281Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: 2025-04-17T18:16:45.233863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:45.234018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:45.234172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00151a/r3tmp/tmpJpw6XS/pdisk_1.dat 2025-04-17T18:16:45.636328Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65435, node 1 2025-04-17T18:16:45.958417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:45.958473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:45.958502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:45.959195Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:45.961515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:46.058161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:46.058351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:46.075238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28962 2025-04-17T18:16:46.718488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:50.208900Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:50.284874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:50.284988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:50.329092Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:50.331645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:50.598221Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.598818Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.599353Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.599507Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.599596Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.599836Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.599922Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.600016Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.600122Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.827591Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:50.827702Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:50.847647Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:51.005500Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:51.047316Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:51.047419Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:51.090718Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:51.092020Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:51.092251Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:51.092334Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:51.092385Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:51.092448Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:51.092512Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:51.092562Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:51.093024Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:51.122717Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:51.122837Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1867:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:51.131742Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1880:2607] 2025-04-17T18:16:51.137651Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1903:2616] 2025-04-17T18:16:51.137923Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1903:2616], schemeshard id = 72075186224037897 2025-04-17T18:16:51.154949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:16:51.176582Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:51.176654Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:51.176727Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:16:51.187629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:51.195340Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:51.195481Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:51.443082Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:51.643881Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:51.693292Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:52.599577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:52.599713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:52.621614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:16:52.938831Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:52.939078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:52.939388Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:16:52.939565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:16:52.939731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:16:52.939886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:16:52.939996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:16:52.940131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:16:52.940257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:16:52.940404Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:16:52.940543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:16:52.940678Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:16:52.991101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2396:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:52.991194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2396:2889];tablet_id=72075186224037900;process= ... D WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:16:53.849124Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.854123Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.860186Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.863187Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.866965Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.870717Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.874196Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.878747Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.884115Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:53.889890Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-04-17T18:16:54.880091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3116:3174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:54.880365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:54.908460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-04-17T18:16:55.524287Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.524923Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.525691Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.526160Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.526691Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.527717Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.528372Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.529243Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.529592Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:55.530000Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-04-17T18:16:56.407119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3808:3232], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.423754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.429559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-17T18:16:56.496536Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.497317Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.499431Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.500066Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.500647Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.502300Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.502900Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.503458Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.504127Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-04-17T18:16:56.505998Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000015s 2025-04-17T18:16:58.512081Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4076:4229] 2025-04-17T18:16:58.514818Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4073:3317] , Record { OperationId: "\000\000\000\000\026^yy\221_\240z\016*\277." Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } } 2025-04-17T18:16:58.514882Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=^yy_z*. 2025-04-17T18:16:58.514922Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=^yy_z*. , PathId [OwnerId: 72075186224037897, LocalPathId: 4] Answer: 'Analyze sent. OperationId: 00000005jyf5ws2qx0f872nfse' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel >> KqpExtractPredicateLookup::SqlInJoin [GOOD] >> KqpKv::BulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11146, MsgBus: 21617 2025-04-17T18:15:48.729200Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347482570149081:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:48.730727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e47/r3tmp/tmpDBxIv8/pdisk_1.dat 2025-04-17T18:15:49.069235Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11146, node 1 2025-04-17T18:15:49.109503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:49.109643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:49.111041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:49.123513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:49.123539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:49.123546Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:49.123719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21617 TClient is connected to server localhost:21617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:49.630615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:49.647063Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:51.512282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347495455051625:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:51.512282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347495455051619:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:51.512376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:51.514718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:15:51.524865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347495455051633:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:15:51.630756Z node 1 :TX_PROXY ERROR: Actor# [1:7494347495455051684:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:51.903587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:52.131198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:52.131426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:52.131669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:15:52.131787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:15:52.131898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:15:52.132004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:15:52.132094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:15:52.132198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:15:52.132302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:15:52.132397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:15:52.132494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:15:52.132585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347499750019246:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:15:52.162052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:52.162100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:52.162389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:15:52.162475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:15:52.162540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:15:52.162594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:15:52.162664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:15:52.162737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:15:52.162793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:15:52.162848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:15:52.162910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:15:52.162979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347499750019322:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:15:52.165392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494347499750019229:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:52.165439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494347499750019229:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abs ... tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.727671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.731938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.733972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.737930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.739907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.744097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.745674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.749881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.751202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.755730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.756777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.762057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.762800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.767288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.768789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.771932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.774686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.777497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.780732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.782352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.788505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.788533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.793966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.794653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.800088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.800662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.805895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.806821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.811745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.812973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.817420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.819162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.823502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.825348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.831195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.837020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.842871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.848948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.854869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.859130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.860927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.864738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.867522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.870760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.877265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:52.881316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.022771Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2feqnrbc5amptrrm4kad6p", SessionId: ydb://session/3?node_id=1&id=NGI5YjM5YWItN2YzZTQ4ZDEtYWIyNDdmMTEtNGUzMGQ2ZDU=, Slow query, duration: 28.517928s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:16:53.322272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:53.322292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:53.323130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TColumnShardTestReadWrite::CompactionGC >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert >> TColumnShardTestReadWrite::WriteReadExoticTypes >> TColumnShardTestReadWrite::WriteReadModifications >> TColumnShardTestReadWrite::ReadWithProgram >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> TVersions::Wreck0Reverse [GOOD] >> KqpPg::TableDeleteWhere+useSink [GOOD] >> KqpPg::TableDeleteWhere-useSink >> TColumnShardTestReadWrite::WriteRead >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: SmallQueue: MainQueue: {11 0f 1b}, {14 1f 1b}, {15 2f 1b}, {18 0f 1b}, {19 0f 1b}, {23 0f 1b}, {27 0f 1b} GhostQueue: 9, 12, 13, 16, 17, 20, 21, 24, 25, 28 0.29048 00000.000 II| FAKE_ENV: Born at 2025-04-17T18:13:57.873748Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.015 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.015 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.020 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.020 NN| TABLET_SAUSAGECACHE: Switch replacement policy from S3FIFO to ThreeLeveledLRU 00000.020 NN| TABLET_SAUSAGECACHE: Switch replacement policy done from S3FIFO to ThreeLeveledLRU 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.054 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.054 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxW ... CHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 5 ] owner [12:659:2684] 00000.430 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.430 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.430 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.430 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.430 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.430 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [4 4] 00000.430 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.430 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 4 ] 00000.431 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 4 ] 00000.431 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.431 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.431 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.431 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 4 ] 00000.431 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 4 ] owner [12:659:2684] 00000.431 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.431 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.431 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.431 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.431 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.432 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [3 4] 00000.432 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.432 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 3 ] 00000.432 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 3 ] 00000.432 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.432 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.432 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.432 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 3 ] 00000.432 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 3 ] owner [12:659:2684] 00000.433 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.433 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.433 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.433 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.433 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.433 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [2 4] 00000.433 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.433 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 2 ] 00000.433 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 2 ] 00000.433 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.433 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.433 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.433 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 2 ] 00000.433 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 2 ] owner [12:659:2684] 00000.434 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.434 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.434 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.434 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.434 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.434 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [1 4] 00000.434 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.434 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 1 ] 00000.434 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 1 ] 00000.434 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.435 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.435 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.435 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 1 100 ] 00000.435 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 1 ] owner [12:659:2684] 00000.435 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.435 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.435 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.435 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.435 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.435 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [0 4] 00000.435 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.435 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 0 ] 00000.436 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 0 ] 00000.436 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.436 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.436 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.436 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 0 100 ] 00000.436 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 0 ] owner [12:659:2684] Counters: Active:0/0, Passive:2772, MemLimit:-1 00000.436 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.437 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 10249619b +(0, 0b), 1 trc, -48685b acc} 00000.438 DD| TABLET_SAUSAGECACHE: Unregister owner [12:659:2684] 00000.439 NN| TABLET_SAUSAGECACHE: Poison cache serviced 201 reqs hit {0 0b} miss {202 20491164b} 00000.440 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.440 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10191b, 107} 00000.440 II| FAKE_ENV: DS.1 gone, left {10250914b, 5}, put {10299737b, 107} 00000.445 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.445 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.445 II| FAKE_ENV: All BS storage groups are stopped 00000.445 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.446 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 3357}, stopped >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestDropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2025-04-17T18:17:05.786285Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:05.986436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:06.006327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:06.008094Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:06.023872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:06.024070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:06.024295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:06.024416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:06.024547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:06.024648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:06.024726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:06.024843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:06.024950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:06.025069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.025213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:06.025326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:06.058100Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:06.058321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:06.058399Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:06.058615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.058776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:06.058851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:06.058895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:06.058977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:06.059032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:06.059075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:06.059112Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:06.059290Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.059350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:06.059390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:06.059422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:06.059497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:06.059544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:06.059596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:06.059643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:06.059732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:06.059768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:06.059798Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:06.059841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:06.059880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:06.059906Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:06.060307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2025-04-17T18:17:06.060381Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-17T18:17:06.060449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-17T18:17:06.060523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-17T18:17:06.060708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:06.060768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:06.060803Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:06.060968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:06.061028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.061058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.061206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:06.061252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:06.061298Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:06.061486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:06.061530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:06.061563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:06.061671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:06.061706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:06.061756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... en=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:17:06.974638Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:06.974685Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-17T18:17:06.974733Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:06.974813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:17:06.974840Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-04-17T18:17:06.974875Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-04-17T18:17:06.974913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=2;memory=8401426;count=1; 2025-04-17T18:17:06.975282Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-04-17T18:17:06.979927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=0; 2025-04-17T18:17:06.980079Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:06.980113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-17T18:17:06.980139Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:06.980332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:17:06.980365Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T18:17:06.980401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-17T18:17:06.980448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=2; 2025-04-17T18:17:06.980486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:17:06.980560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:06.980682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:06.980826Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:06.980914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:06.980999Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:06.981043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:282:2300] finished for tablet 9437184 2025-04-17T18:17:06.981413Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:281:2299];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.048}],"full":{"a":1744913826932188,"name":"_full_task","f":1744913826932188,"d_finished":0,"c":0,"l":1744913826981102,"d":48914},"events":[{"name":"bootstrap","f":1744913826932380,"d_finished":3044,"c":1,"l":1744913826935424,"d":3044},{"a":1744913826980807,"name":"ack","f":1744913826980807,"d_finished":0,"c":0,"l":1744913826981102,"d":295},{"a":1744913826980799,"name":"processing","f":1744913826935473,"d_finished":7235,"c":9,"l":1744913826980733,"d":7538},{"name":"ProduceResults","f":1744913826934360,"d_finished":1694,"c":11,"l":1744913826981017,"d":1694},{"a":1744913826981025,"name":"Finish","f":1744913826981025,"d_finished":0,"c":0,"l":1744913826981102,"d":77},{"name":"task_result","f":1744913826935483,"d_finished":7130,"c":9,"l":1744913826980731,"d":7130}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:06.981478Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:281:2299];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:06.981823Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:281:2299];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.048},{"events":["l_ack","l_processing","l_Finish"],"t":0.049}],"full":{"a":1744913826932188,"name":"_full_task","f":1744913826932188,"d_finished":0,"c":0,"l":1744913826981520,"d":49332},"events":[{"name":"bootstrap","f":1744913826932380,"d_finished":3044,"c":1,"l":1744913826935424,"d":3044},{"a":1744913826980807,"name":"ack","f":1744913826980807,"d_finished":0,"c":0,"l":1744913826981520,"d":713},{"a":1744913826980799,"name":"processing","f":1744913826935473,"d_finished":7235,"c":9,"l":1744913826980733,"d":7956},{"name":"ProduceResults","f":1744913826934360,"d_finished":1694,"c":11,"l":1744913826981017,"d":1694},{"a":1744913826981025,"name":"Finish","f":1744913826981025,"d_finished":0,"c":0,"l":1744913826981520,"d":495},{"name":"task_result","f":1744913826935483,"d_finished":7130,"c":9,"l":1744913826980731,"d":7130}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:06.981913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:06.931610Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T18:17:06.981958Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:06.982257Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:282:2300];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2025-04-17T18:17:05.786200Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:05.989594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:06.012559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:06.012866Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:06.021022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:06.021214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:06.021405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:06.021534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:06.021649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:06.021740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:06.021841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:06.021963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:06.022076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:06.022168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.022292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:06.022397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:06.052016Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:06.052147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:06.052198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:06.052358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.052500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:06.052573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:06.052609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:06.052675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:06.052723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:06.052796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:06.052852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:06.053018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.053075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:06.053106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:06.053132Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:06.053191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:06.053249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:06.053289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:06.053311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:06.053386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:06.053416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:06.053435Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:06.053481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:06.053511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:06.053530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:06.053904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-04-17T18:17:06.053982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-04-17T18:17:06.054054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-17T18:17:06.054130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-17T18:17:06.054284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:06.054341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:06.054369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:06.054585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:06.054634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.054666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.054814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:06.054859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:06.054891Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:06.055073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:06.055109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:06.055153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:06.055268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:06.055335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:06.055383Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... tails={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2;source_id=5; 2025-04-17T18:17:07.506693Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:67;scan_step=name=ASSEMBLER::LAST_PK;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3;source_id=5; 2025-04-17T18:17:07.507080Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:67;scan_step=name=SNAPSHOT;details={};;scan_step_idx=4;source_id=5; 2025-04-17T18:17:07.507283Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:67;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=5;source_id=5; 2025-04-17T18:17:07.507462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:17:07.507490Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-04-17T18:17:07.507517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-04-17T18:17:07.507561Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=6;memory=8391908;count=2; 2025-04-17T18:17:07.507864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-04-17T18:17:07.508247Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=0; 2025-04-17T18:17:07.508346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:07.508378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-17T18:17:07.508405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:07.508571Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:17:07.508666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T18:17:07.508711Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-04-17T18:17:07.508751Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=6; 2025-04-17T18:17:07.508794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:17:07.508855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:07.508951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:07.509146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:07.509292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:07.509404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:07.509438Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:417:2435] finished for tablet 9437184 2025-04-17T18:17:07.509828Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:413:2431];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.019}],"full":{"a":1744913827489951,"name":"_full_task","f":1744913827489951,"d_finished":0,"c":0,"l":1744913827509491,"d":19540},"events":[{"name":"bootstrap","f":1744913827490380,"d_finished":5016,"c":1,"l":1744913827495396,"d":5016},{"a":1744913827509122,"name":"ack","f":1744913827509122,"d_finished":0,"c":0,"l":1744913827509491,"d":369},{"a":1744913827509109,"name":"processing","f":1744913827495676,"d_finished":3876,"c":10,"l":1744913827509006,"d":4258},{"name":"ProduceResults","f":1744913827493269,"d_finished":2061,"c":12,"l":1744913827509425,"d":2061},{"a":1744913827509428,"name":"Finish","f":1744913827509428,"d_finished":0,"c":0,"l":1744913827509491,"d":63},{"name":"task_result","f":1744913827495694,"d_finished":3676,"c":10,"l":1744913827509004,"d":3676}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:07.509906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:413:2431];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:07.510358Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:413:2431];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.019},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":1744913827489951,"name":"_full_task","f":1744913827489951,"d_finished":0,"c":0,"l":1744913827509951,"d":20000},"events":[{"name":"bootstrap","f":1744913827490380,"d_finished":5016,"c":1,"l":1744913827495396,"d":5016},{"a":1744913827509122,"name":"ack","f":1744913827509122,"d_finished":0,"c":0,"l":1744913827509951,"d":829},{"a":1744913827509109,"name":"processing","f":1744913827495676,"d_finished":3876,"c":10,"l":1744913827509006,"d":4718},{"name":"ProduceResults","f":1744913827493269,"d_finished":2061,"c":12,"l":1744913827509425,"d":2061},{"a":1744913827509428,"name":"Finish","f":1744913827509428,"d_finished":0,"c":0,"l":1744913827509951,"d":523},{"name":"task_result","f":1744913827495694,"d_finished":3676,"c":10,"l":1744913827509004,"d":3676}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:07.510494Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:07.489170Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=1384;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4196;selected_rows=0; 2025-04-17T18:17:07.510560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:07.510904Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:417:2435];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] |93.4%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::ReadAggregate >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 2972, MsgBus: 6134 2025-04-17T18:15:52.558415Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347503121332991:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:52.558768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e3f/r3tmp/tmpc2ee5j/pdisk_1.dat 2025-04-17T18:15:52.950611Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2972, node 1 2025-04-17T18:15:52.995029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:52.995139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:52.998410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:53.050926Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:53.050965Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:53.050986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:53.051136Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6134 TClient is connected to server localhost:6134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:53.586642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:53.602531Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:55.179645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347516006235542:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:55.179653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347516006235549:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:55.179763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:55.183094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:15:55.192440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347516006235556:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:15:55.259643Z node 1 :TX_PROXY ERROR: Actor# [1:7494347516006235607:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:55.484785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:55.680715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:55.680956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:55.681192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:15:55.681297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:15:55.681431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:15:55.681554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:15:55.681672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:15:55.681776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:15:55.681905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:15:55.682011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:15:55.682118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:15:55.682606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347516006235882:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:15:55.689253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:55.689357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:55.689626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:15:55.689754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:15:55.689864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:15:55.689991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:15:55.690104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:15:55.690459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:15:55.690607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:15:55.690732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:15:55.690867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:15:55.690999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347516006235865:2348];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:15:55.718923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347516006236031:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:55.718990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347516006236031:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract ... COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.719647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.723758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.723789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.728762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.728762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.733685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.733685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.738750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.738749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.742434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.743957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.745558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.747335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.748995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.752268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.752512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.756635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.757689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.761302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.762714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.767881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.768037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.775622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.778149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.788338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.788338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.794073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.794073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.799883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.800488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.805333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.805688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.811874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.820301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.820345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.826649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.828093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.831792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.834720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.836722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.841184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.841190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.846013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.847822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.851059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.930241Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fevc36bba5gwm6y88jprj", SessionId: ydb://session/3?node_id=1&id=MzI4Yzk2MWMtZmVjYjk5YTctZjc4NWZkMGUtZTdkNzc3Zjg=, Slow query, duration: 28.638168s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:16:57.149512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:57.149512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:57.150141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 |93.4%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2025-04-17T18:16:26.252979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347647350936902:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.253040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013bd/r3tmp/tmpTC1OsS/pdisk_1.dat 2025-04-17T18:16:26.746607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:26.746733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:26.749221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:26.787474Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1479, node 1 2025-04-17T18:16:26.974505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:26.974525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:26.974531Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:26.974624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:27.498708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:27.521034Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:29.517613Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTAyYzlmYS04ZGIzNWVlMi04Y2NhNzE0ZC1mMGY2ZjVjZA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTAyYzlmYS04ZGIzNWVlMi04Y2NhNzE0ZC1mMGY2ZjVjZA== 2025-04-17T18:16:29.520828Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTAyYzlmYS04ZGIzNWVlMi04Y2NhNzE0ZC1mMGY2ZjVjZA==, ActorId: [1:7494347660235839428:2327], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.521117Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:29.521156Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was disabled 2025-04-17T18:16:29.548405Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc= 2025-04-17T18:16:29.548712Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.548927Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ReadyState, TraceId: 01js2fewkcas91h6cx8hpbbk75, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7494347660235839429:2298] database: Root databaseId: /Root pool id: 2025-04-17T18:16:29.549005Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, Sending CompileQuery request 2025-04-17T18:16:30.250293Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, ExecutePhyTx, tx: 0x000050C00023D958 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-17T18:16:30.250399Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, Sending to Executer TraceId: 0 8 2025-04-17T18:16:30.250586Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, Created new KQP executer: [1:7494347664530806731:2328] isRollback: 0 2025-04-17T18:16:30.379597Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, Forwarded TEvStreamData to [1:7494347660235839429:2298] 2025-04-17T18:16:30.399040Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-17T18:16:30.400471Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, txInfo Status: Committed Kind: Pure TotalDuration: 154.529 ServerDuration: 154.429 QueriesCount: 2 2025-04-17T18:16:30.400544Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:16:30.400742Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:16:30.400767Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, EndCleanup, isFinal: 1 2025-04-17T18:16:30.400853Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: ExecuteState, TraceId: 01js2fewkcas91h6cx8hpbbk75, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7494347647350937150:2277] 2025-04-17T18:16:30.400901Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: unknown state, TraceId: 01js2fewkcas91h6cx8hpbbk75, Cleanup temp tables: 0 2025-04-17T18:16:30.401920Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmRhMmZlNWQtZTc5OGZjYTctN2I4MDgyY2QtMWYxNDZiZTc=, ActorId: [1:7494347660235839430:2328], ActorState: unknown state, TraceId: 01js2fewkcas91h6cx8hpbbk75, Session actor destroyed 2025-04-17T18:16:30.419274Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MTAyYzlmYS04ZGIzNWVlMi04Y2NhNzE0ZC1mMGY2ZjVjZA==, ActorId: [1:7494347660235839428:2327], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:16:30.419325Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MTAyYzlmYS04ZGIzNWVlMi04Y2NhNzE0ZC1mMGY2ZjVjZA==, ActorId: [1:7494347660235839428:2327], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:16:30.419365Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTAyYzlmYS04ZGIzNWVlMi04Y2NhNzE0ZC1mMGY2ZjVjZA==, ActorId: [1:7494347660235839428:2327], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:16:30.419389Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTAyYzlmYS04ZGIzNWVlMi04Y2NhNzE0ZC1mMGY2ZjVjZA==, ActorId: [1:7494347660235839428:2327], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:16:30.419462Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTAyYzlmYS04ZGIzNWVlMi04Y2NhNzE0ZC1mMGY2ZjVjZA==, ActorId: [1:7494347660235839428:2327], ActorState: unknown state, Session actor destroyed 2025-04-17T18:16:31.534241Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347668458563571:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:31.534290Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013bd/r3tmp/tmpzLiTAT/pdisk_1.dat 2025-04-17T18:16:31.678100Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:31.712000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:31.712077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:31.727324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15662, node 2 2025-04-17T18:16:31.841550Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:31.841573Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:31.841582Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:31.841689Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 ... 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] RunDataQuery: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; 2025-04-17T18:17:05.677243Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-17T18:17:05.677292Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ReadyState, TraceId: 01js2ffzwd662d9rmtad8czjdz, received request, proxyRequestId: 6 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpcActor: [8:7494347813476058598:2358] database: /Root databaseId: /Root pool id: default 2025-04-17T18:17:05.677328Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347813476058600:2359], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-17T18:17:05.677336Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [8:7494347813476058597:2357], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE= 2025-04-17T18:17:05.677380Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7494347813476058601:2360], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, Start pool fetching 2025-04-17T18:17:05.677402Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347813476058602:2361], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-17T18:17:05.677628Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347813476058600:2359], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-04-17T18:17:05.677669Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2025-04-17T18:17:05.677683Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347813476058602:2361], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-04-17T18:17:05.677734Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7494347813476058601:2360], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, Pool info successfully resolved 2025-04-17T18:17:05.677760Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE= 2025-04-17T18:17:05.677798Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE= 2025-04-17T18:17:05.677801Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7494347813476058557:2347], DatabaseId: /Root, PoolId: default, Received new request, worker id: [8:7494347813476058597:2357], session id: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE= 2025-04-17T18:17:05.677837Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, continue request, pool id: default 2025-04-17T18:17:05.677840Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7494347813476058557:2347], DatabaseId: /Root, PoolId: default, Reply continue success to [8:7494347813476058597:2357], session id: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, local in flight: 1 2025-04-17T18:17:05.678042Z node 8 :KQP_SESSION INFO: Scheme error, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], status: PathNotTable 2025-04-17T18:17:06.133453Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, ExecutePhyTx, tx: 0x000050C0004D0A58 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-17T18:17:06.133539Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, Sending to Executer TraceId: 0 8 2025-04-17T18:17:06.133630Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, Created new KQP executer: [8:7494347817771025927:2357] isRollback: 0 2025-04-17T18:17:06.139416Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-17T18:17:06.139503Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, ExecutePhyTx, tx: 0x000050C0004D0B18 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-17T18:17:06.140235Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-17T18:17:06.140372Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, txInfo Status: Committed Kind: ReadOnly TotalDuration: 7.055 ServerDuration: 6.978 QueriesCount: 2 2025-04-17T18:17:06.140451Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:17:06.140538Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ExecuteState, TraceId: 01js2ffzwd662d9rmtad8czjdz, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-17T18:17:06.140561Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7494347813476058557:2347], DatabaseId: /Root, PoolId: default, Received cleanup request, worker id: [8:7494347813476058597:2357], session id: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, duration: 0.463037s, cpu consumed: 0.001934s 2025-04-17T18:17:06.140619Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7494347813476058557:2347], DatabaseId: /Root, PoolId: default, Reply cleanup success to [8:7494347813476058597:2357], session id: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, local in flight: 0 2025-04-17T18:17:06.140703Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root, PoolId: default, Duration: 0.463037s, CpuConsumed: 0.001934s, AdjustCpuQuota: 0 2025-04-17T18:17:06.140860Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: CleanupState, TraceId: 01js2ffzwd662d9rmtad8czjdz, EndCleanup, isFinal: 0 2025-04-17T18:17:06.140909Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: CleanupState, TraceId: 01js2ffzwd662d9rmtad8czjdz, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7494347792001221156:2182] 2025-04-17T18:17:06.141170Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, TxId: 2025-04-17T18:17:06.141255Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, TxId: 2025-04-17T18:17:06.141722Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:06.141764Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:06.141792Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:06.141819Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:06.141894Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTIzOTE2MzItZmI2YWM5NDItNWM5ZDI4MzctMTcxM2U0ODE=, ActorId: [8:7494347813476058597:2357], ActorState: unknown state, Session actor destroyed 2025-04-17T18:17:06.155292Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTc0NzVhYjctMjU1ZGUyZS03MjkzODBiMC0xYjc0MmM4MQ==, ActorId: [8:7494347809181091110:2335], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:06.155340Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTc0NzVhYjctMjU1ZGUyZS03MjkzODBiMC0xYjc0MmM4MQ==, ActorId: [8:7494347809181091110:2335], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:06.155369Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTc0NzVhYjctMjU1ZGUyZS03MjkzODBiMC0xYjc0MmM4MQ==, ActorId: [8:7494347809181091110:2335], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:06.155411Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTc0NzVhYjctMjU1ZGUyZS03MjkzODBiMC0xYjc0MmM4MQ==, ActorId: [8:7494347809181091110:2335], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:06.155490Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTc0NzVhYjctMjU1ZGUyZS03MjkzODBiMC0xYjc0MmM4MQ==, ActorId: [8:7494347809181091110:2335], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28348, MsgBus: 17420 2025-04-17T18:16:41.194454Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347710473672678:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:41.194634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c65/r3tmp/tmpZ3M0vt/pdisk_1.dat 2025-04-17T18:16:41.696030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:41.696126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:41.698386Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:41.700887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28348, node 1 2025-04-17T18:16:41.813675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:41.813691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:41.813709Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:41.813823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17420 TClient is connected to server localhost:17420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:42.497010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:42.535069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:42.550002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:42.709557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:42.861489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:42.933927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:44.484172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347723358576328:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:44.484277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:44.845215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:44.926368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:44.968138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:45.007972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:45.086549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:45.147865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:45.202575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:45.278672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347727653544292:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:45.278756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:45.278976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347727653544297:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:45.282989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:45.297258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347727653544299:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:16:45.381307Z node 1 :TX_PROXY ERROR: Actor# [1:7494347727653544356:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:46.186611Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347710473672678:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:46.186691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:16:46.409633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:48.016853Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:16:48.036343Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:16:48.082739Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 17143, MsgBus: 3791 2025-04-17T18:16:48.977213Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347740069986221:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:48.978172Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c65/r3tmp/tmpGnCpx4/pdisk_1.dat 2025-04-17T18:16:49.144700Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:49.184823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:49.184905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:49.191249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17143, node 2 2025-04-17T18:16:49.278692Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:49.278721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:49.278730Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:49.278839Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3791 TClient is connected to server localhost:3791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:49.699482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:49.707044Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715 ... ompleted, doublechecking } 2025-04-17T18:16:52.685748Z node 2 :TX_PROXY ERROR: Actor# [2:7494347757249857849:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:53.699263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:16:53.977355Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347740069986221:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:53.977479Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:16:55.278440Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 28187, MsgBus: 17194 2025-04-17T18:16:55.983481Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494347772871995886:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:55.983616Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001c65/r3tmp/tmpO1UO1m/pdisk_1.dat 2025-04-17T18:16:56.113492Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:56.147487Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:56.147584Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:56.149282Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28187, node 3 2025-04-17T18:16:56.230694Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:56.230722Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:56.230732Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:56.230857Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17194 TClient is connected to server localhost:17194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:56.823469Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:56.830615Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:16:56.841877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:56.923004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:57.080794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:57.161470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:59.514737Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494347790051866843:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:59.514844Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:59.576390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:16:59.617354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:16:59.656537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:16:59.704007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:16:59.734843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:16:59.805505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:16:59.842057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:16:59.933759Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494347790051867508:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:59.933866Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:59.934119Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494347790051867513:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:59.937977Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:16:59.947194Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494347790051867515:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:17:00.032996Z node 3 :TX_PROXY ERROR: Actor# [3:7494347794346834868:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:17:00.984799Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494347772871995886:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:00.984893Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:17:01.253699Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:17:02.285523Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:02.367059Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:03.566441Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:03.587154Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:03.620250Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:04.513017Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:04.551580Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:04.596473Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:05.053341Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:05.076156Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:05.090271Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:05.842904Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:05.866345Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:06.498917Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:06.523659Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:07.506549Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:07.521450Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:07.533167Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:07.811422Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:07.824164Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:08.191440Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:08.215304Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:08.228565Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] Test command err: Trying to start YDB, gRPC: 63604, MsgBus: 4560 2025-04-17T18:15:49.863877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347488121751638:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:49.864098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e45/r3tmp/tmpw2jupg/pdisk_1.dat 2025-04-17T18:15:50.195078Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63604, node 1 2025-04-17T18:15:50.258772Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:50.258797Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:50.258805Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:50.258959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:15:50.269086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:50.269245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:50.270648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4560 TClient is connected to server localhost:4560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:50.777522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:50.792540Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:15:52.796019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347501006654193:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:52.796108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:52.796255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347501006654205:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:52.799528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:15:52.809740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347501006654207:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:15:52.887889Z node 1 :TX_PROXY ERROR: Actor# [1:7494347501006654260:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:53.195221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:53.411496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:53.411673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:53.411919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:15:53.412013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:15:53.412102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:15:53.412197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:15:53.412281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:15:53.412372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:15:53.412458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:15:53.412544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:15:53.412637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:15:53.412722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494347505301621785:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:15:53.439229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:53.439289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:53.439462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:15:53.439572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:15:53.439665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:15:53.439758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:15:53.439855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:15:53.439951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:15:53.440049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:15:53.440151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:15:53.440247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:15:53.440336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347505301621768:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:15:53.451798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494347505301621773:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:53.451859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494347505301621773:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstra ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.206354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.209437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.212146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.215989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.217656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.222356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.222700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.227914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.232972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.238319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.243601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.248813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.254356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.259216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.268700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.274511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.280276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.285936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.291998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.292166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.298003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.298126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.304027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.304834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.309891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.311064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.316310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.316981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.322597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.322887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.328278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.328627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.334363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.334504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.341450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.343929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.346751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.349222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.351786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.354358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.356716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.359350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.361757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.364419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.366783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:53.484205Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ferkb12p8z2srjsg43jtm", SessionId: ydb://session/3?node_id=1&id=MTkxYzBiNzktMWM3Njc3ZjktNmIwYzYyMzMtODVlZjBmODA=, Slow query, duration: 28.032494s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:16:53.711914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:53.712059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:53.712304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494347689985261835:9197];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:16:53.712711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TColumnShardTestReadWrite::WriteOverload+InStore-WithWritePortionsOnInsert |93.5%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18627, MsgBus: 9179 2025-04-17T18:15:53.487826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347506947123184:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:53.487888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e3c/r3tmp/tmpOZrphO/pdisk_1.dat 2025-04-17T18:15:53.851741Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18627, node 1 2025-04-17T18:15:53.922817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:53.922970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:53.926725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:53.935349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:53.935380Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:53.935391Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:53.935660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9179 TClient is connected to server localhost:9179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:15:54.417006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:56.367871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347519832025736:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:56.367874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347519832025744:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:56.367968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:15:56.372072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:15:56.380980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347519832025750:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:15:56.490493Z node 1 :TX_PROXY ERROR: Actor# [1:7494347519832025801:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:15:56.781885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:15:56.979948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:56.980181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:56.980454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:15:56.980607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:15:56.980720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:15:56.981018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:15:56.981139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:15:56.981255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:15:56.981363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:15:56.981484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:15:56.981604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:15:56.981709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494347519832026074:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:15:56.987954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:56.988083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:56.988303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:15:56.988399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:15:56.988511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:15:56.988669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:15:56.988789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:15:56.988867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:15:56.988938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:15:56.989028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:15:56.989114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:15:56.989178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494347519832026058:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:15:57.015725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494347519832026092:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:57.015804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494347519832026092:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:57.016008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_i ... 10714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.261595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.265798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.268080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.271574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.274451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.278029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.280880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.283207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.287049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.288675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.293277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.294159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.298253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.299421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.304316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.304527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.309616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.311302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.314377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.317894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.319178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.324447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.324517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.328807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.330522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.336855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.343148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.351536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.358231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.364641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.372058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.378752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.380318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.385507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.386665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.392453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.393583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.397642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.399042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.402883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.404589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.408300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.410039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.415666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:16:56.498720Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fewe3frzvr6nzajgdswkb", SessionId: ydb://session/3?node_id=1&id=NTU0YWYyOGYtYTkyZWM4ZDctZjM2ZGVlNzgtYWEyZjc1MjU=, Slow query, duration: 27.118948s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:16:56.698106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:56.698118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:16:56.698823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] >> EvWrite::WriteWithSplit >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier >> TColumnShardTestReadWrite::WriteRead [GOOD] >> Normalizers::EmptyTablesNormalizer >> KqpKv::BulkUpsert [GOOD] >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-04-17T18:17:05.786136Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:05.994447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:06.020157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:06.020536Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:06.028967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:06.029194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:06.029429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:06.029549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:06.029687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:06.029825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:06.029933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:06.030072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:06.030206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:06.030332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.030452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:06.030558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:06.059109Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:06.059281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:06.059339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:06.059528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.059684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:06.059753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:06.059794Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:06.059898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:06.059966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:06.060007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:06.060035Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:06.060222Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.060292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:06.060333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:06.060368Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:06.060445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:06.060498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:06.060540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:06.060577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:06.060684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:06.060725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:06.060758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:06.060808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:06.060848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:06.060894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:06.061290Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-17T18:17:06.061383Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-04-17T18:17:06.061469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-17T18:17:06.061545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-17T18:17:06.061744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:06.061794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:06.061827Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:06.061993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:06.062029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.062058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.062247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:06.062296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:06.062325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:06.062526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:06.062569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:06.062596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:06.062703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:06.062736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:06.062779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.098871Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:11.098996Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-17T18:17:11.099068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-17T18:17:11.099234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-04-17T18:17:11.099382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.099523Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.099663Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.099882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:11.100044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.100171Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.100208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:423:2438] finished for tablet 9437184 2025-04-17T18:17:11.100591Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:422:2437];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.014}],"full":{"a":1744913831085645,"name":"_full_task","f":1744913831085645,"d_finished":0,"c":0,"l":1744913831100258,"d":14613},"events":[{"name":"bootstrap","f":1744913831085836,"d_finished":3098,"c":1,"l":1744913831088934,"d":3098},{"a":1744913831099846,"name":"ack","f":1744913831098542,"d_finished":1150,"c":1,"l":1744913831099692,"d":1562},{"a":1744913831099834,"name":"processing","f":1744913831090142,"d_finished":5843,"c":10,"l":1744913831099696,"d":6267},{"name":"ProduceResults","f":1744913831087607,"d_finished":3223,"c":13,"l":1744913831100195,"d":3223},{"a":1744913831100198,"name":"Finish","f":1744913831100198,"d_finished":0,"c":0,"l":1744913831100258,"d":60},{"name":"task_result","f":1744913831090169,"d_finished":4560,"c":9,"l":1744913831098411,"d":4560}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.100667Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:11.101045Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:422:2437];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":1744913831085645,"name":"_full_task","f":1744913831085645,"d_finished":0,"c":0,"l":1744913831100707,"d":15062},"events":[{"name":"bootstrap","f":1744913831085836,"d_finished":3098,"c":1,"l":1744913831088934,"d":3098},{"a":1744913831099846,"name":"ack","f":1744913831098542,"d_finished":1150,"c":1,"l":1744913831099692,"d":2011},{"a":1744913831099834,"name":"processing","f":1744913831090142,"d_finished":5843,"c":10,"l":1744913831099696,"d":6716},{"name":"ProduceResults","f":1744913831087607,"d_finished":3223,"c":13,"l":1744913831100195,"d":3223},{"a":1744913831100198,"name":"Finish","f":1744913831100198,"d_finished":0,"c":0,"l":1744913831100707,"d":509},{"name":"task_result","f":1744913831090169,"d_finished":4560,"c":9,"l":1744913831098411,"d":4560}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.101106Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:11.085056Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-04-17T18:17:11.101167Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:11.101466Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> Normalizers::PortionsNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2025-04-17T18:17:06.587990Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:06.681612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:06.704569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:06.704877Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:06.712382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:06.712585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:06.712788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:06.712879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:06.713040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:06.713148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:06.713247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:06.713383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:06.713490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:06.713589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.713683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:06.713767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:06.739278Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:06.739471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:06.739533Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:06.739706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.739884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:06.739969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:06.740010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:06.740120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:06.740189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:06.740229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:06.740257Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:06.740433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.740497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:06.740536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:06.740566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:06.740654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:06.740714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:06.740756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:06.740786Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:06.740879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:06.740926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:06.740953Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:06.740998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:06.741053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:06.741086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:06.741541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=70; 2025-04-17T18:17:06.741628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-17T18:17:06.741700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-17T18:17:06.741798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-17T18:17:06.741971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:06.742026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:06.742062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:06.742277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:06.742322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.742352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.742503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:06.742566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:06.742600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:06.742798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:06.742839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:06.742867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:06.743004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:06.743045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:06.743090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... : string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.262304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:11.262457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-17T18:17:11.262557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-17T18:17:11.262748Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-17T18:17:11.262905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.263003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.263113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.263285Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:11.263472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.263606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.263644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:423:2438] finished for tablet 9437184 2025-04-17T18:17:11.264114Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:422:2437];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.06},{"events":["f_processing","f_task_result"],"t":0.061},{"events":["f_ack","l_task_result"],"t":0.069},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.071}],"full":{"a":1744913831192022,"name":"_full_task","f":1744913831192022,"d_finished":0,"c":0,"l":1744913831263705,"d":71683},"events":[{"name":"bootstrap","f":1744913831192231,"d_finished":60553,"c":1,"l":1744913831252784,"d":60553},{"a":1744913831263261,"name":"ack","f":1744913831261962,"d_finished":1173,"c":1,"l":1744913831263135,"d":1617},{"a":1744913831263251,"name":"processing","f":1744913831253955,"d_finished":5440,"c":10,"l":1744913831263136,"d":5894},{"name":"ProduceResults","f":1744913831193954,"d_finished":3220,"c":13,"l":1744913831263631,"d":3220},{"a":1744913831263634,"name":"Finish","f":1744913831263634,"d_finished":0,"c":0,"l":1744913831263705,"d":71},{"name":"task_result","f":1744913831253977,"d_finished":4145,"c":9,"l":1744913831261774,"d":4145}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.264209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:422:2437];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:11.264591Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:422:2437];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.06},{"events":["f_processing","f_task_result"],"t":0.061},{"events":["f_ack","l_task_result"],"t":0.069},{"events":["l_ProduceResults","f_Finish"],"t":0.071},{"events":["l_ack","l_processing","l_Finish"],"t":0.072}],"full":{"a":1744913831192022,"name":"_full_task","f":1744913831192022,"d_finished":0,"c":0,"l":1744913831264250,"d":72228},"events":[{"name":"bootstrap","f":1744913831192231,"d_finished":60553,"c":1,"l":1744913831252784,"d":60553},{"a":1744913831263261,"name":"ack","f":1744913831261962,"d_finished":1173,"c":1,"l":1744913831263135,"d":2162},{"a":1744913831263251,"name":"processing","f":1744913831253955,"d_finished":5440,"c":10,"l":1744913831263136,"d":6439},{"name":"ProduceResults","f":1744913831193954,"d_finished":3220,"c":13,"l":1744913831263631,"d":3220},{"a":1744913831263634,"name":"Finish","f":1744913831263634,"d_finished":0,"c":0,"l":1744913831264250,"d":616},{"name":"task_result","f":1744913831253977,"d_finished":4145,"c":9,"l":1744913831261774,"d":4145}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:11.264676Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:11.191521Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T18:17:11.264724Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:11.265125Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:423:2438];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::ReadStale >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> TColumnShardTestReadWrite::ReadAggregate [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless >> TColumnShardTestReadWrite::CompactionGCFailingBs >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-04-17T18:17:09.114859Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:09.200099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:09.225923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:09.226225Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:09.234004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:09.234238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:09.234482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:09.234607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:09.234725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:09.234829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:09.234939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:09.235046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:09.235154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:09.235257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:09.235351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:09.235455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:09.261887Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:09.262025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:09.262095Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:09.262298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:09.262460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:09.262539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:09.262583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:09.262663Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:09.262731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:09.262774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:09.262802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:09.263034Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:09.263114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:09.263154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:09.263204Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:09.263294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:09.263347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:09.263393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:09.263420Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:09.263490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:09.263525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:09.263552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:09.263628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:09.263668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:09.263701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:09.264070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-04-17T18:17:09.264157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=42; 2025-04-17T18:17:09.264243Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-17T18:17:09.264311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-17T18:17:09.264481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:09.264535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:09.264574Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:09.264777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:09.264823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:09.264872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:09.265040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:09.265094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:09.265125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:09.265304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:09.265345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:09.265391Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:09.265528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:09.265563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:09.265609Z node 1 :TX_COLUMNSHARD INFO: tablet_i ... .676761Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:12.676911Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:17:12.676939Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T18:17:12.676970Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=76; 2025-04-17T18:17:12.677007Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=76; 2025-04-17T18:17:12.677036Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:17:12.677100Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T18:17:12.677128Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-17T18:17:12.677154Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:12.677524Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:12.677629Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T18:17:12.677657Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:12.677748Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;);columns=4;rows=1; 2025-04-17T18:17:12.677800Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-04-17T18:17:12.677892Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:434:2452];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-04-17T18:17:12.677980Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T18:17:12.678057Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T18:17:12.678133Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T18:17:12.678354Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:12.678447Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T18:17:12.678522Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T18:17:12.678550Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:435:2453] finished for tablet 9437184 2025-04-17T18:17:12.678893Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[2:434:2452];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1744913832667943,"name":"_full_task","f":1744913832667943,"d_finished":0,"c":0,"l":1744913832678592,"d":10649},"events":[{"name":"bootstrap","f":1744913832668090,"d_finished":1775,"c":1,"l":1744913832669865,"d":1775},{"a":1744913832678339,"name":"ack","f":1744913832677503,"d_finished":654,"c":1,"l":1744913832678157,"d":907},{"a":1744913832678330,"name":"processing","f":1744913832670786,"d_finished":5031,"c":10,"l":1744913832678159,"d":5293},{"name":"ProduceResults","f":1744913832669045,"d_finished":1941,"c":13,"l":1744913832678539,"d":1941},{"a":1744913832678541,"name":"Finish","f":1744913832678541,"d_finished":0,"c":0,"l":1744913832678592,"d":51},{"name":"task_result","f":1744913832670800,"d_finished":4266,"c":9,"l":1744913832677191,"d":4266}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T18:17:12.678956Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:434:2452];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:12.679252Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[2:434:2452];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":1744913832667943,"name":"_full_task","f":1744913832667943,"d_finished":0,"c":0,"l":1744913832678988,"d":11045},"events":[{"name":"bootstrap","f":1744913832668090,"d_finished":1775,"c":1,"l":1744913832669865,"d":1775},{"a":1744913832678339,"name":"ack","f":1744913832677503,"d_finished":654,"c":1,"l":1744913832678157,"d":1303},{"a":1744913832678330,"name":"processing","f":1744913832670786,"d_finished":5031,"c":10,"l":1744913832678159,"d":5689},{"name":"ProduceResults","f":1744913832669045,"d_finished":1941,"c":13,"l":1744913832678539,"d":1941},{"a":1744913832678541,"name":"Finish","f":1744913832678541,"d_finished":0,"c":0,"l":1744913832678988,"d":447},{"name":"task_result","f":1744913832670800,"d_finished":4266,"c":9,"l":1744913832677191,"d":4266}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-17T18:17:12.679307Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:12.667545Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-04-17T18:17:12.679334Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:12.679523Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> TColumnShardTestReadWrite::ReadStale [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-04-17T18:17:12.875466Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:12.965831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:12.993208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:12.993547Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:13.001883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:13.002115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:13.002365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:13.002497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:13.002636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:13.002757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:13.002877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:13.003003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:13.003183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:13.003319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:13.003423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:13.003525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:13.033069Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:13.033231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:13.033305Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:13.033502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:13.033668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:13.033760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:13.033806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:13.033916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:13.034009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:13.034064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:13.034098Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:13.034304Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:13.034380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:13.034439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:13.034477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:13.034587Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:13.034653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:13.034706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:13.034738Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:13.034866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:13.034913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:13.034949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:13.034999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:13.035042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:13.035073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:13.035501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=83; 2025-04-17T18:17:13.035593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-17T18:17:13.035688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-17T18:17:13.035783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-17T18:17:13.035965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:13.036036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:13.036077Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:13.036293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:13.036339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:13.036379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:13.036528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:13.036572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:13.036604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:13.036815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:13.036865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:13.036900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:13.037032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:13.037077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:13.037125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... mplete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=77304; 2025-04-17T18:17:13.705197Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::2fb827c6-1bb811f0-b1563fd3-a39ff96e; 2025-04-17T18:17:13.705260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-17T18:17:13.705370Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:753;event=indexation;bytes=77304;blobs_count=9;max_limit=251658240;has_more=0;external_task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e; 2025-04-17T18:17:13.705573Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:619;event=start_changes;type=CS::INDEXATION;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e; 2025-04-17T18:17:13.705988Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=69691;external_task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;type=CS::INDEXATION;priority=0;; 2025-04-17T18:17:13.706269Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=69691;external_task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;type=CS::INDEXATION;priority=0;; 2025-04-17T18:17:13.706325Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;mem=69691;cpu=0; 2025-04-17T18:17:13.706503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;task_id=1;mem=69691;cpu=0; 2025-04-17T18:17:13.706633Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e; 2025-04-17T18:17:13.725888Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-17T18:17:13.726072Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-17T18:17:13.726710Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:17:13.726774Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:17:13.726865Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:768;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=77304;indexing_debug={task_ids=2fb827c6-1bb811f0-b1563fd3-a39ff96e,;}; 2025-04-17T18:17:13.726967Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T18:17:13.727212Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:17:13.727265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:13.727305Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:13.727390Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:17:13.727739Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 1 version: {1744913473934:max} readable: {1744913833934:max} at tablet 9437184 2025-04-17T18:17:13.740460Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-04-17T18:17:13.740599Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1744913473934:max};tablet=9437184;timeout=0.000000s;fline=constructor.cpp:18;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2025-04-17T18:17:13.740702Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1744913473934:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {1744913473934:max}. CS min read snapshot: {1744913533934:max}. now: 2025-04-17T18:17:13.740665Z; 2025-04-17T18:17:13.754266Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:1:6824:0]; 2025-04-17T18:17:13.754469Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:2:6824:0]; 2025-04-17T18:17:13.760008Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:17:13.760256Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-04-17T18:17:13.761276Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-04-17T18:17:13.761415Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:17:13.761895Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1744913473934:max} readable: {1744913833934:max} at tablet 9437184 2025-04-17T18:17:13.774125Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-17T18:17:13.774208Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;fline=with_appended.cpp:65;portions=1,;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e; 2025-04-17T18:17:13.774441Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::2fb827c6-1bb811f0-b1563fd3-a39ff96e; 2025-04-17T18:17:13.774513Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:17:13.774574Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:13.774650Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T18:17:13.774714Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:17:13.774768Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:13.774806Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:13.774905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.993000s; 2025-04-17T18:17:13.774967Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:17:13.775087Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-04-17T18:17:13.775255Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=2fb827c6-1bb811f0-b1563fd3-a39ff96e;mem=69691;cpu=0; 2025-04-17T18:17:13.775375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:13.775476Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-17T18:17:13.777800Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913473934:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-04-17T18:17:13.777891Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913473934:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-04-17T18:17:13.778716Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913473934:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"5":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2025-04-17T18:17:13.778817Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1744913473934:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {1744913473934:max}. CS min read snapshot: {1744913533934:max}. now: 2025-04-17T18:17:13.778788Z; >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 >> Normalizers::PortionsNormalizer [GOOD] >> Normalizers::EmptyTablesNormalizer [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::PortionsNormalizer [GOOD] Test command err: 2025-04-17T18:17:12.324350Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:12.402664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:12.426361Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:12.426650Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:12.434234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-04-17T18:17:12.434472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2025-04-17T18:17:12.434573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-17T18:17:12.434735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:12.434917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:12.435046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:12.435149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:12.435248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:12.435357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:12.435464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:12.435565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:12.435690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:12.435793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:12.435882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:12.464372Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:12.464578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-04-17T18:17:12.464624Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-17T18:17:12.464891Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-04-17T18:17:12.465006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-17T18:17:12.465062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-04-17T18:17:12.465093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-17T18:17:12.465404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-04-17T18:17:12.465468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-04-17T18:17:12.465540Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T18:17:12.465592Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=21; 2025-04-17T18:17:12.465676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-04-17T18:17:12.465723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-17T18:17:12.465772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-17T18:17:12.465917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:12.465975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:12.466012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:12.466032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-17T18:17:12.466086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:12.466125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:12.466168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:12.466208Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:12.466325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:12.466386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:12.466432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:12.466459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:12.466547Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:12.466582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:12.466606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:12.466621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:12.466662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:12.466683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:12.466698Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:12.466731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:12.466753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:12.466769Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:12.467003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=28; 2025-04-17T18:17:12.467052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=19; 2025-04-17T18:17:12.467094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=15; 2025-04-17T18:17:12.467153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=18; 2025-04-17T18:17:12.467250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:12.467290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=a ... 96:2410];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:17:14.972652Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:17:14.972675Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:17:14.972692Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:17:14.972724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:17:14.972770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:14.972821Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T18:17:14.972869Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:17:14.972907Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:14.972943Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:14.973010Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-17T18:17:14.973059Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:17:15.081077Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {1744913833443:111} readable: {1744913833443:max} at tablet 9437184 2025-04-17T18:17:15.081298Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-04-17T18:17:15.081518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833443:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-17T18:17:15.081630Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833443:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-17T18:17:15.082302Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833443:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-04-17T18:17:15.082402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833443:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:17:15.082922Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833443:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:469:2475];trace_detailed=; 2025-04-17T18:17:15.083560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-04-17T18:17:15.083808Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-04-17T18:17:15.084152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:15.084286Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:15.084419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:15.084464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:469:2475] finished for tablet 9437184 2025-04-17T18:17:15.084884Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:467:2474];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913835082839,"name":"_full_task","f":1744913835082839,"d_finished":0,"c":0,"l":1744913835084536,"d":1697},"events":[{"name":"bootstrap","f":1744913835083035,"d_finished":927,"c":1,"l":1744913835083962,"d":927},{"a":1744913835084124,"name":"ack","f":1744913835084124,"d_finished":0,"c":0,"l":1744913835084536,"d":412},{"a":1744913835084109,"name":"processing","f":1744913835084109,"d_finished":0,"c":0,"l":1744913835084536,"d":427},{"name":"ProduceResults","f":1744913835083947,"d_finished":272,"c":2,"l":1744913835084446,"d":272},{"a":1744913835084453,"name":"Finish","f":1744913835084453,"d_finished":0,"c":0,"l":1744913835084536,"d":83}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:15.084957Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:467:2474];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:15.085293Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:467:2474];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744913835082839,"name":"_full_task","f":1744913835082839,"d_finished":0,"c":0,"l":1744913835085000,"d":2161},"events":[{"name":"bootstrap","f":1744913835083035,"d_finished":927,"c":1,"l":1744913835083962,"d":927},{"a":1744913835084124,"name":"ack","f":1744913835084124,"d_finished":0,"c":0,"l":1744913835085000,"d":876},{"a":1744913835084109,"name":"processing","f":1744913835084109,"d_finished":0,"c":0,"l":1744913835085000,"d":891},{"name":"ProduceResults","f":1744913835083947,"d_finished":272,"c":2,"l":1744913835084446,"d":272},{"a":1744913835084453,"name":"Finish","f":1744913835084453,"d_finished":0,"c":0,"l":1744913835085000,"d":547}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:15.085371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:15.082375Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:17:15.085410Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:15.085506Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> ResourcePoolsDdl::TestDropResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-04-17T18:17:12.146438Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:12.238513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:12.265437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:12.265736Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:12.273535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=PortionsCleaner; 2025-04-17T18:17:12.273753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-17T18:17:12.273918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:12.274078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:12.274234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:12.274354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:12.274471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:12.274567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:12.274679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:12.274793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:12.274928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:12.275038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:12.275137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:12.306744Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:12.306994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=PortionsCleaner; 2025-04-17T18:17:12.307059Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-17T18:17:12.307485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-04-17T18:17:12.308868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=1305; 2025-04-17T18:17:12.308971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=55; 2025-04-17T18:17:12.309048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-17T18:17:12.309218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=PortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-17T18:17:12.309306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-17T18:17:12.309360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-17T18:17:12.309501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:12.309579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:12.309633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:12.309666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-17T18:17:12.309739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:12.309792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:12.309855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:12.309903Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:12.310077Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:12.310142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:12.310209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:12.310237Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:12.310333Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:12.310429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:12.310484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:12.310510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:12.310620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:12.310658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:12.310682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:12.310727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:12.310798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:12.310830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:12.311186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=34; 2025-04-17T18:17:12.311297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-17T18:17:12.311356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=20; 2025-04-17T18:17:12.311437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-17T18:17:12.311578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:12.311625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:12.311655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:12.311861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:12.311941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:12.311986Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:12.312163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUp ... 3Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:14.888712Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:17:14.888776Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:17:14.888799Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:17:14.888821Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:17:14.888858Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:17:14.888923Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:14.888984Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T18:17:14.889040Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:17:14.889082Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:14.889113Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:14.889184Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:17:15.023660Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {1744913833214:111} readable: {1744913833214:max} at tablet 9437184 2025-04-17T18:17:15.023813Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-04-17T18:17:15.024007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833214:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-17T18:17:15.024125Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833214:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-17T18:17:15.024993Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833214:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-04-17T18:17:15.025130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833214:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-17T18:17:15.025686Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1744913833214:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:463:2468];trace_detailed=; 2025-04-17T18:17:15.026334Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-04-17T18:17:15.026598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-04-17T18:17:15.026935Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:15.027087Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:15.027214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:15.027259Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:463:2468] finished for tablet 9437184 2025-04-17T18:17:15.027681Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:461:2467];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744913835025614,"name":"_full_task","f":1744913835025614,"d_finished":0,"c":0,"l":1744913835027321,"d":1707},"events":[{"name":"bootstrap","f":1744913835025787,"d_finished":980,"c":1,"l":1744913835026767,"d":980},{"a":1744913835026907,"name":"ack","f":1744913835026907,"d_finished":0,"c":0,"l":1744913835027321,"d":414},{"a":1744913835026892,"name":"processing","f":1744913835026892,"d_finished":0,"c":0,"l":1744913835027321,"d":429},{"name":"ProduceResults","f":1744913835026752,"d_finished":289,"c":2,"l":1744913835027243,"d":289},{"a":1744913835027246,"name":"Finish","f":1744913835027246,"d_finished":0,"c":0,"l":1744913835027321,"d":75}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:15.027776Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:461:2467];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:15.028181Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:461:2467];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744913835025614,"name":"_full_task","f":1744913835025614,"d_finished":0,"c":0,"l":1744913835027821,"d":2207},"events":[{"name":"bootstrap","f":1744913835025787,"d_finished":980,"c":1,"l":1744913835026767,"d":980},{"a":1744913835026907,"name":"ack","f":1744913835026907,"d_finished":0,"c":0,"l":1744913835027821,"d":914},{"a":1744913835026892,"name":"processing","f":1744913835026892,"d_finished":0,"c":0,"l":1744913835027821,"d":929},{"name":"ProduceResults","f":1744913835026752,"d_finished":289,"c":2,"l":1744913835027243,"d":289},{"a":1744913835027246,"name":"Finish","f":1744913835027246,"d_finished":0,"c":0,"l":1744913835027821,"d":575}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:15.028260Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:15.025089Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-17T18:17:15.028303Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:15.028407Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert [GOOD] Test command err: 2025-04-17T18:17:05.786656Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:05.986362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:06.006951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:06.008229Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:06.020852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:06.021070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:06.021267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:06.021388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:06.021473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:06.021566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:06.021663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:06.021740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:06.021827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:06.021910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.021985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:06.022063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:06.048558Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:06.048735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:06.048793Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:06.048981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.051110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:06.051213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:06.051263Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:06.051375Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:06.051478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:06.051523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:06.051553Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:06.051734Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.051801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:06.051839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:06.051871Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:06.051972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:06.052025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:06.052068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:06.052096Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:06.052183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:06.052224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:06.052251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:06.052315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:06.052365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:06.052406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:06.052820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=65; 2025-04-17T18:17:06.052905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T18:17:06.053643Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=681; 2025-04-17T18:17:06.053810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=82; 2025-04-17T18:17:06.054028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:06.054099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:06.054150Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:06.054429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:06.054500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.054543Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.054702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:06.054751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:06.054784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:06.055037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:06.055085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:06.055116Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:06.055247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:06.055299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:06.055352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... tablet_id=9437184; 2025-04-17T18:17:06.333042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:17:06.333093Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:17:06.333127Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:17:06.333179Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:17:06.334217Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-04-17T18:17:06.625258Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=88923003853600;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;fline=schema.h:36;event=sync_schema; 2025-04-17T18:17:06.637320Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881024576;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-04-17T18:17:06.637402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881024576;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-04-17T18:17:06.637498Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1744913826913;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881024576;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-04-17T18:17:06.637826Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-17T18:17:06.637968Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1744913826913 at tablet 9437184, mediator 0 2025-04-17T18:17:06.638007Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-17T18:17:06.638381Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-17T18:17:06.644355Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T18:17:06.644494Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-17T18:17:06.644558Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-17T18:17:06.653662Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-17T18:17:06.653795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:214;event=finished_tx;tx_id=10; 2025-04-17T18:17:06.678873Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-17T18:17:06.679009Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6330728;columns=10; CATCH TEvWritePortionResult, status OK 2025-04-17T18:17:14.136124Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=columnshard__write.cpp:96;event=shard_overload;reason=writes_size_in_fly;sum=132945288;limit=128000000; 2025-04-17T18:17:14.137516Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=overload data error;tx_id=0; CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK 2025-04-17T18:17:14.878844Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T18:17:14.901366Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T18:17:14.911915Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-17T18:17:14.923950Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-17T18:17:14.926290Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-17T18:17:14.938564Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-17T18:17:14.941053Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T18:17:14.953073Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T18:17:14.962113Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T18:17:14.974296Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T18:17:14.976831Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T18:17:14.988577Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T18:17:14.991041Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T18:17:15.002936Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T18:17:15.013307Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T18:17:15.025163Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T18:17:15.027075Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T18:17:15.039285Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T18:17:15.041919Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T18:17:15.053745Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T18:17:15.060736Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T18:17:15.072817Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T18:17:15.075483Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T18:17:15.087629Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T18:17:15.090133Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T18:17:15.102315Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T18:17:15.112507Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T18:17:15.124755Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T18:17:15.126975Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T18:17:15.138984Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T18:17:15.141086Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T18:17:15.153351Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T18:17:15.168430Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T18:17:15.180393Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T18:17:15.182929Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T18:17:15.195107Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T18:17:15.197656Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T18:17:15.209751Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T18:17:15.219201Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T18:17:15.231296Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T18:17:15.233699Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T18:17:15.245834Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T18:17:15.638053Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-17T18:17:15.650266Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> TColumnShardTestReadWrite::ReadGroupBy Test command err: 2025-04-17T18:16:26.259324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347647837236883:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.259481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013c7/r3tmp/tmp0cMUxu/pdisk_1.dat 2025-04-17T18:16:26.718530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:26.718615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:26.724075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:26.770673Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21123, node 1 2025-04-17T18:16:26.974369Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:26.974395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:26.974421Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:26.974542Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:27.499997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:27.522648Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:29.518643Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:29.528904Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODkzYTkzYWMtNjgwMGU3NTMtYjlhNThiOTItMTU1MmVlNjk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODkzYTkzYWMtNjgwMGU3NTMtYjlhNThiOTItMTU1MmVlNjk= 2025-04-17T18:16:29.531904Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:16:29.531952Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:29.531970Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:29.532038Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660722139256:2329], Start check tables existence, number paths: 2 2025-04-17T18:16:29.533937Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODkzYTkzYWMtNjgwMGU3NTMtYjlhNThiOTItMTU1MmVlNjk=, ActorId: [1:7494347660722139276:2330], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.534278Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660722139256:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:29.534330Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660722139256:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:29.534354Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660722139256:2329], Successfully finished 2025-04-17T18:16:29.538776Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:29.550363Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660722139278:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.553914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:29.560789Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660722139278:2304], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-17T18:16:29.562782Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660722139278:2304], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:16:29.570257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660722139278:2304], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:29.643825Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660722139278:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.648559Z node 1 :TX_PROXY ERROR: Actor# [1:7494347660722139329:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:29.648732Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660722139278:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:16:29.656050Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-17T18:16:29.656069Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:16:29.656137Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660722139338:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-17T18:16:29.656171Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODkzYTkzYWMtNjgwMGU3NTMtYjlhNThiOTItMTU1MmVlNjk=, ActorId: [1:7494347660722139276:2330], ActorState: ReadyState, TraceId: 01js2fewpq6p660y9nw20xsh6w, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE RESOURCE POOL my_pool WITH ( CONCURRENT_QUERY_LIMIT=1, QUEUE_SIZE=0 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-17T18:16:29.657650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660722139338:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:29.657742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:29.978878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:16:29.985955Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ODkzYTkzYWMtNjgwMGU3NTMtYjlhNThiOTItMTU1MmVlNjk=, ActorId: [1:7494347660722139276:2330], ActorState: ExecuteState, TraceId: 01js2fewpq6p660y9nw20xsh6w, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7494347660722139347:2330] WorkloadServiceCleanup: 0 2025-04-17T18:16:29.994615Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODkzYTkzYWMtNjgwMGU3NTMtYjlhNThiOTItMTU1MmVlNjk=, ActorId: [1:7494347660722139276:2330], ActorState: CleanupState, TraceId: 01js2fewpq6p660y9nw20xsh6w, EndCleanup, isFinal: 0 2025-04-17T18:16:29.994676Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODkzYTkzYWMtNjgwMGU3NTMtYjlhNThiOTItMTU1MmVlNjk=, ActorId: [1:7494347660722139276:2330], ActorState: CleanupState, TraceId: 01js2fewpq6p660y9nw20xsh6w, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7494347647837236975:2277] 2025-04-17T18:16:30.002146Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2FjNTEyZmYtYTMxYjMxNTItODNiM2FiMTctYjgwODllMTQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2FjNTEyZmYtYTMxYjMxNTItODNiM2FiMTctYjgwODllMTQ= 2025-04-17T18:16:30.002278Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2FjNTEyZmYtYTMxYjMxNTItODNiM2FiMTctYjgwODllMTQ=, ActorId: [1:7494347665017106666:2333], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:30.002441Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-17T18:16:30.002485Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347665017106668:2334], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T18:16:30.002515Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2FjNTEyZmYtYTMxYjMxNTItODNiM2FiMTctYjgwODllMTQ=, ActorId: [1:7494347665017106666:2333], ActorState: ReadyState, TraceId: 01js2fex1j3y7czmpr698t03ss, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7494347660722139369:2364] database: Root databaseId: /Root pool id: my_pool 2025-04-17T18:16:30.002598Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7494347665017106666:2333], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=1&id=N2FjNTEyZmYtYTMxYjMxNTItODNiM2FiMTctYjgwODllMTQ= 2025-04-17T18:16:30.002690Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347665017106669:2335], Database: /Root, Start database fetching 2025-04-17T18:16:30.003583Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347665017106669:2335], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-17T18:16:30.003699Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347665017106668:2334], DatabaseId: /Root, PoolId: my_pool, Pool info su ... 4347858675229803:2442], ActorState: ExecuteState, TraceId: 01js2fg937fbtj87vwdc0xgqje, Sent query response back to proxy, proxyRequestId: 18, proxyId: [8:7494347824315490422:2270] 2025-04-17T18:17:15.119566Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=MmY0Y2E0ZDMtNzJmYzE2ODktZmY1OWZiNDYtNzAyNDJhMGQ=, TxId: 2025-04-17T18:17:15.119652Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=MmY0Y2E0ZDMtNzJmYzE2ODktZmY1OWZiNDYtNzAyNDJhMGQ=, TxId: 2025-04-17T18:17:15.119731Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7494347841495360147:2338], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-17T18:17:15.119774Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MmY0Y2E0ZDMtNzJmYzE2ODktZmY1OWZiNDYtNzAyNDJhMGQ=, ActorId: [8:7494347858675229803:2442], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:15.119800Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MmY0Y2E0ZDMtNzJmYzE2ODktZmY1OWZiNDYtNzAyNDJhMGQ=, ActorId: [8:7494347858675229803:2442], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:15.119832Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY0Y2E0ZDMtNzJmYzE2ODktZmY1OWZiNDYtNzAyNDJhMGQ=, ActorId: [8:7494347858675229803:2442], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:15.119871Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY0Y2E0ZDMtNzJmYzE2ODktZmY1OWZiNDYtNzAyNDJhMGQ=, ActorId: [8:7494347858675229803:2442], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:15.119944Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY0Y2E0ZDMtNzJmYzE2ODktZmY1OWZiNDYtNzAyNDJhMGQ=, ActorId: [8:7494347858675229803:2442], ActorState: unknown state, Session actor destroyed 2025-04-17T18:17:15.120373Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7494347841495360147:2338], DatabaseId: /Root, PoolId: my_pool, Got delete notification 2025-04-17T18:17:15.120436Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-17T18:17:15.120499Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347858675229863:2456], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T18:17:15.120708Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347858675229863:2456], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:15.120769Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:15.129531Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NDY5NDU1OTctZDgzYmNlNmItY2M2MWJjNzAtODBjZDllMTM=, ActorId: [8:7494347841495360039:2330], ActorState: ExecuteState, TraceId: 01js2fg92x0kdy71ymgh3y86rh, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [8:7494347858675229829:2330] WorkloadServiceCleanup: 0 2025-04-17T18:17:15.131178Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NDY5NDU1OTctZDgzYmNlNmItY2M2MWJjNzAtODBjZDllMTM=, ActorId: [8:7494347841495360039:2330], ActorState: CleanupState, TraceId: 01js2fg92x0kdy71ymgh3y86rh, EndCleanup, isFinal: 0 2025-04-17T18:17:15.131231Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NDY5NDU1OTctZDgzYmNlNmItY2M2MWJjNzAtODBjZDllMTM=, ActorId: [8:7494347841495360039:2330], ActorState: CleanupState, TraceId: 01js2fg92x0kdy71ymgh3y86rh, Sent query response back to proxy, proxyRequestId: 17, proxyId: [8:7494347824315490422:2270] 2025-04-17T18:17:15.135834Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI= 2025-04-17T18:17:15.135920Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, ActorId: [8:7494347858675229877:2457], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:17:15.136157Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-17T18:17:15.136203Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347858675229879:2458], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T18:17:15.136238Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, ActorId: [8:7494347858675229877:2457], ActorState: ReadyState, TraceId: 01js2fg940es1z850hhrtkka5d, received request, proxyRequestId: 19 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [8:7494347858675229876:2649] database: Root databaseId: /Root pool id: my_pool 2025-04-17T18:17:15.136279Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [8:7494347858675229877:2457], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI= 2025-04-17T18:17:15.136339Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7494347858675229880:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, Start pool fetching 2025-04-17T18:17:15.136379Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347858675229882:2460], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T18:17:15.136440Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347858675229879:2458], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:15.136531Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:15.136579Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347858675229882:2460], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:15.136659Z node 8 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [8:7494347858675229880:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:15.136759Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [8:7494347858675229880:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-04-17T18:17:15.136874Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [8:7494347858675229877:2457]: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-04-17T18:17:15.136977Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, ActorId: [8:7494347858675229877:2457], ActorState: ExecuteState, TraceId: 01js2fg940es1z850hhrtkka5d, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-17T18:17:15.137093Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, ActorId: [8:7494347858675229877:2457], ActorState: ExecuteState, TraceId: 01js2fg940es1z850hhrtkka5d, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-17T18:17:15.137167Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [8:7494347858675229877:2457], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI= 2025-04-17T18:17:15.137230Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, ActorId: [8:7494347858675229877:2457], ActorState: CleanupState, TraceId: 01js2fg940es1z850hhrtkka5d, EndCleanup, isFinal: 1 2025-04-17T18:17:15.137325Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, ActorId: [8:7494347858675229877:2457], ActorState: CleanupState, TraceId: 01js2fg940es1z850hhrtkka5d, Sent query response back to proxy, proxyRequestId: 19, proxyId: [8:7494347824315490422:2270] 2025-04-17T18:17:15.137359Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, ActorId: [8:7494347858675229877:2457], ActorState: unknown state, TraceId: 01js2fg940es1z850hhrtkka5d, Cleanup temp tables: 0 2025-04-17T18:17:15.137471Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NGRkMmJmOTEtMjUwMzFiNDktOGJjMWJjZTktMTE4YTc5NzI=, ActorId: [8:7494347858675229877:2457], ActorState: unknown state, TraceId: 01js2fg940es1z850hhrtkka5d, Session actor destroyed 2025-04-17T18:17:15.144222Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NDY5NDU1OTctZDgzYmNlNmItY2M2MWJjNzAtODBjZDllMTM=, ActorId: [8:7494347841495360039:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:15.144268Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NDY5NDU1OTctZDgzYmNlNmItY2M2MWJjNzAtODBjZDllMTM=, ActorId: [8:7494347841495360039:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:15.144295Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NDY5NDU1OTctZDgzYmNlNmItY2M2MWJjNzAtODBjZDllMTM=, ActorId: [8:7494347841495360039:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:15.144324Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NDY5NDU1OTctZDgzYmNlNmItY2M2MWJjNzAtODBjZDllMTM=, ActorId: [8:7494347841495360039:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:15.144401Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NDY5NDU1OTctZDgzYmNlNmItY2M2MWJjNzAtODBjZDllMTM=, ActorId: [8:7494347841495360039:2330], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 304 176 28 48 32 24 16 24 56 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] Test command err: 2025-04-17T18:16:26.249766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347645497270558:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.249887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013b0/r3tmp/tmp5EwAKw/pdisk_1.dat 2025-04-17T18:16:26.777639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:26.777746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:26.777972Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:26.791222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3590, node 1 2025-04-17T18:16:26.971137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:26.971165Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:26.971176Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:26.971293Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:27.555866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:27.573408Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:29.623095Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:29.623193Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347658382173021:2329], Start check tables existence, number paths: 2 2025-04-17T18:16:29.628964Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODEwYzAxNGMtYTU4NmVjOC1kZWM4MDQ1OS1iZDhlMTM0NQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODEwYzAxNGMtYTU4NmVjOC1kZWM4MDQ1OS1iZDhlMTM0NQ== 2025-04-17T18:16:29.629461Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODEwYzAxNGMtYTU4NmVjOC1kZWM4MDQ1OS1iZDhlMTM0NQ==, ActorId: [1:7494347658382173038:2330], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.629988Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:16:29.630042Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:29.630073Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:29.630274Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347658382173021:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:29.630368Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347658382173021:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:29.630468Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347658382173021:2329], Successfully finished 2025-04-17T18:16:29.642739Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:29.650538Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658382173040:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.660830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:29.662442Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658382173040:2304], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-17T18:16:29.662654Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658382173040:2304], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:16:29.676798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658382173040:2304], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:29.758304Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658382173040:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.762764Z node 1 :TX_PROXY ERROR: Actor# [1:7494347658382173092:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:29.762899Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658382173040:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:16:29.765719Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY= 2025-04-17T18:16:29.765827Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY=, ActorId: [1:7494347658382173099:2331], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.766026Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-17T18:16:29.766052Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:16:29.766127Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347658382173101:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:29.766254Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY=, ActorId: [1:7494347658382173099:2331], ActorState: ReadyState, TraceId: 01js2fewt6daentza00ne8wzad, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7494347658382173098:2342] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-17T18:16:29.766311Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7494347658382173099:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY= 2025-04-17T18:16:29.766363Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347658382173102:2333], Database: /Root, Start database fetching 2025-04-17T18:16:29.766736Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347658382173102:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-17T18:16:29.766795Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-17T18:16:29.766858Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494347658382173108:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY=, Start pool fetching 2025-04-17T18:16:29.766878Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347658382173110:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:29.767699Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347658382173101:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:29.767707Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347658382173110:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:29.767749Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-17T18:16:29.767765Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494347658382173108:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY=, Pool info successfully resolved 2025-04-17T18:16:29.767830Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-17T18:16:29.768774Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY= 2025-04-17T18:16:29.768873Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347658382173115:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-17T18:16:29.768935Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347658382173115:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7494347658382173099:2331], session id: ydb://session/3?node_id=1&id=NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY= 2025-04-17T18:16:29.768994Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTlkYzhjNWMtOTk5YTFkYmMtM2M0OTNkNzYtNzdkNDkzMmY= 2025-04-17T18:16:29.769032Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] ... =ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, Sending CompileQuery request 2025-04-17T18:17:14.514265Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7494347833092609218:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:14.514356Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:17:14.567148Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, ExecutePhyTx, tx: 0x000050C00019CFD8 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-17T18:17:14.567204Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, Sending to Executer TraceId: 0 8 2025-04-17T18:17:14.567264Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, Created new KQP executer: [10:7494347854274827814:2404] isRollback: 0 2025-04-17T18:17:14.572987Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, Forwarded TEvStreamData to [9:7494347855257872790:3385] 2025-04-17T18:17:14.573875Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-17T18:17:14.574055Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, txInfo Status: Committed Kind: ReadOnly TotalDuration: 6.981 ServerDuration: 6.917 QueriesCount: 2 2025-04-17T18:17:14.574136Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:17:14.574559Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:14.574604Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, EndCleanup, isFinal: 1 2025-04-17T18:17:14.574652Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: ExecuteState, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, Sent query response back to proxy, proxyRequestId: 5, proxyId: [10:7494347832799990260:2277] 2025-04-17T18:17:14.574678Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: unknown state, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, Cleanup temp tables: 0 2025-04-17T18:17:14.575084Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZWIzOTUwMGQtYjY3N2UzOTUtNGE4NGUxYzEtZDZjNTc0Y2U=, ActorId: [10:7494347854274827804:2404], ActorState: unknown state, TraceId: 01js2fg8dr7nywtpgzc3wcc1w1, Session actor destroyed 2025-04-17T18:17:14.578204Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU= 2025-04-17T18:17:14.578312Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:17:14.578641Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ReadyState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, received request, proxyRequestId: 6 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT * FROM `.sys/resource_pools` WHERE Name >= "default" rpcActor: [9:7494347855257872800:3391] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-04-17T18:17:14.578677Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ReadyState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, request placed into pool from cache: default 2025-04-17T18:17:14.578760Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, Sending CompileQuery request 2025-04-17T18:17:14.706546Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, ExecutePhyTx, tx: 0x000050C00023ECD8 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-17T18:17:14.706634Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, Sending to Executer TraceId: 0 8 2025-04-17T18:17:14.706725Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, Created new KQP executer: [10:7494347854274827834:2412] isRollback: 0 2025-04-17T18:17:14.712038Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, Forwarded TEvStreamData to [9:7494347855257872800:3391] 2025-04-17T18:17:14.712815Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-17T18:17:14.712973Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, txInfo Status: Committed Kind: ReadOnly TotalDuration: 6.53 ServerDuration: 6.44 QueriesCount: 2 2025-04-17T18:17:14.713036Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:17:14.713370Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:14.713415Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, EndCleanup, isFinal: 1 2025-04-17T18:17:14.713458Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: ExecuteState, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, Sent query response back to proxy, proxyRequestId: 6, proxyId: [10:7494347832799990260:2277] 2025-04-17T18:17:14.713476Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: unknown state, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, Cleanup temp tables: 0 2025-04-17T18:17:14.713848Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTQ5Y2U0MTQtNmE4ZjE5NDktMTI5YjEyZjEtZjRkMjYzZDU=, ActorId: [10:7494347854274827828:2412], ActorState: unknown state, TraceId: 01js2fg8jjcfwjpnxg9k8x2q30, Session actor destroyed 2025-04-17T18:17:14.716658Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-17T18:17:14.717050Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:17:14.718698Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-04-17T18:17:14.719023Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:17:14.723630Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MThhOTYxMGEtYjU3YzRjNTQtNjk5MWI2NDYtYzUwZTI2Mzk=, ActorId: [9:7494347833783035179:2334], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:14.723673Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MThhOTYxMGEtYjU3YzRjNTQtNjk5MWI2NDYtYzUwZTI2Mzk=, ActorId: [9:7494347833783035179:2334], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:14.723705Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MThhOTYxMGEtYjU3YzRjNTQtNjk5MWI2NDYtYzUwZTI2Mzk=, ActorId: [9:7494347833783035179:2334], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:14.723731Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MThhOTYxMGEtYjU3YzRjNTQtNjk5MWI2NDYtYzUwZTI2Mzk=, ActorId: [9:7494347833783035179:2334], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:14.723804Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MThhOTYxMGEtYjU3YzRjNTQtNjk5MWI2NDYtYzUwZTI2Mzk=, ActorId: [9:7494347833783035179:2334], ActorState: unknown state, Session actor destroyed 2025-04-17T18:17:14.854262Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7494347832799990052:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:14.854411Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> TColumnShardTestReadWrite::WriteStandalone >> PgCatalog::PgTables [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 23345, MsgBus: 15328 2025-04-17T18:13:16.465463Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346833130758704:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.465508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a0/r3tmp/tmptGsbNH/pdisk_1.dat 2025-04-17T18:13:16.768950Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23345, node 1 2025-04-17T18:13:16.834739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.834860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.836331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.940171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.940194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.940200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.940281Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15328 TClient is connected to server localhost:15328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:17.471046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 1042 2025-04-17T18:13:18.939856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-04-17T18:13:19.086240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346846015661353:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.086255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494346846015661345:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.086338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:13:19.089704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:13:19.098376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494346846015661359:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:13:19.166081Z node 1 :TX_PROXY ERROR: Actor# [1:7494346846015661410:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:13:19.546002Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-17T18:13:19.547640Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-04-17T18:13:19.548813Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346846015661467:2338] TxId: 281474976710663. Ctx: { TraceId: 01js2f92kce4r5y2c8kr4s6g6z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE5MWVkYTctNDc1MWFlYTgtM2M0MDMwNzctNjEwMzFlOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-04-17T18:13:19.567780Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGE5MWVkYTctNDc1MWFlYTgtM2M0MDMwNzctNjEwMzFlOTI=, ActorId: [1:7494346846015661342:2338], ActorState: ExecuteState, TraceId: 01js2f92kce4r5y2c8kr4s6g6z, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-17T18:13:19.597862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2025-04-17T18:13:19.863610Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-17T18:13:19.864962Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-04-17T18:13:19.865140Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494346846015661601:2372] TxId: 281474976710668. Ctx: { TraceId: 01js2f935adskjcy9bjmsaz500, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGU4ZjFjMzctODRjYTQ4YWQtNWU2YmVlZmQtNTJiNDVkNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-04-17T18:13:19.865339Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGU4ZjFjMzctODRjYTQ4YWQtNWU2YmVlZmQtNTJiNDVkNDc=, ActorId: [1:7494346846015661557:2372], ActorState: ExecuteState, TraceId: 01js2f935adskjcy9bjmsaz500, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 1042 2025-04-17T18:13:19.888409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_2169371982377735806_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce_pgbpchar_2169371982377735806_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) abcd 2025-04-17T18:13:20.166672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_2169371982377735806_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce__pgbpchar_2169371982377735806_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) {abcd,abcd} 1042 2025-04-17T18:13:20.434965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_10103374131519304989_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 6 --!syntax_pg INSERT INTO Coerce_pgbpchar_10103374131519304989_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) abcd 2025-04-17T18:13:20.709803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 ... SetPath # /home/runner/.ya/build/build_root/6873/0014a0/r3tmp/tmpYVNIFu/pdisk_1.dat 2025-04-17T18:17:00.695699Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:00.739753Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:00.739898Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:00.741785Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17418, node 13 2025-04-17T18:17:00.834770Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:17:00.834800Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:17:00.834816Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:17:00.835000Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2648 TClient is connected to server localhost:2648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:17:01.769812Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:05.454928Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7494347793977882948:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:05.455028Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:17:06.479240Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494347819747687402:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:06.479341Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7494347819747687394:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:06.479864Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:06.485865Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:17:06.499863Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7494347819747687408:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:17:06.586919Z node 13 :TX_PROXY ERROR: Actor# [13:7494347819747687461:2346] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21038, MsgBus: 65046 2025-04-17T18:17:07.848491Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7494347821514073291:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:07.848589Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014a0/r3tmp/tmpYBjP7K/pdisk_1.dat 2025-04-17T18:17:08.028191Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:08.056829Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:08.056971Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:08.059634Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21038, node 14 2025-04-17T18:17:08.135802Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:17:08.135834Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:17:08.135852Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:17:08.136044Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65046 TClient is connected to server localhost:65046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:17:08.986931Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:12.848735Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7494347821514073291:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:12.848832Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:17:13.166542Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7494347847283877733:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:13.166609Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7494347847283877719:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:13.166677Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:13.172336Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:17:13.184454Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7494347847283877746:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:17:13.239421Z node 14 :TX_PROXY ERROR: Actor# [14:7494347847283877797:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:17:13.332116Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:17:13.406504Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:17:17.361446Z node 14 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037888 not found 2025-04-17T18:17:17.450860Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:17:17.760828Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:7494347864463747546:2447], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=14&id=ODRkZGQyYzUtOTdkNmFjNTctNDY5YTU2ZDQtOTk2NDJlZGY=. CustomerSuppliedId : . TraceId : 01js2fgbewe12mqccmh8s01m79. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2025-04-17T18:17:17.761361Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:7494347864463747547:2448], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01js2fgbewe12mqccmh8s01m79. SessionId : ydb://session/3?node_id=14&id=ODRkZGQyYzUtOTdkNmFjNTctNDY5YTU2ZDQtOTk2NDJlZGY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [14:7494347864463747543:2444], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-17T18:17:17.762020Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ODRkZGQyYzUtOTdkNmFjNTctNDY5YTU2ZDQtOTk2NDJlZGY=, ActorId: [14:7494347864463747536:2444], ActorState: ExecuteState, TraceId: 01js2fgbewe12mqccmh8s01m79, Create QueryResponse for error on request, msg: >> TColumnShardTestReadWrite::WriteOverload+InStore-WithWritePortionsOnInsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore-WithWritePortionsOnInsert [GOOD] Test command err: 2025-04-17T18:17:10.957652Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:11.037472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:11.058866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:11.059152Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:11.066953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:11.067139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:11.067338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:11.067490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:11.067622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:11.067716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:11.067809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:11.067942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:11.068061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:11.068171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:11.068278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:11.068375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:11.091088Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:11.091231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:11.091279Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:11.091418Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:11.091550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:11.091610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:11.091638Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:11.091689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:11.091725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:11.091749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:11.091766Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:11.091881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:11.091918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:11.091939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:11.091956Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:11.092007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:11.092037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:11.092062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:11.092078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:11.092135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:11.092169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:11.092186Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:11.092212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:11.092236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:11.092262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:11.092563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-17T18:17:11.092642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-17T18:17:11.092694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=24; 2025-04-17T18:17:11.092749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-04-17T18:17:11.092862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:11.092907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:11.092933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:11.093060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:11.093096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:11.093117Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:11.093219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:11.093257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:11.093284Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:11.093471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:11.093519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:11.093564Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:11.093704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:11.093737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:11.093766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 18:17:19.082482Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 4 at tablet 9437184 2025-04-17T18:17:19.082709Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T18:17:19.094620Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T18:17:19.094747Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.102419Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-04-17T18:17:19.102661Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T18:17:19.114464Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T18:17:19.114584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.115936Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-04-17T18:17:19.116151Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T18:17:19.127923Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T18:17:19.128051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.129460Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-04-17T18:17:19.129679Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T18:17:19.141489Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T18:17:19.141615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.150371Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-04-17T18:17:19.150632Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T18:17:19.162570Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T18:17:19.162698Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.164203Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-04-17T18:17:19.164421Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T18:17:19.176131Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T18:17:19.176256Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.177803Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-04-17T18:17:19.178018Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T18:17:19.189828Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T18:17:19.189957Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.198248Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-04-17T18:17:19.198504Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T18:17:19.210278Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T18:17:19.210410Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.212017Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-04-17T18:17:19.212272Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T18:17:19.224106Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T18:17:19.224225Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.225803Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-04-17T18:17:19.226025Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T18:17:19.237785Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T18:17:19.237919Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.246617Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-04-17T18:17:19.246864Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T18:17:19.258736Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T18:17:19.258862Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.260720Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-04-17T18:17:19.260972Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T18:17:19.272920Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T18:17:19.273046Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.274727Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-04-17T18:17:19.274962Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T18:17:19.286966Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T18:17:19.287091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.296282Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-04-17T18:17:19.296537Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T18:17:19.308254Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T18:17:19.308349Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.309672Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-04-17T18:17:19.309841Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T18:17:19.321333Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T18:17:19.321449Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.322689Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-04-17T18:17:19.322860Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T18:17:19.334430Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T18:17:19.334528Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.344517Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-04-17T18:17:19.344708Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T18:17:19.356512Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T18:17:19.356645Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.358197Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-04-17T18:17:19.358428Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T18:17:19.370299Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T18:17:19.370423Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:19.734908Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; 2025-04-17T18:17:19.787877Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-04-17T18:17:19.788185Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-17T18:17:19.800342Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-17T18:17:19.800500Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 23267, MsgBus: 9810 2025-04-17T18:15:59.084254Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347529859757854:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:59.084329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e30/r3tmp/tmpJxhFB9/pdisk_1.dat 2025-04-17T18:15:59.477799Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:59.496767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:59.496888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:59.499242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23267, node 1 2025-04-17T18:15:59.577705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:15:59.577733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:15:59.577741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:15:59.577857Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9810 TClient is connected to server localhost:9810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:00.140933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:02.167338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347542744660406:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:02.167368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347542744660418:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:02.167449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:02.171076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:02.184299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347542744660420:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:02.272152Z node 1 :TX_PROXY ERROR: Actor# [1:7494347542744660471:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:02.562671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:16:02.810342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:02.810551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:02.810800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:16:02.810931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:16:02.811032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:16:02.811152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:16:02.811251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:16:02.811358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:16:02.811463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:16:02.811568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:16:02.811697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:16:02.811854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494347542744660712:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:16:02.826069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:02.826870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:02.827051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:16:02.827159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:16:02.827282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:16:02.827374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:16:02.827470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:16:02.827558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:16:02.827647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:16:02.827734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:16:02.827823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:16:02.827922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494347542744660718:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:16:02.847433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347542744660738:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:02.847506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347542744660738:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:02.847663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_i ... ller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.664872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.668531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.670499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.674637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.675487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.679705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.680517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.684204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.686261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.690243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.692501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.696071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.698694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.701342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.704794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.707108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.712237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.717474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.722855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.727881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.732771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.737416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.743250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.748234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.753928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.755182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.759927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.760653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.765026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.765357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.770265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.770331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.775252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.775263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.780321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.780627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.784751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.785052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.790118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.794197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.795931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.798108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.801435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.801589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.805927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.929944Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ff2tp01ffh5y8kdq1hmw5", SessionId: ydb://session/3?node_id=1&id=NGEzYTcyYmYtYzU5MjliMjYtNjViOTk3NmEtODM5NTRkOTk=, Slow query, duration: 28.002983s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:17:04.219452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:17:04.219553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:17:04.219879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494347774672950203:10561];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-17T18:17:04.220300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2025-04-17T18:16:26.263926Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347647132976905:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.264541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013d7/r3tmp/tmpRFNIrI/pdisk_1.dat 2025-04-17T18:16:26.793688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:26.825799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:26.825956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:26.828155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26026, node 1 2025-04-17T18:16:26.986736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:26.986757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:26.986763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:26.986863Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:27.522525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:27.535677Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:29.676885Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:29.680206Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmM4YWE0ODktODBiNDJlMi1lYzFjYWM2OC04ZmFjYzJmMw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YmM4YWE0ODktODBiNDJlMi1lYzFjYWM2OC04ZmFjYzJmMw== 2025-04-17T18:16:29.680667Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660017879433:2329], Start check tables existence, number paths: 2 2025-04-17T18:16:29.680760Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:29.680785Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:29.680823Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:16:29.680871Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmM4YWE0ODktODBiNDJlMi1lYzFjYWM2OC04ZmFjYzJmMw==, ActorId: [1:7494347660017879435:2330], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.687483Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660017879433:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:29.687556Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660017879433:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:29.687600Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660017879433:2329], Successfully finished 2025-04-17T18:16:29.687690Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:29.696201Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660017879452:2305], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.700057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:29.700947Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660017879452:2305], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-17T18:16:29.701117Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660017879452:2305], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:16:29.709377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660017879452:2305], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:29.789049Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660017879452:2305], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.793066Z node 1 :TX_PROXY ERROR: Actor# [1:7494347660017879503:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:29.793243Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660017879452:2305], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:16:29.796378Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ= 2025-04-17T18:16:29.796646Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ=, ActorId: [1:7494347660017879511:2331], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.796782Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ=, ActorId: [1:7494347660017879511:2331], ActorState: ReadyState, TraceId: 01js2fewv4cg0h9y6a12gvmecq, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7494347660017879510:2343] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-17T18:16:29.796841Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-17T18:16:29.796854Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:16:29.796927Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7494347660017879511:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ= 2025-04-17T18:16:29.796990Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660017879513:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:29.797061Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347660017879514:2333], Database: /Root, Start database fetching 2025-04-17T18:16:29.802283Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347660017879514:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-17T18:16:29.802440Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660017879513:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:29.802479Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-17T18:16:29.802526Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-17T18:16:29.802540Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-17T18:16:29.802858Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347660017879525:2335], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-17T18:16:29.802936Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494347660017879524:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ=, Start pool fetching 2025-04-17T18:16:29.802968Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660017879527:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:29.803328Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660017879527:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:29.803385Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494347660017879524:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ=, Pool info successfully resolved 2025-04-17T18:16:29.803416Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ= 2025-04-17T18:16:29.803475Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347660017879525:2335], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7494347660017879511:2331], session id: ydb://session/3?node_id=1&id=NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ= 2025-04-17T18:16:29.803549Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTNjOWE1MjEtYWEyOTZmNGMtYjI0ZGRmY2UtM2I2MGVkMTQ= 2025-04-17T18:16:29.803580Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] ... DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7494347884448609547:2329], Start check tables existence, number paths: 2 2025-04-17T18:17:21.408905Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjkxYWMzZjEtMTQ5MDQ4Y2EtYTRhNjdmYjUtMWEyMjE2NDI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NjkxYWMzZjEtMTQ5MDQ4Y2EtYTRhNjdmYjUtMWEyMjE2NDI= 2025-04-17T18:17:21.409453Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:17:21.409489Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:17:21.409510Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:17:21.409593Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjkxYWMzZjEtMTQ5MDQ4Y2EtYTRhNjdmYjUtMWEyMjE2NDI=, ActorId: [6:7494347884448609563:2330], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:17:21.409788Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7494347884448609547:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:17:21.409856Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7494347884448609547:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:17:21.409894Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7494347884448609547:2329], Successfully finished 2025-04-17T18:17:21.410078Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:17:21.412218Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347884448609573:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:17:21.415340Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:17:21.416330Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347884448609573:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-04-17T18:17:21.416471Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347884448609573:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:17:21.424245Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347884448609573:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:17:21.505147Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347884448609573:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:17:21.508085Z node 6 :TX_PROXY ERROR: Actor# [6:7494347884448609624:2332] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:17:21.508157Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494347884448609573:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:17:21.511293Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE= 2025-04-17T18:17:21.511414Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, ActorId: [6:7494347884448609631:2331], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:17:21.511622Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-17T18:17:21.511649Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:17:21.511687Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, ActorId: [6:7494347884448609631:2331], ActorState: ReadyState, TraceId: 01js2fgfb7bakpq5kecj96rh3b, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7494347884448609630:2337] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-17T18:17:21.511718Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347884448609633:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:17:21.511724Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [6:7494347884448609631:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE= 2025-04-17T18:17:21.511766Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7494347884448609634:2333], Database: /Root, Start database fetching 2025-04-17T18:17:21.511990Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7494347884448609634:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-17T18:17:21.512047Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-17T18:17:21.512107Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7494347884448609637:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, Start pool fetching 2025-04-17T18:17:21.512148Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347884448609641:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:17:21.513055Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347884448609641:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:17:21.513058Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347884448609633:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:17:21.513097Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-17T18:17:21.513109Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7494347884448609637:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, Pool info successfully resolved 2025-04-17T18:17:21.513116Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-17T18:17:21.513324Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE= 2025-04-17T18:17:21.513330Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7494347884448609647:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-17T18:17:21.513375Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE= 2025-04-17T18:17:21.513447Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, ActorId: [6:7494347884448609631:2331], ActorState: ExecuteState, TraceId: 01js2fgfb7bakpq5kecj96rh3b, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id 2025-04-17T18:17:21.513564Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, ActorId: [6:7494347884448609631:2331], ActorState: ExecuteState, TraceId: 01js2fgfb7bakpq5kecj96rh3b, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-17T18:17:21.513626Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [6:7494347884448609631:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE= 2025-04-17T18:17:21.513680Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, ActorId: [6:7494347884448609631:2331], ActorState: CleanupState, TraceId: 01js2fgfb7bakpq5kecj96rh3b, EndCleanup, isFinal: 1 2025-04-17T18:17:21.513760Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, ActorId: [6:7494347884448609631:2331], ActorState: CleanupState, TraceId: 01js2fgfb7bakpq5kecj96rh3b, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7494347871563707242:2266] 2025-04-17T18:17:21.513790Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, ActorId: [6:7494347884448609631:2331], ActorState: unknown state, TraceId: 01js2fgfb7bakpq5kecj96rh3b, Cleanup temp tables: 0 2025-04-17T18:17:21.513883Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDgyOWJjNzUtNTc0NGEzNWItMWM1ODg1Y2MtZGE0YzZiYzE=, ActorId: [6:7494347884448609631:2331], ActorState: unknown state, TraceId: 01js2fgfb7bakpq5kecj96rh3b, Session actor destroyed 2025-04-17T18:17:21.514570Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7494347884448609647:2336], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-04-17T18:17:21.520838Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NjkxYWMzZjEtMTQ5MDQ4Y2EtYTRhNjdmYjUtMWEyMjE2NDI=, ActorId: [6:7494347884448609563:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:21.520896Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NjkxYWMzZjEtMTQ5MDQ4Y2EtYTRhNjdmYjUtMWEyMjE2NDI=, ActorId: [6:7494347884448609563:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:21.520924Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjkxYWMzZjEtMTQ5MDQ4Y2EtYTRhNjdmYjUtMWEyMjE2NDI=, ActorId: [6:7494347884448609563:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:21.520964Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjkxYWMzZjEtMTQ5MDQ4Y2EtYTRhNjdmYjUtMWEyMjE2NDI=, ActorId: [6:7494347884448609563:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:21.521038Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjkxYWMzZjEtMTQ5MDQ4Y2EtYTRhNjdmYjUtMWEyMjE2NDI=, ActorId: [6:7494347884448609563:2330], ActorState: unknown state, Session actor destroyed >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2025-04-17T18:17:18.060070Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:18.145432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:18.168003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:18.168274Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:18.175617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:18.175810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:18.176023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:18.176144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:18.176277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:18.176385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:18.176479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:18.176586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:18.176719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:18.176837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:18.176962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:18.177069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:18.203800Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:18.203955Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:18.204026Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:18.204153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:18.204298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:18.204360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:18.204395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:18.204453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:18.204493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:18.204517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:18.204541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:18.204666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:18.204724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:18.204758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:18.204786Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:18.204866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:18.204914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:18.204950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:18.204973Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:18.205037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:18.205075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:18.205104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:18.205142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:18.205174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:18.205196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:18.205572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=68; 2025-04-17T18:17:18.205662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-17T18:17:18.205727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T18:17:18.205822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-17T18:17:18.205975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:18.206022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:18.206056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:18.206226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:18.206255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:18.206279Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:18.206408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:18.206451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:18.206471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:18.206624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:18.206651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:18.206671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:18.206783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:18.206823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:18.206865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-17T18:17:22.738171Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::WriteStandalone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2025-04-17T18:17:18.820908Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:18.907795Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:18.931390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:18.931712Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:18.939945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:18.940146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:18.940361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:18.940481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:18.940604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:18.940718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:18.940816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:18.940926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:18.941054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:18.941178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:18.941296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:18.941393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:18.969159Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:18.969324Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:18.969386Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:18.969548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:18.969698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:18.969796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:18.969845Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:18.969933Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:18.969991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:18.970030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:18.970058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:18.970260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:18.970326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:18.970365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:18.970397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:18.970517Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:18.970569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:18.970612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:18.970640Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:18.970739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:18.970786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:18.970814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:18.970861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:18.970904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:18.970930Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:18.971320Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-04-17T18:17:18.971429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T18:17:18.971521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-17T18:17:18.971602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-17T18:17:18.971777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:18.971837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:18.971872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:18.972061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:18.972106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:18.972159Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:18.972308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:18.972370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:18.972401Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:18.972600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:18.972643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:18.972670Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:18.972788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:18.972829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:18.972875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-17T18:17:23.423445Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] Test command err: Trying to start YDB, gRPC: 22285, MsgBus: 20182 2025-04-17T18:16:07.308087Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347566065981287:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:07.308134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e21/r3tmp/tmpY6BOUK/pdisk_1.dat 2025-04-17T18:16:07.725627Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:07.741763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:07.741870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:07.744899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22285, node 1 2025-04-17T18:16:07.890429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:07.890452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:07.890463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:07.890583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20182 TClient is connected to server localhost:20182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:08.482855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:08.501222Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:10.311864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347578950883845:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:10.312018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:10.318365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347578950883857:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:10.322739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:10.340662Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:16:10.340833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347578950883859:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:10.408790Z node 1 :TX_PROXY ERROR: Actor# [1:7494347578950883912:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:10.649635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:16:10.855541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:10.855668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:10.855723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:10.855747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:10.855987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:16:10.856093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:16:10.856211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:16:10.856226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:16:10.856380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:16:10.856402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:16:10.856494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:16:10.856507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:16:10.856585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:16:10.856662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:16:10.856691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:16:10.856786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:16:10.856800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:16:10.856874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:16:10.856921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:16:10.856977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:16:10.857055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:16:10.857102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:16:10.857163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347578950884175:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:16:10.857182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494347578950884189:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:16:10.892378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494347578950884193:2359];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:10.892378Z no ... tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.267258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.269607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.273408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.275377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.279291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.280997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.285024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.286432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.290638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.291643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.296034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.296653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.301905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.305617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.307739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.311824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.313318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.317912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.318906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.324072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.324761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.330352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.330727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.336612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.336882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.341309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.342712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.345488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.348915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.350170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.354137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.355066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.358168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.361120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.362504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.366914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.367228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.370870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.373551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.375125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.379736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.380412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.384257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.385932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.389375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.392235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:09.497309Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ffa5e33a2s9mr579dx4mt", SessionId: ydb://session/3?node_id=1&id=OWM0NTQ3ZmEtZGQ2OGZkODYtN2M0NDk0M2EtYTBlNDMxNTU=, Slow query, duration: 26.058087s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:17:09.711285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:17:09.711298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:17:09.711866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> EvWrite::WriteInTransaction >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> EvWrite::WriteInTransaction [GOOD] >> EvWrite::WriteWithLock >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_Decimal >> EvWrite::WriteWithLock [GOOD] >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2025-04-17T18:17:25.328797Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:25.419289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:25.446659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:25.447021Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:25.455826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:25.456089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:25.456361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:25.456531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:25.456671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:25.456820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:25.456935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:25.457079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:25.457234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:25.457371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:25.457509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:25.457630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:25.492247Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:25.492526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:25.492590Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:25.492787Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:25.492963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:25.493041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:25.493090Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:25.493215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:25.493303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:25.493351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:25.493386Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:25.493594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:25.493677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:25.493726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:25.493770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:25.493865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:25.493928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:25.493977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:25.494024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:25.494144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:25.494211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:25.494246Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:25.494300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:25.494350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:25.494380Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:25.494851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=66; 2025-04-17T18:17:25.494952Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-17T18:17:25.495065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=55; 2025-04-17T18:17:25.495175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-04-17T18:17:25.495352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:25.495440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:25.495477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:25.495703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:25.495755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:25.495788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:25.495968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:25.496020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:25.496053Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:25.496262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:25.496318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:25.496362Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:25.496512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:25.496567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:25.496638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... );;;); 2025-04-17T18:17:27.540046Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-17T18:17:27.540064Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:27.540362Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:17:27.540386Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T18:17:27.540420Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-17T18:17:27.540457Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=4096;merger=0;interval_id=2; 2025-04-17T18:17:27.540489Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:17:27.540541Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:27.540559Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=4096;finished=1; 2025-04-17T18:17:27.540580Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:27.540699Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:27.540804Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:4096;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:27.540833Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:27.540927Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=4096; 2025-04-17T18:17:27.540968Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=458752;num_rows=4096;batch_columns=key,field; 2025-04-17T18:17:27.541035Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:210:2228];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-17T18:17:27.541120Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:27.541184Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:27.541252Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:27.541494Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:27.541597Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:27.541691Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:27.541724Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:214:2232] finished for tablet 9437184 2025-04-17T18:17:27.542048Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[2:210:2228];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.041},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.042}],"full":{"a":1744913847499539,"name":"_full_task","f":1744913847499539,"d_finished":0,"c":0,"l":1744913847541761,"d":42222},"events":[{"name":"bootstrap","f":1744913847499917,"d_finished":1918,"c":1,"l":1744913847501835,"d":1918},{"a":1744913847541468,"name":"ack","f":1744913847540678,"d_finished":591,"c":1,"l":1744913847541269,"d":884},{"a":1744913847541459,"name":"processing","f":1744913847503370,"d_finished":18507,"c":9,"l":1744913847541271,"d":18809},{"name":"ProduceResults","f":1744913847500997,"d_finished":1658,"c":12,"l":1744913847541713,"d":1658},{"a":1744913847541716,"name":"Finish","f":1744913847541716,"d_finished":0,"c":0,"l":1744913847541761,"d":45},{"name":"task_result","f":1744913847503384,"d_finished":17807,"c":8,"l":1744913847540604,"d":17807}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:27.542104Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:210:2228];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:27.542412Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[2:210:2228];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.041},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.042}],"full":{"a":1744913847499539,"name":"_full_task","f":1744913847499539,"d_finished":0,"c":0,"l":1744913847542133,"d":42594},"events":[{"name":"bootstrap","f":1744913847499917,"d_finished":1918,"c":1,"l":1744913847501835,"d":1918},{"a":1744913847541468,"name":"ack","f":1744913847540678,"d_finished":591,"c":1,"l":1744913847541269,"d":1256},{"a":1744913847541459,"name":"processing","f":1744913847503370,"d_finished":18507,"c":9,"l":1744913847541271,"d":19181},{"name":"ProduceResults","f":1744913847500997,"d_finished":1658,"c":12,"l":1744913847541713,"d":1658},{"a":1744913847541716,"name":"Finish","f":1744913847541716,"d_finished":0,"c":0,"l":1744913847542133,"d":417},{"name":"task_result","f":1744913847503384,"d_finished":17807,"c":8,"l":1744913847540604,"d":17807}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:27.542520Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:27.499067Z;index_granules=0;index_portions=1;index_batches=176;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=494024;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=494024;selected_rows=0; 2025-04-17T18:17:27.542553Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:27.542737Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:214:2232];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributesExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] Test command err: 2025-04-17T18:16:26.270567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347646853138326:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.270751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013e2/r3tmp/tmpRqxUu4/pdisk_1.dat 2025-04-17T18:16:26.744828Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:26.781250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:26.781338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:26.788340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29252, node 1 2025-04-17T18:16:26.986732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:26.986752Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:26.986758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:26.986870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:27.498907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:29.464035Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:29.464146Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347659738040697:2328], Start check tables existence, number paths: 2 2025-04-17T18:16:29.479614Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjNjBmMzktZTkxMTMzMTktZjllM2M0NjItZWNmOWI0YTg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NGVjNjBmMzktZTkxMTMzMTktZjllM2M0NjItZWNmOWI0YTg= 2025-04-17T18:16:29.479926Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347659738040697:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:29.480541Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347659738040697:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:29.480598Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347659738040697:2328], Successfully finished 2025-04-17T18:16:29.480775Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjNjBmMzktZTkxMTMzMTktZjllM2M0NjItZWNmOWI0YTg=, ActorId: [1:7494347659738040713:2329], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.481577Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:29.481596Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:29.481630Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:16:29.481652Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:29.527879Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347659738040724:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.531851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:29.535141Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347659738040724:2304], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-17T18:16:29.536980Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347659738040724:2304], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:16:29.546433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347659738040724:2304], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:29.643973Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347659738040724:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.650049Z node 1 :TX_PROXY ERROR: Actor# [1:7494347659738040775:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:29.650299Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347659738040724:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:16:29.650622Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347659738040782:2342], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:29.651659Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347659738040782:2342], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:29.671829Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NGVjNjBmMzktZTkxMTMzMTktZjllM2M0NjItZWNmOWI0YTg=, ActorId: [1:7494347659738040713:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:16:29.671972Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NGVjNjBmMzktZTkxMTMzMTktZjllM2M0NjItZWNmOWI0YTg=, ActorId: [1:7494347659738040713:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:16:29.671994Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjNjBmMzktZTkxMTMzMTktZjllM2M0NjItZWNmOWI0YTg=, ActorId: [1:7494347659738040713:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:16:29.672025Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjNjBmMzktZTkxMTMzMTktZjllM2M0NjItZWNmOWI0YTg=, ActorId: [1:7494347659738040713:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:16:29.675960Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjNjBmMzktZTkxMTMzMTktZjllM2M0NjItZWNmOWI0YTg=, ActorId: [1:7494347659738040713:2329], ActorState: unknown state, Session actor destroyed 2025-04-17T18:16:30.312753Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347663521477189:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:30.312878Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013e2/r3tmp/tmpPGQgAw/pdisk_1.dat 2025-04-17T18:16:30.471922Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64801, node 2 2025-04-17T18:16:30.515092Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:30.515188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:30.517655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:30.586194Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:30.586214Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:30.586235Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:30.586366Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:30.855282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:33.392157Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:33.394282Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=MTJkOTFjNDMtYjA2NTBiY2YtMmEyOTlkOGUtMjI1M2ZhYzE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTJkOTFjNDMtYjA2NTBiY2YtMmEyOTlkOGUtMjI1M2ZhYzE= 2025-04-17T18:16:33.394668Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:33.394688Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 202 ... 4807596717:2333], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:25.519239Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MmFiODc2MWMtMzFlNjQwZDctYWVkNjBmMjQtNTdmNWE4NDg=, ActorId: [6:7494347764807596717:2333], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:25.519337Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MmFiODc2MWMtMzFlNjQwZDctYWVkNjBmMjQtNTdmNWE4NDg=, ActorId: [6:7494347764807596717:2333], ActorState: unknown state, Session actor destroyed 2025-04-17T18:17:25.522938Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8425ymevr79bvabn53, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-17T18:17:25.523092Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8425ymevr79bvabn53, txInfo Status: Committed Kind: ReadWrite TotalDuration: 13.72 ServerDuration: 13.333 QueriesCount: 2 2025-04-17T18:17:25.523197Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8425ymevr79bvabn53, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:17:25.523254Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8425ymevr79bvabn53, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:25.523284Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8425ymevr79bvabn53, EndCleanup, isFinal: 0 2025-04-17T18:17:25.523317Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8425ymevr79bvabn53, Sent query response back to proxy, proxyRequestId: 521, proxyId: [7:7494347747380458421:2080] 2025-04-17T18:17:25.523677Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, TxId: 2025-04-17T18:17:25.523761Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-17T18:17:25.524238Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ReadyState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, received request, proxyRequestId: 522 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [7:7494347901999291293:5194] database: /Root databaseId: /Root pool id: default 2025-04-17T18:17:25.524268Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ReadyState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, request placed into pool from cache: default 2025-04-17T18:17:25.524757Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, ExecutePhyTx, tx: 0x000050C00038EFD8 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-17T18:17:25.524809Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, Sending to Executer TraceId: 0 8 2025-04-17T18:17:25.524875Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, Created new KQP executer: [7:7494347901999291298:5189] isRollback: 0 2025-04-17T18:17:25.533212Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-17T18:17:25.533350Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, ExecutePhyTx, tx: 0x000050C00038F158 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-17T18:17:25.534837Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-17T18:17:25.535119Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, txInfo Status: Committed Kind: ReadOnly TotalDuration: 10.418 ServerDuration: 10.332 QueriesCount: 2 2025-04-17T18:17:25.535281Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:17:25.535370Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:25.535407Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, EndCleanup, isFinal: 0 2025-04-17T18:17:25.535466Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ExecuteState, TraceId: 01js2fgk8m94e3j3vj8hq9bv6h, Sent query response back to proxy, proxyRequestId: 522, proxyId: [7:7494347747380458421:2080] 2025-04-17T18:17:25.536143Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, TxId: 2025-04-17T18:17:25.536264Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, TxId: 2025-04-17T18:17:25.536387Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7494347764560327928:2313], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-17T18:17:25.536514Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:25.536585Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:25.536649Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:25.536757Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:25.536925Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NGQ2Yjc0NDgtMTVjOTI5MWYtZDdiNGE3NDktNmZhZjE4NTY=, ActorId: [7:7494347901999291271:5189], ActorState: unknown state, Session actor destroyed 2025-04-17T18:17:25.858979Z node 7 :BS_PROXY_PUT ERROR: [d36fe3d8c94ffc73] Result# TEvPutResult {Id# [72075186224037888:1:869:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037888:1:869:0:0:42:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 6 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T18:17:25.861839Z node 8 :BS_PROXY_PUT ERROR: [b2a1f3023d782e8c] Result# TEvPutResult {Id# [72075186224037889:1:747:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:747:0:0:42:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 6 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> TColumnShardTestReadWrite::WriteOverload-InStore-WithWritePortionsOnInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] Test command err: RandomSeed# 14853367247987271928 Step = 0 SEND TEvPut with key [1:1:0:0:0:51943:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:51943:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:85877:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:85877:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:192081:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:192081:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:267203:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:267203:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 3 2025-04-17T18:15:41.824280Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 6 SEND TEvPut with key [1:1:6:0:0:377427:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:377427:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-04-17T18:15:41.984884Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 7 SEND TEvPut with key [1:1:7:0:0:48850:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:48850:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 8 SEND TEvPut with key [1:1:8:0:0:411812:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:411812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 9 SEND TEvPut with key [1:1:9:0:0:293766:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:293766:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start node 3 Step = 10 SEND TEvPut with key [1:1:10:0:0:127358:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:127358:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 11 SEND TEvPut with key [1:1:11:0:0:282945:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:282945:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 12 SEND TEvPut with key [1:1:12:0:0:34864:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:34864:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 13 SEND TEvPut with key [1:1:13:0:0:363096:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:363096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 15 SEND TEvPut with key [1:1:15:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 16 SEND TEvPut with key [1:1:16:0:0:136892:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:136892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 17 SEND TEvPut with key [1:1:17:0:0:517733:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:517733:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 18 SEND TEvPut with key [1:1:18:0:0:250802:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:250802:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 19 SEND TEvPut with key [1:1:19:0:0:199490:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:199490:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 20 SEND TEvPut with key [1:1:20:0:0:244269:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:244269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 21 SEND TEvPut with key [1:1:21:0:0:329606:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:329606:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 23 SEND TEvPut with key [1:1:23:0:0:519258:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:519258:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 25 SEND TEvPut with key [1:1:25:0:0:514591:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:514591:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Stop node 7 2025-04-17T18:15:42.647378Z 1 00h01m30.100512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 26 SEND TEvPut with key [1:1:26:0:0:5927:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:5927:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 28 SEND TEvPut with key [1:1:28:0:0:6043:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:6043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 30 SEND TEvPut with key [1:1:30:0:0:264716:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:264716:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Compact vdisk 3 Step = 31 SEND TEvPut with key [1:1:31:0:0:168116:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:168116:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 32 SEND TEvPut with key [1:1:32:0:0:444749:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:444749:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 33 SEND TEvPut with key [1:1:33:0:0:350254:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:350254:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 34 SEND TEvPut with key [1:1:34:0:0:145950:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:145950:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 35 SEND TEvPut with key [1:1:35:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 38 SEND TEvPut with key [1:1:38:0:0:185170:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:185170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 39 SEND TEvPut with key [1:1:39:0:0:297271:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:297271:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 40 SEND TEvPut with key [1:1:40:0:0:419670:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:419670:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 41 SEND TEvPut with key [1:1:41:0:0:218956:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:218956:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 42 SEND TEvPut with key [1:1:42:0:0:154723:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:154723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 43 SEND TEvPut with key [1:1:43:0:0:13332:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:13332:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 44 SEND TEvPut with key [1:1:44:0:0:448892:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:448892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 45 SEND TEvPut with key [1:1:45:0:0:103231:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:103231:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 46 SEND TEvPut with key [1:1:46:0:0:295973:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:295973:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 47 SEND TEvPut with key [1:1:47:0:0:402799:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:402799:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 48 SEND TEvPut with key [1:1:48:0:0:165045:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:165045:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 49 SEND TEvPut with key [1:1:49:0:0:360099:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:360099:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 50 SEND TEvPut with key [1:1:50:0:0:97222:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:97222:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 51 SEND TEvPut with key [1:1:51:0:0:303396:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:303396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 52 SEND TEvPut with key [1:1:52:0:0:304876:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:304876:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 53 SEND TEvPut with key [1:1:53:0:0:375063:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:375063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Start node 4 Step = 54 SEND TEvPut with key [1:1:54:0:0:288044:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:288044:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999878} Step = 55 SEND TEvPut with key [1:1:55:0:0:181559:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:181559:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999878} Step = 57 SEND TEvPut with key [1:1:57:0:0:424399:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:424399:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 58 SEND TEvPut with key [1:1:58:0:0:169341:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:169341:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 59 SEND TEvPut with key [1:1:59:0:0:405932:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:405932:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 60 SEND TEvPut with key [1:1:60:0:0:190148:0] TEvPutResult: TEvPutResult {Id# [1:1:60:0:0:190148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Stop node 3 2025-04-17T18:15:44.002082Z 1 00h02m00.150512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Wipe node 0 2025-04-17T18:15:44.139371Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-17T18:15:44.141212Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6924538315584261568] Step = 61 SEND TEvPut with key [1:1:61:0:0:500240:0] 2025-04-17T18:15:45.178839Z 1 00h03m50.161024s :BS_PROXY ERROR: Group# 2181038080 StateEstablishingSessions Wakeup TIMEOUT Marker# DSP12 TEvPutResult: TEvPutResult {Id# [1:1:61:0:0:500240:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4)." ApproximateFreeSpaceShare# 0} Step = 62 SEND TEvPut with key [1:1:62:0:0:354994:0] TEvPutResult: TEvPutResult {Id# [1:1:62:0:0:354994:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4)." ApproximateFreeSpac ... d# [1:1:945:0:0:76599:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Compact vdisk 2 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 948 SEND TEvPut with key [1:1:948:0:0:112126:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:112126:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 949 SEND TEvPut with key [1:1:949:0:0:525378:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:525378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 950 SEND TEvPut with key [1:1:950:0:0:410875:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:410875:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 951 SEND TEvPut with key [1:1:951:0:0:113503:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:113503:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 952 SEND TEvPut with key [1:1:952:0:0:431140:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:431140:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 953 SEND TEvPut with key [1:1:953:0:0:509293:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:509293:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Stop node 3 2025-04-17T18:17:04.746600Z 1 00h28m01.000000s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:286395:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:286395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 1 2025-04-17T18:17:05.012897Z 1 00h28m11.000512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 955 SEND TEvPut with key [1:1:955:0:0:219270:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:219270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Start node 1 Step = 956 SEND TEvPut with key [1:1:956:0:0:274971:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:274971:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 957 SEND TEvPut with key [1:1:957:0:0:487884:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:487884:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Start node 3 Step = 958 SEND TEvPut with key [1:1:958:0:0:327302:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:327302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 961 SEND TEvPut with key [1:1:961:0:0:61147:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:61147:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 962 SEND TEvPut with key [1:1:962:0:0:237906:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:237906:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 963 SEND TEvPut with key [1:1:963:0:0:347273:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:347273:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 964 SEND TEvPut with key [1:1:964:0:0:181317:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:181317:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 965 SEND TEvPut with key [1:1:965:0:0:456096:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:456096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 966 SEND TEvPut with key [1:1:966:0:0:93776:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:93776:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 967 SEND TEvPut with key [1:1:967:0:0:447659:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:447659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 969 SEND TEvPut with key [1:1:969:0:0:92781:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:92781:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 0 2025-04-17T18:17:06.220339Z 9 00h28m41.003072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [9:127502:347] ServerId# [1:128538:172] TabletId# 72057594037932033 PipeClientId# [9:127502:347] 2025-04-17T18:17:06.220604Z 8 00h28m41.003072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:158166:17] ServerId# [1:158176:4105] TabletId# 72057594037932033 PipeClientId# [8:158166:17] 2025-04-17T18:17:06.220716Z 7 00h28m41.003072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:157115:17] ServerId# [1:157122:3978] TabletId# 72057594037932033 PipeClientId# [7:157115:17] 2025-04-17T18:17:06.220844Z 6 00h28m41.003072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:134160:17] ServerId# [1:134167:1014] TabletId# 72057594037932033 PipeClientId# [6:134160:17] 2025-04-17T18:17:06.220950Z 5 00h28m41.003072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:154207:17] ServerId# [1:154215:3598] TabletId# 72057594037932033 PipeClientId# [5:154207:17] 2025-04-17T18:17:06.221073Z 4 00h28m41.003072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:163136:17] ServerId# [1:163146:4705] TabletId# 72057594037932033 PipeClientId# [4:163136:17] 2025-04-17T18:17:06.221181Z 3 00h28m41.003072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:153119:17] ServerId# [1:153129:3475] TabletId# 72057594037932033 PipeClientId# [3:153119:17] 2025-04-17T18:17:06.221299Z 2 00h28m41.003072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:162165:17] ServerId# [1:162172:4595] TabletId# 72057594037932033 PipeClientId# [2:162165:17] Step = 971 SEND TEvPut with key [1:1:971:0:0:439384:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:439384:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 972 SEND TEvPut with key [1:1:972:0:0:252551:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:252551:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 973 SEND TEvPut with key [1:1:973:0:0:39982:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:39982:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Stop node 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:526796:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:526796:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Start node 0 Step = 975 SEND TEvPut with key [1:1:975:0:0:337763:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:337763:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:475740:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:475740:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 977 SEND TEvPut with key [1:1:977:0:0:169780:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:169780:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 980 SEND TEvPut with key [1:1:980:0:0:159890:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:159890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 981 SEND TEvPut with key [1:1:981:0:0:111300:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:111300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 982 SEND TEvPut with key [1:1:982:0:0:355914:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:355914:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 983 SEND TEvPut with key [1:1:983:0:0:399106:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:399106:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 985 SEND TEvPut with key [1:1:985:0:0:261994:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:261994:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 987 SEND TEvPut with key [1:1:987:0:0:138774:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:138774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 988 SEND TEvPut with key [1:1:988:0:0:441913:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:441913:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 989 SEND TEvPut with key [1:1:989:0:0:134469:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:134469:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 990 SEND TEvPut with key [1:1:990:0:0:123825:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:123825:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 991 SEND TEvPut with key [1:1:991:0:0:40387:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:40387:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Stop node 7 2025-04-17T18:17:08.029766Z 1 00h29m21.014096s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 993 SEND TEvPut with key [1:1:993:0:0:455894:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:455894:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Compact vdisk 0 Step = 994 SEND TEvPut with key [1:1:994:0:0:54378:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:54378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Compact vdisk 6 Step = 995 SEND TEvPut with key [1:1:995:0:0:487669:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:487669:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 996 SEND TEvPut with key [1:1:996:0:0:194641:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:194641:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 997 SEND TEvPut with key [1:1:997:0:0:74188:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:74188:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 998 SEND TEvPut with key [1:1:998:0:0:136082:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:136082:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 999 SEND TEvPut with key [1:1:999:0:0:145518:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:145518:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Starting nodes Start compaction 1 Start checking >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 |93.6%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramLike >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-04-17T18:17:31.258215Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:31.338986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:31.356332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:31.356608Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:31.363683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:31.363865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:31.364077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:31.364195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:31.364309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:31.364411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:31.364497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:31.364591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:31.364696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:31.364773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:31.364890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:31.364958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:31.388218Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:31.388374Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:31.388430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:31.388556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:31.388669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:31.388731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:31.388780Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:31.388850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:31.388908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:31.388937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:31.388972Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:31.389096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:31.389139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:31.389168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:31.389186Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:31.389251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:31.389296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:31.389327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:31.389343Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:31.389418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:31.389444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:31.389464Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:31.389489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:31.389516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:31.389552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:31.389838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-17T18:17:31.389897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=24; 2025-04-17T18:17:31.389957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-17T18:17:31.390014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-17T18:17:31.390161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:31.390225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:31.390253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:31.390442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:31.390473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:31.390503Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:31.390609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:31.390641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:31.390663Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:31.390784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:31.390825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:31.390842Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:31.390930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:31.390960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:31.390995Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... thod=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:32.172176Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:17:32.172217Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T18:17:32.172261Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-04-17T18:17:32.172300Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=6; 2025-04-17T18:17:32.172333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:17:32.172482Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T18:17:32.172521Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-04-17T18:17:32.172551Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:32.172803Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:32.172947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T18:17:32.172994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:32.173098Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;);columns=1;rows=10; 2025-04-17T18:17:32.173146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=61;num_rows=10;batch_columns=message; 2025-04-17T18:17:32.173269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:299:2317];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2025-04-17T18:17:32.173434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T18:17:32.173561Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T18:17:32.173669Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T18:17:32.173809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:32.173905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T18:17:32.173995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T18:17:32.174026Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:300:2318] finished for tablet 9437184 2025-04-17T18:17:32.174457Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:299:2317];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.009},{"events":["f_ack"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":1744913852162653,"name":"_full_task","f":1744913852162653,"d_finished":0,"c":0,"l":1744913852174070,"d":11417},"events":[{"name":"bootstrap","f":1744913852162835,"d_finished":2103,"c":1,"l":1744913852164938,"d":2103},{"a":1744913852173791,"name":"ack","f":1744913852172764,"d_finished":938,"c":1,"l":1744913852173702,"d":1217},{"a":1744913852173780,"name":"processing","f":1744913852164996,"d_finished":5441,"c":9,"l":1744913852173704,"d":5731},{"name":"ProduceResults","f":1744913852163934,"d_finished":2495,"c":12,"l":1744913852174013,"d":2495},{"a":1744913852174016,"name":"Finish","f":1744913852174016,"d_finished":0,"c":0,"l":1744913852174070,"d":54},{"name":"task_result","f":1744913852165010,"d_finished":4386,"c":8,"l":1744913852172610,"d":4386}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T18:17:32.174550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:299:2317];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:32.174945Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:299:2317];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.009},{"events":["f_ack"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":1744913852162653,"name":"_full_task","f":1744913852162653,"d_finished":0,"c":0,"l":1744913852174608,"d":11955},"events":[{"name":"bootstrap","f":1744913852162835,"d_finished":2103,"c":1,"l":1744913852164938,"d":2103},{"a":1744913852173791,"name":"ack","f":1744913852172764,"d_finished":938,"c":1,"l":1744913852173702,"d":1755},{"a":1744913852173780,"name":"processing","f":1744913852164996,"d_finished":5441,"c":9,"l":1744913852173704,"d":6269},{"name":"ProduceResults","f":1744913852163934,"d_finished":2495,"c":12,"l":1744913852174013,"d":2495},{"a":1744913852174016,"name":"Finish","f":1744913852174016,"d_finished":0,"c":0,"l":1744913852174608,"d":592},{"name":"task_result","f":1744913852165010,"d_finished":4386,"c":8,"l":1744913852172610,"d":4386}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-17T18:17:32.175026Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:32.162220Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T18:17:32.175066Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:32.175264Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:300:2318];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> KqpKv::ReadRows_Decimal [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> EvWrite::WriteWithSplit [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Decimal [GOOD] Test command err: Trying to start YDB, gRPC: 25777, MsgBus: 27711 2025-04-17T18:13:58.163802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347009771930035:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:58.163886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b6/r3tmp/tmp1atagQ/pdisk_1.dat 2025-04-17T18:13:58.619745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:58.623494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:58.626459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:58.636114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25777, node 1 2025-04-17T18:13:58.843213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:58.843237Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:58.843249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:58.843362Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27711 TClient is connected to server localhost:27711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:59.547901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.570782Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:13:59.588306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.737863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.899196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:59.981773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:14:01.447138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347022656833694:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.447261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:01.781337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.821850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.852012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.878638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.915530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.953141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:14:01.990172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:14:02.072686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347026951801648:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.072793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.072994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347026951801654:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:14:02.076327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:14:02.085760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347026951801656:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:14:02.145864Z node 1 :TX_PROXY ERROR: Actor# [1:7494347026951801709:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:14:03.124089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.164093Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347009771930035:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:03.164173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:14:03.210582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.239169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.310632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.383747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.415175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.444670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.482292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.522783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:14:03.554686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13695, MsgBus: 64403 2025-04-17T18:14:05.915680Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347040968526837:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:14:05.916381Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b6/r3tmp/tmpLikjA6/pdisk_1.dat 2025-04-17T18:14:06.061286Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:14:06.071165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:14:06.071241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:14:06.074541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13695, node 2 2025-04-17T18:14:06.211038Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:14:06.211058Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:14:06.211067Z ... "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 32443, MsgBus: 28002 2025-04-17T18:17:12.180486Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7494347845742044064:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:12.180641Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b6/r3tmp/tmpt7CF1t/pdisk_1.dat 2025-04-17T18:17:12.395837Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:12.417209Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:12.417355Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:12.419670Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32443, node 18 2025-04-17T18:17:12.477798Z node 18 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:17:12.477836Z node 18 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:17:12.477853Z node 18 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:17:12.478081Z node 18 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28002 TClient is connected to server localhost:28002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:17:13.381376Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:17.180741Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7494347845742044064:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:17.180863Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:17:18.369875Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5668, MsgBus: 11252 2025-04-17T18:17:19.861118Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7494347877020113856:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:19.861239Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b6/r3tmp/tmpjGduAv/pdisk_1.dat 2025-04-17T18:17:20.015102Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:20.057015Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:20.057161Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:20.058940Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5668, node 19 2025-04-17T18:17:20.118740Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:17:20.118770Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:17:20.118786Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:17:20.118998Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11252 TClient is connected to server localhost:11252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:17:21.078902Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:24.861219Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7494347877020113856:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:24.861352Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:17:25.915723Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18070, MsgBus: 14276 2025-04-17T18:17:27.768924Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7494347909386609905:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:27.769055Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0015b6/r3tmp/tmplyl0Oh/pdisk_1.dat 2025-04-17T18:17:27.941864Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:27.990598Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:27.990752Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:27.993906Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18070, node 20 2025-04-17T18:17:28.067187Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:17:28.067223Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:17:28.067241Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:17:28.067462Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14276 TClient is connected to server localhost:14276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:17:29.090347Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:32.769095Z node 20 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7494347909386609905:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:32.769241Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:17:33.918479Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7494347935156414357:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:33.918702Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:33.943462Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.082270Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Uint64 for column Key22, but expected Decimal(22,9) 2025-04-17T18:17:34.089857Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Decimal(35,10) for column Key22, but expected Decimal(22,9) >> KqpPg::TableDeleteWhere-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2025-04-17T18:17:11.708896Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:11.789647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:11.810752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:11.811000Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:11.817922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:11.818072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:11.818266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:11.818337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:11.818462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:11.818584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:11.818695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:11.818798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:11.818914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:11.819009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:11.819129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:11.819239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:11.841453Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:11.841700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:11.841760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:11.841946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:11.842112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:11.842215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:11.842258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:11.842367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:11.842470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:11.842525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:11.842557Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:11.842725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:11.842778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:11.842818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:11.842846Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:11.842930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:11.842970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:11.843005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:11.843039Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:11.843092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:11.843134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:11.843155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:11.843186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:11.843213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:11.843239Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:11.843555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-17T18:17:11.843620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=25; 2025-04-17T18:17:11.843688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-17T18:17:11.843759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-17T18:17:11.843888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:11.843955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:11.843980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:11.844107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:11.844147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:11.844189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:11.844303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:11.844332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:11.844354Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:11.844503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:11.844549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:11.844571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:11.844648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:11.844675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:11.844708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... fId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:856:2873];bytes=3691800;rows=450;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-17T18:17:35.979778Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.979908Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.979947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:35.979982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:35.981341Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:35.981452Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.981487Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:35.981569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=149; 2025-04-17T18:17:35.981618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1222396;num_rows=149;batch_columns=key,field; 2025-04-17T18:17:35.981748Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:856:2873];bytes=1222396;rows=149;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-17T18:17:35.981846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.981939Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.982085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.982662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:35.982762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.982860Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.982904Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:865:2882] finished for tablet 9437184 2025-04-17T18:17:35.983521Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:856:2873];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.012},{"events":["f_processing","f_task_result"],"t":0.013},{"events":["l_task_result"],"t":0.654},{"events":["f_ack"],"t":16.608},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":16.624}],"full":{"a":1744913839358843,"name":"_full_task","f":1744913839358843,"d_finished":0,"c":0,"l":1744913855982969,"d":16624126},"events":[{"name":"bootstrap","f":1744913839359495,"d_finished":11918,"c":1,"l":1744913839371413,"d":11918},{"a":1744913855982641,"name":"ack","f":1744913855967607,"d_finished":8279,"c":9,"l":1744913855982126,"d":8607},{"a":1744913855982624,"name":"processing","f":1744913839372059,"d_finished":239963,"c":53,"l":1744913855982129,"d":240308},{"name":"ProduceResults","f":1744913839363336,"d_finished":16704,"c":64,"l":1744913855982885,"d":16704},{"a":1744913855982888,"name":"Finish","f":1744913855982888,"d_finished":0,"c":0,"l":1744913855982969,"d":81},{"name":"task_result","f":1744913839372092,"d_finished":230446,"c":44,"l":1744913840013003,"d":230446}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.983645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:856:2873];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:35.984169Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:856:2873];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.012},{"events":["f_processing","f_task_result"],"t":0.013},{"events":["l_task_result"],"t":0.654},{"events":["f_ack"],"t":16.608},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":16.624}],"full":{"a":1744913839358843,"name":"_full_task","f":1744913839358843,"d_finished":0,"c":0,"l":1744913855983700,"d":16624857},"events":[{"name":"bootstrap","f":1744913839359495,"d_finished":11918,"c":1,"l":1744913839371413,"d":11918},{"a":1744913855982641,"name":"ack","f":1744913855967607,"d_finished":8279,"c":9,"l":1744913855982126,"d":9338},{"a":1744913855982624,"name":"processing","f":1744913839372059,"d_finished":239963,"c":53,"l":1744913855982129,"d":241039},{"name":"ProduceResults","f":1744913839363336,"d_finished":16704,"c":64,"l":1744913855982885,"d":16704},{"a":1744913855982888,"name":"Finish","f":1744913855982888,"d_finished":0,"c":0,"l":1744913855983700,"d":812},{"name":"task_result","f":1744913839372092,"d_finished":230446,"c":44,"l":1744913840013003,"d":230446}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-17T18:17:35.984265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:19.357483Z;index_granules=0;index_portions=5;index_batches=2052;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=17133336;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=17133336;selected_rows=0; 2025-04-17T18:17:35.984319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:35.984603Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] Test command err: 2025-04-17T18:16:26.248415Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347648025517266:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.248537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013f4/r3tmp/tmpodTDAa/pdisk_1.dat 2025-04-17T18:16:26.844676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:26.852285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:26.852380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:26.854822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12913, node 1 2025-04-17T18:16:26.978814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:26.978838Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:26.978844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:26.978968Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:27.509227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:29.690154Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:29.690249Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660910419804:2329], Start check tables existence, number paths: 2 2025-04-17T18:16:29.697594Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjIyZWEzYzMtOWM5NGYzMTMtNDk3ZGEwMjMtNWRiY2M5ZTU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjIyZWEzYzMtOWM5NGYzMTMtNDk3ZGEwMjMtNWRiY2M5ZTU= 2025-04-17T18:16:29.700731Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:16:29.700768Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:29.700808Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:29.714592Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjIyZWEzYzMtOWM5NGYzMTMtNDk3ZGEwMjMtNWRiY2M5ZTU=, ActorId: [1:7494347660910419816:2330], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.714909Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660910419804:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:29.714975Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660910419804:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:29.715008Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347660910419804:2329], Successfully finished 2025-04-17T18:16:29.715083Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:29.723024Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660910419829:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.727657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:29.730948Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660910419829:2304], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-17T18:16:29.732939Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660910419829:2304], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:16:29.743856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660910419829:2304], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:29.828246Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660910419829:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.833691Z node 1 :TX_PROXY ERROR: Actor# [1:7494347660910419880:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:29.833844Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347660910419829:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:16:29.840308Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM= 2025-04-17T18:16:29.840417Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM=, ActorId: [1:7494347660910419887:2331], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.840613Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-17T18:16:29.840625Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:16:29.840691Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660910419889:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:29.840731Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM=, ActorId: [1:7494347660910419887:2331], ActorState: ReadyState, TraceId: 01js2fewwg4xt499avj2mrzpa2, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7494347660910419886:2341] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-17T18:16:29.840791Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7494347660910419887:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM= 2025-04-17T18:16:29.840854Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347660910419890:2333], Database: /Root, Start database fetching 2025-04-17T18:16:29.841955Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660910419889:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:29.841999Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-17T18:16:29.842025Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-17T18:16:29.842281Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347660910419900:2334], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-17T18:16:29.842334Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347660910419890:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-17T18:16:29.842365Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-17T18:16:29.842426Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494347660910419902:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM=, Start pool fetching 2025-04-17T18:16:29.842463Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660910419904:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:29.842876Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347660910419904:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:29.842930Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494347660910419902:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM=, Pool info successfully resolved 2025-04-17T18:16:29.842960Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM= 2025-04-17T18:16:29.843040Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347660910419900:2334], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7494347660910419887:2331], session id: ydb://session/3?node_id=1&id=YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM= 2025-04-17T18:16:29.843092Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347660910419900:2334], DatabaseId: /Root, PoolId: sample_pool_id, Reply continue success to [1:7494347660910419887:2331], session id: ydb://session/3?node_id=1&id=YzkyNTEyNTMtZTFlYzBjMDAtYmNlMjk3MjctOGZlMDg0ZjM=, local in flight: 1 2025-04-17T18:16:29.843151Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, ... state, TraceId: 01js2fgwjm6f3tf4ftqtyr9zhd, Cleanup temp tables: 0 2025-04-17T18:17:35.062013Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg4ZDMyYzAtZGZmM2NiZmEtZTUyMTU1YzItNTUzNzdhOTM=, ActorId: [8:7494347945587590560:2661], ActorState: unknown state, TraceId: 01js2fgwjm6f3tf4ftqtyr9zhd, Session actor destroyed 2025-04-17T18:17:35.068271Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGVkODYwMmEtZjJjMmU2NzctOTA0ODlmYWMtMmM2ZTQ5MmE=, ActorId: [8:7494347915522818112:2331], ActorState: ReadyState, TraceId: 01js2fgwjv6jk26tv3p01sadtb, received request, proxyRequestId: 55 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL my_pool; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-17T18:17:35.081947Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7494347941292623246:2660], DatabaseId: /Root, PoolId: my_pool, Got delete notification 2025-04-17T18:17:35.082092Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-17T18:17:35.082191Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347945587590579:2665], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T18:17:35.082571Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347945587590579:2665], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:35.082704Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:35.086572Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZGVkODYwMmEtZjJjMmU2NzctOTA0ODlmYWMtMmM2ZTQ5MmE=, ActorId: [8:7494347915522818112:2331], ActorState: ExecuteState, TraceId: 01js2fgwjv6jk26tv3p01sadtb, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [8:7494347945587590569:2331] WorkloadServiceCleanup: 0 2025-04-17T18:17:35.088611Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGVkODYwMmEtZjJjMmU2NzctOTA0ODlmYWMtMmM2ZTQ5MmE=, ActorId: [8:7494347915522818112:2331], ActorState: CleanupState, TraceId: 01js2fgwjv6jk26tv3p01sadtb, EndCleanup, isFinal: 0 2025-04-17T18:17:35.088684Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGVkODYwMmEtZjJjMmU2NzctOTA0ODlmYWMtMmM2ZTQ5MmE=, ActorId: [8:7494347915522818112:2331], ActorState: CleanupState, TraceId: 01js2fgwjv6jk26tv3p01sadtb, Sent query response back to proxy, proxyRequestId: 55, proxyId: [8:7494347898342948501:2266] 2025-04-17T18:17:35.092993Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ== 2025-04-17T18:17:35.093147Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:17:35.093404Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-17T18:17:35.093470Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347945587590593:2667], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-17T18:17:35.093485Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ReadyState, TraceId: 01js2fgwknew9bfss43ajj4ktf, received request, proxyRequestId: 56 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [8:7494347945587590590:2978] database: Root databaseId: /Root pool id: default 2025-04-17T18:17:35.093511Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ReadyState, TraceId: 01js2fgwknew9bfss43ajj4ktf, request placed into pool from cache: default 2025-04-17T18:17:35.093603Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, Sending CompileQuery request 2025-04-17T18:17:35.093801Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7494347945587590593:2667], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:35.093919Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-17T18:17:35.166379Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, ExecutePhyTx, tx: 0x000050C000107BD8 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-17T18:17:35.166443Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, Sending to Executer TraceId: 0 8 2025-04-17T18:17:35.166530Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, Created new KQP executer: [8:7494347945587590599:2666] isRollback: 0 2025-04-17T18:17:35.169304Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, Forwarded TEvStreamData to [8:7494347945587590590:2978] 2025-04-17T18:17:35.169910Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-17T18:17:35.170054Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, txInfo Status: Committed Kind: Pure TotalDuration: 3.746 ServerDuration: 3.682 QueriesCount: 2 2025-04-17T18:17:35.170116Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:17:35.170293Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:35.170325Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, EndCleanup, isFinal: 1 2025-04-17T18:17:35.170371Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: ExecuteState, TraceId: 01js2fgwknew9bfss43ajj4ktf, Sent query response back to proxy, proxyRequestId: 56, proxyId: [8:7494347898342948501:2266] 2025-04-17T18:17:35.170396Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: unknown state, TraceId: 01js2fgwknew9bfss43ajj4ktf, Cleanup temp tables: 0 2025-04-17T18:17:35.170690Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWU1NDFjMGEtZWY1NjYyNzgtYjc2M2Y4ZS1hMTYxNjQ3ZQ==, ActorId: [8:7494347945587590591:2666], ActorState: unknown state, TraceId: 01js2fgwknew9bfss43ajj4ktf, Session actor destroyed 2025-04-17T18:17:35.176679Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZGVkODYwMmEtZjJjMmU2NzctOTA0ODlmYWMtMmM2ZTQ5MmE=, ActorId: [8:7494347915522818112:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:35.176743Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZGVkODYwMmEtZjJjMmU2NzctOTA0ODlmYWMtMmM2ZTQ5MmE=, ActorId: [8:7494347915522818112:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:35.176777Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGVkODYwMmEtZjJjMmU2NzctOTA0ODlmYWMtMmM2ZTQ5MmE=, ActorId: [8:7494347915522818112:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:35.176811Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGVkODYwMmEtZjJjMmU2NzctOTA0ODlmYWMtMmM2ZTQ5MmE=, ActorId: [8:7494347915522818112:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:35.176892Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGVkODYwMmEtZjJjMmU2NzctOTA0ODlmYWMtMmM2ZTQ5MmE=, ActorId: [8:7494347915522818112:2331], ActorState: unknown state, Session actor destroyed 2025-04-17T18:17:35.325698Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmZiZTZiMzUtMWM2MmJlMTQtYmIyZTc3OWYtYmFhNzdjZjU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YmZiZTZiMzUtMWM2MmJlMTQtYmIyZTc3OWYtYmFhNzdjZjU= 2025-04-17T18:17:35.325852Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmZiZTZiMzUtMWM2MmJlMTQtYmIyZTc3OWYtYmFhNzdjZjU=, ActorId: [8:7494347945587590613:2671], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:17:35.326604Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmZiZTZiMzUtMWM2MmJlMTQtYmIyZTc3OWYtYmFhNzdjZjU=, ActorId: [8:7494347945587590613:2671], ActorState: ReadyState, TraceId: 01js2fgwtyaxz3sm78dqqrdmbv, received request, proxyRequestId: 58 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/.metadata/initialization/migrations`; rpcActor: [8:7494347945587590614:2672] database: /Root databaseId: /Root pool id: default 2025-04-17T18:17:35.326637Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmZiZTZiMzUtMWM2MmJlMTQtYmIyZTc3OWYtYmFhNzdjZjU=, ActorId: [8:7494347945587590613:2671], ActorState: ReadyState, TraceId: 01js2fgwtyaxz3sm78dqqrdmbv, request placed into pool from cache: default 2025-04-17T18:17:35.326737Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmZiZTZiMzUtMWM2MmJlMTQtYmIyZTc3OWYtYmFhNzdjZjU=, ActorId: [8:7494347945587590613:2671], ActorState: ExecuteState, TraceId: 01js2fgwtyaxz3sm78dqqrdmbv, Sending CompileQuery request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 4740, MsgBus: 17185 2025-04-17T18:17:23.411719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347892822319321:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:23.411854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ca/r3tmp/tmpxpS6Pk/pdisk_1.dat 2025-04-17T18:17:23.660322Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4740, node 1 2025-04-17T18:17:23.736027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:17:23.736054Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:17:23.736060Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:17:23.736177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:17:23.774192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:23.774307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:23.775912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17185 TClient is connected to server localhost:17185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:17:24.131806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:24.158553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:24.263634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:24.393966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:24.468491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:26.374777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347905707222983:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:26.374889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:26.620640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:17:26.645806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:17:26.670842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:17:26.697558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:17:26.722514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:17:26.751673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:17:26.778911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:17:26.824801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347905707223637:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:26.824861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347905707223642:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:26.824904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:26.827797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:17:26.836250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347905707223644:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:17:26.923058Z node 1 :TX_PROXY ERROR: Actor# [1:7494347905707223700:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:17:28.441174Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494347892822319321:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:28.441665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25414, MsgBus: 28469 2025-04-17T18:17:29.280707Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347918349829310:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:29.280796Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0017ca/r3tmp/tmpbmFiKx/pdisk_1.dat 2025-04-17T18:17:29.383890Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25414, node 2 2025-04-17T18:17:29.410265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:29.410348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:29.412039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:29.423848Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:17:29.423872Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:17:29.423880Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:17:29.423981Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28469 TClient is connected to server localhost:28469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:17:29.772475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:29.790349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:29.840744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:17:29.959523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:17:30.042908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:17:31.826337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347926939765678:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:31.826439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:31.878692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:17:31.905264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:17:31.933841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:17:31.963092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:17:31.990806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:17:32.057463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:17:32.097797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:17:32.151325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347931234733634:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:32.151410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494347931234733639:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:32.151461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:32.154885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:17:32.164187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494347931234733641:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:17:32.237477Z node 2 :TX_PROXY ERROR: Actor# [2:7494347931234733695:3538] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:17:32.955076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:17:32.988597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.023348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.280921Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494347918349829310:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:34.281022Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> TColumnShardTestReadWrite::RebootWriteReadStandalone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28568, MsgBus: 27797 2025-04-17T18:13:16.465467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494346829396470698:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:13:16.465528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014ae/r3tmp/tmpsEoXuZ/pdisk_1.dat 2025-04-17T18:13:16.753771Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28568, node 1 2025-04-17T18:13:16.821239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:16.821806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:16.823434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:16.939317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:16.939347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:16.939353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:16.939437Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27797 TClient is connected to server localhost:27797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:13:17.483549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:13:18.802880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-17T18:13:19.002398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-17T18:13:19.050562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 abcd 2025-04-17T18:13:19.141844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-17T18:13:19.223976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 abcd 2025-04-17T18:13:19.361281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 {"abcd ","abcd "} 2025-04-17T18:13:19.492225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-04-17T18:13:19.540143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-04-17T18:13:19.588983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 abcd 2025-04-17T18:13:19.679715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-17T18:13:19.779566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 abcd 2025-04-17T18:13:19.852808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-17T18:13:19.932047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-04-17T18:13:19.996380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-04-17T18:13:20.044045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 1111 2025-04-17T18:13:20.106766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-17T18:13:20.172627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-04-17T18:13:20.214100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-04-17T18:13:20.260184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-04-17T18:13:20.306430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-04-17T18:13:20.355400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 1111 2025-04-17T18:13:20.415874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-17T18:13:20.488110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 1111 2025-04-17T18:13:20.555162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-17T18:13:20.621666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgnumeric_17472595041006102391_7644398022171395976' Unable to coerce value for pgnumeric: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: numeric field overflow DETAIL: A field with precision 2, scale 0 must round to an absolute value less than 10^2. 2025-04-17T18:13:20.667929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 2 ... 1474976710829:0, at schemeshard: 72057594046644480 2025-04-17T18:17:32.629670Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 628 2025-04-17T18:17:32.650524Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710831:0, at schemeshard: 72057594046644480 2025-04-17T18:17:32.707235Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:32.726025Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710833:0, at schemeshard: 72057594046644480 2025-04-17T18:17:32.785110Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 601 2025-04-17T18:17:32.806016Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710835:0, at schemeshard: 72057594046644480 2025-04-17T18:17:32.868639Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:32.888813Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710837:0, at schemeshard: 72057594046644480 2025-04-17T18:17:32.948096Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 603 2025-04-17T18:17:32.970384Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710839:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.074674Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:33.097507Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710841:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.159215Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 602 2025-04-17T18:17:33.177106Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710843:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.239111Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:33.259158Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710845:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.321047Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 604 2025-04-17T18:17:33.340911Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710847:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.399755Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:33.422045Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710849:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.489570Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 718 2025-04-17T18:17:33.510427Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710851:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.572706Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:33.593519Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710853:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.666130Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 869 2025-04-17T18:17:33.686660Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710855:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.783618Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:33.802341Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710857:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.861500Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 650 2025-04-17T18:17:33.881771Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710859:0, at schemeshard: 72057594046644480 2025-04-17T18:17:33.943154Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:33.963165Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710861:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.024867Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 829 2025-04-17T18:17:34.044263Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710863:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.105829Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:34.124827Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710865:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.216194Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 774 2025-04-17T18:17:34.234913Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710867:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.298635Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:34.318877Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710869:0, at schemeshard: 72057594046644480 2950 2025-04-17T18:17:34.380225Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:34.399262Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710871:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.461160Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:34.481440Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710873:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.542443Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 114 2025-04-17T18:17:34.560885Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710875:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.654171Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:34.673473Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710877:0, at schemeshard: 72057594046644480 3802 2025-04-17T18:17:34.733675Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:34.752096Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710879:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.811883Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:34.830849Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710881:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.893360Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 4072 2025-04-17T18:17:34.912171Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710883:0, at schemeshard: 72057594046644480 2025-04-17T18:17:34.971066Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:34.991548Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710885:0, at schemeshard: 72057594046644480 2025-04-17T18:17:35.056732Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 142 2025-04-17T18:17:35.080443Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710887:0, at schemeshard: 72057594046644480 2025-04-17T18:17:35.192832Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:35.214615Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710889:0, at schemeshard: 72057594046644480 2025-04-17T18:17:35.283696Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 3615 2025-04-17T18:17:35.305858Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710891:0, at schemeshard: 72057594046644480 2025-04-17T18:17:35.373485Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:35.395249Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710893:0, at schemeshard: 72057594046644480 2025-04-17T18:17:35.465274Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 3614 2025-04-17T18:17:35.488641Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710895:0, at schemeshard: 72057594046644480 2025-04-17T18:17:35.550878Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:35.570316Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710897:0, at schemeshard: 72057594046644480 2025-04-17T18:17:35.635933Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 22 2025-04-17T18:17:35.654587Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710899:0, at schemeshard: 72057594046644480 2025-04-17T18:17:35.719443Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-17T18:17:35.739295Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710901:0, at schemeshard: 72057594046644480 2025-04-17T18:17:35.830124Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TColumnShardTestReadWrite::WriteReadZSTD >> TColumnShardTestReadWrite::WriteOverload-InStore+WithWritePortionsOnInsert |93.6%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 [GOOD] Test command err: Trying to start YDB, gRPC: 2900, MsgBus: 1238 2025-04-17T18:16:00.014742Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347530502416301:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:00.038815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e2d/r3tmp/tmpzuEgVL/pdisk_1.dat 2025-04-17T18:16:00.449261Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:00.465834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:00.465917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:00.468502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2900, node 1 2025-04-17T18:16:00.582685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:00.582709Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:00.582715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:00.582830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1238 TClient is connected to server localhost:1238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:01.091702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:03.032148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347547682286010:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:03.032267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:03.032528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347547682286022:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:03.036212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:03.049742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347547682286024:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:03.109892Z node 1 :TX_PROXY ERROR: Actor# [1:7494347547682286077:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:03.413214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:16:03.662056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:03.662403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:03.662728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:16:03.662843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:16:03.662946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:16:03.663069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:16:03.663195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:16:03.663325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:16:03.663425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:16:03.663567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:16:03.663674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:16:03.663781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494347547682286333:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:16:03.664586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:03.664678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:03.664822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:16:03.664935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:16:03.665032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:16:03.665127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:16:03.665218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:16:03.665322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:16:03.665430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:16:03.665519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:16:03.665635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:16:03.665725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494347547682286327:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:16:03.699331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494347547682286331:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:03.699390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494347547682286331:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:03.699604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id= ... WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.951965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.956731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.957423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.961945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.962605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.967343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.968132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.972679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.973369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.978034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.978836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.983327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.984097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.988830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.989386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.994478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:02.995244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.000352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.006748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.012814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.019023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.022650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.025371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.028857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.031578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.035418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.037826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.041503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.099175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:17:03.154407Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ff376dwh40f2yv1qa336w", SessionId: ydb://session/3?node_id=1&id=NTg0ZWRiY2ItYWRjYWY5YmUtY2ZkYzg5NDEtOGY0OTRhYTM=, Slow query, duration: 26.827543s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:17:03.557285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:17:03.557620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:17:03.557646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494347783905546108:11130];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-17T18:17:03.557946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:17:29.605754Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fg9tt7nb7ddfm9q79srqz", SessionId: ydb://session/3?node_id=1&id=NTg0ZWRiY2ItYWRjYWY5YmUtY2ZkYzg5NDEtOGY0OTRhYTM=, Slow query, duration: 13.738333s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n\n$ws =\n\n (select date_dim.d_year AS ws_sold_year, web_sales.ws_item_sk ws_item_sk,\n\n web_sales.ws_bill_customer_sk ws_customer_sk,\n\n sum(ws_quantity) ws_qty,\n\n sum(ws_wholesale_cost) ws_wc,\n\n sum(ws_sales_price) ws_sp\n\n from web_sales as web_sales\n\n left join web_returns as web_returns on web_returns.wr_order_number=web_sales.ws_order_number and web_sales.ws_item_sk=web_returns.wr_item_sk\n\n join date_dim as date_dim on web_sales.ws_sold_date_sk = date_dim.d_date_sk\n\n where wr_order_number is null\n\n group by date_dim.d_year, web_sales.ws_item_sk, web_sales.ws_bill_customer_sk\n\n );\n\n$cs =\n\n (select date_dim.d_year AS cs_sold_year, catalog_sales.cs_item_sk cs_item_sk,\n\n catalog_sales.cs_bill_customer_sk cs_customer_sk,\n\n sum(cs_quantity) cs_qty,\n\n sum(cs_wholesale_cost) cs_wc,\n\n sum(cs_sales_price) cs_sp\n\n from catalog_sales as catalog_sales\n\n left join catalog_returns as catalog_returns on catalog_returns.cr_order_number=catalog_sales.cs_order_number and catalog_sales.cs_item_sk=catalog_returns.cr_item_sk\n\n join date_dim as date_dim on catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n\n where cr_order_number is null\n\n group by date_dim.d_year, catalog_sales.cs_item_sk, catalog_sales.cs_bill_customer_sk\n\n );\n\n$ss=\n\n (select date_dim.d_year AS ss_sold_year, store_sales.ss_item_sk ss_item_sk,\n\n store_sales.ss_customer_sk ss_customer_sk,\n\n sum(ss_quantity) ss_qty,\n\n sum(ss_wholesale_cost) ss_wc,\n\n sum(ss_sales_price) ss_sp\n\n from store_sales as store_sales\n\n left join store_returns as store_returns on store_returns.sr_ticket_number=store_sales.ss_ticket_number and store_sales.ss_item_sk=store_returns.sr_item_sk\n\n join date_dim as date_dim on store_sales.ss_sold_date_sk = date_dim.d_date_sk\n\n where sr_ticket_number is null\n\n group by date_dim.d_year, store_sales.ss_item_sk, store_sales.ss_customer_sk\n\n );\n\n-- start query 1 in stream 0 using template query78.tpl and seed 1819994127\n\n select\n\nss_sold_year, ss_item_sk, ss_customer_sk,\n\ncast(ss_qty as double)/(coalesce(ws_qty,0)+coalesce(cs_qty,0)) ratio,\n\nss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price,\n\ncoalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty,\n\ncoalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost,\n\ncoalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price\n\nfrom $ss ss\n\nleft join $ws ws on (ws.ws_sold_year=ss.ss_sold_year and ws.ws_item_sk=ss.ss_item_sk and ws.ws_customer_sk=ss.ss_customer_sk)\n\nleft join $cs cs on (cs.cs_sold_year=ss.ss_sold_year and cs.cs_item_sk=ss.ss_item_sk and cs.cs_customer_sk=ss.ss_customer_sk)\n\nwhere (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2001\n\norder by\n\n ss_sold_year, ss_item_sk, ss_customer_sk,\n\n store_qty desc, store_wholesale_cost desc, store_sales_price desc,\n\n other_chan_qty,\n\n other_chan_wholesale_cost,\n\n other_chan_sales_price,\n\n ratio\n\nlimit 100;\n\n\n\n-- end query 1 in stream 0 using template query78.tpl", parameters: 0b >> TColumnShardTestReadWrite::WriteOverload-InStore-WithWritePortionsOnInsert [GOOD] |93.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> Normalizers::SchemaVersionsNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore-WithWritePortionsOnInsert [GOOD] Test command err: 2025-04-17T18:17:29.804468Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:29.883583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:29.899841Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:29.900049Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:29.906942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:29.907077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:29.907242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:29.907322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:29.907406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:29.907470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:29.907529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:29.907596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:29.907671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:29.907768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:29.907888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:29.907995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:29.928016Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:29.928138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:29.928199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:29.928337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:29.928465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:29.928620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:29.928651Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:29.928714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:29.928754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:29.928779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:29.928796Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:29.928910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:29.928957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:29.928979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:29.928999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:29.929052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:29.929086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:29.929110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:29.929125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:29.929183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:29.929205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:29.929235Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:29.929260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:29.929282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:29.929296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:29.929568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-17T18:17:29.929651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-17T18:17:29.929702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-04-17T18:17:29.929767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-17T18:17:29.929877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:29.929910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:29.929933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:29.930054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:29.930080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:29.930096Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:29.930200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:29.930236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:29.930255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:29.930379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:29.930405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:29.930429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:29.930580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:29.930623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:29.930660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 18:17:37.932186Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 4 at tablet 9437184 2025-04-17T18:17:37.932412Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T18:17:37.943897Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T18:17:37.944011Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:37.945238Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-04-17T18:17:37.945418Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T18:17:37.957248Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T18:17:37.957382Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:37.959257Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-04-17T18:17:37.959527Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T18:17:37.971086Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T18:17:37.971191Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:37.976813Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-04-17T18:17:37.977072Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T18:17:37.988554Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T18:17:37.988679Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:37.990087Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-04-17T18:17:37.990303Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T18:17:38.001718Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T18:17:38.001825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.003048Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-04-17T18:17:38.003264Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T18:17:38.014668Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T18:17:38.014763Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.022371Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-04-17T18:17:38.022607Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T18:17:38.034061Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T18:17:38.034166Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.035438Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-04-17T18:17:38.035629Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T18:17:38.047598Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T18:17:38.047757Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.049924Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-04-17T18:17:38.050135Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T18:17:38.061852Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T18:17:38.061985Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.072173Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-04-17T18:17:38.072442Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T18:17:38.084512Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T18:17:38.084674Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.086377Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-04-17T18:17:38.086637Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T18:17:38.098493Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T18:17:38.098637Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.100208Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-04-17T18:17:38.100437Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T18:17:38.112373Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T18:17:38.112515Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.120354Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-04-17T18:17:38.120646Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T18:17:38.132181Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T18:17:38.132282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.133424Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-04-17T18:17:38.133599Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T18:17:38.145349Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T18:17:38.145492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.147095Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-04-17T18:17:38.147354Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T18:17:38.159637Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T18:17:38.159824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.178939Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-04-17T18:17:38.179276Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T18:17:38.191607Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T18:17:38.191787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.193497Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-04-17T18:17:38.193800Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T18:17:38.206083Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T18:17:38.206278Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.208033Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-04-17T18:17:38.208330Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T18:17:38.220520Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T18:17:38.220688Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:38.556642Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; 2025-04-17T18:17:38.611691Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-04-17T18:17:38.612076Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-17T18:17:38.624777Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-17T18:17:38.624990Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> TColumnShardTestReadWrite::WriteReadNoCompression >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] >> Normalizers::SchemaVersionsNormalizer [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2025-04-17T18:17:39.700498Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:39.781991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:39.806900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:39.807169Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:39.815075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-04-17T18:17:39.815301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-17T18:17:39.815481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:39.815646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:39.815766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:39.815900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:39.816010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:39.816112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:39.816232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:39.816353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:39.816505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:39.816622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:39.816731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:39.845705Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:39.845911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-04-17T18:17:39.845974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-17T18:17:39.846297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-04-17T18:17:39.846379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-17T18:17:39.846416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-17T18:17:39.846601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:39.846695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:39.846743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:39.846778Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-17T18:17:39.846860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:39.846907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:39.846942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:39.846969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:39.847122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:39.847182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:39.847226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:39.847270Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:39.847370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:39.847426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:39.847476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:39.847527Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:39.847602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:39.847641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:39.847674Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:39.847717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:39.847758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:39.847785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:39.848166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-17T18:17:39.848237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-17T18:17:39.848303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-17T18:17:39.848396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-17T18:17:39.848535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:39.848580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:39.848610Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:39.848819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:39.848862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:39.848893Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:39.849035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:39.849077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:39.849110Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:39.849271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;pr ... 8:17:42.585482Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:42.586809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:17:42.586842Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T18:17:42.586873Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-17T18:17:42.586925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-17T18:17:42.586961Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:17:42.587035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:42.587061Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-17T18:17:42.587081Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:42.587249Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:42.587373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:42.587403Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:42.587488Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-17T18:17:42.587538Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-17T18:17:42.587612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:455:2462];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-17T18:17:42.587705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:42.587777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:42.587848Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:42.588471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:42.588572Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:42.588673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:42.588720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:457:2463] finished for tablet 9437184 2025-04-17T18:17:42.589146Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:455:2462];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.206},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.208}],"full":{"a":1744913862380277,"name":"_full_task","f":1744913862380277,"d_finished":0,"c":0,"l":1744913862588786,"d":208509},"events":[{"name":"bootstrap","f":1744913862380440,"d_finished":2329,"c":1,"l":1744913862382769,"d":2329},{"a":1744913862588437,"name":"ack","f":1744913862587231,"d_finished":634,"c":1,"l":1744913862587865,"d":983},{"a":1744913862588425,"name":"processing","f":1744913862382858,"d_finished":105489,"c":9,"l":1744913862587866,"d":105850},{"name":"ProduceResults","f":1744913862381797,"d_finished":1958,"c":12,"l":1744913862588688,"d":1958},{"a":1744913862588693,"name":"Finish","f":1744913862588693,"d_finished":0,"c":0,"l":1744913862588786,"d":93},{"name":"task_result","f":1744913862382873,"d_finished":104717,"c":8,"l":1744913862587110,"d":104717}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:42.589222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:455:2462];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:42.589532Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:455:2462];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.206},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.208}],"full":{"a":1744913862380277,"name":"_full_task","f":1744913862380277,"d_finished":0,"c":0,"l":1744913862589265,"d":208988},"events":[{"name":"bootstrap","f":1744913862380440,"d_finished":2329,"c":1,"l":1744913862382769,"d":2329},{"a":1744913862588437,"name":"ack","f":1744913862587231,"d_finished":634,"c":1,"l":1744913862587865,"d":1462},{"a":1744913862588425,"name":"processing","f":1744913862382858,"d_finished":105489,"c":9,"l":1744913862587866,"d":106329},{"name":"ProduceResults","f":1744913862381797,"d_finished":1958,"c":12,"l":1744913862588688,"d":1958},{"a":1744913862588693,"name":"Finish","f":1744913862588693,"d_finished":0,"c":0,"l":1744913862589265,"d":572},{"name":"task_result","f":1744913862382873,"d_finished":104717,"c":8,"l":1744913862587110,"d":104717}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:17:42.589598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:42.379767Z;index_granules=0;index_portions=1;index_batches=953;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589608;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589608;selected_rows=0; 2025-04-17T18:17:42.589635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:42.589825Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] Test command err: 2025-04-17T18:15:09.036887Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:15:09.140295Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:15:09.152812Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:09.153309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:15:09.177967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:15:09.178380Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:15:09.186999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:15:09.187248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:15:09.187546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:15:09.187714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:15:09.187866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:15:09.187982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:15:09.188108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:15:09.188248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:15:09.188376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:15:09.188487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:15:09.188595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:15:09.188693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:15:09.216058Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:09.219624Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:15:09.219780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:15:09.219852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:15:09.220051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:15:09.220219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:15:09.220308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:15:09.220351Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:15:09.220474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:15:09.220549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:15:09.220600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:15:09.220634Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:15:09.220813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:15:09.220883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:15:09.220924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:15:09.220958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:15:09.221043Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:15:09.221124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:15:09.221167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:15:09.221196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:15:09.221464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:15:09.221515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:15:09.221547Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:15:09.221607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:15:09.221647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:15:09.221677Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:15:09.222125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-17T18:15:09.222361Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=178; 2025-04-17T18:15:09.222455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-17T18:15:09.222568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-17T18:15:09.222750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:15:09.222834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:15:09.222873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:15:09.223084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:15:09.223128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:15:09.223160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:15:09.223530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:15:09.223662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:15:09.223725Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:15:09.223927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:15:09.223969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:15:09.223999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T ... f=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.525541Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.525560Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:41.525585Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:17:41.525650Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:41.525704Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.525724Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:41.525774Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-17T18:17:41.525798Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-17T18:17:41.525892Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:851:2843];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-04-17T18:17:41.525951Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.526012Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.526140Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.526226Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:41.526276Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.526319Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.526353Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:858:2850] finished for tablet 9437184 2025-04-17T18:17:41.526802Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:851:2843];stats={"p":[{"events":["f_bootstrap"],"t":0.048},{"events":["f_ProduceResults"],"t":0.435},{"events":["l_bootstrap"],"t":0.687},{"events":["f_processing","f_task_result"],"t":0.705},{"events":["l_task_result"],"t":6.925},{"events":["f_ack"],"t":6.961},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":7.669}],"full":{"a":1744913853857211,"name":"_full_task","f":1744913853857211,"d_finished":0,"c":0,"l":1744913861526408,"d":7669197},"events":[{"name":"bootstrap","f":1744913853905812,"d_finished":639287,"c":1,"l":1744913854545099,"d":639287},{"a":1744913861526218,"name":"ack","f":1744913860818963,"d_finished":656916,"c":904,"l":1744913861526163,"d":657106},{"a":1744913861526211,"name":"processing","f":1744913854563116,"d_finished":3303369,"c":4520,"l":1744913861526164,"d":3303566},{"name":"ProduceResults","f":1744913854292486,"d_finished":1290430,"c":5426,"l":1744913861526330,"d":1290430},{"a":1744913861526337,"name":"Finish","f":1744913861526337,"d_finished":0,"c":0,"l":1744913861526408,"d":71},{"name":"task_result","f":1744913854563146,"d_finished":2572365,"c":3616,"l":1744913860783021,"d":2572365}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.526872Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:851:2843];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:41.527265Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:851:2843];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.048},{"events":["f_ProduceResults"],"t":0.435},{"events":["l_bootstrap"],"t":0.687},{"events":["f_processing","f_task_result"],"t":0.705},{"events":["l_task_result"],"t":6.925},{"events":["f_ack"],"t":6.961},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":7.669}],"full":{"a":1744913853857211,"name":"_full_task","f":1744913853857211,"d_finished":0,"c":0,"l":1744913861526920,"d":7669709},"events":[{"name":"bootstrap","f":1744913853905812,"d_finished":639287,"c":1,"l":1744913854545099,"d":639287},{"a":1744913861526218,"name":"ack","f":1744913860818963,"d_finished":656916,"c":904,"l":1744913861526163,"d":657618},{"a":1744913861526211,"name":"processing","f":1744913854563116,"d_finished":3303369,"c":4520,"l":1744913861526164,"d":3304078},{"name":"ProduceResults","f":1744913854292486,"d_finished":1290430,"c":5426,"l":1744913861526330,"d":1290430},{"a":1744913861526337,"name":"Finish","f":1744913861526337,"d_finished":0,"c":0,"l":1744913861526920,"d":583},{"name":"task_result","f":1744913854563146,"d_finished":2572365,"c":3616,"l":1744913860783021,"d":2572365}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:17:41.527332Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:33.798359Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-17T18:17:41.527376Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:41.527595Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TColumnShardTestReadWrite::Write >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> TColumnShardTestReadWrite::RebootWriteRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2025-04-17T18:17:38.581368Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:38.666323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:38.693267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:38.693625Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:38.702426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:38.702674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:38.702912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:38.703081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:38.703219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:38.703360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:38.703466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:38.703617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:38.703749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:38.703883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:38.704019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:38.704130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:38.730893Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:38.731065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:38.731126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:38.731329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:38.731508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:38.731596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:38.731646Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:38.731741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:38.731813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:38.731859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:38.731895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:38.732088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:38.732173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:38.732216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:38.732258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:38.732354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:38.732411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:38.732460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:38.732511Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:38.732615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:38.732658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:38.732692Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:38.732749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:38.732809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:38.732848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:38.733253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-17T18:17:38.733355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-17T18:17:38.733455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-17T18:17:38.733546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-17T18:17:38.733725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:38.733785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:38.733829Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:38.734017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:38.734071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:38.734104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:38.734315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:38.734367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:38.734397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:38.734601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:38.734647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:38.734687Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:38.734818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:38.734861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:38.734926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... s=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.344558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:45.344659Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-17T18:17:45.344737Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-17T18:17:45.344842Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1054:2925];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-17T18:17:45.344997Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.345097Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.345211Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.345375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:45.345484Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.345609Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.345660Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1055:2926] finished for tablet 9437184 2025-04-17T18:17:45.346025Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1054:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.008},{"events":["f_ack"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1744913865335285,"name":"_full_task","f":1744913865335285,"d_finished":0,"c":0,"l":1744913865345711,"d":10426},"events":[{"name":"bootstrap","f":1744913865335426,"d_finished":2190,"c":1,"l":1744913865337616,"d":2190},{"a":1744913865345356,"name":"ack","f":1744913865344335,"d_finished":898,"c":1,"l":1744913865345233,"d":1253},{"a":1744913865345346,"name":"processing","f":1744913865338603,"d_finished":3921,"c":10,"l":1744913865345236,"d":4286},{"name":"ProduceResults","f":1744913865336635,"d_finished":2227,"c":13,"l":1744913865345641,"d":2227},{"a":1744913865345645,"name":"Finish","f":1744913865345645,"d_finished":0,"c":0,"l":1744913865345711,"d":66},{"name":"task_result","f":1744913865338617,"d_finished":2935,"c":9,"l":1744913865344216,"d":2935}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.346088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1054:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:45.346420Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1054:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.008},{"events":["f_ack"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1744913865335285,"name":"_full_task","f":1744913865335285,"d_finished":0,"c":0,"l":1744913865346126,"d":10841},"events":[{"name":"bootstrap","f":1744913865335426,"d_finished":2190,"c":1,"l":1744913865337616,"d":2190},{"a":1744913865345356,"name":"ack","f":1744913865344335,"d_finished":898,"c":1,"l":1744913865345233,"d":1668},{"a":1744913865345346,"name":"processing","f":1744913865338603,"d_finished":3921,"c":10,"l":1744913865345236,"d":4701},{"name":"ProduceResults","f":1744913865336635,"d_finished":2227,"c":13,"l":1744913865345641,"d":2227},{"a":1744913865345645,"name":"Finish","f":1744913865345645,"d_finished":0,"c":0,"l":1744913865346126,"d":481},{"name":"task_result","f":1744913865338617,"d_finished":2935,"c":9,"l":1744913865344216,"d":2935}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.346483Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:45.334801Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T18:17:45.346522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:45.346794Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2025-04-17T18:17:38.854944Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:38.926797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:38.943001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:38.943219Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:38.948763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:38.948960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:38.949193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:38.949330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:38.949469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:38.949583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:38.949729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:38.949861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:38.949937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:38.950011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:38.950073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:38.950133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:38.968440Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:38.968607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:38.968659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:38.968828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:38.968997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:38.969066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:38.969109Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:38.969199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:38.969251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:38.969288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:38.969313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:38.969485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:38.969547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:38.969582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:38.969611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:38.969684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:38.969727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:38.969792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:38.969822Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:38.969904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:38.969944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:38.969973Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:38.970023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:38.970077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:38.970105Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:38.970453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-17T18:17:38.970564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=65; 2025-04-17T18:17:38.970658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-04-17T18:17:38.970735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-17T18:17:38.970910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:38.970983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:38.971023Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:38.971193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:38.971231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:38.971256Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:38.971402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:38.971445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:38.971474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:38.971644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:38.971682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:38.971706Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:38.971828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:38.971863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:38.971901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... s=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.098093Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:45.098215Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-17T18:17:45.098295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-17T18:17:45.098404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1054:2925];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-17T18:17:45.098516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.098629Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.098737Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.098907Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:45.099038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.099161Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.099194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1055:2926] finished for tablet 9437184 2025-04-17T18:17:45.099546Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1054:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":1744913865087186,"name":"_full_task","f":1744913865087186,"d_finished":0,"c":0,"l":1744913865099240,"d":12054},"events":[{"name":"bootstrap","f":1744913865087351,"d_finished":2454,"c":1,"l":1744913865089805,"d":2454},{"a":1744913865098882,"name":"ack","f":1744913865097859,"d_finished":900,"c":1,"l":1744913865098759,"d":1258},{"a":1744913865098871,"name":"processing","f":1744913865091076,"d_finished":4377,"c":10,"l":1744913865098761,"d":4746},{"name":"ProduceResults","f":1744913865088718,"d_finished":2462,"c":13,"l":1744913865099183,"d":2462},{"a":1744913865099184,"name":"Finish","f":1744913865099184,"d_finished":0,"c":0,"l":1744913865099240,"d":56},{"name":"task_result","f":1744913865091090,"d_finished":3379,"c":9,"l":1744913865097730,"d":3379}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.099597Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1054:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:45.099921Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1054:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":1744913865087186,"name":"_full_task","f":1744913865087186,"d_finished":0,"c":0,"l":1744913865099633,"d":12447},"events":[{"name":"bootstrap","f":1744913865087351,"d_finished":2454,"c":1,"l":1744913865089805,"d":2454},{"a":1744913865098882,"name":"ack","f":1744913865097859,"d_finished":900,"c":1,"l":1744913865098759,"d":1651},{"a":1744913865098871,"name":"processing","f":1744913865091076,"d_finished":4377,"c":10,"l":1744913865098761,"d":5139},{"name":"ProduceResults","f":1744913865088718,"d_finished":2462,"c":13,"l":1744913865099183,"d":2462},{"a":1744913865099184,"name":"Finish","f":1744913865099184,"d_finished":0,"c":0,"l":1744913865099633,"d":449},{"name":"task_result","f":1744913865091090,"d_finished":3379,"c":9,"l":1744913865097730,"d":3379}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:45.099988Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:45.086646Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T18:17:45.100027Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:45.100331Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-17T18:16:07.396232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:16:07.396316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:07.396350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:16:07.396381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:16:07.396421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:16:07.396465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:16:07.396522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:16:07.396608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:16:07.396940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:16:07.476240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:16:07.476330Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:181:2058] recipient: [1:15:2062] 2025-04-17T18:16:07.492068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:16:07.492431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:16:07.492558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:16:07.497715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:16:07.497932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:16:07.498671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:07.498902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:16:07.500840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:07.502315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:07.502394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:07.502449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:16:07.502499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:07.502537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:16:07.502726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:212:2058] recipient: [1:210:2211] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:212:2058] recipient: [1:210:2211] Leader for TabletID 72057594037968897 is [1:216:2215] sender: [1:217:2058] recipient: [1:210:2211] 2025-04-17T18:16:07.509770Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-17T18:16:07.640539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:16:07.640769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.641006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:16:07.641232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:16:07.641315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.643684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:07.643814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:16:07.644034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.644096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:16:07.644138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:16:07.644171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:16:07.646009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.646089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:16:07.646141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:16:07.647919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.647965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.648009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:07.648047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:16:07.657726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:16:07.659923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:16:07.660099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:252:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:16:07.661104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:16:07.661239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:16:07.661288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:07.661559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:16:07.661629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:16:07.661809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:16:07.661893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:16:07.663963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:16:07.663996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:16:07.664131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:16:07.664161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:16:07.664433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:16:07.664466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:16:07.664551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:16:07.664574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:16:07.664603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... 5813Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-17T18:17:45.755934Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 416611830023 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-17T18:17:45.755978Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:17:45.756051Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 324 RawX2: 416611830023 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-17T18:17:45.756107Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:17:45.756142Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-17T18:17:45.758449Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:17:45.758592Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:17:45.760843Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1262 } } 2025-04-17T18:17:45.760897Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-04-17T18:17:45.761032Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1262 } } 2025-04-17T18:17:45.761123Z node 97 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1262 } } 2025-04-17T18:17:45.761502Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:17:45.761959Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 431 RawX2: 416611830113 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-17T18:17:45.761997Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-04-17T18:17:45.762099Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 431 RawX2: 416611830113 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-17T18:17:45.762142Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:17:45.762243Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 431 RawX2: 416611830113 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-17T18:17:45.762293Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:17:45.762325Z node 97 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:17:45.762361Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:17:45.762400Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:17:45.762428Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-17T18:17:45.765289Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:17:45.765672Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:17:45.766157Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:17:45.766227Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2025-04-17T18:17:45.766271Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-17T18:17:45.766304Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-04-17T18:17:45.766365Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-17T18:17:45.766398Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2025-04-17T18:17:45.770094Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:17:45.770143Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-04-17T18:17:45.770255Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-17T18:17:45.770290Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-17T18:17:45.770328Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-17T18:17:45.770356Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-17T18:17:45.770392Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-17T18:17:45.770433Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-17T18:17:45.770475Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-17T18:17:45.770514Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-17T18:17:45.770667Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:17:45.770706Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-17T18:17:45.772530Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-17T18:17:45.772573Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-17T18:17:45.772918Z node 97 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-17T18:17:45.772997Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-17T18:17:45.773031Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:528:2489] TestWaitNotification: OK eventTxId 1003 2025-04-17T18:17:45.773457Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:17:45.773656Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 239us result status StatusSuccess 2025-04-17T18:17:45.774105Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2025-04-17T18:16:19.708448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:16:19.708509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:19.793605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:16:21.051010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-17T18:16:21.209109Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:16:21.209696Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:16:21.210408Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17991849228938253871 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:16:21.253970Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:16:21.254501Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:16:21.255052Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1222820985532528541 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:16:21.292053Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:16:21.292604Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:16:21.292825Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9015788631605311762 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:16:21.296418Z node 2 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T18:16:21.383202Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:16:21.383641Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:16:21.383852Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpKm9szT/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1032569585846785546 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:16:21.386496Z node 4 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration ... ld/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:17:37.007267Z node 146 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12987156711807828535 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:17:37.009972Z node 146 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-17T18:17:37.075568Z node 151 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:17:37.075955Z node 151 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:17:37.076121Z node 151 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13418376210814037964 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:17:37.116498Z node 153 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:17:37.116832Z node 153 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:17:37.116962Z node 153 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6272613325287048983 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:17:37.391338Z node 145 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:17:37.391435Z node 145 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:37.493775Z node 145 :STATISTICS WARN: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2025-04-17T18:17:37.646262Z node 152 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:17:37.646874Z node 152 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:17:37.647185Z node 152 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/0016e9/r3tmp/tmpljt7o3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12785991850035596217 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:17:40.437499Z node 154 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:17:40.437594Z node 154 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:40.503116Z node 154 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:17:43.800009Z node 163 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:17:43.800093Z node 163 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:43.856954Z node 163 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TColumnShardTestReadWrite::ReadWithProgramNoProjection >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] |93.7%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::WriteReadDuplicate >> TColumnShardTestReadWrite::WriteOverload-InStore+WithWritePortionsOnInsert [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |93.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-04-17T18:17:41.068721Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:41.155476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:41.178955Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:41.179247Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:41.186640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:41.186840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:41.187036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:41.187175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:41.187305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:41.187408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:41.187499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:41.187625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:41.187739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:41.187851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:41.187953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:41.188054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:41.214103Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:41.214278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:41.214334Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:41.214481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:41.214650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:41.214729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:41.214768Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:41.214868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:41.214962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:41.215007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:41.215038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:41.215197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:41.215263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:41.215301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:41.215332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:41.215424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:41.215483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:41.215546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:41.215578Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:41.215671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:41.215707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:41.215741Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:41.215795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:41.215856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:41.215887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:41.216239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-17T18:17:41.216327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-17T18:17:41.216398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-17T18:17:41.216468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-17T18:17:41.216626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:41.216682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:41.216716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:41.216906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:41.216947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:41.216973Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:41.217126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:41.217173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:41.217203Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:41.217386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:41.217429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:41.217455Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:41.217572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:41.217612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:41.217649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... quest_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:47.620152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:47.620255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-17T18:17:47.620321Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-17T18:17:47.620421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1054:2925];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-17T18:17:47.620531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:47.620608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:47.620699Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:47.620838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:47.620932Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:47.621040Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:47.621070Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1055:2926] finished for tablet 9437184 2025-04-17T18:17:47.621380Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1054:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1744913867610634,"name":"_full_task","f":1744913867610634,"d_finished":0,"c":0,"l":1744913867621109,"d":10475},"events":[{"name":"bootstrap","f":1744913867610784,"d_finished":2134,"c":1,"l":1744913867612918,"d":2134},{"a":1744913867620820,"name":"ack","f":1744913867619927,"d_finished":791,"c":1,"l":1744913867620718,"d":1080},{"a":1744913867620814,"name":"processing","f":1744913867613804,"d_finished":3973,"c":10,"l":1744913867620720,"d":4268},{"name":"ProduceResults","f":1744913867611907,"d_finished":2246,"c":13,"l":1744913867621060,"d":2246},{"a":1744913867621062,"name":"Finish","f":1744913867621062,"d_finished":0,"c":0,"l":1744913867621109,"d":47},{"name":"task_result","f":1744913867613815,"d_finished":3086,"c":9,"l":1744913867619793,"d":3086}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:47.621446Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1054:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:47.621760Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1054:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1744913867610634,"name":"_full_task","f":1744913867610634,"d_finished":0,"c":0,"l":1744913867621477,"d":10843},"events":[{"name":"bootstrap","f":1744913867610784,"d_finished":2134,"c":1,"l":1744913867612918,"d":2134},{"a":1744913867620820,"name":"ack","f":1744913867619927,"d_finished":791,"c":1,"l":1744913867620718,"d":1448},{"a":1744913867620814,"name":"processing","f":1744913867613804,"d_finished":3973,"c":10,"l":1744913867620720,"d":4636},{"name":"ProduceResults","f":1744913867611907,"d_finished":2246,"c":13,"l":1744913867621060,"d":2246},{"a":1744913867621062,"name":"Finish","f":1744913867621062,"d_finished":0,"c":0,"l":1744913867621477,"d":415},{"name":"task_result","f":1744913867613815,"d_finished":3086,"c":9,"l":1744913867619793,"d":3086}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:47.621817Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:47.610100Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T18:17:47.621851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:47.622105Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore+WithWritePortionsOnInsert [GOOD] Test command err: 2025-04-17T18:17:39.083718Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:39.169845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:39.190593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:39.190798Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:39.196256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:39.196390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:39.196525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:39.196599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:39.196684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:39.196742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:39.196806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:39.196877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:39.196945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:39.197047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:39.197118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:39.197176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:39.213447Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:39.213577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:39.213626Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:39.213759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:39.213864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:39.213916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:39.213943Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:39.214002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:39.214044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:39.214070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:39.214090Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:39.214228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:39.214269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:39.214292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:39.214315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:39.214375Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:39.214411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:39.214438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:39.214456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:39.214524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:39.214554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:39.214571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:39.214596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:39.214636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:39.214660Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:39.214921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-17T18:17:39.214974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=22; 2025-04-17T18:17:39.215019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=19; 2025-04-17T18:17:39.215076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-04-17T18:17:39.215186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:39.215222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:39.215251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:39.215384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:39.215410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:39.215426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:39.215502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:39.215530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:39.215546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:39.215665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:39.215690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:39.215705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:39.215781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:39.215808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:39.215834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... tablet_id=9437184; 2025-04-17T18:17:39.464715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:17:39.464738Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:17:39.464762Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:17:39.464792Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:17:39.464836Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-04-17T18:17:39.733826Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=88923003853600;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;fline=schema.h:36;event=sync_schema; 2025-04-17T18:17:39.745629Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881026880;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-04-17T18:17:39.745747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881026880;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-04-17T18:17:39.745781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1744913860134;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881026880;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-04-17T18:17:39.746043Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-17T18:17:39.746234Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1744913860134 at tablet 9437184, mediator 0 2025-04-17T18:17:39.746292Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-17T18:17:39.746624Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-17T18:17:39.746737Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-17T18:17:39.750971Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-17T18:17:39.754813Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T18:17:39.755641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-17T18:17:39.755819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:214;event=finished_tx;tx_id=10; 2025-04-17T18:17:39.778726Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-17T18:17:39.778849Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6330728;columns=10; CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK 2025-04-17T18:17:46.360316Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=columnshard__write.cpp:96;event=shard_overload;reason=writes_size_in_fly;sum=132945288;limit=128000000; 2025-04-17T18:17:46.362329Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=overload data error;tx_id=0; CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK 2025-04-17T18:17:47.266767Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T18:17:47.289639Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-17T18:17:47.302670Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-17T18:17:47.314753Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-17T18:17:47.317347Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-17T18:17:47.329418Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-17T18:17:47.332176Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T18:17:47.344342Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-17T18:17:47.356110Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T18:17:47.368190Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-17T18:17:47.370869Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T18:17:47.383053Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-17T18:17:47.385618Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T18:17:47.397235Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-17T18:17:47.404530Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T18:17:47.415968Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-17T18:17:47.417832Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T18:17:47.429355Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-17T18:17:47.431217Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T18:17:47.442988Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-17T18:17:47.452838Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T18:17:47.464572Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-17T18:17:47.467235Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T18:17:47.479030Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-17T18:17:47.481362Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T18:17:47.493300Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-17T18:17:47.505707Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T18:17:47.518527Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-17T18:17:47.521385Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T18:17:47.534205Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-17T18:17:47.537134Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T18:17:47.549312Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-17T18:17:47.566077Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T18:17:47.578444Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-17T18:17:47.581249Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T18:17:47.593660Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-17T18:17:47.596398Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T18:17:47.608385Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-17T18:17:47.618209Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T18:17:47.630653Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-17T18:17:47.632830Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T18:17:47.644704Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-17T18:17:47.977443Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-17T18:17:47.988991Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] Test command err: 2025-04-17T18:17:47.674205Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:47.772075Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:47.797351Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:47.797655Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:47.805923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:47.806121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:47.806360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:47.806484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:47.806636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:47.806741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:47.806832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:47.806990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:47.807112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:47.807241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:47.807409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:47.807537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:47.837103Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:47.837272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:47.837346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:47.837509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:47.837690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:47.837784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:47.837840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:47.837942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:47.838024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:47.838072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:47.838101Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:47.838298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:47.838372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:47.838407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:47.838443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:47.838557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:47.838616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:47.838668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:47.838697Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:47.838792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:47.838832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:47.838860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:47.838919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:47.838970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:47.839001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:47.839365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-04-17T18:17:47.839467Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=51; 2025-04-17T18:17:47.839545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-17T18:17:47.839632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-17T18:17:47.839836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:47.839893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:47.839931Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:47.840149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:47.840202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:47.840233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:47.840375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:47.840416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:47.840443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:47.840649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:47.840697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:47.840727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:47.840866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:47.840907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:47.840950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ; 2025-04-17T18:17:48.552636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:100;schema=level: int32 timestamp: timestamp[us];);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:48.552669Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:48.552765Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;);columns=2;rows=100; 2025-04-17T18:17:48.552817Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1200;num_rows=100;batch_columns=level,timestamp; 2025-04-17T18:17:48.552894Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:260:2278];bytes=1200;rows=100;faults=0;finished=0;fault=0;schema=level: int32 timestamp: timestamp[us]; 2025-04-17T18:17:48.553007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:48.553093Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:48.553195Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:48.553311Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:48.553417Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:48.553525Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:48.553559Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:268:2286] finished for tablet 9437184 2025-04-17T18:17:48.553865Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:260:2278];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":1744913868538162,"name":"_full_task","f":1744913868538162,"d_finished":0,"c":0,"l":1744913868553593,"d":15431},"events":[{"name":"bootstrap","f":1744913868538449,"d_finished":4218,"c":1,"l":1744913868542667,"d":4218},{"a":1744913868553296,"name":"ack","f":1744913868552509,"d_finished":714,"c":1,"l":1744913868553223,"d":1011},{"a":1744913868553287,"name":"processing","f":1744913868544934,"d_finished":4674,"c":10,"l":1744913868553225,"d":4980},{"name":"ProduceResults","f":1744913868541150,"d_finished":2039,"c":13,"l":1744913868553547,"d":2039},{"a":1744913868553550,"name":"Finish","f":1744913868553550,"d_finished":0,"c":0,"l":1744913868553593,"d":43},{"name":"task_result","f":1744913868544955,"d_finished":3855,"c":9,"l":1744913868552433,"d":3855}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:48.553925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:260:2278];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:48.554236Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:260:2278];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":1744913868538162,"name":"_full_task","f":1744913868538162,"d_finished":0,"c":0,"l":1744913868553957,"d":15795},"events":[{"name":"bootstrap","f":1744913868538449,"d_finished":4218,"c":1,"l":1744913868542667,"d":4218},{"a":1744913868553296,"name":"ack","f":1744913868552509,"d_finished":714,"c":1,"l":1744913868553223,"d":1375},{"a":1744913868553287,"name":"processing","f":1744913868544934,"d_finished":4674,"c":10,"l":1744913868553225,"d":5344},{"name":"ProduceResults","f":1744913868541150,"d_finished":2039,"c":13,"l":1744913868553547,"d":2039},{"a":1744913868553550,"name":"Finish","f":1744913868553550,"d_finished":0,"c":0,"l":1744913868553957,"d":407},{"name":"task_result","f":1744913868544955,"d_finished":3855,"c":9,"l":1744913868552433,"d":3855}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-17T18:17:48.554304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:48.536996Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T18:17:48.554347Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:48.554571Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:268:2286];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; 2025-04-17T18:17:48.555054Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 9437184 2025-04-17T18:17:48.555225Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {1744913868739:100} readable: {1744913868739:max} at tablet 9437184 2025-04-17T18:17:48.555322Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-04-17T18:17:48.555514Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1744913868739:100};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-04-17T18:17:48.555591Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1744913868739:100};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-04-17T18:17:48.555686Z node 1 :TX_COLUMNSHARD_SCAN WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1744913868739:100};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: program has no projections; >> TColumnShardTestReadWrite::Write [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2025-04-17T18:17:44.815355Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:44.901846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:44.927090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:44.927418Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:44.935665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:44.935864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:44.936093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:44.936223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:44.936365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:44.936489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:44.936602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:44.936722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:44.936844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:44.936976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:44.937102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:44.937203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:44.965595Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:44.965811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:44.965875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:44.966071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:44.966282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:44.966384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:44.966442Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:44.966553Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:44.966626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:44.966674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:44.966706Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:44.966937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:44.967022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:44.967068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:44.967102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:44.967206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:44.967273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:44.967323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:44.967355Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:44.967460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:44.967510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:44.967545Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:44.967600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:44.967646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:44.967674Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:44.968069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=65; 2025-04-17T18:17:44.968181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=57; 2025-04-17T18:17:44.968260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-17T18:17:44.968354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-04-17T18:17:44.968550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:44.968617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:44.968656Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:44.968862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:44.968918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:44.968975Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:44.969154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:44.969215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:44.969249Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:44.969467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:44.969516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:44.969549Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:44.969681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:44.969724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:44.969773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-17T18:17:48.899356Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore >> TStorageTenantTest::CreateSolomonInsideSubDomain >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain >> TStorageTenantTest::DeclareAndDefine >> TStorageTenantTest::GenericCases >> TStorageTenantTest::Boot >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] Test command err: 2025-04-17T18:16:26.244440Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347648747005583:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.244498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013d6/r3tmp/tmpDxK3rz/pdisk_1.dat 2025-04-17T18:16:26.715089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:26.718354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:26.727801Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:26.751025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13156, node 1 2025-04-17T18:16:26.973606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:26.973631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:26.973641Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:26.973752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:27.519024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:29.318502Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:29.319191Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:16:29.324955Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347661631908103:2328], Start check tables existence, number paths: 2 2025-04-17T18:16:29.325168Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:29.325192Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:29.327593Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347661631908103:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:29.327659Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347661631908103:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:29.327690Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347661631908103:2328], Successfully finished 2025-04-17T18:16:29.332549Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:29.369435Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmNjOWIxNjQtNjdiZDI3NDMtZmE3MWUwMzEtZjI3OTk4MTE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MmNjOWIxNjQtNjdiZDI3NDMtZmE3MWUwMzEtZjI3OTk4MTE= 2025-04-17T18:16:29.369741Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmNjOWIxNjQtNjdiZDI3NDMtZmE3MWUwMzEtZjI3OTk4MTE=, ActorId: [1:7494347661631908128:2330], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.395505Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347661631908130:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.401264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:29.404416Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347661631908130:2304], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-17T18:16:29.407088Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347661631908130:2304], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:16:29.419731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347661631908130:2304], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:29.494299Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347661631908130:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.501006Z node 1 :TX_PROXY ERROR: Actor# [1:7494347661631908181:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:29.501145Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347661631908130:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:16:29.525351Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-17T18:16:29.525386Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:16:29.525441Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347661631908190:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-17T18:16:29.526506Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmNjOWIxNjQtNjdiZDI3NDMtZmE3MWUwMzEtZjI3OTk4MTE=, ActorId: [1:7494347661631908128:2330], ActorState: ReadyState, TraceId: 01js2fewjd0x6bfkstg1q0bag9, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-17T18:16:29.527288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347661631908190:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:29.527404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:29.974583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:16:29.979966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:16:29.983241Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MmNjOWIxNjQtNjdiZDI3NDMtZmE3MWUwMzEtZjI3OTk4MTE=, ActorId: [1:7494347661631908128:2330], ActorState: ExecuteState, TraceId: 01js2fewjd0x6bfkstg1q0bag9, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7494347661631908199:2330] WorkloadServiceCleanup: 0 2025-04-17T18:16:29.994622Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmNjOWIxNjQtNjdiZDI3NDMtZmE3MWUwMzEtZjI3OTk4MTE=, ActorId: [1:7494347661631908128:2330], ActorState: CleanupState, TraceId: 01js2fewjd0x6bfkstg1q0bag9, EndCleanup, isFinal: 0 2025-04-17T18:16:29.994701Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmNjOWIxNjQtNjdiZDI3NDMtZmE3MWUwMzEtZjI3OTk4MTE=, ActorId: [1:7494347661631908128:2330], ActorState: CleanupState, TraceId: 01js2fewjd0x6bfkstg1q0bag9, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7494347648747005825:2277] 2025-04-17T18:16:30.008990Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzI1ODk0ZjctNzNmZDI0YTYtNDk4MDhkNjMtNDJhYTZiMg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzI1ODk0ZjctNzNmZDI0YTYtNDk4MDhkNjMtNDJhYTZiMg== 2025-04-17T18:16:30.009303Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzI1ODk0ZjctNzNmZDI0YTYtNDk4MDhkNjMtNDJhYTZiMg==, ActorId: [1:7494347665926875526:2335], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:30.009419Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzI1ODk0ZjctNzNmZDI0YTYtNDk4MDhkNjMtNDJhYTZiMg==, ActorId: [1:7494347665926875526:2335], ActorState: ReadyState, TraceId: 01js2fex1sb0k0y6aadbrghw1y, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL CLASSIFIER MyResourcePoolClassifier rpcActor: [1:7494347665926875525:2363] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-17T18:16:30.009454Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-17T18:16:30.009503Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7494347665926875526:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NzI1ODk0ZjctNzNmZDI0YTYtNDk4MDhkNjMtNDJhYTZiMg== 2025-04-17T18:16:30.009549Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347665926875528:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:30.009613Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347665926875529:2337], Database: /Root, Start database fetching 2025-04-17T18:16:30.010550Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347665926875528:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:30. ... WYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:17:49.995435Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ReadyState, TraceId: 01js2fhb5b2kd32cajw7snaxgs, received request, proxyRequestId: 96 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [9:7494348005464364725:2968] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-04-17T18:17:49.995466Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ReadyState, TraceId: 01js2fhb5b2kd32cajw7snaxgs, request placed into pool from cache: default 2025-04-17T18:17:49.995542Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ExecuteState, TraceId: 01js2fhb5b2kd32cajw7snaxgs, Sending CompileQuery request 2025-04-17T18:17:50.005067Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7494347962514688749:2631][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 46, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-17T18:17:50.005153Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7494347962514688749:2631][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 47, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-17T18:17:50.005830Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7494348005464364727:2969], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-04-17T18:17:50.006052Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ExecuteState, TraceId: 01js2fhb5b2kd32cajw7snaxgs, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-04-17T18:17:50.006106Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ExecuteState, TraceId: 01js2fhb5b2kd32cajw7snaxgs, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:50.006131Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ExecuteState, TraceId: 01js2fhb5b2kd32cajw7snaxgs, EndCleanup, isFinal: 0 2025-04-17T18:17:50.006289Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ExecuteState, TraceId: 01js2fhb5b2kd32cajw7snaxgs, Sent query response back to proxy, proxyRequestId: 96, proxyId: [9:7494347945334818720:2277] 2025-04-17T18:17:50.006988Z node 9 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-04-17T18:17:50.007311Z node 9 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-04-17T18:17:50.007364Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:50.007392Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:50.007416Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:50.007434Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:50.007491Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NzBlYmI2MjItNjliMmQ0NDItNGExMzA3OWYtN2Y5OWQ4OWY=, ActorId: [9:7494348005464364724:2967], ActorState: unknown state, Session actor destroyed 2025-04-17T18:17:50.098391Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA= 2025-04-17T18:17:50.098519Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:17:50.098959Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ReadyState, TraceId: 01js2fhb8j5z38q69s9dqjpgc2, received request, proxyRequestId: 98 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/initialization/migrations`; rpcActor: [9:7494348009759332031:2974] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-04-17T18:17:50.098994Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ReadyState, TraceId: 01js2fhb8j5z38q69s9dqjpgc2, request placed into pool from cache: default 2025-04-17T18:17:50.099098Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ExecuteState, TraceId: 01js2fhb8j5z38q69s9dqjpgc2, Sending CompileQuery request 2025-04-17T18:17:50.108326Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7494347949629786781:2564][/Root/test-shared/.metadata/initialization/migrations] Sync is done: cookie# 50, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-17T18:17:50.108443Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7494347949629786781:2564][/Root/test-shared/.metadata/initialization/migrations] Sync is done: cookie# 51, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-17T18:17:50.109119Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7494348009759332033:2975], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]
: Error: LookupError, code: 2005 2025-04-17T18:17:50.109335Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ExecuteState, TraceId: 01js2fhb8j5z38q69s9dqjpgc2, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-04-17T18:17:50.109378Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ExecuteState, TraceId: 01js2fhb8j5z38q69s9dqjpgc2, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:50.109401Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ExecuteState, TraceId: 01js2fhb8j5z38q69s9dqjpgc2, EndCleanup, isFinal: 0 2025-04-17T18:17:50.109496Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ExecuteState, TraceId: 01js2fhb8j5z38q69s9dqjpgc2, Sent query response back to proxy, proxyRequestId: 98, proxyId: [9:7494347945334818720:2277] 2025-04-17T18:17:50.109957Z node 9 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-04-17T18:17:50.110168Z node 9 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]
: Error: LookupError, code: 2005 2025-04-17T18:17:50.110242Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:50.110276Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:50.110306Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:50.110333Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:50.110409Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZWQ5NmU1OTItZjFhZjdmYmUtOGYyNTgwZDktMzc5ZmVhNzA=, ActorId: [9:7494348009759332030:2973], ActorState: unknown state, Session actor destroyed >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-04-17T18:17:45.797956Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:45.885523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:45.908811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:45.909114Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:45.916703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:45.916922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:45.917125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:45.917244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:45.917365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:45.917482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:45.917582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:45.917709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:45.917825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:45.917932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:45.918032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:45.918122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:45.944618Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:45.944832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:45.944892Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:45.945067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:45.945213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:45.945280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:45.945321Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:45.945405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:45.945460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:45.945504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:45.945531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:45.945695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:45.945766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:45.945807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:45.945837Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:45.945912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:45.945958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:45.945997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:45.946022Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:45.946130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:45.946204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:45.946240Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:45.946288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:45.946327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:45.946352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:45.946728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-04-17T18:17:45.946821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-17T18:17:45.946903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-17T18:17:45.946987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-17T18:17:45.947149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:45.947215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:45.947254Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:45.947436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:45.947493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:45.947521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:45.947669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:45.947715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:45.947746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:45.947928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:45.947967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:45.947994Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:45.948118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:45.948159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:45.948197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:52.389963Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:17:52.390144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-17T18:17:52.390300Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-17T18:17:52.390472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1054:2925];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-17T18:17:52.390660Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:52.390810Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:52.390973Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:52.391226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:17:52.391404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:52.391601Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:52.391656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1055:2926] finished for tablet 9437184 2025-04-17T18:17:52.392247Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1054:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1744913872374066,"name":"_full_task","f":1744913872374066,"d_finished":0,"c":0,"l":1744913872391720,"d":17654},"events":[{"name":"bootstrap","f":1744913872374301,"d_finished":3651,"c":1,"l":1744913872377952,"d":3651},{"a":1744913872391197,"name":"ack","f":1744913872389609,"d_finished":1398,"c":1,"l":1744913872391007,"d":1921},{"a":1744913872391181,"name":"processing","f":1744913872379545,"d_finished":6758,"c":10,"l":1744913872391010,"d":7297},{"name":"ProduceResults","f":1744913872376387,"d_finished":3794,"c":13,"l":1744913872391636,"d":3794},{"a":1744913872391641,"name":"Finish","f":1744913872391641,"d_finished":0,"c":0,"l":1744913872391720,"d":79},{"name":"task_result","f":1744913872379564,"d_finished":5205,"c":9,"l":1744913872389386,"d":5205}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:52.392362Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1054:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:17:52.392938Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1054:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":1744913872374066,"name":"_full_task","f":1744913872374066,"d_finished":0,"c":0,"l":1744913872392421,"d":18355},"events":[{"name":"bootstrap","f":1744913872374301,"d_finished":3651,"c":1,"l":1744913872377952,"d":3651},{"a":1744913872391197,"name":"ack","f":1744913872389609,"d_finished":1398,"c":1,"l":1744913872391007,"d":2622},{"a":1744913872391181,"name":"processing","f":1744913872379545,"d_finished":6758,"c":10,"l":1744913872391010,"d":7998},{"name":"ProduceResults","f":1744913872376387,"d_finished":3794,"c":13,"l":1744913872391636,"d":3794},{"a":1744913872391641,"name":"Finish","f":1744913872391641,"d_finished":0,"c":0,"l":1744913872392421,"d":780},{"name":"task_result","f":1744913872379564,"d_finished":5205,"c":9,"l":1744913872389386,"d":5205}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-17T18:17:52.393046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:17:52.373318Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-17T18:17:52.393109Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:17:52.393527Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1055:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TStorageTenantTest::LsLs >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> ExternalIndex::Simple [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TStorageTenantTest::DeclareAndDefine [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> TStorageTenantTest::GenericCases [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2025-04-17T18:17:51.556408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348012614163250:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:51.556505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ce1/r3tmp/tmplneivU/pdisk_1.dat 2025-04-17T18:17:51.925898Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:51.952026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:51.952141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:51.965958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10008 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:17:52.086082Z node 1 :TX_PROXY DEBUG: actor# [1:7494348012614163483:2116] Handle TEvNavigate describe path dc-1 2025-04-17T18:17:52.086225Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348016909131252:2440] HANDLE EvNavigateScheme dc-1 2025-04-17T18:17:52.087044Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348012614163507:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:52.087096Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494348012614163507:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:17:52.087362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:52.089486Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163161:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348016909131257:2441] 2025-04-17T18:17:52.089512Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163164:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348016909131258:2441] 2025-04-17T18:17:52.089555Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012614163161:2050] Subscribe: subscriber# [1:7494348016909131257:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.089568Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012614163164:2053] Subscribe: subscriber# [1:7494348016909131258:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.089628Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163167:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348016909131259:2441] 2025-04-17T18:17:52.089651Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012614163167:2056] Subscribe: subscriber# [1:7494348016909131259:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.089696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016909131257:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348012614163161:2050] 2025-04-17T18:17:52.089730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016909131258:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348012614163164:2053] 2025-04-17T18:17:52.089747Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163161:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348016909131257:2441] 2025-04-17T18:17:52.089749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016909131259:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348012614163167:2056] 2025-04-17T18:17:52.089763Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163164:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348016909131258:2441] 2025-04-17T18:17:52.089777Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163167:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348016909131259:2441] 2025-04-17T18:17:52.089796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348016909131254:2441] 2025-04-17T18:17:52.089839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348016909131255:2441] 2025-04-17T18:17:52.089922Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348016909131253:2441][/dc-1] Set up state: owner# [1:7494348012614163507:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.091129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348016909131256:2441] 2025-04-17T18:17:52.091230Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348016909131253:2441][/dc-1] Path was already updated: owner# [1:7494348012614163507:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.091324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016909131257:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016909131254:2441], cookie# 1 2025-04-17T18:17:52.091339Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016909131258:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016909131255:2441], cookie# 1 2025-04-17T18:17:52.091348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016909131259:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016909131256:2441], cookie# 1 2025-04-17T18:17:52.091394Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163161:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016909131257:2441], cookie# 1 2025-04-17T18:17:52.091415Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163164:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016909131258:2441], cookie# 1 2025-04-17T18:17:52.091434Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163167:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016909131259:2441], cookie# 1 2025-04-17T18:17:52.091482Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016909131257:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348012614163161:2050], cookie# 1 2025-04-17T18:17:52.091504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016909131258:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348012614163164:2053], cookie# 1 2025-04-17T18:17:52.091571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016909131259:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348012614163167:2056], cookie# 1 2025-04-17T18:17:52.091607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348016909131254:2441], cookie# 1 2025-04-17T18:17:52.091658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:17:52.091687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348016909131255:2441], cookie# 1 2025-04-17T18:17:52.091713Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:17:52.091733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348016909131256:2441], cookie# 1 2025-04-17T18:17:52.091754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016909131253:2441][/dc-1] Unexpected sync response: sender# [1:7494348016909131256:2441], cookie# 1 2025-04-17T18:17:52.185166Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348012614163507:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:17:52.185563Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348012614163507:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... kload_manager/delayed_requests Version: 0 }: sender# [1:7494348025499066632:3030] 2025-04-17T18:17:54.485052Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348025499066628:3030][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [1:7494348012614163507:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:54.485071Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163161:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348025499066633:3030] 2025-04-17T18:17:54.485086Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163161:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7494348025499066640:3031] 2025-04-17T18:17:54.485094Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012614163161:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-17T18:17:54.485167Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012614163161:2050] Subscribe: subscriber# [1:7494348025499066640:3031], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:54.485190Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163164:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348025499066634:3030] 2025-04-17T18:17:54.485196Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163167:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348025499066635:3030] 2025-04-17T18:17:54.485211Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163164:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7494348025499066641:3031] 2025-04-17T18:17:54.485220Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012614163164:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-17T18:17:54.485230Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163167:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7494348025499066642:3031] 2025-04-17T18:17:54.485236Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012614163164:2053] Subscribe: subscriber# [1:7494348025499066641:3031], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:54.485238Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012614163167:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-17T18:17:54.485256Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012614163167:2056] Subscribe: subscriber# [1:7494348025499066642:3031], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:54.485271Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348012614163507:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-17T18:17:54.485279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348025499066640:3031][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348012614163161:2050] 2025-04-17T18:17:54.485295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348025499066641:3031][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348012614163164:2053] 2025-04-17T18:17:54.485342Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348012614163507:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7494348025499066628:3030] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:17:54.485345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348025499066642:3031][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348012614163167:2056] 2025-04-17T18:17:54.485398Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348025499066629:3031][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348025499066637:3031] 2025-04-17T18:17:54.485420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348025499066629:3031][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348025499066638:3031] 2025-04-17T18:17:54.485436Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348025499066629:3031][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7494348012614163507:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:54.485450Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348012614163507:2129], cacheItem# { Subscriber: { Subscriber: [1:7494348025499066628:3030] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:54.485451Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348025499066629:3031][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348025499066639:3031] 2025-04-17T18:17:54.485471Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348025499066629:3031][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7494348012614163507:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:54.485490Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163161:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348025499066640:3031] 2025-04-17T18:17:54.485493Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348012614163507:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-17T18:17:54.485502Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163164:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348025499066641:3031] 2025-04-17T18:17:54.485515Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012614163167:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348025499066642:3031] 2025-04-17T18:17:54.485532Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348012614163507:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7494348025499066629:3031] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:17:54.485604Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348012614163507:2129], cacheItem# { Subscriber: { Subscriber: [1:7494348025499066629:3031] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:54.485687Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348025499066643:3033], recipient# [1:7494348025499066627:2334], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:54.563042Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348012614163507:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:54.563163Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348012614163507:2129], cacheItem# { Subscriber: { Subscriber: [1:7494348016909131683:2780] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:54.563255Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348025499066645:3034], recipient# [1:7494348025499066644:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: 2025-04-17T18:13:22.217451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:103:2149], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:13:22.217642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:13:22.217911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:13:22.217944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00185a/r3tmp/tmpx3Jwvo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7904, node 1 TClient is connected to server localhost:25229 2025-04-17T18:13:23.111406Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-04-17T18:13:23.111500Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-04-17T18:13:23.120395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:13:23.169735Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:13:23.179836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:13:23.179908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:13:23.179951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:13:23.180227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:13:23.215843Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:13:23.216437Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:13:23.217660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:13:23.218659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:13:23.230311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:13:23.347583Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-04-17T18:13:23.347649Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:13:23.348725Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:679:2572] 2025-04-17T18:13:23.411263Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:13:23.411391Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:13:23.411911Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:13:23.412061Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:13:23.412462Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:13:23.412736Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:13:23.412815Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:13:23.419787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:13:23.420932Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:13:23.422413Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:13:23.422487Z node 1 :TX_PROXY DEBUG: Actor# [1:679:2572] txid# 281474976715657 SEND to# [1:678:2571] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-17T18:13:23.505078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:753:2633];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:13:23.522694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:753:2633];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:13:23.522976Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-17T18:13:23.529919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:13:23.530096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:13:23.530348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:13:23.530476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:13:23.530566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:13:23.530668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:13:23.530753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:13:23.530850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:13:23.530925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:13:23.531017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:13:23.531096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:13:23.531159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:753:2633];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:13:23.546126Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-17T18:13:23.546316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:13:23.546373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:13:23.546533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:13:23.546668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:13:23.546741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:13:23.546774Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:13:23.546841Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:13:23.546900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:13:23.546936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:13:23.546963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:13:23.547109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:13:23.547179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute; ... oFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"9936175e-4ee3321b-55783d24-419c5917")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) ) 2025-04-17T18:17:54.008108Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.007 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [KQP] kqp_transform.cpp:33: PhysicalPeepholeTransformer: ( (let $1 (KqpTable '"//Root/.metadata/initialization/migrations" '"72057594046644480:6" '"" '1)) (let $2 '('"componentId" '"instant" '"modificationId")) (let $3 (Uint64 '"1001")) (let $4 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) (Void) '())) (let $5 (OptionalType (DataType 'Utf8))) (let $6 (StructType '('"componentId" $5) '('"instant" (OptionalType (DataType 'Uint32))) '('"modificationId" $5))) (let $7 '('('"_logical_id" '338) '('"_id" '"eb1484f9-38e4f7d7-ee2058bc-3a3c0237") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $4)) (lambda '($12) (block '( (let $13 (lambda '($14) (Member $14 '"componentId") (Member $14 '"instant") (Member $14 '"modificationId"))) (return (FromFlow (ExpandMap (Take (ToFlow $12) $3) $13))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($15) (FromFlow (NarrowMap (Take (ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"9936175e-4ee3321b-55783d24-419c5917")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) 2025-04-17T18:17:54.020100Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.020 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:466: Register async execution for node #268 2025-04-17T18:17:54.020225Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.020 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {3}, callable #277 2025-04-17T18:17:54.020317Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.020 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:577: Node #277 finished execution 2025-04-17T18:17:54.020376Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.020 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:594: Node #277 created 0 trackable nodes: 2025-04-17T18:17:54.020442Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.020 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:87: Finish, output #280, status: Async 2025-04-17T18:17:54.020987Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.020 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:133: Completed async execution for node #268 2025-04-17T18:17:54.021057Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #268 2025-04-17T18:17:54.021116Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:59: Begin, root #280 2025-04-17T18:17:54.021170Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #280, status: Ok 2025-04-17T18:17:54.021224Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-04-17T18:17:54.021282Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-04-17T18:17:54.021334Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-04-17T18:17:54.021416Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-04-17T18:17:54.021473Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {4}, callable #268 2025-04-17T18:17:54.021621Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:577: Node #268 finished execution 2025-04-17T18:17:54.021688Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:594: Node #268 created 0 trackable nodes: 2025-04-17T18:17:54.021743Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-04-17T18:17:54.021795Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:577: Node #275 finished execution 2025-04-17T18:17:54.021857Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.021 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-04-17T18:17:54.022063Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:577: Node #278 finished execution 2025-04-17T18:17:54.022126Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:594: Node #278 created 0 trackable nodes: 2025-04-17T18:17:54.022204Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-04-17T18:17:54.022277Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:577: Node #279 finished execution 2025-04-17T18:17:54.022331Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:594: Node #279 created 0 trackable nodes: 2025-04-17T18:17:54.022390Z node 1 :KQP_YQL TRACE: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 TRACE ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-04-17T18:17:54.022449Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:577: Node #280 finished execution 2025-04-17T18:17:54.022500Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:594: Node #280 created 0 trackable nodes: 2025-04-17T18:17:54.022556Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:87: Finish, output #280, status: Ok 2025-04-17T18:17:54.022621Z node 1 :KQP_YQL INFO: TraceId: 01js2fhew3cxhnrsvkwkxx6rft, SessionId: CompileActor 2025-04-17 18:17:54.022 INFO ydb-services-ext_index-ut(pid=490602, tid=0x00007F107089BD00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #280 2025-04-17T18:17:54.041368Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-04-17T18:17:54.041430Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716232 ProcessProposeKqpTransaction 2025-04-17T18:17:54.051094Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-04-17T18:17:54.051153Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716233 ProcessProposeKqpTransaction 2025-04-17T18:17:54.220121Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:753:2633];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T18:17:54.220385Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:756:2636];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T18:17:54.220444Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:761:2639];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T18:17:54.220493Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:766:2642];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-17T18:17:54.232757Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:753:2633];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-17T18:17:54.232896Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:756:2636];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-04-17T18:17:54.232961Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:761:2639];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-17T18:17:54.233024Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:766:2642];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2025-04-17T18:17:51.585935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348013589996853:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:51.586004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ccf/r3tmp/tmpnEqpxl/pdisk_1.dat 2025-04-17T18:17:51.988985Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:52.007282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:52.007407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:52.013329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25153 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:17:52.148425Z node 1 :TX_PROXY DEBUG: actor# [1:7494348013589997114:2140] Handle TEvNavigate describe path dc-1 2025-04-17T18:17:52.148576Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348017884964842:2434] HANDLE EvNavigateScheme dc-1 2025-04-17T18:17:52.148736Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348013589997139:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:52.148790Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494348013589997139:2153], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:17:52.149096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:52.151066Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348013589996757:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348017884964847:2435] 2025-04-17T18:17:52.151069Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348013589996760:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348017884964848:2435] 2025-04-17T18:17:52.151124Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348013589996757:2051] Subscribe: subscriber# [1:7494348017884964847:2435], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.151124Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348013589996760:2054] Subscribe: subscriber# [1:7494348017884964848:2435], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.151172Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348013589996763:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348017884964849:2435] 2025-04-17T18:17:52.151184Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348013589996763:2057] Subscribe: subscriber# [1:7494348017884964849:2435], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.151278Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348017884964848:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348013589996760:2054] 2025-04-17T18:17:52.151324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348017884964847:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348013589996757:2051] 2025-04-17T18:17:52.151347Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348013589996760:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348017884964848:2435] 2025-04-17T18:17:52.151371Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348013589996757:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348017884964847:2435] 2025-04-17T18:17:52.151379Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348017884964849:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348013589996763:2057] 2025-04-17T18:17:52.151392Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348013589996763:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348017884964849:2435] 2025-04-17T18:17:52.151434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348017884964845:2435] 2025-04-17T18:17:52.151465Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348017884964844:2435] 2025-04-17T18:17:52.151535Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348017884964843:2435][/dc-1] Set up state: owner# [1:7494348013589997139:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.151676Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348017884964846:2435] 2025-04-17T18:17:52.151752Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348017884964843:2435][/dc-1] Path was already updated: owner# [1:7494348013589997139:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.151841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348017884964847:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348017884964844:2435], cookie# 1 2025-04-17T18:17:52.151879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348017884964848:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348017884964845:2435], cookie# 1 2025-04-17T18:17:52.151897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348017884964849:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348017884964846:2435], cookie# 1 2025-04-17T18:17:52.151950Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348013589996757:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348017884964847:2435], cookie# 1 2025-04-17T18:17:52.151978Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348013589996760:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348017884964848:2435], cookie# 1 2025-04-17T18:17:52.151996Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348013589996763:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348017884964849:2435], cookie# 1 2025-04-17T18:17:52.152042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348017884964847:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348013589996757:2051], cookie# 1 2025-04-17T18:17:52.152078Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348017884964848:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348013589996760:2054], cookie# 1 2025-04-17T18:17:52.152120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348017884964849:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348013589996763:2057], cookie# 1 2025-04-17T18:17:52.152174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348017884964844:2435], cookie# 1 2025-04-17T18:17:52.152205Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:17:52.152221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348017884964845:2435], cookie# 1 2025-04-17T18:17:52.152239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:17:52.152262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348017884964846:2435], cookie# 1 2025-04-17T18:17:52.152275Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348017884964843:2435][/dc-1] Unexpected sync response: sender# [1:7494348017884964846:2435], cookie# 1 2025-04-17T18:17:52.206232Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348013589997139:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:17:52.206668Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348013589997139:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-17T18:17:53.519396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:17:53.519501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:17:53.519523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:17:53.519561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:17:53.519676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:17:53.519697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T18:17:53.519801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:17:53.546594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:17:53.546643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:17:53.546701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-17T18:17:53.546740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-17T18:17:53.546771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:17:53.546778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:17:53.546795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:17:53.546806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-17T18:17:53.546832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-17T18:17:53.546840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-17T18:17:53.546870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:17:53.546878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:17:53.546896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-04-17T18:17:53.546902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-04-17T18:17:53.546926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-17T18:17:53.546945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-17T18:17:53.546988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2025-04-17T18:17:53.547066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:17:53.547117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:17:53.547138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-17T18:17:53.547214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:17:53.551414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:17:53.594497Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348013589997139:2153], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.594656Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348013589997139:2153], cacheItem# { Subscriber: { Subscriber: [1:7494348017884965148:2661] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:53.594733Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348022179932784:2945], recipient# [1:7494348022179932783:2312], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.925156Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348018278716338:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.925444Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348018278716338:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.925505Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7494348018278716338:2107], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2025-04-17T18:17:53.925633Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494348022573683954:2307][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:53.926068Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494348022573683954:2307][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7494348022573683955:2307] 2025-04-17T18:17:53.926111Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494348022573683954:2307][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7494348022573683956:2307] 2025-04-17T18:17:53.926131Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7494348022573683954:2307][/dc-1/USER_0] Set up state: owner# [3:7494348018278716338:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:53.926220Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494348022573683954:2307][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7494348022573683957:2307] 2025-04-17T18:17:53.926227Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494348018278716338:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 } 2025-04-17T18:17:53.926248Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7494348022573683954:2307][/dc-1/USER_0] Ignore empty state: owner# [3:7494348018278716338:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:53.926314Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494348018278716338:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [3:7494348022573683954:2307] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:17:53.926399Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494348018278716338:2107], cacheItem# { Subscriber: { Subscriber: [3:7494348022573683954:2307] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:53.926469Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348022573683961:2308], recipient# [3:7494348022573683953:2306], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.926527Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348018278716338:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.926629Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348022573683962:2309], recipient# [3:7494348022573683952:2318], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.926770Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-04-17T18:17:51.583825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348012505196253:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:51.583897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cc3/r3tmp/tmpCEgmhQ/pdisk_1.dat 2025-04-17T18:17:51.950232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:51.973840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:51.973952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:51.978089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17768 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:17:52.088010Z node 1 :TX_PROXY DEBUG: actor# [1:7494348012505196512:2134] Handle TEvNavigate describe path dc-1 2025-04-17T18:17:52.088077Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348016800164251:2437] HANDLE EvNavigateScheme dc-1 2025-04-17T18:17:52.088215Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348012505196535:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:52.088264Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494348012505196535:2147], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:17:52.088432Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:52.090144Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196161:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348016800164256:2438] 2025-04-17T18:17:52.090156Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196164:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348016800164257:2438] 2025-04-17T18:17:52.090210Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012505196164:2054] Subscribe: subscriber# [1:7494348016800164257:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.090211Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012505196161:2051] Subscribe: subscriber# [1:7494348016800164256:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.090269Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196167:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348016800164258:2438] 2025-04-17T18:17:52.090284Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012505196167:2057] Subscribe: subscriber# [1:7494348016800164258:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.090301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016800164256:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348012505196161:2051] 2025-04-17T18:17:52.090333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016800164257:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348012505196164:2054] 2025-04-17T18:17:52.090333Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196161:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348016800164256:2438] 2025-04-17T18:17:52.090347Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196164:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348016800164257:2438] 2025-04-17T18:17:52.090372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016800164258:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348012505196167:2057] 2025-04-17T18:17:52.090398Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196167:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348016800164258:2438] 2025-04-17T18:17:52.090412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348016800164253:2438] 2025-04-17T18:17:52.090449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348016800164254:2438] 2025-04-17T18:17:52.090495Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348016800164252:2438][/dc-1] Set up state: owner# [1:7494348012505196535:2147], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.091123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348016800164255:2438] 2025-04-17T18:17:52.091182Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348016800164252:2438][/dc-1] Path was already updated: owner# [1:7494348012505196535:2147], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.091246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016800164256:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016800164253:2438], cookie# 1 2025-04-17T18:17:52.091267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016800164257:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016800164254:2438], cookie# 1 2025-04-17T18:17:52.091281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016800164258:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016800164255:2438], cookie# 1 2025-04-17T18:17:52.091311Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196161:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016800164256:2438], cookie# 1 2025-04-17T18:17:52.091332Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196164:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016800164257:2438], cookie# 1 2025-04-17T18:17:52.091347Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196167:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016800164258:2438], cookie# 1 2025-04-17T18:17:52.091380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016800164256:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348012505196161:2051], cookie# 1 2025-04-17T18:17:52.091399Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016800164257:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348012505196164:2054], cookie# 1 2025-04-17T18:17:52.091428Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016800164258:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348012505196167:2057], cookie# 1 2025-04-17T18:17:52.091458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348016800164253:2438], cookie# 1 2025-04-17T18:17:52.091508Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:17:52.091522Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348016800164254:2438], cookie# 1 2025-04-17T18:17:52.091539Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:17:52.091555Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348016800164255:2438], cookie# 1 2025-04-17T18:17:52.091567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016800164252:2438][/dc-1] Unexpected sync response: sender# [1:7494348016800164255:2438], cookie# 1 2025-04-17T18:17:52.125687Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348012505196535:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:17:52.125949Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348012505196535:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... paction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913873500 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-04-17T18:17:53.566247Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-17T18:17:53.567086Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196161:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7494348018471935397:2098] 2025-04-17T18:17:53.567137Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012505196161:2051] Unsubscribe: subscriber# [3:7494348018471935397:2098], path# /dc-1/USER_0 2025-04-17T18:17:53.567188Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196164:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7494348018471935409:2098] 2025-04-17T18:17:53.567220Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012505196164:2054] Unsubscribe: subscriber# [3:7494348018471935409:2098], path# /dc-1/USER_0 2025-04-17T18:17:53.567273Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348012505196167:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7494348018471935416:2098] 2025-04-17T18:17:53.567291Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348012505196167:2057] Unsubscribe: subscriber# [3:7494348018471935416:2098], path# /dc-1/USER_0 2025-04-17T18:17:53.567741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:17:53.587625Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348012505196535:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.587804Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348012505196535:2147], cacheItem# { Subscriber: { Subscriber: [1:7494348016800164561:2665] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:53.587945Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348021095132093:2855], recipient# [1:7494348021095132092:2310], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.838990Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348018471935461:2111], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.839204Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348018471935461:2111], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.839268Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7494348018471935461:2111], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2025-04-17T18:17:53.839443Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494348022766903163:2356][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:53.839892Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494348022766903163:2356][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7494348022766903164:2356] 2025-04-17T18:17:53.839936Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494348022766903163:2356][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7494348022766903165:2356] 2025-04-17T18:17:53.839973Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7494348022766903163:2356][/dc-1/USER_0] Set up state: owner# [3:7494348018471935461:2111], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:53.840007Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7494348022766903163:2356][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7494348022766903166:2356] 2025-04-17T18:17:53.840031Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7494348022766903163:2356][/dc-1/USER_0] Ignore empty state: owner# [3:7494348018471935461:2111], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:53.840055Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7494348018471935461:2111], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 } 2025-04-17T18:17:53.840125Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7494348018471935461:2111], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [3:7494348022766903163:2356] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:17:53.840259Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494348018471935461:2111], cacheItem# { Subscriber: { Subscriber: [3:7494348022766903163:2356] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:53.840345Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348022766903170:2357], recipient# [3:7494348022766903162:2355], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.840439Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348018471935461:2111], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.840546Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348022766903171:2358], recipient# [3:7494348022766903161:2324], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:53.840634Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2025-04-17T18:17:51.569021Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348010792171277:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:51.571749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cd0/r3tmp/tmpTXHu2c/pdisk_1.dat 2025-04-17T18:17:51.945172Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:51.951225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:51.951327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:51.962240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30553 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:17:52.115180Z node 1 :TX_PROXY DEBUG: actor# [1:7494348010792171508:2116] Handle TEvNavigate describe path dc-1 2025-04-17T18:17:52.115267Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348015087139270:2436] HANDLE EvNavigateScheme dc-1 2025-04-17T18:17:52.115403Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348010792171547:2136], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:52.115446Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494348010792171547:2136], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:17:52.115709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:52.117698Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171186:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348015087139275:2437] 2025-04-17T18:17:52.117711Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171189:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348015087139276:2437] 2025-04-17T18:17:52.117777Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348010792171189:2053] Subscribe: subscriber# [1:7494348015087139276:2437], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.117777Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348010792171186:2050] Subscribe: subscriber# [1:7494348015087139275:2437], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.117843Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171192:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348015087139277:2437] 2025-04-17T18:17:52.117866Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348010792171192:2056] Subscribe: subscriber# [1:7494348015087139277:2437], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.117866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015087139275:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348010792171186:2050] 2025-04-17T18:17:52.117898Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015087139276:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348010792171189:2053] 2025-04-17T18:17:52.117910Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171186:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348015087139275:2437] 2025-04-17T18:17:52.117924Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015087139277:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348010792171192:2056] 2025-04-17T18:17:52.117929Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171189:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348015087139276:2437] 2025-04-17T18:17:52.117942Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171192:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348015087139277:2437] 2025-04-17T18:17:52.117985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348015087139272:2437] 2025-04-17T18:17:52.118026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348015087139273:2437] 2025-04-17T18:17:52.118110Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348015087139271:2437][/dc-1] Set up state: owner# [1:7494348010792171547:2136], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.118245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348015087139274:2437] 2025-04-17T18:17:52.118314Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348015087139271:2437][/dc-1] Path was already updated: owner# [1:7494348010792171547:2136], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.118382Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015087139275:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015087139272:2437], cookie# 1 2025-04-17T18:17:52.118405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015087139276:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015087139273:2437], cookie# 1 2025-04-17T18:17:52.118420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015087139277:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015087139274:2437], cookie# 1 2025-04-17T18:17:52.118457Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171186:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015087139275:2437], cookie# 1 2025-04-17T18:17:52.118483Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171189:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015087139276:2437], cookie# 1 2025-04-17T18:17:52.118509Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171192:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015087139277:2437], cookie# 1 2025-04-17T18:17:52.118540Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015087139275:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348010792171186:2050], cookie# 1 2025-04-17T18:17:52.118568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015087139276:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348010792171189:2053], cookie# 1 2025-04-17T18:17:52.118579Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015087139277:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348010792171192:2056], cookie# 1 2025-04-17T18:17:52.118645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348015087139272:2437], cookie# 1 2025-04-17T18:17:52.118681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:17:52.118698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348015087139273:2437], cookie# 1 2025-04-17T18:17:52.118724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:17:52.118747Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348015087139274:2437], cookie# 1 2025-04-17T18:17:52.118762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015087139271:2437][/dc-1] Unexpected sync response: sender# [1:7494348015087139274:2437], cookie# 1 2025-04-17T18:17:52.165657Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348010792171547:2136], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:17:52.166052Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348010792171547:2136], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... le NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348010792171192:2056] 2025-04-17T18:17:54.065991Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171192:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348023677074800:3139] 2025-04-17T18:17:54.066014Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171186:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348023677074795:3138] 2025-04-17T18:17:54.066031Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171186:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348023677074797:3139] 2025-04-17T18:17:54.066053Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348023677074788:3139][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348023677074792:3139] 2025-04-17T18:17:54.066055Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171189:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348023677074796:3138] 2025-04-17T18:17:54.066068Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010792171189:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7494348023677074799:3139] 2025-04-17T18:17:54.066084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348023677074788:3139][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348023677074793:3139] 2025-04-17T18:17:54.066111Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348010792171547:2136], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-17T18:17:54.066113Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348023677074788:3139][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7494348010792171547:2136], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:54.066141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348023677074788:3139][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7494348023677074794:3139] 2025-04-17T18:17:54.066166Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348023677074788:3139][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7494348010792171547:2136], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:54.066229Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348010792171547:2136], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7494348023677074787:3138] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:17:54.066312Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348010792171547:2136], cacheItem# { Subscriber: { Subscriber: [1:7494348023677074787:3138] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:54.066373Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348010792171547:2136], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-17T18:17:54.066449Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348010792171547:2136], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7494348023677074788:3139] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:17:54.066549Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348010792171547:2136], cacheItem# { Subscriber: { Subscriber: [1:7494348023677074788:3139] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:54.066733Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348023677074801:3140], recipient# [1:7494348023677074786:2337], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:54.575469Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348010792171547:2136], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:54.575607Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348010792171547:2136], cacheItem# { Subscriber: { Subscriber: [1:7494348015087139577:2661] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:54.575703Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348023677074812:3141], recipient# [1:7494348023677074811:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:55.064985Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348010792171547:2136], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:55.065085Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348010792171547:2136], cacheItem# { Subscriber: { Subscriber: [1:7494348023677074771:3136] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:55.065146Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348027972042119:3145], recipient# [1:7494348027972042118:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:55.576405Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348010792171547:2136], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:55.576539Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348010792171547:2136], cacheItem# { Subscriber: { Subscriber: [1:7494348015087139577:2661] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:55.576626Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348027972042130:3146], recipient# [1:7494348027972042129:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |93.7%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet >> TFlatTest::SplitEmptyTwice [GOOD] |93.8%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> TStorageTenantTest::LsLs [GOOD] >> KqpYql::NonStrictDml ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2025-04-17T18:17:51.629089Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348010924678642:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:51.629155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cf4/r3tmp/tmp49ztx4/pdisk_1.dat 2025-04-17T18:17:51.924667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:51.926832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:51.933610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:51.962415Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22551 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:17:52.125575Z node 1 :TX_PROXY DEBUG: actor# [1:7494348010924678865:2093] Handle TEvNavigate describe path dc-1 2025-04-17T18:17:52.125664Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348015219646679:2440] HANDLE EvNavigateScheme dc-1 2025-04-17T18:17:52.125792Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348010924678954:2138], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:52.125916Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494348010924678954:2138], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:17:52.126111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:52.128242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010924678592:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348015219646684:2441] 2025-04-17T18:17:52.128247Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010924678595:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348015219646685:2441] 2025-04-17T18:17:52.128285Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348010924678595:2052] Subscribe: subscriber# [1:7494348015219646685:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.128290Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348010924678592:2049] Subscribe: subscriber# [1:7494348015219646684:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.128323Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010924678598:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348015219646686:2441] 2025-04-17T18:17:52.128364Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348010924678598:2055] Subscribe: subscriber# [1:7494348015219646686:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.128381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015219646685:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348010924678595:2052] 2025-04-17T18:17:52.128405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015219646684:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348010924678592:2049] 2025-04-17T18:17:52.128421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015219646686:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348010924678598:2055] 2025-04-17T18:17:52.128423Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010924678595:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348015219646685:2441] 2025-04-17T18:17:52.128437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010924678592:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348015219646684:2441] 2025-04-17T18:17:52.128559Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348015219646682:2441] 2025-04-17T18:17:52.128604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348015219646681:2441] 2025-04-17T18:17:52.128621Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010924678598:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348015219646686:2441] 2025-04-17T18:17:52.128652Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348015219646680:2441][/dc-1] Set up state: owner# [1:7494348010924678954:2138], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.128775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348015219646683:2441] 2025-04-17T18:17:52.128820Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348015219646680:2441][/dc-1] Path was already updated: owner# [1:7494348010924678954:2138], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.128860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015219646684:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015219646681:2441], cookie# 1 2025-04-17T18:17:52.128875Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015219646685:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015219646682:2441], cookie# 1 2025-04-17T18:17:52.128891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015219646686:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015219646683:2441], cookie# 1 2025-04-17T18:17:52.128934Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010924678592:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015219646684:2441], cookie# 1 2025-04-17T18:17:52.128982Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010924678595:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015219646685:2441], cookie# 1 2025-04-17T18:17:52.129001Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348010924678598:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348015219646686:2441], cookie# 1 2025-04-17T18:17:52.129026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015219646684:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348010924678592:2049], cookie# 1 2025-04-17T18:17:52.129035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015219646685:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348010924678595:2052], cookie# 1 2025-04-17T18:17:52.129078Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348015219646686:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348010924678598:2055], cookie# 1 2025-04-17T18:17:52.129101Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348015219646681:2441], cookie# 1 2025-04-17T18:17:52.129129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:17:52.129149Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348015219646682:2441], cookie# 1 2025-04-17T18:17:52.129167Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:17:52.129219Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348015219646683:2441], cookie# 1 2025-04-17T18:17:52.129231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348015219646680:2441][/dc-1] Unexpected sync response: sender# [1:7494348015219646683:2441], cookie# 1 2025-04-17T18:17:52.172677Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348010924678954:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:17:52.173063Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348010924678954:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... elf# [4:7494348031924911084:2755], recipient# [4:7494348031924911074:2360], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:56.273549Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:17:57.274051Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494348027629942734:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.274228Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494348036219878382:2756], recipient# [4:7494348036219878381:2361], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.274363Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:17:57.636654Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494348027629942734:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.636767Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494348027629942734:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.636866Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494348036219878396:2757], recipient# [4:7494348036219878394:2371], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.636889Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494348036219878397:2758], recipient# [4:7494348036219878395:2372], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.637497Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7494348036219878394:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:17:57.637647Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:17:57.727200Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494348027629942734:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.727342Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494348036219878398:2759], recipient# [4:7494348036219878394:2371], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.727487Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7494348036219878394:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:17:57.865474Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494348027629942734:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.865715Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494348036219878399:2760], recipient# [4:7494348036219878394:2371], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.865942Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7494348036219878394:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:17:58.129116Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494348027629942734:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:58.129303Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494348040514845696:2761], recipient# [4:7494348036219878394:2371], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:58.129476Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7494348036219878394:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:17:58.275069Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7494348027629942734:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:58.275263Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7494348040514845698:2762], recipient# [4:7494348040514845697:2373], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:58.275411Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2025-04-17T18:17:54.775205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348025486631972:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:54.775304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:17:54.799536Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348026379670613:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:54.800997Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cbe/r3tmp/tmpNDE5dO/pdisk_1.dat 2025-04-17T18:17:55.089094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:55.089226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:55.091351Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:17:55.092331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:55.120036Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:55.138127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:55.138279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:55.140664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11940 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:17:55.285324Z node 1 :TX_PROXY DEBUG: actor# [1:7494348025486632194:2141] Handle TEvNavigate describe path dc-1 2025-04-17T18:17:55.285399Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348029781599950:2460] HANDLE EvNavigateScheme dc-1 2025-04-17T18:17:55.285543Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348025486632246:2165], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:55.285617Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494348025486632246:2165], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:17:55.285807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:55.287770Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348025486631834:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348029781599955:2461] 2025-04-17T18:17:55.287810Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348025486631837:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348029781599956:2461] 2025-04-17T18:17:55.287831Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348025486631834:2052] Subscribe: subscriber# [1:7494348029781599955:2461], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:55.287845Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348025486631837:2055] Subscribe: subscriber# [1:7494348029781599956:2461], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:55.287878Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348025486631840:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348029781599957:2461] 2025-04-17T18:17:55.287894Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348025486631840:2058] Subscribe: subscriber# [1:7494348029781599957:2461], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:55.287904Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348029781599955:2461][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348025486631834:2052] 2025-04-17T18:17:55.287919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348029781599956:2461][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348025486631837:2055] 2025-04-17T18:17:55.287946Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348025486631834:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348029781599955:2461] 2025-04-17T18:17:55.287953Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348029781599957:2461][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348025486631840:2058] 2025-04-17T18:17:55.287967Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348025486631837:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348029781599956:2461] 2025-04-17T18:17:55.287979Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348025486631840:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348029781599957:2461] 2025-04-17T18:17:55.287987Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348029781599952:2461] 2025-04-17T18:17:55.288015Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348029781599953:2461] 2025-04-17T18:17:55.288083Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348029781599951:2461][/dc-1] Set up state: owner# [1:7494348025486632246:2165], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:55.288223Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348029781599954:2461] 2025-04-17T18:17:55.288269Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348029781599951:2461][/dc-1] Path was already updated: owner# [1:7494348025486632246:2165], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:55.288347Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348029781599955:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348029781599952:2461], cookie# 1 2025-04-17T18:17:55.288372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348029781599956:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348029781599953:2461], cookie# 1 2025-04-17T18:17:55.288388Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348029781599957:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348029781599954:2461], cookie# 1 2025-04-17T18:17:55.288431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348025486631834:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348029781599955:2461], cookie# 1 2025-04-17T18:17:55.288455Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348025486631837:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348029781599956:2461], cookie# 1 2025-04-17T18:17:55.288501Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348025486631840:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348029781599957:2461], cookie# 1 2025-04-17T18:17:55.288524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348029781599955:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348025486631834:2052], cookie# 1 2025-04-17T18:17:55.288532Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348029781599956:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348025486631837:2055], cookie# 1 2025-04-17T18:17:55.288541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348029781599957:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348025486631840:2058], cookie# 1 2025-04-17T18:17:55.288567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348029781599952:2461], cookie# 1 2025-04-17T18:17:55.288593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:17:55.288604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348029781599953:2461], cookie# 1 2025-04-17T18:17:55.288615Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:17:55.288653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348029781599954:2461], cookie# 1 2025-04-17T18:17:55.288671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348029781599951:2461][/dc-1] Unexpected sync response: sender# [1:7494348029781599954:2461], cookie# 1 2025-04-17T18:17:55.343013Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348025486632246:2165], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSi ... TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:55.803293Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348030674638171:2111], recipient# [2:7494348030674638161:2294], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:55.803397Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:17:56.803976Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348026379670850:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:56.804108Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348034969605472:2113], recipient# [2:7494348034969605471:2295], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:56.804244Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:17:57.597256Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348026379670850:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.597439Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348039264572773:2114], recipient# [2:7494348039264572770:2297], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.597878Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:17:57.598901Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348026379670850:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.599087Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348039264572785:2115], recipient# [2:7494348039264572782:2306], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.600520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7494348039264572782:2306], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:17:57.654907Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348026379670850:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.655088Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348039264572786:2116], recipient# [2:7494348039264572782:2306], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.655265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7494348039264572782:2306], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:17:57.804899Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348026379670850:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.805019Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348039264572790:2117], recipient# [2:7494348039264572789:2307], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.805154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:17:57.849428Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348026379670850:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.849622Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348039264572791:2118], recipient# [2:7494348039264572782:2306], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:57.849794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7494348039264572782:2306], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:17:58.076292Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348026379670850:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:58.076419Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348043559540088:2119], recipient# [2:7494348039264572782:2306], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:58.076561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7494348039264572782:2306], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |93.8%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> KqpYql::NonStrictDml [GOOD] >> KqpYql::JsonNumberPrecision ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2025-04-17T18:17:51.588237Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348011779396717:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:51.588413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cdb/r3tmp/tmpA0wP95/pdisk_1.dat 2025-04-17T18:17:51.937440Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:51.954002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:51.954124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:51.972542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64445 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:17:52.124371Z node 1 :TX_PROXY DEBUG: actor# [1:7494348011779396945:2135] Handle TEvNavigate describe path dc-1 2025-04-17T18:17:52.124476Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348016074364684:2437] HANDLE EvNavigateScheme dc-1 2025-04-17T18:17:52.124602Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348011779397031:2173], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:52.124657Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494348011779397031:2173], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:17:52.124864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:52.126941Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348011779396593:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348016074364689:2438] 2025-04-17T18:17:52.126950Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348011779396596:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348016074364690:2438] 2025-04-17T18:17:52.127010Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348011779396596:2054] Subscribe: subscriber# [1:7494348016074364690:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.127012Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348011779396593:2051] Subscribe: subscriber# [1:7494348016074364689:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.127067Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348011779396599:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348016074364691:2438] 2025-04-17T18:17:52.127087Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348011779396599:2057] Subscribe: subscriber# [1:7494348016074364691:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:52.127100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016074364690:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348011779396596:2054] 2025-04-17T18:17:52.127129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016074364689:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348011779396593:2051] 2025-04-17T18:17:52.127133Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348011779396596:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348016074364690:2438] 2025-04-17T18:17:52.127148Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016074364691:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348011779396599:2057] 2025-04-17T18:17:52.127157Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348011779396593:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348016074364689:2438] 2025-04-17T18:17:52.127176Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348011779396599:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348016074364691:2438] 2025-04-17T18:17:52.127229Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348016074364687:2438] 2025-04-17T18:17:52.127281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348016074364686:2438] 2025-04-17T18:17:52.127347Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348016074364685:2438][/dc-1] Set up state: owner# [1:7494348011779397031:2173], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.127494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348016074364688:2438] 2025-04-17T18:17:52.127559Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348016074364685:2438][/dc-1] Path was already updated: owner# [1:7494348011779397031:2173], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:52.127610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016074364689:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016074364686:2438], cookie# 1 2025-04-17T18:17:52.127631Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016074364690:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016074364687:2438], cookie# 1 2025-04-17T18:17:52.127655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016074364691:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016074364688:2438], cookie# 1 2025-04-17T18:17:52.127696Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348011779396593:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016074364689:2438], cookie# 1 2025-04-17T18:17:52.127719Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348011779396596:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016074364690:2438], cookie# 1 2025-04-17T18:17:52.127745Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348011779396599:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348016074364691:2438], cookie# 1 2025-04-17T18:17:52.127779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016074364689:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348011779396593:2051], cookie# 1 2025-04-17T18:17:52.127795Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016074364690:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348011779396596:2054], cookie# 1 2025-04-17T18:17:52.127809Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348016074364691:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348011779396599:2057], cookie# 1 2025-04-17T18:17:52.127857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348016074364686:2438], cookie# 1 2025-04-17T18:17:52.127924Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:17:52.127944Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348016074364687:2438], cookie# 1 2025-04-17T18:17:52.127965Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:17:52.128011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348016074364688:2438], cookie# 1 2025-04-17T18:17:52.128033Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348016074364685:2438][/dc-1] Unexpected sync response: sender# [1:7494348016074364688:2438], cookie# 1 2025-04-17T18:17:52.173644Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348011779397031:2173], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:17:52.174042Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348011779397031:2173], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... criber: [3:7494348028558447793:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:59.331747Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348045738330459:6812], recipient# [3:7494348045738330458:5552], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.060330Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348015673545802:2244], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.060460Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494348015673545802:2244], cacheItem# { Subscriber: { Subscriber: [3:7494348028558447793:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:18:00.060567Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348050033297757:6813], recipient# [3:7494348050033297756:5553], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.078287Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348015673545802:2244], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.078439Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494348015673545802:2244], cacheItem# { Subscriber: { Subscriber: [3:7494348028558447793:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:18:00.078511Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348015673545802:2244], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.078592Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494348015673545802:2244], cacheItem# { Subscriber: { Subscriber: [3:7494348028558447790:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:18:00.078603Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348050033297760:6814], recipient# [3:7494348050033297759:5555], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.078655Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348050033297761:6815], recipient# [3:7494348050033297758:5554], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.083991Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348033367882452:2238], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.084089Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7494348033367882452:2238], cacheItem# { Subscriber: { Subscriber: [2:7494348046252784454:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:18:00.084186Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348050547751870:2346], recipient# [2:7494348050547751869:2572], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.092020Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348033367882452:2238], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.092112Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7494348033367882452:2238], cacheItem# { Subscriber: { Subscriber: [2:7494348046252784454:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:18:00.092202Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348050547751872:2347], recipient# [2:7494348050547751871:2573], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.103765Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494348033367882452:2238], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:00.103879Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7494348033367882452:2238], cacheItem# { Subscriber: { Subscriber: [2:7494348046252784483:2307] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:18:00.103964Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494348050547751874:2348], recipient# [2:7494348050547751873:2574], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2025-04-17T18:17:55.329836Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348027541870332:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:55.330013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001cb3/r3tmp/tmpygLiSI/pdisk_1.dat 2025-04-17T18:17:55.677155Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:55.712723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:55.712832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:55.715541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7357 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:17:55.798478Z node 1 :TX_PROXY DEBUG: actor# [1:7494348027541870562:2140] Handle TEvNavigate describe path dc-1 2025-04-17T18:17:55.798537Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348027541870999:2441] HANDLE EvNavigateScheme dc-1 2025-04-17T18:17:55.798729Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348027541870587:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:55.798779Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494348027541870587:2154], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:17:55.799032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:55.800676Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870207:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348027541871005:2442] 2025-04-17T18:17:55.800679Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870204:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348027541871004:2442] 2025-04-17T18:17:55.800724Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348027541870204:2051] Subscribe: subscriber# [1:7494348027541871004:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:55.800729Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348027541870207:2054] Subscribe: subscriber# [1:7494348027541871005:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:55.800786Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870210:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348027541871006:2442] 2025-04-17T18:17:55.800804Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348027541870210:2057] Subscribe: subscriber# [1:7494348027541871006:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:55.800812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348027541871005:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348027541870207:2054] 2025-04-17T18:17:55.800836Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348027541871004:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348027541870204:2051] 2025-04-17T18:17:55.800859Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870207:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348027541871005:2442] 2025-04-17T18:17:55.800862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348027541871006:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348027541870210:2057] 2025-04-17T18:17:55.800877Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870210:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348027541871006:2442] 2025-04-17T18:17:55.800902Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870204:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348027541871004:2442] 2025-04-17T18:17:55.800904Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348027541871002:2442] 2025-04-17T18:17:55.800932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348027541871001:2442] 2025-04-17T18:17:55.800990Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348027541871000:2442][/dc-1] Set up state: owner# [1:7494348027541870587:2154], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:55.801133Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348027541871003:2442] 2025-04-17T18:17:55.801191Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348027541871000:2442][/dc-1] Path was already updated: owner# [1:7494348027541870587:2154], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:55.801244Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348027541871004:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348027541871001:2442], cookie# 1 2025-04-17T18:17:55.801259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348027541871005:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348027541871002:2442], cookie# 1 2025-04-17T18:17:55.801284Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348027541871006:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348027541871003:2442], cookie# 1 2025-04-17T18:17:55.801339Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870204:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348027541871004:2442], cookie# 1 2025-04-17T18:17:55.801365Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870207:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348027541871005:2442], cookie# 1 2025-04-17T18:17:55.801412Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870210:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348027541871006:2442], cookie# 1 2025-04-17T18:17:55.801452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348027541871004:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348027541870204:2051], cookie# 1 2025-04-17T18:17:55.801484Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348027541871005:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348027541870207:2054], cookie# 1 2025-04-17T18:17:55.801500Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348027541871006:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348027541870210:2057], cookie# 1 2025-04-17T18:17:55.801544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348027541871001:2442], cookie# 1 2025-04-17T18:17:55.801584Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:17:55.801622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348027541871002:2442], cookie# 1 2025-04-17T18:17:55.801644Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:17:55.801670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348027541871003:2442], cookie# 1 2025-04-17T18:17:55.801692Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348027541871000:2442][/dc-1] Unexpected sync response: sender# [1:7494348027541871003:2442], cookie# 1 2025-04-17T18:17:55.839804Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348027541870587:2154], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:17:55.840137Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348027541870587:2154], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-17T18:17:58.256144Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348040426773502:2876], recipient# [1:7494348040426773501:2875], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:17:58.256183Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348040426773501:2875] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:17:58.256243Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348040426773501:2875] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2025-04-17T18:17:58.257187Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348040426773501:2875] Handle TEvDescribeSchemeResult Forward to# [1:7494348040426773500:2874] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913878250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744913878250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-04-17T18:17:58.289242Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-17T18:17:58.289923Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870210:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7494348037050315031:2098] 2025-04-17T18:17:58.289955Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348027541870210:2057] Unsubscribe: subscriber# [3:7494348037050315031:2098], path# /dc-1/USER_0 2025-04-17T18:17:58.290019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T18:17:58.290272Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870204:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7494348037050315028:2098] 2025-04-17T18:17:58.290292Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348027541870204:2051] Unsubscribe: subscriber# [3:7494348037050315028:2098], path# /dc-1/USER_0 2025-04-17T18:17:58.290321Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348027541870207:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7494348037050315030:2098] 2025-04-17T18:17:58.290342Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348027541870207:2054] Unsubscribe: subscriber# [3:7494348037050315030:2098], path# /dc-1/USER_0 2025-04-17T18:17:58.333409Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348027541870587:2154], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:58.333524Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494348027541870587:2154], cacheItem# { Subscriber: { Subscriber: [1:7494348031836838613:2674] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:58.333605Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494348040426773521:2891], recipient# [1:7494348040426773520:2323], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:59.028633Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348037050315077:2116], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:59.028756Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494348037050315077:2116], cacheItem# { Subscriber: { Subscriber: [3:7494348041345282541:2229] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:17:59.028839Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348045640250098:2371], recipient# [3:7494348045640250097:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2025-04-17T18:15:11.394812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347323155437418:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:15:11.395525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001d91/r3tmp/tmpqigQY6/pdisk_1.dat 2025-04-17T18:15:11.725065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:15:11.725182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:15:11.727455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:15:11.769810Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27204 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:15:12.016189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.032712Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.058321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:15:12.252905Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.006s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:15:12.264569Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.007s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:15:12.312371Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-17T18:15:12.323471Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.007s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913712171 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744913712171 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-04-17T18:15:14.838515Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.110, eph 1} end=0, 2 blobs 234r (max 234), put Spent{time=0.016s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (67856 0 0)b }, ecr=1.000 2025-04-17T18:15:14.862581Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.118, eph 1} end=0, 2 blobs 759r (max 759), put Spent{time=0.029s,wait=0.009s,interrupts=1} Part{ 1 pk, lobs 0 +0, (49813 0 0)b }, ecr=1.000 2025-04-17T18:15:15.042924Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.195, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.047s,wait=0.043s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-04-17T18:15:15.045719Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.189, eph 2} end=0, 2 blobs 487r (max 488), put Spent{time=0.062s,wait=0.032s,interrupts=1} Part{ 1 pk, lobs 0 +0, (140962 0 0)b }, ecr=1.000 2025-04-17T18:15:15.058351Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.198, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.045s,wait=0.027s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-04-17T18:15:15.086784Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.199, eph 1} end=0, 2 blobs 501r (max 501), put Spent{time=0.062s,wait=0.043s,interrupts=1} Part{ 1 pk, lobs 0 +0, (31966 0 0)b }, ecr=1.000 2025-04-17T18:15:15.111775Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.521, eph 1} end=0, 2 blobs 3r (max 3), put Spent{time=0.067s,wait=0.066s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-04-17T18:15:15.150759Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.200, eph 1} end=0, 2 blobs 1500r (max 1500), put Spent{time=0.151s,wait=0.099s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103070 0 0)b }, ecr=1.000 2025-04-17T18:15:15.164455Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.542, eph 1} end=0, 2 blobs 10001r (max 10001), put Spent{time=0.138s,wait=0.013s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-04-17T18:15:15.166272Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.204, eph 2} end=0, 2 blobs 1512r (max 1515), put Spent{time=0.141s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (99136 0 0)b }, ecr=1.000 2025-04-17T18:15:15.227086Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.299, eph 3} end=0, 2 blobs 738r (max 739), put Spent{time=0.019s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (213449 0 0)b }, ecr=1.000 2025-04-17T18:15:15.249780Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.311, eph 3} end=0, 2 blobs 2268r (max 2268), put Spent{time=0.025s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (148654 0 0)b }, ecr=1.000 2025-04-17T18:15:15.392273Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1034, eph 2} end=0, 2 blobs 3r (max 5), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-04-17T18:15:15.416733Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.412, eph 4} end=0, 2 blobs 989r (max 990), put Spent{time=0.029s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (285981 0 0)b }, ecr=1.000 2025-04-17T18:15:15.454728Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.418, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.041s,wait=0.031s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-04-17T18:15:15.472116Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.421, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.056s,wait=0.047s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-04-17T18:15:15.527436Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.419, eph 2} end=0, 2 blobs 3000r (max 3000), put Spent{time=0.113s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (205952 0 0)b }, ecr=1.000 2025-04-17T18:15:15.529290Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.422, eph 2} end=0, 2 blobs 1005r (max 1005), put Spent{time=0.113s,wait=0.012s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63918 0 0)b }, ecr=1.000 2025-04-17T18:15:15.537694Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.424, eph 4} end=0, 2 blobs 3024r (max 3027), put Spent{time=0.121s,wait=0.008s,interrupts=1} Part{ 1 pk, lobs 0 +0, (198172 0 0)b }, ecr=1.000 2025-04-17T18:15:15.598677Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1050, eph 2} end=0, 2 blobs 10001r (max 10501), put Spent{time=0.183s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-04-17T18:15:15.647928Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.512, eph 5} end=0, 2 blobs 1243r (max 1244), put Spent{time=0.041s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (359329 0 0)b }, ecr=1.000 2025-04-17T18:15:15.667201Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.516, eph 5} end=0, 2 blobs 3786r (max 3789), put Spent{time=0.052s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (248080 0 0)b }, ecr=1.000 2025-04-17T18:15:15.741760Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1540, eph 3} end=0, 2 blobs 3r (max 5), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-04-17T18:15:15.770155Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.590, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.012s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-04-17T18:15:15.809560Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.594, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.052s,wait=0.050s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-04-17T18:15:15.810891Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.586, eph 6} end=0, 2 blobs 1494r (max 1495), put Spent{time=0.070s,wait=0.042s,interrupts=1} Part{ 1 pk, lobs 0 +0, (431861 0 0)b }, ecr=1.000 2025-04-17T18:15:15.875865Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.595, eph 3} end=0, 2 blobs 1506r (max 1506), put Spent{time=0.118s,wait=0.094s,interrupts=1} Part{ 1 pk, lobs 0 +0, (95731 0 0)b }, ecr=1.000 2025-04-17T18:15:15.877513Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.588, eph 3} end=0, 2 blobs 4500r (max 4500), put Spent{time=0.120s,wait=0.085 ... ARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:17:54.636321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-17T18:17:54.636364Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715690:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:17:54.636891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 9 2025-04-17T18:17:54.637079Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-04-17T18:17:54.637100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-04-17T18:17:54.637129Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-04-17T18:17:54.637141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-04-17T18:17:54.637160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715690, ready parts: 1/1, is published: true 2025-04-17T18:17:54.637211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7494348024091197187:2417] message: TxId: 281474976715690 2025-04-17T18:17:54.637239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-04-17T18:17:54.637268Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715690:0 2025-04-17T18:17:54.637286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715690:0 2025-04-17T18:17:54.637486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 2025-04-17T18:17:54.637887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:17:54.638146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-04-17T18:17:54.638718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:17:54.638752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-17T18:17:54.639753Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-17T18:17:54.655134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494348019796229170 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-04-17T18:17:54.655215Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:17:54.655545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494348024091196971 RawX2: 4503608217307467 } TabletId: 72075186224037895 State: 4 2025-04-17T18:17:54.655581Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:17:54.655762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494348024091196781 RawX2: 4503608217307447 } TabletId: 72075186224037891 State: 4 2025-04-17T18:17:54.655806Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:17:54.656035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494348024091196967 RawX2: 4503608217307466 } TabletId: 72075186224037893 State: 4 2025-04-17T18:17:54.656064Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:17:54.656258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494348024091196780 RawX2: 4503608217307446 } TabletId: 72075186224037890 State: 4 2025-04-17T18:17:54.656295Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:17:54.656512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494348024091196966 RawX2: 4503608217307465 } TabletId: 72075186224037894 State: 4 2025-04-17T18:17:54.656545Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:17:54.657282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:17:54.657427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:17:54.657510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:17:54.657589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:17:54.657659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:17:54.657742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:17:54.659578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:17:54.659899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-17T18:17:54.660152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-04-17T18:17:54.660336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-17T18:17:54.660563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:17:54.660753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:17:54.660951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-04-17T18:17:54.661887Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-17T18:17:54.667800Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-17T18:17:54.667849Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-04-17T18:17:54.667877Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-04-17T18:17:54.672846Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-17T18:17:54.672925Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-04-17T18:17:54.704564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:17:54.704994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:17:54.705246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:17:54.705445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-17T18:17:54.705623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:17:54.705853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:17:54.705879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:17:54.705939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-04-17T18:17:54.705969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-04-17T18:17:54.706021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:17:54.706042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:17:54.706224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-17T18:17:54.706243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-17T18:17:54.706313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:17:54.706351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:17:54.706406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:17:54.706532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:17:54.706548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:17:54.706618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-17T18:17:54.706644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-17T18:17:54.706797Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2025-04-17T18:17:58.579586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348041532198046:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:58.579715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001caf/r3tmp/tmpD9yZ0h/pdisk_1.dat 2025-04-17T18:17:58.863636Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:58.937164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:58.937265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:58.940238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17247 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:17:59.074311Z node 1 :TX_PROXY DEBUG: actor# [1:7494348041532198272:2140] Handle TEvNavigate describe path dc-1 2025-04-17T18:17:59.074415Z node 1 :TX_PROXY DEBUG: Actor# [1:7494348045827166009:2444] HANDLE EvNavigateScheme dc-1 2025-04-17T18:17:59.074533Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494348041532198295:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:17:59.074584Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494348041532198295:2153], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T18:17:59.074834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T18:17:59.076648Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348041532197914:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348045827166014:2445] 2025-04-17T18:17:59.076688Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348041532197917:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348045827166015:2445] 2025-04-17T18:17:59.076714Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348041532197914:2051] Subscribe: subscriber# [1:7494348045827166014:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:59.076738Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348041532197917:2054] Subscribe: subscriber# [1:7494348045827166015:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:59.076767Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348041532197920:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494348045827166016:2445] 2025-04-17T18:17:59.076784Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494348041532197920:2057] Subscribe: subscriber# [1:7494348045827166016:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T18:17:59.076873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348045827166014:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348041532197914:2051] 2025-04-17T18:17:59.076917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348045827166015:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348041532197917:2054] 2025-04-17T18:17:59.076931Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348041532197914:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348045827166014:2445] 2025-04-17T18:17:59.076945Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348041532197917:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348045827166015:2445] 2025-04-17T18:17:59.077034Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348045827166016:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348041532197920:2057] 2025-04-17T18:17:59.077086Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348041532197920:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494348045827166016:2445] 2025-04-17T18:17:59.077111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348045827166011:2445] 2025-04-17T18:17:59.077140Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348045827166012:2445] 2025-04-17T18:17:59.077209Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494348045827166010:2445][/dc-1] Set up state: owner# [1:7494348041532198295:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:59.077350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494348045827166013:2445] 2025-04-17T18:17:59.077404Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494348045827166010:2445][/dc-1] Path was already updated: owner# [1:7494348041532198295:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T18:17:59.077458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348045827166014:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348045827166011:2445], cookie# 1 2025-04-17T18:17:59.077477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348045827166015:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348045827166012:2445], cookie# 1 2025-04-17T18:17:59.077492Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348045827166016:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348045827166013:2445], cookie# 1 2025-04-17T18:17:59.077530Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348041532197914:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348045827166014:2445], cookie# 1 2025-04-17T18:17:59.077556Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348041532197917:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348045827166015:2445], cookie# 1 2025-04-17T18:17:59.077572Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494348041532197920:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494348045827166016:2445], cookie# 1 2025-04-17T18:17:59.077639Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348045827166014:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348041532197914:2051], cookie# 1 2025-04-17T18:17:59.077663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348045827166015:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348041532197917:2054], cookie# 1 2025-04-17T18:17:59.077676Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494348045827166016:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348041532197920:2057], cookie# 1 2025-04-17T18:17:59.077742Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348045827166011:2445], cookie# 1 2025-04-17T18:17:59.077783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:17:59.077799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348045827166012:2445], cookie# 1 2025-04-17T18:17:59.077818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:17:59.077847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494348045827166013:2445], cookie# 1 2025-04-17T18:17:59.077870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494348045827166010:2445][/dc-1] Unexpected sync response: sender# [1:7494348045827166013:2445], cookie# 1 2025-04-17T18:17:59.138905Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494348041532198295:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T18:17:59.139335Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494348041532198295:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... : KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:18:02.089667Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348060982085536:2239], recipient# [3:7494348060982085525:2235], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.089696Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348060982085537:2240], recipient# [3:7494348060982085526:2236], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.089773Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348048097183619:2232], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.089845Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494348048097183619:2232], cacheItem# { Subscriber: { Subscriber: [3:7494348060982085527:2237] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:18:02.089886Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348048097183619:2232], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090122Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348060982085540:2242], recipient# [3:7494348060982085522:2546], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090159Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348048097183619:2232], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090157Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348060982085539:2241], recipient# [3:7494348060982085528:2238], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090239Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348048097183619:2232], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090266Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348060982085541:2243], recipient# [3:7494348060982085519:2544], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090397Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494348048097183619:2232], cacheItem# { Subscriber: { Subscriber: [3:7494348060982085527:2237] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:18:02.090439Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348048097183619:2232], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090464Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348060982085542:2244], recipient# [3:7494348060982085524:2234], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090497Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348060982085543:2245], recipient# [3:7494348060982085523:2547], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090522Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348048097183619:2232], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.090675Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348060982085544:2246], recipient# [3:7494348060982085517:2542], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.110017Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7494348060982085522:2546], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:18:02.110347Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:18:02.110396Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:18:02.110524Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:18:02.191672Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494348048097183619:2232], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.191880Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494348060982085547:2247], recipient# [3:7494348060982085522:2546], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:18:02.192050Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7494348060982085522:2546], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] >> TStorageServiceTest::ShouldRegister >> TCheckpointStorageTest::ShouldCreateCheckpoint >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks >> TStateStorageTest::ShouldSaveGetOldSmallState >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2025-04-17T18:17:48.257285Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:48.333869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:48.356007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:48.356280Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:48.363943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:48.364143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:48.364366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:48.364481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:48.364611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:48.364718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:48.364823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:48.364946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:48.365078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:48.365195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:48.365310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:48.365413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:48.388062Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:48.388201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:48.388246Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:48.388420Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:48.388536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:48.388594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:48.388627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:48.388702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:48.388749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:48.388775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:48.388792Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:48.388899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:48.388944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:48.388968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:48.388988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:48.389054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:48.389090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:48.389116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:48.389135Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:48.389208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:48.389232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:48.389249Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:48.389289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:48.389317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:48.389333Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:48.389624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-17T18:17:48.389692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-17T18:17:48.389752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-17T18:17:48.389814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-17T18:17:48.389939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:48.389985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:48.390013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:48.390138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:48.390169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:48.390213Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:48.390297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:48.390322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:48.390338Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:48.390465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:48.390490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:48.390507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:48.390597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:48.390622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:48.390665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=49; 2025-04-17T18:18:05.054001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:18:05.054092Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:18:05.054129Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-04-17T18:18:05.054197Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:18:05.055727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:18:05.055894Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:18:05.055929Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:18:05.056005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=10; 2025-04-17T18:18:05.056047Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=80;num_rows=10;batch_columns=timestamp; 2025-04-17T18:18:05.056169Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:3546:5558];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-04-17T18:18:05.056296Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:18:05.056397Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:18:05.056468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:18:05.056869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:18:05.056950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:18:05.057023Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:18:05.057049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:3551:5563] finished for tablet 9437184 2025-04-17T18:18:05.057422Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:3546:5558];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.014},{"events":["f_ack"],"t":0.016},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1744913885039285,"name":"_full_task","f":1744913885039285,"d_finished":0,"c":0,"l":1744913885057092,"d":17807},"events":[{"name":"bootstrap","f":1744913885039589,"d_finished":2033,"c":1,"l":1744913885041622,"d":2033},{"a":1744913885056849,"name":"ack","f":1744913885055698,"d_finished":790,"c":1,"l":1744913885056488,"d":1033},{"a":1744913885056830,"name":"processing","f":1744913885042789,"d_finished":3007,"c":8,"l":1744913885056490,"d":3269},{"name":"ProduceResults","f":1744913885040636,"d_finished":1959,"c":11,"l":1744913885057040,"d":1959},{"a":1744913885057042,"name":"Finish","f":1744913885057042,"d_finished":0,"c":0,"l":1744913885057092,"d":50},{"name":"task_result","f":1744913885042808,"d_finished":2078,"c":7,"l":1744913885054256,"d":2078}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:18:05.057480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:3546:5558];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:18:05.057890Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:3546:5558];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.014},{"events":["f_ack"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":1744913885039285,"name":"_full_task","f":1744913885039285,"d_finished":0,"c":0,"l":1744913885057507,"d":18222},"events":[{"name":"bootstrap","f":1744913885039589,"d_finished":2033,"c":1,"l":1744913885041622,"d":2033},{"a":1744913885056849,"name":"ack","f":1744913885055698,"d_finished":790,"c":1,"l":1744913885056488,"d":1448},{"a":1744913885056830,"name":"processing","f":1744913885042789,"d_finished":3007,"c":8,"l":1744913885056490,"d":3684},{"name":"ProduceResults","f":1744913885040636,"d_finished":1959,"c":11,"l":1744913885057040,"d":1959},{"a":1744913885057042,"name":"Finish","f":1744913885057042,"d_finished":0,"c":0,"l":1744913885057507,"d":465},{"name":"task_result","f":1744913885042808,"d_finished":2078,"c":7,"l":1744913885054256,"d":2078}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-17T18:18:05.057966Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:18:05.038831Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2812;selected_rows=0; 2025-04-17T18:18:05.058001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:18:05.058143Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:3551:5563];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2025-04-17T18:16:26.508411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347648537734699:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.508557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:16:26.569001Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494347646639192282:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.569047Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013af/r3tmp/tmpkXbakm/pdisk_1.dat 2025-04-17T18:16:27.545970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:27.547173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:27.547261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:27.550034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:27.550100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:27.551750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:27.555597Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:27.556345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:27.568045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:27.586319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 20834, node 1 2025-04-17T18:16:27.692080Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:16:27.734881Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:16:27.846301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:27.846329Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:27.846334Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:27.846453Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:28.199673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:30.589240Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:30.596667Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2M0MzY2YWUtYzA5YTBkNTktZWM5Zjc1YjItMzU0ZGJiOTI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2M0MzY2YWUtYzA5YTBkNTktZWM5Zjc1YjItMzU0ZGJiOTI= 2025-04-17T18:16:30.597220Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347665717604593:2333], Start check tables existence, number paths: 2 2025-04-17T18:16:30.600573Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2M0MzY2YWUtYzA5YTBkNTktZWM5Zjc1YjItMzU0ZGJiOTI=, ActorId: [1:7494347665717604594:2334], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:30.601055Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-04-17T18:16:30.601606Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:30.601623Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:30.616211Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347665717604593:2333], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:30.616281Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347665717604593:2333], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:30.616312Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347665717604593:2333], Successfully finished 2025-04-17T18:16:30.616473Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:30.626792Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665717604620:2525], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:30.631094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:30.635073Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665717604620:2525], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-04-17T18:16:30.635252Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665717604620:2525], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:16:30.657229Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:30.657363Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7494347663819061761:2306], Start check tables existence, number paths: 2 2025-04-17T18:16:30.660192Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:30.660217Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:30.664535Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-04-17T18:16:30.668349Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7494347663819061761:2306], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:30.668401Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7494347663819061761:2306], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:30.668437Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7494347663819061761:2306], Successfully finished 2025-04-17T18:16:30.668492Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:30.655117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665717604620:2525], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:16:30.734963Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665717604620:2525], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:30.740834Z node 1 :TX_PROXY ERROR: Actor# [1:7494347665717604694:2577] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:30.740915Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665717604620:2525], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:16:30.743461Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzAxNTYzZDMtOWNlMWJkMWUtNzdkYjU3YjItYzRlOGY3NjY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MzAxNTYzZDMtOWNlMWJkMWUtNzdkYjU3YjItYzRlOGY3NjY= 2025-04-17T18:16:30.743791Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-17T18:16:30.743812Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:16:30.743856Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzAxNTYzZDMtOWNlMWJkMWUtNzdkYjU3YjItYzRlOGY3NjY=, ActorId: [1:7494347665717604704:2336], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:30.744115Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzAxNTYzZDMtOWNlMWJkMWUtNzdkYjU3YjItYzRlOGY3NjY=, ActorId: [1:7494347665717604704:2336], ActorState: ReadyState, TraceId: 01js2fexrr853a5h1whwb9s0tm, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7494347665717604703:2585] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-17T18:16:30.744166Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7494347665717604704:2336], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MzAxNTYzZDMtOWNlMWJkMWUtNzdkYjU3YjItYzRlOGY3NjY= 2025-04-17T18:16:30.744212Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347665717604706:2337], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:30.744279Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347665717604707:2338], Database: /Root, Start database fetching 2025-04-17T18:16:30.746484Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347665717604707:2338], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-17T18:16:30.746564Z node 1 :KQP_ ... 8897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:18:00.383169Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:18:00.384687Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14325, node 12 2025-04-17T18:18:00.436089Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:18:00.436124Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:18:00.436137Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:18:00.436320Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:18:00.816357Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:04.511445Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:18:04.511827Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:18:04.511861Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:18:04.511904Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:18:04.511984Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7494348069231217681:2330], Start check tables existence, number paths: 2 2025-04-17T18:18:04.515874Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI= 2025-04-17T18:18:04.517492Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7494348069231217681:2330], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:18:04.517610Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7494348069231217681:2330], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:18:04.517694Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7494348069231217681:2330], Successfully finished 2025-04-17T18:18:04.517831Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:18:04.517998Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:18:04.518635Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494348069231217703:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:18:04.523195Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:18:04.524746Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494348069231217703:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-04-17T18:18:04.524928Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494348069231217703:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:18:04.534940Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494348069231217703:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:18:04.604941Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494348069231217703:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:18:04.608805Z node 12 :TX_PROXY ERROR: Actor# [12:7494348069231217754:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:18:04.608902Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7494348069231217703:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:18:04.609297Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-04-17T18:18:04.609329Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-04-17T18:18:04.609423Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494348069231217761:2332], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:18:04.611284Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494348069231217761:2332], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:18:04.611363Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-04-17T18:18:04.611403Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-17T18:18:04.611719Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7494348069231217770:2333], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-17T18:18:04.613042Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7494348069231217770:2333], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-17T18:18:04.621421Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-17T18:18:04.621466Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:18:04.621495Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: ReadyState, TraceId: 01js2fhsec3kw2w5cw4yfgd071, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-17T18:18:04.621571Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494348069231217782:2335], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-17T18:18:04.623158Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7494348069231217782:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:04.623244Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:04.639593Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:18:04.641985Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: ExecuteState, TraceId: 01js2fhsec3kw2w5cw4yfgd071, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [12:7494348069231217791:2331] WorkloadServiceCleanup: 0 2025-04-17T18:18:04.642529Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7494348069231217770:2333], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-17T18:18:04.644273Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: CleanupState, TraceId: 01js2fhsec3kw2w5cw4yfgd071, EndCleanup, isFinal: 0 2025-04-17T18:18:04.644335Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: CleanupState, TraceId: 01js2fhsec3kw2w5cw4yfgd071, Sent query response back to proxy, proxyRequestId: 3, proxyId: [12:7494348052051348078:2270] 2025-04-17T18:18:04.653388Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:18:04.653455Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:18:04.653486Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:18:04.653518Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:18:04.653606Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MjYzZWUxMDMtOTE3MTZlOGMtMzk4NTgyMTAtMTE5OTI0NDI=, ActorId: [12:7494348069231217701:2331], ActorState: unknown state, Session actor destroyed >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> KqpYql::JsonNumberPrecision [GOOD] >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints >> TStorageServiceTest::ShouldRegister [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> TStorageServiceTest::ShouldCreateCheckpoint ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonNumberPrecision [GOOD] Test command err: Trying to start YDB, gRPC: 26741, MsgBus: 10255 2025-04-17T18:17:59.289016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348047393302124:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:17:59.289156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001411/r3tmp/tmpv9Ptfv/pdisk_1.dat 2025-04-17T18:17:59.544588Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26741, node 1 2025-04-17T18:17:59.619126Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:17:59.619159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:17:59.619176Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:17:59.619318Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:17:59.660637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:59.660740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:59.662374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10255 TClient is connected to server localhost:10255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:18:00.061768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:00.092071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:00.203126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:00.375914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:00.440925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:01.669596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348055983238496:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:01.669746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:01.957909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:18:01.984356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:18:02.013293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:18:02.040593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:18:02.072751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:18:02.114350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:18:02.180942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:18:02.229708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348060278206455:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:02.229799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:02.229869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348060278206460:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:02.232512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:18:02.240139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348060278206462:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:18:02.295426Z node 1 :TX_PROXY ERROR: Actor# [1:7494348060278206516:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9833, MsgBus: 14712 2025-04-17T18:18:04.329192Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348069812790739:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:18:04.329549Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001411/r3tmp/tmp44bKOn/pdisk_1.dat 2025-04-17T18:18:04.440885Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9833, node 2 2025-04-17T18:18:04.476715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:18:04.476825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:18:04.478752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:18:04.503922Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:18:04.503958Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:18:04.503965Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:18:04.504084Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14712 TClient is connected to server localhost:14712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:18:04.931330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:04.948405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:05.001273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:05.206203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:05.281637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:06.884091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348078402727111:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:06.884154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:06.928689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:18:06.956084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:18:06.984096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:18:07.007432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:18:07.032350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:18:07.058748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:18:07.085570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:18:07.158945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348082697695066:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:07.159015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348082697695071:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:07.159045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:07.161295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:18:07.169503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348082697695073:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:18:07.222516Z node 2 :TX_PROXY ERROR: Actor# [2:7494348082697695126:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState |93.8%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint >> TStateStorageTest::ShouldLoadLastSnapshot >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementBigState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] Test command err: 2025-04-17T18:16:26.296662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347645872160064:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.299394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013c9/r3tmp/tmpejAoF3/pdisk_1.dat 2025-04-17T18:16:26.821600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:26.830085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:26.830158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:26.833171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6480, node 1 2025-04-17T18:16:26.971731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:26.971746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:26.971750Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:26.971833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:27.651758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:27.678653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:29.632096Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:29.632196Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347658757062450:2328], Start check tables existence, number paths: 2 2025-04-17T18:16:29.632429Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:29.632460Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:29.637442Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw== 2025-04-17T18:16:29.640313Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw==, ActorId: [1:7494347658757062453:2330], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:29.652843Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347658757062450:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:29.652925Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347658757062450:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:29.652955Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347658757062450:2328], Successfully finished 2025-04-17T18:16:29.653142Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:16:29.653176Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:29.664313Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658757062474:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.668253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:29.669279Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658757062474:2304], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-17T18:16:29.669462Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658757062474:2304], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:16:29.678650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658757062474:2304], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:29.778580Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658757062474:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:29.783063Z node 1 :TX_PROXY ERROR: Actor# [1:7494347658757062525:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:29.783232Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347658757062474:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:16:29.783640Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-04-17T18:16:29.783668Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-04-17T18:16:29.783741Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347658757062532:2331], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:29.785068Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347658757062532:2331], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:29.785111Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-04-17T18:16:29.785136Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-17T18:16:29.785365Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347658757062541:2332], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-17T18:16:29.786489Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347658757062541:2332], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-17T18:16:29.795816Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-17T18:16:29.795842Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:16:29.795988Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw==, ActorId: [1:7494347658757062453:2330], ActorState: ReadyState, TraceId: 01js2fewv32cnt36g12ag3ht5p, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-17T18:16:29.796106Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347658757062553:2334], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-17T18:16:29.807096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347658757062553:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:29.807262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:30.009918Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347658757062541:2332], DatabaseId: Root, PoolId: sample_pool_id, Got delete notification 2025-04-17T18:16:30.016265Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw==, ActorId: [1:7494347658757062453:2330], ActorState: ExecuteState, TraceId: 01js2fewv32cnt36g12ag3ht5p, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7494347658757062554:2330] WorkloadServiceCleanup: 0 2025-04-17T18:16:30.017832Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw==, ActorId: [1:7494347658757062453:2330], ActorState: CleanupState, TraceId: 01js2fewv32cnt36g12ag3ht5p, EndCleanup, isFinal: 0 2025-04-17T18:16:30.017926Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw==, ActorId: [1:7494347658757062453:2330], ActorState: CleanupState, TraceId: 01js2fewv32cnt36g12ag3ht5p, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7494347645872160169:2277] 2025-04-17T18:16:30.036386Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw==, ActorId: [1:7494347658757062453:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:16:30.036467Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw==, ActorId: [1:7494347658757062453:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:16:30.036490Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjVlNTcyNDUtMTAzNzI0MDgtOTEwZDY1NDQtODIzOWQzYw==, ActorId: [1:7494347658757062453:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-0 ... ode_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxz40ab19hztnzhz9h7q, Sending to Executer TraceId: 0 8 2025-04-17T18:18:09.253857Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxz40ab19hztnzhz9h7q, Created new KQP executer: [10:7494348087596790162:2537] isRollback: 0 2025-04-17T18:18:09.263488Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxz40ab19hztnzhz9h7q, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-17T18:18:09.263665Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxz40ab19hztnzhz9h7q, txInfo Status: Committed Kind: ReadWrite TotalDuration: 10.474 ServerDuration: 10.058 QueriesCount: 2 2025-04-17T18:18:09.263792Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxz40ab19hztnzhz9h7q, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:18:09.263843Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxz40ab19hztnzhz9h7q, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:18:09.263870Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxz40ab19hztnzhz9h7q, EndCleanup, isFinal: 0 2025-04-17T18:18:09.263928Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxz40ab19hztnzhz9h7q, Sent query response back to proxy, proxyRequestId: 28, proxyId: [10:7494347993107508273:2268] 2025-04-17T18:18:09.264263Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, TxId: 2025-04-17T18:18:09.264430Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-17T18:18:09.265087Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ReadyState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, received request, proxyRequestId: 29 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [10:7494348087596790187:2543] database: /Root databaseId: /Root pool id: default 2025-04-17T18:18:09.265139Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ReadyState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, request placed into pool from cache: default 2025-04-17T18:18:09.265857Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, ExecutePhyTx, tx: 0x000050C000213898 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-17T18:18:09.265930Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, Sending to Executer TraceId: 0 8 2025-04-17T18:18:09.266024Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, Created new KQP executer: [10:7494348087596790190:2537] isRollback: 0 2025-04-17T18:18:09.271499Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-17T18:18:09.271606Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, ExecutePhyTx, tx: 0x000050C0001AD298 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-17T18:18:09.272796Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-17T18:18:09.273014Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, txInfo Status: Committed Kind: ReadOnly TotalDuration: 7.305 ServerDuration: 7.174 QueriesCount: 2 2025-04-17T18:18:09.273153Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-17T18:18:09.273234Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:18:09.273273Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, EndCleanup, isFinal: 0 2025-04-17T18:18:09.273369Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ExecuteState, TraceId: 01js2fhxzhc58n5cbde7tj57xn, Sent query response back to proxy, proxyRequestId: 29, proxyId: [10:7494347993107508273:2268] 2025-04-17T18:18:09.273813Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, TxId: 2025-04-17T18:18:09.273946Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, TxId: 2025-04-17T18:18:09.274134Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:18:09.274163Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:18:09.274235Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:18:09.274262Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:18:09.274325Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZjE5ZWY1YTQtMWM2ZGYzMmUtNzU3ZWNkMmUtNjcwNDMyOWI=, ActorId: [10:7494348087596790157:2537], ActorState: unknown state, Session actor destroyed 2025-04-17T18:18:09.284841Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=YTg2OGExM2ItNTQ1ZGExOTUtZWQ4YzljYTktNjM4NjE5MjM=, ActorId: [10:7494348010287377890:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:18:09.284933Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=YTg2OGExM2ItNTQ1ZGExOTUtZWQ4YzljYTktNjM4NjE5MjM=, ActorId: [10:7494348010287377890:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:18:09.284969Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YTg2OGExM2ItNTQ1ZGExOTUtZWQ4YzljYTktNjM4NjE5MjM=, ActorId: [10:7494348010287377890:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:18:09.285013Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YTg2OGExM2ItNTQ1ZGExOTUtZWQ4YzljYTktNjM4NjE5MjM=, ActorId: [10:7494348010287377890:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:18:09.285095Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YTg2OGExM2ItNTQ1ZGExOTUtZWQ4YzljYTktNjM4NjE5MjM=, ActorId: [10:7494348010287377890:2330], ActorState: unknown state, Session actor destroyed >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState >> TStateStorageTest::ShouldNotGetNonExistendState |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> TCheckpointStorageTest::ShouldRegisterCoordinator >> TStorageServiceTest::ShouldNotRegisterPrevGeneration >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TStorageServiceTest::ShouldSaveState >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TStorageServiceTest::ShouldSaveState [GOOD] >> TStorageServiceTest::ShouldUseGc >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetState |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation >> TAsyncIndexTests::OnlineBuild |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TStateStorageTest::ShouldDeleteNoCheckpoints >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> TStorageServiceTest::ShouldGetState [GOOD] >> TAsyncIndexTests::OnlineBuild [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:17.362092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:17.362199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:17.362249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:17.362293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:17.362337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:17.362379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:17.362428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:17.362484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:17.362765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:17.426362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:17.426426Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:17.440215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:17.440544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:17.440673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:17.447645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:17.447821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:17.448413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:17.448621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:17.451426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:17.452560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:17.452626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:17.452825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:17.452883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:17.452927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:17.453044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:17.459344Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:17.565112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:17.565304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:17.565461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:17.565743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:17.565779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:17.567606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:17.567726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:17.567881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:17.567916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:17.567950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:17.567986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:17.569716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:17.569770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:17.569823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:17.571461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:17.571502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:17.571554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:17.571612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:17.575166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:17.576957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:17.577167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:17.578095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:17.578242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:17.578285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:17.578534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:17.578599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:17.578777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:17.578849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:17.580737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:17.580782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:17.580932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:17.580968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:17.581340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:17.581378Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:17.581476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:17.581506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:17.581544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:17.581571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:17.581601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:17.581639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:17.581669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:17.581694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:17.581768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:17.581799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:17.581827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:17.583447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:17.583554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:17.583591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP LOCK, path: /MyRoot/Table 2025-04-17T18:18:18.075680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2025-04-17T18:18:18.075760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-04-17T18:18:18.075833Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 102, cookie: 102, txId: 281474976710760, status: StatusAccepted 2025-04-17T18:18:18.075925Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-04-17T18:18:18.076106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-17T18:18:18.076164Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-04-17T18:18:18.076225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:18:18.076328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:18.079071Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:18:18.079161Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:18:18.079423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-17T18:18:18.079572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:18:18.079719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-17T18:18:18.079748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-17T18:18:18.079779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-17T18:18:18.080045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:18.080144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:18.080196Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-17T18:18:18.080233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-17T18:18:18.082013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-17T18:18:18.082053Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-17T18:18:18.082116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T18:18:18.082132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:18:18.082155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-17T18:18:18.082196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:18:18.082221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-17T18:18:18.082260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:124:2150] message: TxId: 281474976710760 2025-04-17T18:18:18.082295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-17T18:18:18.082319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-17T18:18:18.082344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-17T18:18:18.082400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-17T18:18:18.083941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-17T18:18:18.083991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-17T18:18:18.084052Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-17T18:18:18.084105Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:18:18.085508Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:18:18.085603Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:18:18.085662Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-17T18:18:18.087165Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-17T18:18:18.087245Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-17T18:18:18.087287Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-17T18:18:18.087406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:18:18.087442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:474:2435] TestWaitNotification: OK eventTxId 102 >> TColumnShardTestReadWrite::CompactionGC [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> TGcTest::ShouldRemovePreviousCheckpoints [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2025-04-17T18:17:05.787279Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:05.986565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:06.012184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:06.012470Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:06.020880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:06.021105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:06.021322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:06.021432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:06.021559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:06.021712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:06.021840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:06.021943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:06.022058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:06.022168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.022296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:06.022429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:06.050015Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:06.050165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:06.050243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:06.050452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.051143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:06.051241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:06.051292Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:06.051396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:06.051490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:06.051539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:06.051571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:06.051770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:06.051853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:06.051895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:06.051927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:06.052009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:06.052073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:06.052123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:06.052175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:06.052283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:06.052328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:06.052355Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:06.052400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:06.052437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:06.052469Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:06.052880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=73; 2025-04-17T18:17:06.052976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-17T18:17:06.053645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=614; 2025-04-17T18:17:06.053790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=95; 2025-04-17T18:17:06.053989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:06.054045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:06.054097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:06.054304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:06.054343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.054369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:06.054549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:06.054598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:06.054628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:06.054798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:06.054838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:06.054865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:06.054975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:06.055012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:06.055052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ob DS:0:[9437184:2:18:2:1:1198112:0] 2025-04-17T18:17:58.192650Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:17:4:0:1768808:0] 2025-04-17T18:17:58.192673Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:24:2:0:3009176:0] 2025-04-17T18:17:58.192696Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:33:2:0:3042040:0] 2025-04-17T18:17:58.192722Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:42:2:0:3009176:0] 2025-04-17T18:17:58.192752Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:32:4:0:1768768:0] 2025-04-17T18:17:58.192784Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:25:3:0:1768768:0] 2025-04-17T18:17:58.192818Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:29:4:0:1768768:0] 2025-04-17T18:17:58.192866Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:36:2:0:1768768:0] 2025-04-17T18:17:58.192891Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:17:4:1:1198112:0] 2025-04-17T18:17:58.192923Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:21:2:0:3042040:0] 2025-04-17T18:17:58.192991Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=46;external_task_id=4a384108-1bb811f0-a86cd883-39a5c238;mem=55728;cpu=0; 2025-04-17T18:17:58.193234Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:768;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1,;}; 2025-04-17T18:17:58.193514Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:45:255:1:574112:0]; 2025-04-17T18:17:58.193598Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:45:255:2:592928:0]; GC for channel 4 deletes blobs: GC for channel 3 deletes blobs: GC for channel 2 deletes blobs: [9437184:2:87:2:0:5870200:0] Added portions: 155 156 2025-04-17T18:17:58.217918Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:17:58.218171Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[276] (CS::INDEXATION) apply at tablet 9437184 2025-04-17T18:17:58.221416Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:90 Blob count: 2 2025-04-17T18:17:58.221557Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=97044340;raw_bytes=151102351;count=41;records=1537497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:17:58.246296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-17T18:17:58.246380Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;fline=with_appended.cpp:65;portions=77,78,;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1; 2025-04-17T18:17:58.246622Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::49f73eb0-1bb811f0-b4f1cccd-bbfe27a1; 2025-04-17T18:17:58.246678Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:17:58.246741Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:58.246798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;tablet_id=9437184;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-04-17T18:17:58.246842Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T18:17:58.246903Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:17:58.246946Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:58.246974Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:17:58.247023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.394000s; 2025-04-17T18:17:58.247058Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:17:58.247486Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:89:4:0:5870200:0] 2025-04-17T18:17:58.247522Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:90 Blob count: 2 2025-04-17T18:17:58.248438Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=49f73eb0-1bb811f0-b4f1cccd-bbfe27a1;mem=5963210;cpu=0; 2025-04-17T18:17:58.249326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:17:58.249881Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:15:255:2:592928:0]; 2025-04-17T18:17:58.250028Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:18:255:1:675968:0]; 2025-04-17T18:17:58.250249Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:12:255:2:592928:0]; 2025-04-17T18:17:58.250403Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:21:255:1:675968:0]; 2025-04-17T18:17:58.250569Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:12:255:1:574112:0]; 2025-04-17T18:17:58.250735Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:15:255:1:574112:0]; 2025-04-17T18:17:58.250864Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:6:255:1:653840:0]; 2025-04-17T18:17:58.251009Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:9:255:1:653840:0]; 2025-04-17T18:17:58.251139Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:19:255:1:653840:0]; 2025-04-17T18:17:58.251304Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:3:255:1:675968:0]; 2025-04-17T18:17:58.251462Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:10:255:2:592928:0]; 2025-04-17T18:17:58.251601Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:1:255:3:197600:0]; 2025-04-17T18:17:58.251658Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:22:255:1:574112:0]; 2025-04-17T18:17:58.254509Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:13:255:1:675968:0]; 2025-04-17T18:17:58.254639Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:16:255:1:675968:0]; 2025-04-17T18:17:58.254754Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:7:255:1:574112:0]; 2025-04-17T18:17:58.254847Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:7:255:2:592928:0]; 2025-04-17T18:17:58.254963Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:17:255:2:592928:0]; 2025-04-17T18:17:58.255059Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:1:255:2:395328:0]; 2025-04-17T18:17:58.255108Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:10:255:1:574112:0]; 2025-04-17T18:17:58.255209Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:1:255:1:574128:0]; 2025-04-17T18:17:58.255315Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:20:255:2:592928:0]; 2025-04-17T18:17:58.255412Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:4:255:1:675968:0]; 2025-04-17T18:17:58.255542Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:2:255:3:197600:0]; 2025-04-17T18:17:58.255579Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:5:255:2:592928:0]; 2025-04-17T18:17:58.255671Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:17:255:1:574112:0]; 2025-04-17T18:17:58.255772Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:20:255:1:574112:0]; 2025-04-17T18:17:58.255868Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:11:255:1:675968:0]; 2025-04-17T18:17:58.255972Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:2:255:1:574128:0]; 2025-04-17T18:17:58.256067Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:2:255:2:395328:0]; 2025-04-17T18:17:58.256105Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:14:255:1:653840:0]; 2025-04-17T18:17:58.256205Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:5:255:1:574112:0]; 2025-04-17T18:17:58.256329Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:2:8:255:1:675968:0]; WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] Test command err: 2025-04-17T18:10:27.092411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:10:27.092570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:10:27.092753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bcb/r3tmp/tmpJCWAss/pdisk_1.dat 2025-04-17T18:10:27.457586Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3011, node 1 2025-04-17T18:10:27.676305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:10:27.676379Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:10:27.676410Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:10:27.677122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:10:27.679125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:10:27.764462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:27.764577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:27.778493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64267 2025-04-17T18:10:28.306607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:10:31.049768Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:10:31.094764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:31.094884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:31.132522Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:10:31.135097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:31.372300Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:31.372810Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:31.373371Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:31.373681Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:31.373780Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:31.373891Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:31.373965Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:31.374032Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:31.374096Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:10:31.535865Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:10:31.535938Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:10:31.548816Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:10:31.666961Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:10:31.706792Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:10:31.706893Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:10:31.736148Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:10:31.737585Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:10:31.737763Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:10:31.737817Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:10:31.737882Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:10:31.737941Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:10:31.737995Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:10:31.738036Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:10:31.739197Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:10:31.772790Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:31.772888Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:10:31.778684Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:10:31.782930Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:10:31.783077Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:10:31.793790Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:10:31.810362Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:10:31.810412Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:10:31.810478Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:10:31.819109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:10:31.824754Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:10:31.824874Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:10:31.980270Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:10:32.137211Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:10:32.237125Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:10:32.899064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:32.899175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:10:32.913337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:10:33.012238Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:10:33.012469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:10:33.012767Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:10:33.012899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:10:33.013075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:10:33.013199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:10:33.013316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:10:33.013502Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:10:33.013642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:10:33.013759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:10:33.013868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:10:33.013983Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:10:33.046051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:10:33.046194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 17T18:17:50.324390Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:17:50.324462Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:50.324497Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:17:51.219573Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:17:51.219650Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:51.219687Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:17:52.040229Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:17:52.040294Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:52.040323Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:17:52.897699Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:17:52.908691Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:17:52.908766Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:52.908803Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:17:53.783032Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:17:53.783093Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:53.783119Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:17:54.596541Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:17:54.596761Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:17:54.607846Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:17:54.607920Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:54.607958Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:17:55.458532Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:17:55.458606Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:55.458629Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:17:56.277163Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:17:56.277244Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:56.277280Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:17:57.070861Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:17:57.081869Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:17:57.081947Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:57.081983Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:17:58.045179Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:17:58.045251Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:58.045287Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:17:58.938137Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:17:58.938380Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:17:58.949327Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:17:58.949407Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:58.949443Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:17:59.792615Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:17:59.792705Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:17:59.792740Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:18:00.625881Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:18:00.625962Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:00.625995Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:18:01.539484Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:18:01.550274Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:01.550349Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:01.550404Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:18:02.548807Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:18:02.548894Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:02.548931Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:18:03.475064Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:18:03.475265Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:18:03.486135Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:03.486222Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:03.486253Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:18:04.348946Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:18:04.349021Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:04.349052Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:18:05.168763Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:05.168831Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:05.168859Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:18:05.979511Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:18:05.990667Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:18:05.990759Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:05.990818Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:18:06.960349Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:06.960418Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:06.960467Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:18:07.782456Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:18:07.782587Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:18:07.793205Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:18:07.793261Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:07.793289Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T18:18:08.671875Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:08.671952Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:18:08.671986Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture()+28 (0x18EA949C) TWithBackTrace::TWithBackTrace<>()+80 (0x18AD13D0) NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+485 (0x18AA5A55) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TTestCaseAnalyzeRebootColumnShard::Execute_(NUnitTest::TTestContext&)+4263 (0x18AC25F7) std::__y1::__function::__func, void ()>::operator()()+280 (0x18ACD488) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19394ED6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1936D929) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TCurrentTest::Execute()+1204 (0x18ACC654) NUnitTest::TTestFactory::Execute()+2438 (0x1936F1F6) NUnitTest::RunMain(int, char**)+5213 (0x1938F44D) ??+0 (0x7F69FE0A9D90) __libc_start_main+128 (0x7F69FE0A9E40) _start+41 (0x163FC029) |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteGraph >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2025-04-17T18:18:10.144052Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7494348090968315011:2048] with connection to localhost:7481:local 2025-04-17T18:18:10.144178Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:10.305718Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:10.305748Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:10.306078Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:10.951148Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:10.951191Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:11.458339Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7494348098344574029:2048] with connection to localhost:7481:local 2025-04-17T18:18:11.458422Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:11.576949Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:11.576980Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:11.577215Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:12.326251Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:12.326286Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:12.326575Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-17T18:18:12.477970Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-17T18:18:12.478008Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-17T18:18:12.478304Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-04-17T18:18:12.604023Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-04-17T18:18:12.604052Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-04-17T18:18:12.604309Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:12.784883Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:13.459835Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7494348106418117841:2048] with connection to localhost:7481:local 2025-04-17T18:18:13.459946Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:13.613361Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:13.613406Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:13.613610Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:14.234012Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:14.234044Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:14.234325Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:14.515375Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-17T18:18:14.515403Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:18:14.515670Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-17T18:18:14.649126Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-17T18:18:14.649157Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-17T18:18:14.649446Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:14.785652Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-17T18:18:14.785689Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:18:14.786026Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-17T18:18:14.893185Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-17T18:18:14.893243Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-17T18:18:14.893564Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-04-17T18:18:14.999888Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2025-04-17T18:18:14.999924Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-04-17T18:18:15.000270Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2025-04-17T18:18:15.133719Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2025-04-17T18:18:15.133752Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2025-04-17T18:18:15.134015Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:15.309984Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:16.050097Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7494348116379919902:2048] with connection to localhost:7481:local 2025-04-17T18:18:16.050223Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:16.218829Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:16.218867Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:16.219110Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:16.876687Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:16.876726Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:16.876999Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-04-17T18:18:17.010686Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-04-17T18:18:17.010741Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-04-17T18:18:17.011068Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2025-04-17T18:18:17.011115Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2025-04-17T18:18:17.598339Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2025-04-17T18:18:17.598434Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2025-04-17T18:18:17.598482Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2025-04-17T18:18:17.605896Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2025-04-17T18:18:17.915956Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2025-04-17T18:18:17.916032Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2025-04-17T18:18:17.927709Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> TStateStorageTest::ShouldDeleteGraph [GOOD] >> TStateStorageTest::ShouldGetMultipleStates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] Test command err: 2025-04-17T18:18:08.659796Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7494348086632572892:2048] with connection to localhost:63056:local 2025-04-17T18:18:08.660563Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:09.303987Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:09.304030Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:10.029347Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7494348090328267576:2048] with connection to localhost:63056:local 2025-04-17T18:18:10.029495Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:10.203038Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:10.203069Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:10.203319Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:10.337947Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-17T18:18:10.337974Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:10.338201Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:10.425439Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2025-04-17T18:18:10.425475Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:10.954491Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7494348092399997537:2048] with connection to localhost:63056:local 2025-04-17T18:18:10.954627Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:11.100068Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:11.100107Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:11.100375Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:11.819906Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:11.819940Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:11.820230Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:12.065848Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-17T18:18:12.065890Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:18:12.066162Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-17T18:18:12.208876Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-17T18:18:12.208913Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-17T18:18:12.209148Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:12.356256Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-17T18:18:12.356313Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:18:12.356651Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-17T18:18:12.485385Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-17T18:18:12.485414Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-17T18:18:12.485663Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:12.682140Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:13.381282Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7494348104758580572:2048] with connection to localhost:63056:local 2025-04-17T18:18:13.381379Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:13.526768Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:13.526815Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:13.527135Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:14.193768Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:14.193807Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:14.194342Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-04-17T18:18:14.280682Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-04-17T18:18:14.282958Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-04-17T18:18:15.010052Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7494348109515795471:2048] with connection to localhost:63056:local 2025-04-17T18:18:15.010061Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7494348113810762869:2130] 2025-04-17T18:18:15.010119Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:15.155139Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:15.155179Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:15.155441Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:15.774585Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:15.774610Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:15.774881Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:16.050077Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-17T18:18:16.050108Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:18:16.050431Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-17T18:18:16.149470Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2025-04-17T18:18:16.149525Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2025-04-17T18:18:16.149553Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-17T18:18:16.149625Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2025-04-17T18:18:16.149778Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-17T18:18:16.302976Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-17T18:18:16.303027Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-17T18:18:16.303270Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:16.392602Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-17T18:18:16.392650Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:18:16.392973Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-17T18:18:16.487673Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-17T18:18:16.487720Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2025-04-17T18:18:16.487745Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-17T18:18:16.487795Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2025-04-17T18:18:16.488039Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-04-17T18:18:16.575068Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2025-04-17T18:18:16.577457Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2025-04-17T18:18:16.616893Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-04-17T18:18:16.616933Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-04-17T18:18:16.617236Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:16.746034Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2025-04-17T18:18:16.746089Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:18:16.746389Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2025-04-17T18:18:16.879198Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2025-04-17T18:18:16.879231Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2025-04-17T18:18:16.879256Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2025-04-17T18:18:16.879366Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2025-04-17T18:18:16.879511Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:16.902055Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2025-04-17T18:18:17.055472Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:17.158847Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:17.168591Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:17.269210Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:17.279124Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:17.379753Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:17.390125Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:17.490799Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:17.500303Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:17.600875Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:17.610501Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:17.711166Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:17.720563Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:17.821201Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:17.856856Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:17.957489Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:17.965224Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:18.065889Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:18.075292Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:18.175918Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:18.186206Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-17T18:18:18.286832Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-17T18:18:18.302706Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2025-04-17T18:18:13.001467Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7494348101443735197:2048] with connection to localhost:1636:local 2025-04-17T18:18:13.001603Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:13.673680Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:13.673715Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:13.674021Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:13.777643Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2025-04-17T18:18:13.777671Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:14.354270Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7494348111398122078:2048] with connection to localhost:1636:local 2025-04-17T18:18:14.354395Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:14.819296Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2025-04-17T18:18:14.819326Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:15.365723Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7494348114307623982:2048] with connection to localhost:1636:local 2025-04-17T18:18:15.365851Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:15.494979Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:15.495034Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:15.495532Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:16.173417Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:16.173446Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:16.173683Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:16.309199Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Constraint violated. Table: `local/TStorageServiceTestShouldNotCreateCheckpointTwice/checkpoints_metadata`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-04-17T18:18:16.309234Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:17.059269Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7494348118736297641:2048] with connection to localhost:1636:local 2025-04-17T18:18:17.059381Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:17.218280Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:17.218321Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:17.218615Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:17.448044Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-17T18:18:17.448100Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:18:18.082559Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7494348125682168256:2048] with connection to localhost:1636:local 2025-04-17T18:18:18.082663Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:18.240409Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:18.240455Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:18.240723Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:18.885729Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:18.885766Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:18.886010Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:19.063260Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-17T18:18:19.063287Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:19.063598Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:19.156651Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-17T18:18:19.156686Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged >> TAsyncIndexTests::CreateTable |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] >> TAsyncIndexTests::CreateTable [GOOD] >> KqpQueryPerf::MultiRead+QueryService >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:23.870467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:23.870577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:23.870663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:23.870720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:23.870791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:23.870845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:23.870925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:23.871052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:23.871477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:23.983851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:23.983930Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:24.003343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:24.003865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:24.004082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:24.014100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:24.014371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:24.015180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:24.015409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:24.018860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:24.020378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:24.020468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:24.020735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:24.020824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:24.020879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:24.021043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.029486Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:24.146713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:24.146947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.147148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:24.147391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:24.147440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.149752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:24.149879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:24.150061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.150113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:24.150151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:24.150228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:24.152150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.152204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:24.152237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:24.153742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.153783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.153853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:24.153909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:24.157595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:24.159381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:24.159562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:24.160489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:24.160621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:24.160671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:24.160939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:24.160988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:24.161181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:24.161257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:24.163199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:24.163245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:24.163411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:24.163448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:24.163801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.163845Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:24.163969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:24.164003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:24.164046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:24.164078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:24.164112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:24.164150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:24.164185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:24.164216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:24.164273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:24.164313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:24.164346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:24.166153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:24.166285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:24.166323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:24.405448Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:24.405484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:18:24.405520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-17T18:18:24.406046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:18:24.406071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-17T18:18:24.406146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:18:24.406173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:18:24.406259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:18:24.406304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:24.406329Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.406352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:18:24.406375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T18:18:24.411686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:24.411741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:24.414995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:24.415188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:24.415365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.415443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:24.415533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:24.415796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:24.415828Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-17T18:18:24.415903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:18:24.415934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:18:24.415974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:18:24.415999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:18:24.416023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-17T18:18:24.416299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.416591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.416614Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:18:24.416649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:18:24.416663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:18:24.416684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:18:24.416697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:18:24.416712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-17T18:18:24.416757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2347] message: TxId: 101 2025-04-17T18:18:24.416788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:18:24.416818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:18:24.416836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:18:24.416922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:18:24.416958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-17T18:18:24.416982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-17T18:18:24.417002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:18:24.417026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-17T18:18:24.417042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-17T18:18:24.417068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:18:24.418949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:18:24.418985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:380:2348] TestWaitNotification: OK eventTxId 101 2025-04-17T18:18:24.419347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:18:24.419523Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 173us result status StatusSuccess 2025-04-17T18:18:24.420090Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TVectorIndexTests::CreateTablePrefix >> TGcTest::ShouldIgnoreIncrementCheckpoint [GOOD] >> TStateStorageTest::ShouldCountStates >> TStateStorageTest::ShouldCountStates [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] >> TVectorIndexTests::CreateTablePrefix [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefix [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:25.646006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:25.646099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:25.646151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:25.646210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:25.646266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:25.646308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:25.646374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:25.646445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:25.646814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:25.724006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:25.724042Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:25.734752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:25.735039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:25.735176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:25.741129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:25.741250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:25.741728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:25.741863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:25.743922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:25.744915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:25.744994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:25.745205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:25.745269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:25.745315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:25.745424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.750199Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:25.852089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:25.852285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.852448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:25.852686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:25.852737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.854646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:25.854781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:25.854956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.855005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:25.855045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:25.855094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:25.856673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.856720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:25.856752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:25.858131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.858172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.858246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:25.858302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:25.861870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:25.863403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:25.863572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:25.864449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:25.864575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:25.864621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:25.864872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:25.864925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:25.865095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:25.865166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:25.866819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:25.866867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:25.867020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:25.867059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:25.867458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.867509Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:25.867624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:25.867656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:25.867696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:25.867728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:25.867770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:25.867808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:25.867841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:25.867869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:25.867922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:25.867964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:25.867994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:25.869787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:25.869899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:25.869933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ration: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:26.362633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:26.362669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:26.362764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:26.362808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:26.362849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:26.362873Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T18:18:26.362897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T18:18:26.363733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:26.363792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:26.363807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:26.363826Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-17T18:18:26.363847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-17T18:18:26.364426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:26.364488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:26.364506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:26.364531Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-04-17T18:18:26.364553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-17T18:18:26.364597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/5, is published: true 2025-04-17T18:18:26.366205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-04-17T18:18:26.366249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:26.366495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T18:18:26.366581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-04-17T18:18:26.366607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-04-17T18:18:26.366636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-04-17T18:18:26.366673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-04-17T18:18:26.366701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/5, is published: true 2025-04-17T18:18:26.367741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:4, at schemeshard: 72057594046678944 2025-04-17T18:18:26.367815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:4 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:26.368007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-17T18:18:26.368074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-04-17T18:18:26.368093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-04-17T18:18:26.368111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-04-17T18:18:26.368137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-04-17T18:18:26.368164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/5, is published: true 2025-04-17T18:18:26.368959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-17T18:18:26.368990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:26.369121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:18:26.369164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-04-17T18:18:26.369180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-04-17T18:18:26.369196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-04-17T18:18:26.369218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-04-17T18:18:26.369251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/5, is published: true 2025-04-17T18:18:26.369515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.369655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.369689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.369725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.369804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:18:26.369825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:26.369976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:18:26.370045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-04-17T18:18:26.370078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-04-17T18:18:26.370104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-04-17T18:18:26.370125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-04-17T18:18:26.370147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 5/5, is published: true 2025-04-17T18:18:26.370236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:452:2398] message: TxId: 102 2025-04-17T18:18:26.370288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-04-17T18:18:26.370331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:18:26.370361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:18:26.370444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:18:26.370481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-17T18:18:26.370500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-17T18:18:26.370522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:18:26.370539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-17T18:18:26.370555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-17T18:18:26.370612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T18:18:26.370636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-17T18:18:26.370668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-17T18:18:26.370704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-17T18:18:26.370723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:4 2025-04-17T18:18:26.370754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:4 2025-04-17T18:18:26.370792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-17T18:18:26.371538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.371588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.371625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.371697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.371740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.372912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:26.374528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:18:26.374568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:597:2536] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2025-04-17T18:17:13.549775Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:13.622439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:13.639276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:13.639539Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:13.647431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:13.647665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:13.647883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:13.648001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:13.648131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:13.648244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:13.648354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:13.648532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:13.648655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:13.648762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:13.648867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:13.648971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:13.674214Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:13.674332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:13.674389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:13.674578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:13.674724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:13.674797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:13.674833Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:13.674916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:13.674971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:13.675007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:13.675033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:13.675185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:13.675252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:13.675287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:13.675330Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:13.675414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:13.675468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:13.675517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:13.675550Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:13.675637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:13.675672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:13.675718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:13.675766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:13.675804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:13.675849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:13.676187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-17T18:17:13.676265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-17T18:17:13.676328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-17T18:17:13.676390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-17T18:17:13.676538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:13.676585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:13.676617Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:13.676791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:13.676843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:13.676872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:13.677029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:13.677068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:13.677093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:13.677264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:13.677300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:13.677325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:13.677422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:13.677459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:13.677489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... HARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:36:2:0:3042040:0] 2025-04-17T18:18:05.457506Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:30:2:0:1768768:0] 2025-04-17T18:18:05.457531Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:27:2:0:1768768:0] 2025-04-17T18:18:05.457567Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:16:3:2:598760:0] 2025-04-17T18:18:05.457594Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:37:3:0:1768768:0] 2025-04-17T18:18:05.457619Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:41:4:1:1796864:0] 2025-04-17T18:18:05.457648Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:20:4:1:1796864:0] 2025-04-17T18:18:05.457674Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:29:4:0:3042040:0] 2025-04-17T18:18:05.457703Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:22:3:0:3009176:0] 2025-04-17T18:18:05.457729Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:24:2:0:3042040:0] 2025-04-17T18:18:05.457754Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:41:4:0:1768768:0] 2025-04-17T18:18:05.457857Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=46;external_task_id=4e8bf196-1bb811f0-87fa4245-6582c4a0;mem=55728;cpu=0; 2025-04-17T18:18:05.458263Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:768;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=4e447cc6-1bb811f0-b42c1188-354a142,;}; 2025-04-17T18:18:05.458671Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:45:255:1:574112:0]; 2025-04-17T18:18:05.458793Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:45:255:2:592928:0]; GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: GC for channel 3 deletes blobs: [9437184:3:85:3:0:5870200:0] Added portions: 155 156 2025-04-17T18:18:05.481635Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:18:05.481920Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[269] (CS::INDEXATION) apply at tablet 9437184 2025-04-17T18:18:05.484989Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 3:88 Blob count: 2 2025-04-17T18:18:05.485160Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=97044340;raw_bytes=151102351;count=41;records=1537497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:18:05.500289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-17T18:18:05.500388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;fline=with_appended.cpp:65;portions=77,78,;task_id=4e447cc6-1bb811f0-b42c1188-354a142; 2025-04-17T18:18:05.500686Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::4e447cc6-1bb811f0-b42c1188-354a142; 2025-04-17T18:18:05.500753Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:18:05.500843Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:18:05.500908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;tablet_id=9437184;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-04-17T18:18:05.500964Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-17T18:18:05.501032Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:18:05.501088Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:18:05.501133Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:18:05.501226Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.386500s; 2025-04-17T18:18:05.501282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=4e447cc6-1bb811f0-b42c1188-354a142;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:18:05.501909Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:87:2:0:5870200:0] 2025-04-17T18:18:05.501976Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 3:88 Blob count: 2 2025-04-17T18:18:05.503356Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=4e447cc6-1bb811f0-b42c1188-354a142;mem=5963210;cpu=0; 2025-04-17T18:18:05.504238Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:18:05.505168Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:6:255:1:653840:0]; 2025-04-17T18:18:05.505394Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:7:255:2:592928:0]; 2025-04-17T18:18:05.505545Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:19:255:1:653840:0]; 2025-04-17T18:18:05.505700Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:1:255:3:197600:0]; 2025-04-17T18:18:05.505761Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:10:255:2:592928:0]; 2025-04-17T18:18:05.505910Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:1:255:2:395328:0]; 2025-04-17T18:18:05.505974Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:4:255:1:675968:0]; 2025-04-17T18:18:05.506145Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:22:255:1:574112:0]; 2025-04-17T18:18:05.506320Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:13:255:1:675968:0]; 2025-04-17T18:18:05.506486Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:16:255:1:675968:0]; 2025-04-17T18:18:05.506657Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:7:255:1:574112:0]; 2025-04-17T18:18:05.506801Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:10:255:1:574112:0]; 2025-04-17T18:18:05.506966Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:17:255:2:592928:0]; 2025-04-17T18:18:05.510665Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:1:255:1:574128:0]; 2025-04-17T18:18:05.510837Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:20:255:1:574112:0]; 2025-04-17T18:18:05.510976Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:20:255:2:592928:0]; 2025-04-17T18:18:05.511121Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:2:255:2:395328:0]; 2025-04-17T18:18:05.511190Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:2:255:3:197600:0]; 2025-04-17T18:18:05.511250Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:14:255:1:653840:0]; 2025-04-17T18:18:05.511634Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:5:255:2:592928:0]; 2025-04-17T18:18:05.511792Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:17:255:1:574112:0]; 2025-04-17T18:18:05.511937Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:8:255:1:675968:0]; 2025-04-17T18:18:05.512128Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:11:255:1:675968:0]; 2025-04-17T18:18:05.512292Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:2:255:1:574128:0]; 2025-04-17T18:18:05.512466Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:21:255:1:675968:0]; 2025-04-17T18:18:05.512803Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:12:255:2:592928:0]; 2025-04-17T18:18:05.513022Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:5:255:1:574112:0]; 2025-04-17T18:18:05.513172Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:15:255:1:574112:0]; 2025-04-17T18:18:05.513320Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:15:255:2:592928:0]; 2025-04-17T18:18:05.513489Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:18:255:1:675968:0]; 2025-04-17T18:18:05.513669Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:9:255:1:653840:0]; 2025-04-17T18:18:05.513824Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:12:255:1:574112:0]; 2025-04-17T18:18:05.513967Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:120;method=DeleteObject;id=[9437184:3:3:255:1:675968:0]; WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2025-04-17T18:18:17.714681Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7494348123731756594:2048] with connection to localhost:23954:local 2025-04-17T18:18:17.714764Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:18.259656Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:18.259689Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:18.260039Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:18.972605Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:18.972640Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:18.972890Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:19.157984Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-17T18:18:19.158020Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:19.158299Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-17T18:18:19.271201Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-17T18:18:19.271225Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-17T18:18:19.936146Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7494348131361210719:2048] with connection to localhost:23954:local 2025-04-17T18:18:19.936245Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:20.052476Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:20.052507Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:20.052827Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-17T18:18:20.260997Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-17T18:18:20.261023Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-17T18:18:20.818707Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7494348137702651926:2048] with connection to localhost:23954:local 2025-04-17T18:18:20.818817Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:20.994877Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:20.994911Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:20.995158Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-04-17T18:18:21.223282Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-17T18:18:21.223315Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-04-17T18:18:21.863525Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7494348140014218160:2048] with connection to localhost:23954:local 2025-04-17T18:18:21.863664Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:22.029066Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:22.029112Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:22.029415Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:22.717395Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:22.717430Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:22.717804Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-17T18:18:22.902096Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2025-04-17T18:18:22.902126Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-17T18:18:23.475870Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7494348148036124017:2048] with connection to localhost:23954:local 2025-04-17T18:18:23.475963Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:23.617312Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-17T18:18:23.617356Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:23.617668Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-17T18:18:24.185192Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-17T18:18:24.185226Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-17T18:18:24.185489Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-17T18:18:24.444040Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-17T18:18:24.444078Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:18:24.444397Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-17T18:18:24.595862Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-17T18:18:24.595913Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-17T18:18:24.596166Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-17T18:18:24.701837Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-17T18:18:24.701867Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2025-04-17T18:15:41.545696Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:15:41.563798Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:15:41.564241Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:15:41.564451Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:15:41.615992Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:15:41.623775Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:15:41.713062Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:15:41.714793Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:15:41.714861Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:15:41.714912Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:15:41.715247Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:15:41.715597Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:15:41.715654Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:15:41.715989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:15:41.716033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:15:41.789155Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:15:41.822580Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:15:41.823703Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:15:41.823827Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:15:41.823859Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:15:41.823905Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:15:41.823937Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.824090Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.824129Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.825004Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:15:41.825108Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:15:41.825226Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.825259Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:15:41.825374Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:15:41.825412Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:15:41.825450Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:15:41.825483Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:15:41.825516Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:15:41.825613Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.825644Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.825703Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:15:41.829020Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:15:41.829079Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:15:41.829172Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:15:41.829413Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:15:41.829461Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:15:41.829537Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:15:41.829657Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.829696Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:15:41.829744Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:15:41.829775Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.830062Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:15:41.830113Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:15:41.830150Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:15:41.830197Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.830236Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:15:41.830283Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:15:41.830324Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:15:41.830354Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.830376Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:15:41.842963Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:15:41.843024Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:15:41.843053Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:15:41.843086Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:15:41.845176Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:15:41.847367Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.847422Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:15:41.847459Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:15:41.847534Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:15:41.847563Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:15:41.847672Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:15:41.847705Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.847761Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:15:41.847796Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:15:41.850989Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:15:41.851042Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:15:41.851249Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.851288Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:15:41.851331Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:15:41.851365Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:15:41.851395Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:15:41.851432Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:15:41.851463Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:15:41.851504Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.851548Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:15:41.851596Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:15:41.851627Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:15:41.851801Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:15:41.851837Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:15:41.851857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:15:41.851879Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:15:41.851905Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:15:41.851956Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:15:41.851979Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:15:41.852011Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:15:41.852038Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:15:41.852075Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:15:41.852124Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:15:41.852156Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-17T18:15:41.852204Z node 1 :TX_DATA ... c latency: 58 ms, propose latency: 58 ms, status: COMPLETE 2025-04-17T18:18:18.423667Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is DelayComplete 2025-04-17T18:18:18.423692Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2025-04-17T18:18:18.423716Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit CompletedOperations 2025-04-17T18:18:18.423739Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2025-04-17T18:18:18.423778Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2025-04-17T18:18:18.423796Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2025-04-17T18:18:18.423814Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:10] at 9437184 has finished 2025-04-17T18:18:18.441277Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:18:18.441342Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:10] at 9437184 on unit FinishPropose 2025-04-17T18:18:18.441401Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:18:21.467363Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-17T18:18:21.467433Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T18:18:21.467777Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:647:2622], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:18:21.467815Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:18:21.467854Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:646:2621], serverId# [3:647:2622], sessionId# [0:0:0] 2025-04-17T18:18:21.468055Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006\ 2025-04-17T18:18:21.468087Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:18:21.468160Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:18:21.468731Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2025-04-17T18:18:21.480516Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-17T18:18:21.480589Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2025-04-17T18:18:21.480619Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:18:21.480649Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:18:21.480706Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-17T18:18:21.480761Z node 3 :TX_DATASHARD TRACE: Activated operation [0:11] at 9437184 2025-04-17T18:18:21.480789Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-17T18:18:21.480806Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-17T18:18:21.480823Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2025-04-17T18:18:21.480840Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T18:18:21.484776Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T18:18:21.484934Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T18:18:21.484968Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T18:18:21.523362Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:18:21.523442Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T18:18:21.524275Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T18:18:21.527516Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T18:18:21.527688Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T18:18:21.527739Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T18:18:21.623727Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:18:21.623798Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T18:18:21.624409Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T18:18:21.643809Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2025-04-17T18:18:21.644048Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T18:18:21.644088Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T18:18:21.644415Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:18:21.644452Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T18:18:21.645052Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T18:18:21.767119Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T18:18:21.767747Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T18:18:21.767795Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T18:18:21.938821Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:18:21.938910Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T18:18:21.939702Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T18:18:22.185994Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-04-17T18:18:22.186817Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T18:18:22.186862Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T18:18:22.317287Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:18:22.317361Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T18:18:22.317997Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T18:18:22.321800Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T18:18:22.321948Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T18:18:22.321986Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T18:18:22.342509Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:18:22.342575Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T18:18:22.343307Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T18:18:22.344971Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T18:18:22.345103Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T18:18:22.345147Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T18:18:22.358983Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:18:22.359071Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T18:18:22.359923Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T18:18:22.365686Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-17T18:18:22.365897Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-17T18:18:22.365954Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-17T18:18:22.676687Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:18:22.676764Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-17T18:18:22.677467Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-17T18:18:23.437595Z node 3 :TX_DATASHARD TRACE: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2025-04-17T18:18:23.437692Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-17T18:18:23.437761Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-17T18:18:23.437805Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2025-04-17T18:18:23.437839Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit FinishPropose 2025-04-17T18:18:23.437870Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit FinishPropose 2025-04-17T18:18:23.437911Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 62 ms, propose latency: 62 ms, status: COMPLETE 2025-04-17T18:18:23.438039Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is DelayComplete 2025-04-17T18:18:23.438063Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2025-04-17T18:18:23.438086Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit CompletedOperations 2025-04-17T18:18:23.438111Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2025-04-17T18:18:23.438149Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-17T18:18:23.438169Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2025-04-17T18:18:23.438210Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:11] at 9437184 has finished 2025-04-17T18:18:23.459181Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:18:23.459251Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:11] at 9437184 on unit FinishPropose 2025-04-17T18:18:23.459312Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] >> TUniqueIndexTests::CreateTable |94.0%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::MultiRead+QueryService [GOOD] |94.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] Test command err: 2025-04-17T18:18:16.721564Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-04-17T18:18:16.879634Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-04-17T18:18:17.374718Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-04-17T18:18:22.212909Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [2:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/ShouldIgnoreIncrementCheckpoint"); SELECT * FROM checkpoints_graphs_description; 2025-04-17T18:18:22.398205Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-04-17T18:18:22.398304Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC skip increment checkpoint for graph 'graph' >> TUniqueIndexTests::CreateTable [GOOD] |94.0%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20002, MsgBus: 24029 2025-04-17T18:18:24.919615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348155332579074:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:18:24.919697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00162a/r3tmp/tmpdpcfvY/pdisk_1.dat 2025-04-17T18:18:25.199507Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20002, node 1 2025-04-17T18:18:25.269094Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:18:25.269131Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:18:25.269145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:18:25.269295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:18:25.276295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:18:25.276402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:18:25.278267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24029 TClient is connected to server localhost:24029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:18:25.658662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:25.675044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:25.779165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:25.881340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:25.948746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:18:27.424118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348168217482737:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:27.424304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:27.595455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:18:27.621608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:18:27.645388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:18:27.671288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:18:27.694993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:18:27.761090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:18:27.786070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:18:27.829951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348168217483397:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:27.830022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:27.830044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348168217483402:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:27.832920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:18:27.840023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348168217483404:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:18:27.935847Z node 1 :TX_PROXY ERROR: Actor# [1:7494348168217483461:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:29.271489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:29.271553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:29.271584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:29.271611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:29.271659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:29.271691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:29.271731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:29.271778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:29.272023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:29.328268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:29.328317Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:29.342969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:29.343351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:29.343500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:29.351330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:29.351517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:29.352108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:29.352287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:29.355086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:29.356255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:29.356318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:29.356507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:29.356561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:29.356602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:29.356717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.363006Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:29.472364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:29.472591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.472781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:29.473006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:29.473057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.475053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:29.475169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:29.475348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.475397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:29.475429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:29.475485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:29.476991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.477039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:29.477067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:29.478525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.478573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.478623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:29.478690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:29.482082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:29.483603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:29.483782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:29.484693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:29.484802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:29.484845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:29.485084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:29.485132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:29.485292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:29.485359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:29.487008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:29.487051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:29.487190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:29.487220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:29.487575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.487620Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:29.487710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:29.487744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:29.487778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:29.487809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:29.487839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:29.487870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:29.487901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:29.487924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:29.487970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:29.488003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:29.488036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:29.489621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:29.489711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:29.489741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:29.735030Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:29.735053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:18:29.735077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-17T18:18:29.735434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:18:29.735470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-17T18:18:29.735567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:18:29.735598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:18:29.735704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:18:29.735743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:29.735768Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.735793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:18:29.735812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T18:18:29.737881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:29.737930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:29.739614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:29.739689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:29.739763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.739797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:29.739842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:29.740016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.740145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:29.740168Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-17T18:18:29.740239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:18:29.740262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:18:29.740283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:18:29.740312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:18:29.740340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-17T18:18:29.740439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.740454Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:18:29.740486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:18:29.740509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:18:29.740528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:18:29.740538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:18:29.740558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-17T18:18:29.740598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2347] message: TxId: 101 2025-04-17T18:18:29.740625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:18:29.740652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:18:29.740683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:18:29.740761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:18:29.740786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-17T18:18:29.740798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-17T18:18:29.740812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:18:29.740825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-17T18:18:29.740834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-17T18:18:29.740856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:18:29.742619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:18:29.742684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:380:2348] TestWaitNotification: OK eventTxId 101 2025-04-17T18:18:29.743004Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:18:29.743207Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 213us result status StatusSuccess 2025-04-17T18:18:29.743946Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.0%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-04-17T18:18:29.146624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:29.146699Z node 1 :IMPORT WARN: Table profiles were not loaded |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] >> TAsyncIndexTests::Decimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:34.575969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:34.576023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:34.576058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:34.576094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:34.576139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:34.576159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:34.576194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:34.576242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:34.576463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:34.655137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:34.655178Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:34.669232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:34.669554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:34.669730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:34.676353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:34.676497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:34.677015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:34.677183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:34.679571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:34.680625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:34.680686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:34.680859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:34.680899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:34.680937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:34.681045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:34.686808Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:34.778563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:34.778727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:34.778867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:34.779032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:34.779063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:34.780725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:34.780823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:34.780964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:34.780998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:34.781022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:34.781055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:34.782354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:34.782390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:34.782419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:34.783648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:34.783685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:34.783738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:34.783794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:34.790781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:34.792286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:34.792426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:34.793105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:34.793195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:34.793231Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:34.793440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:34.793475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:34.793596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:34.793645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:34.795329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:34.795369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:34.795527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:34.795574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:34.795892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:34.795925Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:34.796000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:34.796024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:34.796052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:34.796071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:34.796099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:34.796123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:34.796145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:34.796165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:34.796222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:34.796247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:34.796266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:34.797376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:34.797455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:34.797483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:35.042508Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:18:35.042777Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 251us result status StatusSuccess 2025-04-17T18:18:35.043164Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:35.043890Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:18:35.044087Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 206us result status StatusSuccess 2025-04-17T18:18:35.044435Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BasicStatistics::TwoNodes [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError >> TAsyncIndexTests::Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:35.614506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:35.614593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:35.614629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:35.614659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:35.614708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:35.614741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:35.614790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:35.614853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:35.615097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:35.674406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:35.674474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:35.693891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:35.694256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:35.694397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:35.701844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:35.701994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:35.702520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:35.702704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:35.704864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:35.705965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:35.706024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:35.706210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:35.706256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:35.706291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:35.706375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:35.712159Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:35.807391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:35.807643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:35.807919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:35.808105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:35.808144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:35.809755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:35.809870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:35.810009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:35.810047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:35.810072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:35.810112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:35.811364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:35.811403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:35.811431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:35.812662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:35.812691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:35.812728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:35.812774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:35.815305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:35.816656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:35.816771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:35.817391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:35.817498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:35.817526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:35.817708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:35.817748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:35.817866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:35.817916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:35.819279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:35.819312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:35.819409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:35.819439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:35.819704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:35.819741Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:35.819814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:35.819838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:35.819868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:35.819890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:35.819915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:35.819945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:35.819966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:35.819989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:35.820024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:35.820052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:35.820074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:35.821258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:35.821327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:35.821353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:36.063803Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:36.063830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-17T18:18:36.063856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-17T18:18:36.064285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:18:36.064306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-17T18:18:36.064380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:18:36.064415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-17T18:18:36.064481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-17T18:18:36.064508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:36.064530Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.064556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-17T18:18:36.064575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-17T18:18:36.069625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:36.069701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:36.073055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:36.073235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:36.073478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.073649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-17T18:18:36.073771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:36.074090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-17T18:18:36.074123Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-17T18:18:36.074217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:18:36.074256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:18:36.074287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-17T18:18:36.074310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-17T18:18:36.074339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-17T18:18:36.074586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.074844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.074874Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-17T18:18:36.074913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:18:36.074947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:18:36.074978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-17T18:18:36.075004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:18:36.075025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-17T18:18:36.075080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2347] message: TxId: 101 2025-04-17T18:18:36.075120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-17T18:18:36.075151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-17T18:18:36.075175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-17T18:18:36.075269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:18:36.075313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-17T18:18:36.075336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-17T18:18:36.075357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:18:36.075370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-17T18:18:36.075382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-17T18:18:36.075409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:18:36.077351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:18:36.077387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:380:2348] TestWaitNotification: OK eventTxId 101 2025-04-17T18:18:36.077716Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:18:36.077928Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 196us result status StatusSuccess 2025-04-17T18:18:36.078512Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: 2025-04-17T18:16:46.333571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:46.333775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:46.333877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001515/r3tmp/tmpuiz1zj/pdisk_1.dat 2025-04-17T18:16:46.772673Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62526, node 1 2025-04-17T18:16:47.067929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:47.067981Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:47.068011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:47.068231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:47.073953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:47.184998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:47.185156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:47.199892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24860 2025-04-17T18:16:47.805598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:53.792026Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:53.792127Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-17T18:16:53.848346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:53.848456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:53.849236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:53.849301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:53.889690Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:53.889864Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:16:53.892666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:53.892982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:54.158846Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:54.159539Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:54.160163Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:54.160332Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:54.160605Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:54.160692Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:54.160815Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:54.160912Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:54.160992Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:54.337106Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:54.337207Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:54.337994Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:54.338077Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:54.351483Z node 2 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:16:54.352110Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:54.353429Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:54.522743Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:54.585210Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:54.585293Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:54.624612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:54.626684Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:54.626874Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:54.626928Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:54.626979Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:54.627032Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:54.627075Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:54.627124Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:54.627650Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:54.671265Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:54.671374Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2290:2602], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:54.679517Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2298:2608] 2025-04-17T18:16:54.686042Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2335:2625] 2025-04-17T18:16:54.686749Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2335:2625], schemeshard id = 72075186224037897 2025-04-17T18:16:54.688933Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:16:54.706362Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:54.706417Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:54.706484Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:16:54.720743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:54.727918Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976725657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:54.728054Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976725657 2025-04-17T18:16:54.927105Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:55.117157Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976725657. Doublechecking... 2025-04-17T18:16:55.196856Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:56.323044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2681:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.323169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.341649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:16:56.630582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2833:3117], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.630765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.632342Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2838:3121]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:16:56.632566Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:16:56.632658Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2840:3123] 2025-04-17T18:16:56.632778Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2840:3123] 2025-04-17T18:16:56.633476Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2841:2814] 2025-04-17T18:16:56.633725Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2840:3123], server id = [2:2841:2814], tablet id = 72075186224037894, status = OK 2025-04-17T18:16:56.633973Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2841:2814], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:16:56.634032Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:16:56.634272Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:16:56.634386Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2838:3121], StatRequests.size() = 1 2025-04-17T18:16:56.657603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2845:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.657702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch poo ... solveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:18:29.705015Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:18:29.705068Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:18:29.705101Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:18:30.168989Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7478:4580]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:30.169257Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-17T18:18:30.169294Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7478:4580], StatRequests.size() = 1 2025-04-17T18:18:30.570574Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:18:30.570761Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:18:30.571113Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:18:30.614872Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-17T18:18:30.614959Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 204.000000s, at schemeshard: 72075186224037897 2025-04-17T18:18:30.615338Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-04-17T18:18:30.630829Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:18:30.945181Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7515:4596]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:30.945456Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-17T18:18:30.945488Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7515:4596], StatRequests.size() = 1 2025-04-17T18:18:31.297455Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:31.297549Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:18:31.297622Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:18:31.297673Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:18:31.298237Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:18:31.345202Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:18:31.350298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7542:4615], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:31.350417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7552:4620], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:31.350489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:31.361319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725658:2, at schemeshard: 72075186224037897 2025-04-17T18:18:31.413503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7556:4623], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725658 completed, doublechecking } 2025-04-17T18:18:31.555733Z node 2 :TX_PROXY ERROR: Actor# [2:7652:4669] txid# 281474976725659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:18:31.609805Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7682:4685]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:31.610046Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T18:18:31.610100Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7682:4685], StatRequests.size() = 1 2025-04-17T18:18:31.785764Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzA3MWIyZDAtNjIwYmZkMjUtZTRlZDkwODUtM2M1MzYx, TxId: 2025-04-17T18:18:31.785877Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzA3MWIyZDAtNjIwYmZkMjUtZTRlZDkwODUtM2M1MzYx, TxId: 2025-04-17T18:18:31.787697Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:18:31.804364Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:18:31.804466Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:18:32.013992Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7718:4695]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:32.014302Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-17T18:18:32.014346Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7718:4695], StatRequests.size() = 1 2025-04-17T18:18:32.672569Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7761:4709]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:32.672853Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-17T18:18:32.672919Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7761:4709], StatRequests.size() = 1 2025-04-17T18:18:33.024507Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:18:33.035739Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:33.035822Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:18:33.035865Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T18:18:33.035898Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:18:33.036333Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:18:33.039589Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:18:33.057289Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTc4YmE2MTMtOWEwNjMwODktMTU3YWI5ODItNTAzY2M0Nzg=, TxId: 2025-04-17T18:18:33.057359Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTc4YmE2MTMtOWEwNjMwODktMTU3YWI5ODItNTAzY2M0Nzg=, TxId: 2025-04-17T18:18:33.058212Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:18:33.072908Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:18:33.072988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:18:33.382360Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7833:4740]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:33.382689Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-17T18:18:33.382732Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7833:4740], StatRequests.size() = 1 2025-04-17T18:18:34.122947Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7882:4756]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:34.123258Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-17T18:18:34.123305Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7882:4756], StatRequests.size() = 1 2025-04-17T18:18:34.475933Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:18:34.476094Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:18:34.476508Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:18:34.487803Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:34.487864Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:18:34.788163Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7919:4772]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:34.788367Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-17T18:18:34.788395Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7919:4772], StatRequests.size() = 1 2025-04-17T18:18:34.788839Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:7921:3040]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:34.791476Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:18:34.791562Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7931:3044] 2025-04-17T18:18:34.791613Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7931:3044] 2025-04-17T18:18:34.794080Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7939:4774] 2025-04-17T18:18:34.794800Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7931:3044], server id = [2:7939:4774], tablet id = 72075186224037894, status = OK 2025-04-17T18:18:34.795431Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7939:4774], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:18:34.795507Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2025-04-17T18:18:34.795779Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-17T18:18:34.795859Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7921:3040], StatRequests.size() = 1 >> TVectorIndexTests::CreateTableWithError [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:36.446675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:36.446747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:36.446789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:36.446818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:36.446859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:36.446880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:36.446918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:36.446967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:36.447217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:36.504401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:36.504459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:36.514989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:36.515253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:36.515382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:36.520962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:36.521088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:36.521549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:36.521713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:36.524257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:36.525477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:36.525543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:36.525741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:36.525788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:36.525838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:36.525951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.532397Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:36.634966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:36.635147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.635292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:36.635465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:36.635503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.637045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:36.637151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:36.637281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.637320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:36.637345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:36.637386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:36.638543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.638587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:36.638613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:36.639768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.639800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.639836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:36.639878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:36.642232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:36.643320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:36.643451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:36.644074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:36.644156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:36.644185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:36.644372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:36.644408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:36.644532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:36.644589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:36.645850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:36.645884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:36.645986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:36.646024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:36.646287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:36.646320Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:36.646399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:36.646425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:36.646460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:36.646486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:36.646513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:36.646540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:36.646566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:36.646588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:36.646625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:36.646670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:36.646708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:36.647862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:36.647930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:36.647963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:18:36.648001Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:18:36.648034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:36.648106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:18:36.650579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:18:36.650971Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:18:36.652834Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T18:18:36.670408Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T18:18:36.672216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:36.672528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-17T18:18:36.672630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-04-17T18:18:36.672658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-04-17T18:18:36.675693Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:18:36.677670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:36.677799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-04-17T18:18:36.678137Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-17T18:18:36.680381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:36.680643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-17T18:18:36.680736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-04-17T18:18:36.680769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-04-17T18:18:36.682279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:36.682377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 >> TNodeBrokerTest::NodeNameExpiration >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:14.265152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:14.265250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:14.265287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:14.265323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:14.266257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:14.266318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:14.266387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:14.266484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:14.267788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:14.341276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:14.341329Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:14.355933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:14.356698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:14.356931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:14.368960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:14.369711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:14.372701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:14.373091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:14.380403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:14.386756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:14.386835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:14.387507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:14.387585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:14.387671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:14.387878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:14.395354Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:14.501752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:14.502805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:14.503545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:14.504531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:14.504608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:14.507198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:14.507319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:14.507562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:14.507702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:14.507758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:14.507813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:14.509754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:14.509814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:14.509862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:14.511600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:14.511648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:14.511683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:14.511722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:14.515645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:14.517512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:14.518322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:14.519478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:14.519623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:14.519685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:14.520832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:14.520921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:14.521160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:14.521262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:14.523851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:14.523901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:14.524055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:14.524092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:14.524435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:14.524508Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:14.524616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:14.524653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:14.524697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:14.524738Z no ... perationId: 1003:2, at schemeshard: 72057594046678944 2025-04-17T18:18:37.466427Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:37.466588Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-17T18:18:37.466665Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-17T18:18:37.466702Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-17T18:18:37.466726Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-17T18:18:37.466746Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-17T18:18:37.466768Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-04-17T18:18:37.468138Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.468197Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.468217Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-17T18:18:37.468333Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.468375Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.468393Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-17T18:18:37.468415Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-17T18:18:37.468435Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T18:18:37.468483Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-04-17T18:18:37.469278Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:18:37.469316Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:37.469483Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:18:37.469555Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-17T18:18:37.469576Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-17T18:18:37.469603Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-17T18:18:37.469626Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-17T18:18:37.469647Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-04-17T18:18:37.469670Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-17T18:18:37.469697Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-17T18:18:37.469718Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-17T18:18:37.469784Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:18:37.469811Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-04-17T18:18:37.469828Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-04-17T18:18:37.469851Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T18:18:37.469869Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-04-17T18:18:37.469885Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-04-17T18:18:37.469915Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-17T18:18:37.470219Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.473209Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.473350Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.473377Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.473434Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.474928Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:18:37.476522Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 111669152023 } TabletId: 72075186233409546 State: 4 2025-04-17T18:18:37.476589Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-17T18:18:37.477717Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:18:37.478045Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-04-17T18:18:37.478192Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:18:37.478363Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-04-17T18:18:37.480199Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:18:37.480231Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-17T18:18:37.480273Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T18:18:37.480303Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T18:18:37.480328Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:18:37.482235Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:18:37.482279Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-04-17T18:18:37.482526Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-17T18:18:37.482677Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-17T18:18:37.482718Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-17T18:18:37.483298Z node 26 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-17T18:18:37.483352Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-17T18:18:37.483377Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:628:2554] 2025-04-17T18:18:37.486798Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 111669152026 } TabletId: 72075186233409547 State: 4 2025-04-17T18:18:37.486850Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-04-17T18:18:37.488026Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:18:37.488298Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2025-04-17T18:18:37.488413Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:37.488571Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-04-17T18:18:37.490513Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:18:37.490544Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:18:37.490589Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:37.492497Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:18:37.492551Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-04-17T18:18:37.492900Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-17T18:18:37.493116Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-17T18:18:37.493164Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] >> TVectorIndexTests::CreateTableMultiColumn >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] >> TNodeBrokerTest::NodeNameExpiration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2025-04-17T18:18:38.131345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:38.131421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:38.147654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> TVectorIndexTests::CreateTableMultiColumn [GOOD] |94.2%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:41.297138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:41.297199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:41.297232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:41.297286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:41.297327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:41.297347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:41.297384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:41.297430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:41.297675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:41.353484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:41.353521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:41.363913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:41.364203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:41.364324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:41.370083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:41.370240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:41.370634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:41.370779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:41.372776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:41.373621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:41.373687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:41.373818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:41.373852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:41.373876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:41.373958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.378323Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:41.461396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:41.461569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.461714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:41.461885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:41.461921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.463467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:41.463571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:41.463716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.463751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:41.463775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:41.463813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:41.465082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.465120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:41.465142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:41.466222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.466255Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.466299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:41.466342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:41.468843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:41.470005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:41.470154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:41.470802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:41.470882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:41.470913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:41.471079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:41.471112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:41.471228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:41.471276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:41.472434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:41.472472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:41.472571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:41.472594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:41.472822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.472847Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:41.472913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:41.472933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:41.472957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:41.472976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:41.473002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:41.473024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:41.473044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:41.473062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:41.473095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:41.473119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:41.473141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:41.474225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:41.474296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:41.474319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "covered1" DataColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:41.729835Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:18:41.729983Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 165us result status StatusSuccess 2025-04-17T18:18:41.730268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:41.730722Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:18:41.730874Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 153us result status StatusSuccess 2025-04-17T18:18:41.731117Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:44.796203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:44.796263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:44.796298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:44.796327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:44.796358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:44.796392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:44.796434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:44.796483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:44.796727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:44.855173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:44.855211Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:44.866655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:44.866946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:44.867101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:44.873941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:44.874113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:44.874733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:44.874904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:44.877323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:44.878336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:44.878404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:44.878602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:44.878663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:44.878742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:44.878858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:44.883852Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:44.989698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:44.989892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:44.990069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:44.990302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:44.990345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:44.992049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:44.992165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:44.992317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:44.992352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:44.992379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:44.992425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:44.993712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:44.993751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:44.993777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:44.994981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:44.995023Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:44.995065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:44.995113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:44.997635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:44.998963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:44.999095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:44.999795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:44.999886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:44.999930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:45.000135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:45.000173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:45.000322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:45.000387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:45.001789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:45.001827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:45.001969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:45.001997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:45.002298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:45.002334Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:45.002419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:45.002445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:45.002477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:45.002499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:45.002527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:45.002559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:45.002589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:45.002610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:45.002651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:45.002677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:45.002718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:45.003954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:45.004042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:45.004071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8:18:45.328564Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:18:45.328584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:18:45.329135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.329221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.329240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:45.329435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.329488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.329501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:45.329517Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-17T18:18:45.329534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:45.329765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.329888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.329916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:45.330956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.331052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.331084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:45.331752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.331823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.331842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:45.331860Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-17T18:18:45.331878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-17T18:18:45.332093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.332155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:18:45.332172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:18:45.332188Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-17T18:18:45.332205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-17T18:18:45.332248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/4, is published: true 2025-04-17T18:18:45.333753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-04-17T18:18:45.333794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:45.334046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T18:18:45.334139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-04-17T18:18:45.334166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-04-17T18:18:45.334219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-04-17T18:18:45.334246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-04-17T18:18:45.334272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-04-17T18:18:45.334951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:45.335076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-17T18:18:45.335100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:45.335237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-17T18:18:45.335285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-17T18:18:45.335303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-17T18:18:45.335337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-17T18:18:45.335361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-17T18:18:45.335387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-04-17T18:18:45.335602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-17T18:18:45.335625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:45.335744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:18:45.335794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-17T18:18:45.335810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-17T18:18:45.335827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-17T18:18:45.335842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-17T18:18:45.335858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-04-17T18:18:45.335922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:411:2368] message: TxId: 102 2025-04-17T18:18:45.335956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-17T18:18:45.335992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:18:45.336017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:18:45.336081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:18:45.336106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-17T18:18:45.336120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-17T18:18:45.336141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:18:45.336163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-17T18:18:45.336178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-17T18:18:45.336215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T18:18:45.336234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-17T18:18:45.336245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-17T18:18:45.336280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-17T18:18:45.336477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:45.336522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:45.336593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:45.336612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:45.336627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:45.337823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:45.337874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-17T18:18:45.339249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:18:45.339286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:526:2476] TestWaitNotification: OK eventTxId 102 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2025-04-17T18:17:09.857940Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:09.953708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:09.977513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:09.977840Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:09.985581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:09.985782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:09.986018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:09.986172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:09.986328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:09.986443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:09.986558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:09.986669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:09.986776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:09.986872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:09.986962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:09.987061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:10.011942Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:10.012072Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:10.012119Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:10.012244Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:10.012390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:10.012504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:10.012536Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:10.012597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:10.012638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:10.012665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:10.012681Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:10.012814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:10.012857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:10.012882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:10.012915Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:10.012978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:10.013012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:10.013045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:10.013073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:10.013140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:10.013168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:10.013188Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:10.013220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:10.013251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:10.013269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:10.013595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=74; 2025-04-17T18:17:10.013681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T18:17:10.013766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-17T18:17:10.013844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-17T18:17:10.014021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:10.014075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:10.014118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:10.014338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:10.014387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:10.014441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:10.014593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:10.014658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:10.014689Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:10.014920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:10.014975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:10.015015Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:10.015155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:10.015199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:10.015246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 6;method=PutObject;id=[9437184:4:1:255:117:2768:0]; 2025-04-17T18:18:45.690435Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:118:2768:0]; 2025-04-17T18:18:45.690473Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:119:2768:0]; 2025-04-17T18:18:45.690512Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:120:2768:0]; 2025-04-17T18:18:45.690549Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:121:2768:0]; 2025-04-17T18:18:45.690593Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:122:2768:0]; 2025-04-17T18:18:45.690658Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:123:2768:0]; 2025-04-17T18:18:45.690726Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:124:2768:0]; 2025-04-17T18:18:45.690793Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:125:2768:0]; 2025-04-17T18:18:45.690858Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:126:2768:0]; 2025-04-17T18:18:45.690916Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:127:2768:0]; 2025-04-17T18:18:45.690969Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:128:2696:0]; 2025-04-17T18:18:45.691024Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:129:2696:0]; 2025-04-17T18:18:45.691076Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:130:2696:0]; 2025-04-17T18:18:45.691131Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:131:2696:0]; 2025-04-17T18:18:45.691191Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:132:8920:0]; 2025-04-17T18:18:45.691236Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:133:2776:0]; 2025-04-17T18:18:45.691272Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:134:2776:0]; 2025-04-17T18:18:45.691311Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:135:2776:0]; 2025-04-17T18:18:45.691350Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:136:2776:0]; 2025-04-17T18:18:45.691420Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:137:2776:0]; 2025-04-17T18:18:45.691484Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:138:2768:0]; 2025-04-17T18:18:45.691529Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:139:2768:0]; 2025-04-17T18:18:45.691575Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:140:2768:0]; 2025-04-17T18:18:45.691620Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:141:2768:0]; 2025-04-17T18:18:45.691665Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:142:2768:0]; 2025-04-17T18:18:45.691727Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:143:2768:0]; 2025-04-17T18:18:45.691768Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:144:2768:0]; 2025-04-17T18:18:45.691807Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:145:2768:0]; 2025-04-17T18:18:45.691857Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:146:2768:0]; 2025-04-17T18:18:45.691905Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:147:2768:0]; 2025-04-17T18:18:45.691952Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:148:2768:0]; 2025-04-17T18:18:45.692008Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:149:2768:0]; 2025-04-17T18:18:45.692052Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:150:2768:0]; 2025-04-17T18:18:45.692089Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:151:2768:0]; 2025-04-17T18:18:45.692127Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:152:2768:0]; 2025-04-17T18:18:45.692174Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:153:2768:0]; 2025-04-17T18:18:45.692236Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:154:2768:0]; 2025-04-17T18:18:45.692283Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:155:2768:0]; 2025-04-17T18:18:45.692330Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:156:2768:0]; 2025-04-17T18:18:45.692386Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:157:2768:0]; 2025-04-17T18:18:45.692429Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:158:2768:0]; 2025-04-17T18:18:45.692467Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:159:2768:0]; 2025-04-17T18:18:45.692503Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:160:2768:0]; 2025-04-17T18:18:45.692550Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:161:2768:0]; 2025-04-17T18:18:45.692598Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:162:2768:0]; 2025-04-17T18:18:45.692643Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:163:2768:0]; 2025-04-17T18:18:45.692682Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:164:2768:0]; 2025-04-17T18:18:45.692721Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:165:2768:0]; 2025-04-17T18:18:45.692766Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:166:2768:0]; 2025-04-17T18:18:45.692809Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:167:2768:0]; 2025-04-17T18:18:45.692845Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:168:2768:0]; 2025-04-17T18:18:45.692880Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:169:2768:0]; 2025-04-17T18:18:45.692931Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:170:2768:0]; 2025-04-17T18:18:45.692991Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:171:2768:0]; 2025-04-17T18:18:45.693056Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:172:2696:0]; 2025-04-17T18:18:45.693095Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:173:2696:0]; 2025-04-17T18:18:45.693131Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:174:2696:0]; 2025-04-17T18:18:45.693168Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:175:2696:0]; 2025-04-17T18:18:45.693206Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:176:8904:0]; 2025-04-17T18:18:46.146849Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:18:46.147444Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-04-17T18:18:46.213220Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-04-17T18:18:46.219610Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2213112;raw_bytes=2475629;count=1;records=26059} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100101848;raw_bytes=103700153;count=42;records=1100341} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:18:46.520878Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-17T18:18:46.520954Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;fline=with_appended.cpp:65;portions=47,;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd; 2025-04-17T18:18:46.521620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::65ea5d3c-1bb811f0-a2eff308-a76ec1dd; 2025-04-17T18:18:46.521710Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:1;path_id:1;size:7587944;portions_count:47;); 2025-04-17T18:18:46.521762Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:18:46.521827Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:18:46.521890Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=8; 2025-04-17T18:18:46.521964Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744913533199;tx_id=18446744073709551615;;current_snapshot_ts=1744913831544; 2025-04-17T18:18:46.522005Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:18:46.522044Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:18:46.522078Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:18:46.522141Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.945000s; 2025-04-17T18:18:46.522198Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:18:46.522357Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-04-17T18:18:46.523578Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=39;external_task_id=65ea5d3c-1bb811f0-a2eff308-a76ec1dd;mem=11009198;cpu=0; 2025-04-17T18:18:46.524446Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] >> TVectorIndexTests::CreateTablePrefixInvalidKeyType >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:18:50.416104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:50.416168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:50.416205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:50.416234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:50.416280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:50.416306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:50.416347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:50.416398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:50.416656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:50.492796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:18:50.492841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:50.504893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:50.505290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:50.505460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:50.511982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:50.512133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:50.512549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:50.512683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:50.515156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:50.516058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:50.516108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:50.516274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:50.516307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:50.516337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:50.516417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.521258Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:18:50.625601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:50.625796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.625945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:50.626157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:50.626225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.628081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:50.628193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:50.628336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.628385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:50.628412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:50.628449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:50.629784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.629825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:50.629850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:50.631034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.631076Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.631121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:50.631170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:50.633630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:50.634939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:50.635081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:50.635739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:50.635830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:50.635864Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:50.636055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:50.636093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:50.636231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:50.636284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:50.637605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:50.637639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:50.637761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:50.637788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:50.638047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.638082Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:50.638162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:50.638232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:50.638272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:50.638302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:50.638333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:18:50.638360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:50.638385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:18:50.638408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:18:50.638451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:18:50.638479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:18:50.638502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:18:50.644237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:50.644331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:18:50.644370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-17T18:18:50.644410Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-17T18:18:50.644444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:50.644529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-17T18:18:50.646877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-17T18:18:50.647277Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-17T18:18:50.648857Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-17T18:18:50.662226Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-17T18:18:50.664162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "prefix" Type: "Float" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "prefix" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "covered" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:50.664496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-17T18:18:50.664644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-04-17T18:18:50.664688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-04-17T18:18:50.665287Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:18:50.667417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Column \'prefix\' has wrong key type Float for being key" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:50.667559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-04-17T18:18:50.668000Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-17T18:18:50.668149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-17T18:18:50.668175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-17T18:18:50.668524Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-17T18:18:50.668584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-17T18:18:50.668624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:284:2275] TestWaitNotification: OK eventTxId 101 2025-04-17T18:18:50.668990Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:18:50.669162Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector" took 155us result status StatusPathDoesNotExist 2025-04-17T18:18:50.669310Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/vectors/idx_vector\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/vectors/idx_vector" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage >> Sharding::XXUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 16905571065975338505 14592766786636977978 10597933995135613958 15334158381433026153 2414355303028126620 1604521797658930685 59531885112645707 5181051140278062952 12095257176258437625 14702887649188500610 14754864880953537561 8353028844674523673 12216027386027551636 7615703581287363624 9712085371794135814 13548374632607787743 9420659274725174671 6599182209307316864 2806428968916106402 5117093127057095427 8793464060190139421 8995821020955057646 5393654241957202115 14172377443832141675 5899895533893082457 1371813790080364875 2846041600958268129 2326545666919090659 2670847412502753606 12827181184759618591 2744517546871237796 15649027459853754310 299826908509416628 1554215224901205287 15380358210427549988 863433366801249133 4697811166722603484 13218349753748696744 608694119154652660 10315845180773963379 7451285447589077808 4810921139992638669 1506506436899238935 2363380363734440517 16044529756293720594 11659058412656232995 13835866331986072486 8023843744655093936 11432095472662544114 5421997421353358932 1750302349509622455 7889139431661851263 10707539132018163981 10677123266430587137 17893953646068600948 4451011576873216635 14857910399090981497 169710255518474066 12748643298571138022 17577885941399802511 10061786662359270501 14821543075220545290 17867950673569694841 12690156697891955462 12684202463124341898 2255250109815411240 4289775854269145225 3310016106730090875 3557997367860100306 16636889046522820771 11613487667212403472 12819024432987473480 3890564765178812175 10909993512662838389 16935781879110002295 12028742958380884844 17437931119496078486 18306214238970621706 6510308594122233701 17066712136179028131 2306311918905434464 14327396106667095610 9909847234144134128 8881749777558388287 5675824324692563595 14332767234060644807 170441599579081727 1830564956828156175 8014065826917930302 14099775489606687582 >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2025-04-17T18:17:14.963583Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:15.039094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:15.059834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:15.060153Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:15.066827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:15.066973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:15.067134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:15.067212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:15.067313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:15.067377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:15.067449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:15.067513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:15.067581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:15.067637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:15.067699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:15.067757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:15.086395Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:15.086506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:15.086549Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:15.086687Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:15.086844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:15.086901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:15.086928Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:15.087018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:15.087079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:15.087117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:15.087143Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:15.087292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:15.087363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:15.087402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:15.087422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:15.087495Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:15.087545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:15.087597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:15.087632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:15.087712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:15.087742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:15.087767Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:15.087800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:15.087835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:15.087864Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:15.088199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-17T18:17:15.088268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-04-17T18:17:15.088339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-17T18:17:15.088423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-17T18:17:15.088576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:15.088643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:15.088680Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:15.088849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:15.088888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:15.088911Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:15.089041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:15.089079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:15.089102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:15.089267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:15.089301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:15.089324Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:15.089436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:15.089471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:15.089520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 2.428280Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:81:2696:0]; 2025-04-17T18:18:52.428346Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:82:8528:0]; 2025-04-17T18:18:52.428401Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:83:2776:0]; 2025-04-17T18:18:52.428455Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:84:2768:0]; 2025-04-17T18:18:52.428509Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:85:2768:0]; 2025-04-17T18:18:52.428565Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:86:2768:0]; 2025-04-17T18:18:52.428624Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:87:2768:0]; 2025-04-17T18:18:52.428685Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:88:2768:0]; 2025-04-17T18:18:52.428758Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:89:2768:0]; 2025-04-17T18:18:52.428828Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:90:2768:0]; 2025-04-17T18:18:52.428889Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:91:2768:0]; 2025-04-17T18:18:52.428961Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:92:2768:0]; 2025-04-17T18:18:52.429027Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:93:2768:0]; 2025-04-17T18:18:52.429088Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:94:2768:0]; 2025-04-17T18:18:52.429145Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:95:2768:0]; 2025-04-17T18:18:52.429203Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:96:2768:0]; 2025-04-17T18:18:52.429308Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:97:2768:0]; 2025-04-17T18:18:52.429395Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:98:2768:0]; 2025-04-17T18:18:52.429457Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:99:2768:0]; 2025-04-17T18:18:52.429521Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:100:2768:0]; 2025-04-17T18:18:52.429587Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:101:2768:0]; 2025-04-17T18:18:52.429647Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:102:2768:0]; 2025-04-17T18:18:52.429719Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:103:2768:0]; 2025-04-17T18:18:52.429785Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:104:2768:0]; 2025-04-17T18:18:52.429846Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:105:2768:0]; 2025-04-17T18:18:52.429904Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:106:2768:0]; 2025-04-17T18:18:52.429962Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:107:2768:0]; 2025-04-17T18:18:52.430026Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:108:2768:0]; 2025-04-17T18:18:52.430084Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:109:2768:0]; 2025-04-17T18:18:52.430144Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:110:2768:0]; 2025-04-17T18:18:52.430236Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:111:2768:0]; 2025-04-17T18:18:52.430309Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:112:2768:0]; 2025-04-17T18:18:52.430373Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:113:2768:0]; 2025-04-17T18:18:52.430430Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:114:2768:0]; 2025-04-17T18:18:52.430489Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:115:2768:0]; 2025-04-17T18:18:52.430558Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:116:2768:0]; 2025-04-17T18:18:52.430622Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:117:2768:0]; 2025-04-17T18:18:52.430687Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:118:2768:0]; 2025-04-17T18:18:52.430778Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:119:2696:0]; 2025-04-17T18:18:52.430852Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:120:2696:0]; 2025-04-17T18:18:52.430921Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:121:2696:0]; 2025-04-17T18:18:52.430986Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:122:2696:0]; 2025-04-17T18:18:52.431051Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:123:8528:0]; 2025-04-17T18:18:52.431116Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:124:2768:0]; 2025-04-17T18:18:52.431183Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:125:2768:0]; 2025-04-17T18:18:52.431245Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:126:2768:0]; 2025-04-17T18:18:52.431319Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:127:2768:0]; 2025-04-17T18:18:52.431393Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:128:2768:0]; 2025-04-17T18:18:52.431463Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:129:2768:0]; 2025-04-17T18:18:52.431530Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:130:2768:0]; 2025-04-17T18:18:52.431592Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:131:2768:0]; 2025-04-17T18:18:52.431660Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:132:2768:0]; 2025-04-17T18:18:52.431722Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:133:2768:0]; 2025-04-17T18:18:52.431784Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:134:2768:0]; 2025-04-17T18:18:52.431866Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:135:2768:0]; 2025-04-17T18:18:52.431941Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:136:2768:0]; 2025-04-17T18:18:52.432010Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:137:2768:0]; 2025-04-17T18:18:52.432072Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:138:2768:0]; 2025-04-17T18:18:52.432133Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:139:2768:0]; 2025-04-17T18:18:52.432200Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:140:2768:0]; 2025-04-17T18:18:52.432262Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:141:2768:0]; 2025-04-17T18:18:52.432326Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:142:2768:0]; 2025-04-17T18:18:52.432401Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:143:2768:0]; 2025-04-17T18:18:52.432474Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:144:2768:0]; 2025-04-17T18:18:52.432545Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:145:2768:0]; 2025-04-17T18:18:52.432611Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:146:2768:0]; 2025-04-17T18:18:52.432674Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:147:2768:0]; 2025-04-17T18:18:52.432736Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:148:2768:0]; 2025-04-17T18:18:52.432799Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:149:2768:0]; 2025-04-17T18:18:52.432859Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:150:2768:0]; 2025-04-17T18:18:52.432933Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:151:2768:0]; 2025-04-17T18:18:52.433005Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:152:2768:0]; 2025-04-17T18:18:52.433072Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:153:2768:0]; 2025-04-17T18:18:52.433134Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:154:2768:0]; 2025-04-17T18:18:52.433196Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:155:2768:0]; 2025-04-17T18:18:52.433256Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:156:2768:0]; 2025-04-17T18:18:52.433315Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:157:2768:0]; 2025-04-17T18:18:52.433374Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:158:2768:0]; 2025-04-17T18:18:52.433445Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:159:2768:0]; 2025-04-17T18:18:52.433513Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:160:2696:0]; 2025-04-17T18:18:52.433584Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:161:2696:0]; 2025-04-17T18:18:52.433650Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:162:2696:0]; 2025-04-17T18:18:52.433710Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:163:2696:0]; 2025-04-17T18:18:52.433775Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:164:8528:0]; 2025-04-17T18:18:52.804646Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:18:52.805346Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[4] (CS::GENERAL) apply at tablet 9437184 2025-04-17T18:18:52.873974Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:2 Blob count: 692 2025-04-17T18:18:52.879806Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2078720;raw_bytes=2324579;count=1;records=24469} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100419184;raw_bytes=104021253;count=42;records=1103721} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |94.3%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-04-17T18:17:17.420854Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:17.501732Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:17.516322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:17.516544Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:17.523761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:17.523972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:17.524209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:17.524333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:17.524445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:17.524554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:17.524658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:17.524785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:17.524906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:17.525014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:17.525109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:17.525231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:17.547984Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:17.548120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:17.548168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:17.548359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:17.548519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:17.548606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:17.548646Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:17.548742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:17.548811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:17.548849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:17.548888Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:17.549066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:17.549140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:17.549180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:17.549218Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:17.549310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:17.549364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:17.549408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:17.549436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:17.549521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:17.549571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:17.549602Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:17.549648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:17.549686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:17.549721Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:17.550089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2025-04-17T18:17:17.550171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T18:17:17.550260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T18:17:17.550333Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-17T18:17:17.550519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:17.550586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:17.550630Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:17.550810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:17.550864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:17.550896Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:17.551075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:17.551118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:17.551147Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:17.551316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:17.551369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:17.551401Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:17.551550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:17.551592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:17.551631Z node 1 :TX_COLUMNS ... en=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:18:56.739402Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T18:18:56.739428Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2052; 2025-04-17T18:18:56.739460Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=2052; 2025-04-17T18:18:56.739487Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:18:56.739555Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-17T18:18:56.739579Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-17T18:18:56.739604Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:18:56.739932Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:18:56.740033Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-17T18:18:56.740068Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:18:56.740164Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;);columns=4;rows=1; 2025-04-17T18:18:56.740223Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-04-17T18:18:56.740321Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[54:432:2450];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-04-17T18:18:56.740430Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-17T18:18:56.740521Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-17T18:18:56.740606Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-17T18:18:56.740863Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:18:56.740971Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-17T18:18:56.741053Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-17T18:18:56.741079Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: Scan [54:433:2451] finished for tablet 9437184 2025-04-17T18:18:56.741458Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[54:432:2450];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1744913936730565,"name":"_full_task","f":1744913936730565,"d_finished":0,"c":0,"l":1744913936741112,"d":10547},"events":[{"name":"bootstrap","f":1744913936730701,"d_finished":1676,"c":1,"l":1744913936732377,"d":1676},{"a":1744913936740842,"name":"ack","f":1744913936739917,"d_finished":711,"c":1,"l":1744913936740628,"d":981},{"a":1744913936740828,"name":"processing","f":1744913936732428,"d_finished":5246,"c":10,"l":1744913936740630,"d":5530},{"name":"ProduceResults","f":1744913936731638,"d_finished":2006,"c":13,"l":1744913936741068,"d":2006},{"a":1744913936741070,"name":"Finish","f":1744913936741070,"d_finished":0,"c":0,"l":1744913936741112,"d":42},{"name":"task_result","f":1744913936732437,"d_finished":4444,"c":9,"l":1744913936739639,"d":4444}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-17T18:18:56.741528Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[54:432:2450];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:18:56.741834Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[54:432:2450];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1744913936730565,"name":"_full_task","f":1744913936730565,"d_finished":0,"c":0,"l":1744913936741562,"d":10997},"events":[{"name":"bootstrap","f":1744913936730701,"d_finished":1676,"c":1,"l":1744913936732377,"d":1676},{"a":1744913936740842,"name":"ack","f":1744913936739917,"d_finished":711,"c":1,"l":1744913936740628,"d":1431},{"a":1744913936740828,"name":"processing","f":1744913936732428,"d_finished":5246,"c":10,"l":1744913936740630,"d":5980},{"name":"ProduceResults","f":1744913936731638,"d_finished":2006,"c":13,"l":1744913936741068,"d":2006},{"a":1744913936741070,"name":"Finish","f":1744913936741070,"d_finished":0,"c":0,"l":1744913936741562,"d":492},{"name":"task_result","f":1744913936732437,"d_finished":4444,"c":9,"l":1744913936739639,"d":4444}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-17T18:18:56.741892Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:18:56.730164Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-04-17T18:18:56.741928Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:18:56.742154Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:20.893756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:20.893830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:20.893858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:20.893884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:20.893914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:20.893945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:20.893984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:20.894058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:20.894342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:20.971623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:20.971678Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:20.983284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:20.983914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:20.984077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:20.994274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:20.994885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:20.995462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:20.995658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:20.998637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:21.000154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:21.000215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:21.000410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:21.000453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:21.000492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:21.000631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:21.007524Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:21.136880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:21.137096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:21.137285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:21.137512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:21.137567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:21.139587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:21.139713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:21.139923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:21.139983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:21.140032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:21.140061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:21.141698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:21.141743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:21.141773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:21.143270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:21.143335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:21.143373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:21.143422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:21.146949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:21.148556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:21.148727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:21.149611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:21.149734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:21.149779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:21.150000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:21.150070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:21.150248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:21.150330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:21.152271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:21.152309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:21.152451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:21.152510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:21.152862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:21.152918Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:21.153002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:21.153036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:21.153066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:21.153111Z no ... 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:57.479190Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:18:57.479431Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 267us result status StatusSuccess 2025-04-17T18:18:57.480241Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:57.491253Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:811:2626] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:18:57.491363Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:731:2626] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-17T18:18:57.491529Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:811:2626] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744913937464120 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744913937464120 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744913937464120 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:18:57.493752Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:811:2626] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-17T18:18:57.493836Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:731:2626] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-17T18:18:57.795667Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:18:57.804500Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:18:57.804908Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T18:18:57.804967Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:57.805023Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T18:18:57.805093Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:57.805147Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:57.805221Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T18:18:57.806196Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:258:2250], now have 1 active actors on pipe 2025-04-17T18:18:57.806320Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:18:57.825546Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:57.827930Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:57.828047Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:57.831226Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:57.831414Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:57.833706Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:57.834874Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:266:2256] 2025-04-17T18:18:57.837306Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-17T18:18:57.837940Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:266:2256] 2025-04-17T18:18:57.838003Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:18:57.838055Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T18:18:57.841845Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:269:2258], now have 1 active actors on pipe 2025-04-17T18:18:57.902702Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:18:57.907660Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:18:57.908054Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T18:18:57.908115Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:57.908168Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-17T18:18:57.908221Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:57.908293Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:57.908376Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T18:18:57.909238Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:401:2356], now have 1 active actors on pipe 2025-04-17T18:18:57.909390Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:18:57.909616Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:18:57.912237Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:18:57.912388Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:57.913328Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:18:57.913476Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:57.913925Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:57.914208Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:409:2362] 2025-04-17T18:18:57.916913Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T18:18:57.916989Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:409:2362] 2025-04-17T18:18:57.917062Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:18:57.917121Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T18:18:57.918040Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:412:2364], now have 1 active actors on pipe 2025-04-17T18:18:57.920287Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:418:2367], now have 1 active actors on pipe 2025-04-17T18:18:57.920803Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:420:2368], now have 1 active actors on pipe 2025-04-17T18:18:57.920985Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:18:57.921328Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:18:57.922684Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:418:2367] destroyed 2025-04-17T18:18:57.924524Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:420:2368] destroyed 2025-04-17T18:18:58.411047Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:18:58.415303Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:18:58.415717Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T18:18:58.415788Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:58.415842Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T18:18:58.415895Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:58.415949Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.416028Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T18:18:58.416875Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:260:2252], now have 1 active actors on pipe 2025-04-17T18:18:58.416961Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:18:58.417198Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:58.419974Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:58.420134Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.420925Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 L ... txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:18:58.565145Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:58.565575Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:58.565839Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:530:2453] 2025-04-17T18:18:58.568473Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T18:18:58.568573Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:530:2453] 2025-04-17T18:18:58.568645Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:18:58.568725Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T18:18:58.569698Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:533:2455], now have 1 active actors on pipe 2025-04-17T18:18:58.571095Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:539:2458], now have 1 active actors on pipe 2025-04-17T18:18:58.571490Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:18:58.571651Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:540:2459], now have 1 active actors on pipe 2025-04-17T18:18:58.572046Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:18:58.572139Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:541:2459], now have 1 active actors on pipe 2025-04-17T18:18:58.572390Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:18:58.583636Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:549:2466], now have 1 active actors on pipe 2025-04-17T18:18:58.612664Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:18:58.615679Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:18:58.616083Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T18:18:58.616147Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:58.616318Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:58.617241Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.617310Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T18:18:58.617447Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:58.617878Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:58.618152Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:606:2511] 2025-04-17T18:18:58.620829Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-17T18:18:58.622533Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-17T18:18:58.622947Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-17T18:18:58.623426Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-17T18:18:58.623812Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-17T18:18:58.623868Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T18:18:58.623920Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:18:58.623968Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T18:18:58.624023Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:606:2511] 2025-04-17T18:18:58.624111Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:18:58.624183Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T18:18:58.625299Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:540:2459] destroyed 2025-04-17T18:18:58.625394Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:539:2458] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 77 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 77 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 91 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 91 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-17T18:18:57.929410Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:18:57.932562Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:18:57.932886Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T18:18:57.932943Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:57.932985Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T18:18:57.933051Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:57.933119Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:57.933196Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T18:18:57.933964Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:259:2251], now have 1 active actors on pipe 2025-04-17T18:18:57.934112Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:18:57.949760Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:57.952001Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:57.952086Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:57.952686Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:57.952794Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:57.953081Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:57.953332Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:267:2257] 2025-04-17T18:18:57.954843Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-17T18:18:57.954886Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:267:2257] 2025-04-17T18:18:57.954921Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:18:57.954954Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T18:18:57.955574Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:270:2259], now have 1 active actors on pipe 2025-04-17T18:18:57.991168Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:18:57.994014Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:18:57.994293Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-17T18:18:57.994328Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:57.994353Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-17T18:18:57.994383Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:57.994422Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:57.994473Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-17T18:18:57.994994Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:405:2360], now have 1 active actors on pipe 2025-04-17T18:18:57.995072Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:18:57.995219Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:57.996965Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:57.997049Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:57.997809Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:18:57.997939Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:57.998309Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:57.998540Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [3:413:2366] 2025-04-17T18:18:58.000662Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-17T18:18:58.000709Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:413:2366] 2025-04-17T18:18:58.000759Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:18:58.000794Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T18:18:58.001304Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:416:2368], now have 1 active actors on pipe 2025-04-17T18:18:58.013375Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:18:58.016638Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:18:58.016900Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-17T18:18:58.016932Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:58.016959Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-17T18:18:58.016986Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:58.017038Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.017092Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-17T18:18:58.017556Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:465:2405], now have 1 active actors on pipe 2025-04-17T18:18:58.017623Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:18:58.017762Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:18:58.019492Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:18:58.019587Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.020191Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:18:58.020282Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:58.020566Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:58.020796Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:473:2411] 2025-04-17T18:18:58.021953Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-17T18:18:58.021999Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:473:2411] 2025-04-17T18:18:58.022035Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 b ... 38] doesn't have tx info 2025-04-17T18:18:58.601424Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:58.601470Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-17T18:18:58.601517Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:58.601577Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.601655Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-17T18:18:58.602403Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:461:2401], now have 1 active actors on pipe 2025-04-17T18:18:58.602567Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:18:58.602778Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 7(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:18:58.605166Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:18:58.605319Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.606029Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 7 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:18:58.606155Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:58.606525Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:58.606759Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:469:2407] 2025-04-17T18:18:58.608933Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-17T18:18:58.609001Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:469:2407] 2025-04-17T18:18:58.609066Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:18:58.609118Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T18:18:58.609908Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:472:2409], now have 1 active actors on pipe 2025-04-17T18:18:58.625978Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:18:58.629135Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:18:58.629367Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T18:18:58.629407Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:58.629447Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-17T18:18:58.629491Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:58.629541Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.629594Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T18:18:58.630204Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:521:2446], now have 1 active actors on pipe 2025-04-17T18:18:58.630269Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:18:58.630428Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:18:58.632159Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:18:58.632259Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.632896Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:18:58.632984Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:58.633217Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:58.633396Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:529:2452] 2025-04-17T18:18:58.634892Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T18:18:58.634951Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:529:2452] 2025-04-17T18:18:58.634997Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:18:58.635036Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T18:18:58.635609Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:532:2454], now have 1 active actors on pipe 2025-04-17T18:18:58.636787Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:540:2457], now have 1 active actors on pipe 2025-04-17T18:18:58.636858Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:541:2458], now have 1 active actors on pipe 2025-04-17T18:18:58.636931Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:542:2458], now have 1 active actors on pipe 2025-04-17T18:18:58.647769Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:547:2462], now have 1 active actors on pipe 2025-04-17T18:18:58.666704Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:18:58.669491Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:18:58.669817Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T18:18:58.669876Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:18:58.670024Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:18:58.670905Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:18:58.670960Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T18:18:58.671070Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:18:58.671379Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:18:58.671629Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:604:2507] 2025-04-17T18:18:58.673695Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-17T18:18:58.674994Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-17T18:18:58.675298Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-17T18:18:58.675624Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-17T18:18:58.675878Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-17T18:18:58.675926Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T18:18:58.675970Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:18:58.676012Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T18:18:58.676067Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:604:2507] 2025-04-17T18:18:58.676129Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:18:58.676184Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T18:18:58.677112Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:541:2458] destroyed 2025-04-17T18:18:58.677200Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:540:2457] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> BasicStatistics::TwoTables [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: 2025-04-17T18:16:43.247913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:43.248055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:43.248184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001526/r3tmp/tmpsINwOe/pdisk_1.dat 2025-04-17T18:16:43.639241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61367, node 1 2025-04-17T18:16:43.900144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:43.900197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:43.900224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:43.900896Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:43.903072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:43.991493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:43.991608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:44.007249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11347 2025-04-17T18:16:44.605811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:48.036829Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:48.080268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:48.080381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:48.118979Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:48.121647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:48.401565Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.402065Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.402749Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.403094Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.403182Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.403264Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.403346Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.403439Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.403537Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.589151Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:48.589288Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:48.605997Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:48.779754Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:48.843979Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:48.844075Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:48.889120Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:48.891177Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:48.891426Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:48.891487Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:48.891554Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:48.891620Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:48.891675Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:48.891732Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:48.894961Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:48.953986Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:48.954113Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:48.966515Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:16:48.974931Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:16:48.975085Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:16:48.992756Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:16:49.017596Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:49.017667Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:49.017759Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:16:49.030169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:49.044812Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:49.044992Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:49.297267Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:49.495606Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:49.564923Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:50.478275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.478624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.503782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:16:50.809072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2382:3109], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.809213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.810548Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2387:3113]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:16:50.810739Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:16:50.810828Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2389:3115] 2025-04-17T18:16:50.810890Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2389:3115] 2025-04-17T18:16:50.811363Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2390:2879] 2025-04-17T18:16:50.811633Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2389:3115], server id = [2:2390:2879], tablet id = 72075186224037894, status = OK 2025-04-17T18:16:50.811842Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2390:2879], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:16:50.811906Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:16:50.812122Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:16:50.812198Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2387:3113], StatRequests.size() = 1 2025-04-17T18:16:50.833593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2394:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.833685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.834090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2399:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.840294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:16:51.028276Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:16:51.028345Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:16:51.129096Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2389:3115], schemeshard count = 1 2025-04-17T18:16:51.418211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... t[ 1 ] 2025-04-17T18:18:50.825848Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-17T18:18:50.825885Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6698:4774], StatRequests.size() = 1 2025-04-17T18:18:51.549813Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:18:51.550287Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:18:51.550641Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:18:51.604257Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-04-17T18:18:51.604314Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 223.000000s, at schemeshard: 72075186224037897 2025-04-17T18:18:51.604455Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-04-17T18:18:51.617515Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:18:52.196816Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6731:4790]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:52.197091Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-17T18:18:52.197135Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6731:4790], StatRequests.size() = 1 2025-04-17T18:18:52.873219Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:52.873280Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:18:52.873322Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-17T18:18:52.873358Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:18:52.873671Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:18:52.885948Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:18:52.889936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6756:4811], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:52.890010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6766:4816], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:52.890146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:18:52.902363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:18:52.952729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6770:4819], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:18:53.142933Z node 2 :TX_PROXY ERROR: Actor# [2:6866:4865] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:18:53.197676Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6895:4880]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:53.198045Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T18:18:53.198111Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6895:4880], StatRequests.size() = 1 2025-04-17T18:18:53.334515Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjUxZjQyMWUtNjBmYWE0MTctYjk4ZWQxYzYtNjhmNjY2N2U=, TxId: 2025-04-17T18:18:53.334605Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjUxZjQyMWUtNjBmYWE0MTctYjk4ZWQxYzYtNjhmNjY2N2U=, TxId: 2025-04-17T18:18:53.335181Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:18:53.350259Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-17T18:18:53.350352Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:18:53.844758Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6927:4899]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:53.844928Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-17T18:18:53.844951Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6927:4899], StatRequests.size() = 1 2025-04-17T18:18:55.121517Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6968:4921]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:55.121732Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-17T18:18:55.121767Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6968:4921], StatRequests.size() = 1 2025-04-17T18:18:55.763785Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:18:55.774561Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:55.774618Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:18:55.774652Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:18:55.774694Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:18:55.774966Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:18:55.777492Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:18:55.815891Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjlhMTYwM2YtZTYzNzk5ZGMtYjkzMzhlMzEtMTcyY2U0NjA=, TxId: 2025-04-17T18:18:55.815949Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjlhMTYwM2YtZTYzNzk5ZGMtYjkzMzhlMzEtMTcyY2U0NjA=, TxId: 2025-04-17T18:18:55.816363Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:18:55.830046Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:18:55.830097Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:18:56.393857Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7034:4960]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:56.394020Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-17T18:18:56.394044Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7034:4960], StatRequests.size() = 1 2025-04-17T18:18:57.719134Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7077:4984]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:57.719452Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-17T18:18:57.719499Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7077:4984], StatRequests.size() = 1 2025-04-17T18:18:58.379249Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:18:58.379391Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:18:58.379779Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:18:58.390783Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:18:58.390833Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:18:58.390865Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T18:18:58.390892Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:18:58.391161Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:18:58.393737Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:18:58.402490Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yzg4NDc5YjctOTBhMmQwOTctZTM5MWVlZi1mNzRhNmMwYg==, TxId: 2025-04-17T18:18:58.402544Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yzg4NDc5YjctOTBhMmQwOTctZTM5MWVlZi1mNzRhNmMwYg==, TxId: 2025-04-17T18:18:58.402947Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:18:58.416756Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:18:58.416805Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:18:58.994522Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7138:5018]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:58.994725Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-17T18:18:58.994768Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7138:5018], StatRequests.size() = 1 2025-04-17T18:18:58.995202Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 127 ], ReplyToActorId[ [2:7140:5020]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:58.997485Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 127 ] 2025-04-17T18:18:58.997543Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 127, ReplyToActorId = [2:7140:5020], StatRequests.size() = 1 >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-17T18:19:04.103485Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:19:04.107128Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:19:04.107452Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T18:19:04.107510Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:19:04.107547Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T18:19:04.107597Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:19:04.107652Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:04.107720Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T18:19:04.108437Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:259:2251], now have 1 active actors on pipe 2025-04-17T18:19:04.108580Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:19:04.127116Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:04.129998Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:04.130153Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:04.131068Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:04.131256Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:04.131674Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:04.132045Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:267:2257] 2025-04-17T18:19:04.134703Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-17T18:19:04.134786Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2257] 2025-04-17T18:19:04.134853Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:04.134918Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T18:19:04.135700Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:270:2259], now have 1 active actors on pipe 2025-04-17T18:19:04.186070Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:19:04.189862Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:19:04.190158Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T18:19:04.190239Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:19:04.190321Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-17T18:19:04.190376Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:19:04.190424Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:04.190496Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T18:19:04.191040Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-04-17T18:19:04.191148Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:19:04.191294Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:19:04.193097Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:19:04.193204Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:04.193810Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:19:04.193902Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:04.194148Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:04.194338Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:413:2366] 2025-04-17T18:19:04.195701Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T18:19:04.195753Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:413:2366] 2025-04-17T18:19:04.195796Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:04.195832Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T18:19:04.196424Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-04-17T18:19:04.197370Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:422:2371], now have 1 active actors on pipe 2025-04-17T18:19:04.197721Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:424:2372], now have 1 active actors on pipe 2025-04-17T18:19:04.197998Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:422:2371] destroyed 2025-04-17T18:19:04.198235Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:424:2372] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:31.466709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:31.466789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:31.466823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:31.466855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:31.466902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:31.466944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:31.466998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:31.467087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:31.467397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:31.525720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:31.525781Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:31.535316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:31.535882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:31.536029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:31.544604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:31.545279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:31.545717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:31.545883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:31.548146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:31.549091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:31.549129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:31.549266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:31.549298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:31.549327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:31.549414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:31.554556Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:31.628743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:31.628902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.629051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:31.629196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:31.629237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.630976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:31.631087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:31.631261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.631313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:31.631347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:31.631377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:31.632806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.632845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:31.632868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:31.634257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.634305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.634340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:31.634398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:31.636745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:31.637984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:31.638145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:31.638791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:31.638897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:31.638925Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:31.639086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:31.639126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:31.639239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:31.639284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:31.640729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:31.640757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:31.640863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:31.640886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:31.641138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.641180Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:31.641243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:31.641262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:31.641294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:31.641324Z no ... CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:05.300957Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:05.301250Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 319us result status StatusSuccess 2025-04-17T18:19:05.302426Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-17T18:19:05.622064Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:19:05.624823Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:19:05.625050Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T18:19:05.625081Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:19:05.625112Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T18:19:05.625147Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:19:05.625178Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:05.625223Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T18:19:05.625669Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-17T18:19:05.625750Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:19:05.640675Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:05.642907Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:05.643018Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:05.643624Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:05.643770Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:05.644040Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:05.644288Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-17T18:19:05.645764Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-17T18:19:05.645809Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-17T18:19:05.645853Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:05.645889Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T18:19:05.646528Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-17T18:19:05.683627Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:19:05.686933Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:19:05.687215Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-17T18:19:05.687256Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:19:05.687294Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-17T18:19:05.687326Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:19:05.687385Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:05.687455Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-17T18:19:05.688131Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:404:2359], now have 1 active actors on pipe 2025-04-17T18:19:05.688218Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:19:05.688383Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:19:05.690607Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:19:05.690717Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:05.691459Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:19:05.691554Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:05.691807Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:05.691948Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:412:2365] 2025-04-17T18:19:05.693162Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-17T18:19:05.693203Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:412:2365] 2025-04-17T18:19:05.693238Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:05.693266Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-17T18:19:05.693774Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:415:2367], now have 1 active actors on pipe 2025-04-17T18:19:05.708934Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:19:05.712756Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:19:05.713162Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T18:19:05.713223Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:19:05.713275Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-17T18:19:05.713322Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:19:05.713375Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:05.713450Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T18:19:05.714285Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:464:2404], now have 1 active actors on pipe 2025-04-17T18:19:05.714423Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:19:05.714627Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:19:05.717098Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:19:05.717264Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:05.718109Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:19:05.718235Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:05.718646Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:05.718903Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:472:2410] 2025-04-17T18:19:05.720546Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T18:19:05.720607Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:472:2410] 2025-04-17T18:19:05.720669Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:05.720709Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T18:19:05.721629Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:475:2412], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-04-17T18:19:05.736317Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:484:2415], now have 1 active actors on pipe 2025-04-17T18:19:05.736677Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:487:2416], now have 1 active actors on pipe 2025-04-17T18:19:05.736871Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:488:2416], now have 1 active actors on pipe 2025-04-17T18:19:05.737682Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:484:2415] destroyed 2025-04-17T18:19:05.738164Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [2:487:2416] destroyed 2025-04-17T18:19:05.738305Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:488:2416] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: 2025-04-17T18:18:58.934123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:18:58.934554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:18:58.934597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ed5/r3tmp/tmpxSOx4P/pdisk_1.dat 2025-04-17T18:18:59.324820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:18:59.367590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:18:59.410076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:18:59.410697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:18:59.423393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:18:59.515706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:18:59.892205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-04-17T18:19:00.177832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:817:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:00.177974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:827:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:00.178052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:00.185701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:00.331497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:831:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:19:00.382966Z node 1 :TX_PROXY ERROR: Actor# [1:890:2723] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:01.041580Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2fkfpd0cj8yj55fhd3j3tm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjcwZTEyMmMtNjZiNTI3ODAtNzI3ZmE4MzUtNzNkZjNh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.144069Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2fkgk526b2h48j7psqrxn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWE0MjcwMjAtNGQ3MjcwNzYtNWRkYmYyOWUtZWEwM2RhZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.201583Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2fkgmydxfeyv2t4ndfvn5t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ4MTUyN2UtZGQ4OWNjZi0yOWE1MjA2LTQyOWFiMjgx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.255659Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2fkgpraz3sq134zz8y3bfs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjU4OThjNWItNTk1Y2JkZDItZmE5ZDFkN2EtY2EyYWMyMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.295229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2fkgrc9w4g5ycepm2tbk5n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDI1OWJiZTEtYzIwYTk4NWItMTJlZjk1YzMtMWU0ODY4N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.334339Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2fkgsk15va4j0y0n00syr6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNlZmU5ZDAtNDVmN2E1YmQtZDI3ODIyNmQtZWQzZTQ4ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.373177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2fkgtt3wgmc2gmh0bmr5se, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM4ZTNhYzItM2ExMjMwODQtZTkyZDk5ZmUtM2UyYjg5ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.411564Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2fkgw1f84knwybn2p3ykcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWFjZjgyY2UtZDY4ZTY0OWQtNTI0MzVkZTgtYTU1MGQyZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.450349Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2fkgx87weq01rwgt2h6pqv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTgwNjQ3ZmYtYjcxM2E3NmItNTMxMDRiM2MtYzhmMzhjYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.489089Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2fkgye9qkhfg3zyqfakp5x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJmMDQ0NGYtNmM2Mzg2NmItYzg1ZWIyZTAtMWExODcwNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.528519Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2fkgzpb6fkw32yg1vxh2td, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FhMzJlZWItYzRiZDFmNDgtYTNlNTMzNDEtZjJjZDA5Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.566651Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2fkh0w24hqj7x78hwz9hfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZjYWQyM2UtNTYwNWUzM2YtZTM4NDc4N2EtZTRlOTgyZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.605001Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2fkh23bezpxfd6d6npptt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ3MzVhYjItNDQ0ZjY1MzktMTVjNGM1YjUtZDc5NGFlMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.655492Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2fkh39a4g1qpa70p838dvm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZkMTNiOGQtMTkyNThhN2EtNjUyOWE5YzgtMmEwYjFkMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.693299Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01js2fkh4v891r5zdbyecp5jp8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ2ODAzOGItMTFkNGQ2OTItNTIwYjc1YjktMmE4ZjE4ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.762655Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2fkh703154shfrg1vq57a8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjYxNzYyODMtMmUyMWE5YTctOTBkN2EzNGEtMTdlNGY0MGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.801572Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01js2fkh86e0ayyts2jfhvbkrh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFmNjI4ZDQtOGVlM2JkYzQtYjU5ZTAzNDEtZDE2Y2M2ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.839453Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01js2fkh9e7cez6k5ns0chbg57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM3YWViNjQtZjMyZjAzODMtOWNlMGNlOTctNjY5YTEzZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.876931Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2fkhak8hvc8yc5ag0ygdra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc3MTE1NGMtZjc2NDgxNjktMmQ1YzFmNC00ZTk5MDUxMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.923246Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01js2fkhbsddby7vqqar1s36mk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhjNWNmZTctNzFkZDgwNDItZjU4YTdlNGEtNWI2ZGExMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.960107Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01js2fkhd7b39yfantr4ws6g7q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI4N2JkMS02YzI1Y2QwLTU0YTY4NjFmLTcyMTIyYzc1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:01.996031Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01js2fkhecbdpw8m3nyhmdzkfz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTBlYTU3ZTctYzNjMzhjZWItOTgxYTI0M2UtMWQ2YzgwNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:02.032670Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01js2fkhfg9trehv6qn71d7ms4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDcyMjUyMzItMjZlYTg5MjAtZWY5ZjIzMGMtODQyNzdmNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:02.070701Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01js2fkhgnbzphj7hykms95zra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2JhNmEzNzItMzZmNzg5ZGQtOWY3NzE3ZjctYWFmYmJmMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:02.116134Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01js2fkhhwf0dj7zqvpn7b4dkk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTlmYjk2YjktYTg2YWRmZTUtZmRiYTA2Ny1jZTQzNGQ0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:02.152240Z node 1 :K ... 831614Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01js2fkk7q10v5sfmtd1s1qzdp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I0NWJmZTMtMTQ5OTFiYzYtNWQ3ZGRkODUtZDA5MjQ1OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:03.873009Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01js2fkk8v3bm2z8vd1ae4r2pe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2QwN2JiNTEtZTNkYzU2MTAtMzczOWI5Zi03YTlkMGM4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:03.912043Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01js2fkka5b5v29mntffvj503m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZjN2FkZDctNGE2OGNhMDUtNDMxYWU2YzktNTAxMTA0MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:03.950624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01js2fkkbdaqgqd0txxfhyc6vk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJhZTIyOWQtMTk2NGU0N2YtMmFjZTc4NWMtZjlmODgxYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:03.988013Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01js2fkkckbrrp91c6yv3f8ra2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU3MmJjNDMtZGY0Y2QxYmQtZGI5Y2U1YTgtZTA4MTA5YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.024421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01js2fkkdr2w5593tttkcn72cz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMxMDI1NDQtYjFlZTI0NS0xZGVlNzc5Yi1jMTQ3MWY3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.060868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01js2fkkew9hs77s1jth07nsyk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2FmYmQ1YzgtMTFjODBmNzgtNjgxZWJlN2YtN2QwYWQ4YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.100625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01js2fkkg22bnhrrrmtp2p2rxz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg5MmI1NzYtN2RlYjI5NC01Y2RiODE4OS04YTBkZDQ1Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.138811Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01js2fkkh9btvjknzredz3vb3d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTlmOWMxODAtOTVkYzU2YzMtZGE5MDQ1OGMtYTM4MTkzMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.206840Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01js2fkkjg814h19yyrmmbbf4k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM3YzI1YzAtN2Y0ZmIwYjgtYzFiMDNmMDQtNTNkYTZhYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.245057Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01js2fkkmkb6k6jx0jvmz1pq3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgwNDNjNDgtZmVlMGEzZGUtMTJhNzgwYTUtOTFhZTE2ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.283999Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01js2fkkns41zpxd4664f54a0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU4YmZkMjMtOTA3YWJlOWQtN2ZhOTFmNWYtNzUxNzEyOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.324467Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01js2fkkq1d2a9x969hrfdw616, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRjMWU0OWYtZDYwNjEwMGItZTE2MThlMzItODNhODgwMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.362401Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01js2fkkr9cbjag07yt0jxmd34, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjllZTY1MmUtZjQ5YzAzOWUtYWI4Yzc5MDQtYzY1ZDAwZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.402988Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01js2fkksf5dtwckzg5smevjy0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkzZTJmOGQtZDhjMTI3OTgtY2RiN2Y3NzMtMmUzMjgxMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.440369Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01js2fkktq9d6ch8p1xn7nx3f5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM2NWQ1MjYtNzAwODhkNzAtMjFjYTFiNTUtODI5Zjk0ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.479076Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01js2fkkvx6705098x1x45jh6m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTcyYmNlYjYtZDVjNjQwNzctNzVmNDVkODItMmNkYWU4NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.518302Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01js2fkkx37rj59rgbcaagcpxv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQ0MzgxMGUtZDJkZTNhMWMtNzUxZWVmODgtMWNlOGIxNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.557364Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01js2fkkyb81vyr2agq7qchy66, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBmZDYwNjUtMzkzZmIxNjctMzRlMDc5YjItMTg3NzdlNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.604169Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01js2fkkzje341e0nze07sffxm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZmZDBiYTctNTFkOWQ5ZGEtOWIwMDllZTYtZDdmNjEzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.642727Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01js2fkm101vc50bbf1ryhj071, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRhNDE4ZTItNDM3OTVjZTEtNmMxYTI3NDktYTA4OThjNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.681654Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01js2fkm276g10f51fbdqk099j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUwN2Q0MzUtNDJkYmY5YTctMzIzODkxZTQtYWVmZThhYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.734643Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01js2fkm3w9gd69ccsf3y1fnz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRmNzQwYzctOWNlNzFiYzgtMjYwYmJkYjktOTkwYTBkY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.772754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01js2fkm533ac59dphzrxq47c6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMwOTQ0YzAtNGYxYmE2YjctZmE1ZmNhNDctNTIwMjQ1OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.811477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01js2fkm69eyahks0gs01jnb1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZlNjA3YWYtNDcwZTlkNjItNmE2N2ZhZWUtNGFiNmQ4Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.851219Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01js2fkm7gbw2sr25vsb3k05zs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMxNmQ3ZGYtNGE1YWU0NjktZjcxYjc3YzctMjQ0Y2U0ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.890792Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01js2fkm8qb381tttd30hef6sx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNhNDEwLWRjNDc0YzQwLTU2MDhiNjEtMzA1OGQ4ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:04.946651Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01js2fkma24hkfrsndnhqhsenb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE3YWFiZjktZmVmNWU3Y2EtMjAwNjVhZTMtZWJmMzY1OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:05.002044Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01js2fkmbseazm2s7mwwhm3aax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjgxOWRlZTUtOWE0MWNmMi01ZTI4NGU1ZS1kMWY4NTkxZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:05.050275Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01js2fkmdf24fvgaevcgckskbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDlkMmZlODUtMmNlOGM2OTktOGI4OGU3MDgtNjdiNjgxMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:05.087443Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01js2fkmezba4xmatbd76jj42k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzlhNjViMWItNWFmNDM3YTMtYWQ0ZjdkYS03NTU5MGQ2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:05.125021Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01js2fkmg45y8wp6keza091qzk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTYwMjQ4ZTItZmU1YWQwZGYtYzhkMDNkNzktNjg5MjNkMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:05.162722Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01js2fkmh97sjycb4w7c0eeh07, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU5NzNjOGUtYjY0Y2U3YWYtMTE3NTMxNDItOGE2OTMyZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:05.214358Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01js2fkmjfb2yfjzx57w9zvqq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmJiYWJlNTAtODc1NzVjMDYtM2UwNjMwYzUtNmJhNTRhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:05.252486Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01js2fkmm39aw6qdh5brfpbgtr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJmNDczZjAtZWQ3ZWM5YjMtY2E2YWQ5ZGEtNzI1ZTIwOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:19:05.574321Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01js2fkmrvdtmyyexkyqbrpzq4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NhMjIxODEtODk2MmIxZmUtYzRmMzUwZTQtZDdmMzc0MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition |94.4%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> BasicStatistics::SimpleGlobalIndex [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-17T18:19:06.605717Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:19:06.610110Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:19:06.610528Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-17T18:19:06.610581Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:19:06.610627Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-17T18:19:06.610678Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:19:06.610727Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:06.610809Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-17T18:19:06.611552Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-17T18:19:06.611682Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:19:06.626498Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:06.629612Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:06.629754Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:06.630661Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:06.630834Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:06.631304Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:06.631701Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-17T18:19:06.634349Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-17T18:19:06.634417Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-17T18:19:06.634471Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:06.634517Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T18:19:06.635417Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-17T18:19:06.688503Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:19:06.692489Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:19:06.692802Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-17T18:19:06.692852Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:19:06.692890Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-17T18:19:06.692936Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:19:06.692987Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:06.693072Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-17T18:19:06.693839Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:403:2358], now have 1 active actors on pipe 2025-04-17T18:19:06.693913Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:19:06.694105Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:06.696442Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:06.696600Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:06.697429Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-17T18:19:06.697557Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:06.697927Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:06.698151Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:411:2364] 2025-04-17T18:19:06.700291Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-17T18:19:06.700359Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:411:2364] 2025-04-17T18:19:06.700419Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:06.700469Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-17T18:19:06.701285Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:414:2366], now have 1 active actors on pipe 2025-04-17T18:19:06.718276Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:19:06.722554Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:19:06.722868Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-17T18:19:06.722937Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:19:06.722978Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-17T18:19:06.723019Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:19:06.723069Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:06.723126Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-17T18:19:06.723838Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:463:2403], now have 1 active actors on pipe 2025-04-17T18:19:06.723960Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:19:06.724163Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:19:06.726489Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:19:06.726632Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:06.727397Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-17T18:19:06.727541Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:06.727960Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:06.728201Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:471:2409] 2025-04-17T18:19:06.730160Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-17T18:19:06.730271Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:471:2409] 2025-04-17T18:19:06.730328Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:06.730390Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... edTxs.size=0 2025-04-17T18:19:07.256796Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:07.256844Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T18:19:07.257330Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:522:2447], now have 1 active actors on pipe 2025-04-17T18:19:07.257419Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-17T18:19:07.257575Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:19:07.259214Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:19:07.259303Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:07.259848Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-17T18:19:07.259942Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:07.260183Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:07.260346Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:530:2453] 2025-04-17T18:19:07.261631Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T18:19:07.261672Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:530:2453] 2025-04-17T18:19:07.261715Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:07.261751Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T18:19:07.262319Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:533:2455], now have 1 active actors on pipe 2025-04-17T18:19:07.263044Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:539:2458], now have 1 active actors on pipe 2025-04-17T18:19:07.263294Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:19:07.263385Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:540:2459], now have 1 active actors on pipe 2025-04-17T18:19:07.263563Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:19:07.263602Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:541:2459], now have 1 active actors on pipe 2025-04-17T18:19:07.263699Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-17T18:19:07.274795Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:549:2466], now have 1 active actors on pipe 2025-04-17T18:19:07.298978Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-17T18:19:07.301531Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-17T18:19:07.301808Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-17T18:19:07.301856Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-17T18:19:07.302015Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-17T18:19:07.302831Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-17T18:19:07.302884Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-17T18:19:07.302999Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-17T18:19:07.303334Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-17T18:19:07.303554Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:606:2511] 2025-04-17T18:19:07.305591Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-17T18:19:07.306870Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-17T18:19:07.307223Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-17T18:19:07.307536Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-17T18:19:07.307702Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-17T18:19:07.307732Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-17T18:19:07.307769Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-17T18:19:07.307838Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-17T18:19:07.307892Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:606:2511] 2025-04-17T18:19:07.307950Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-17T18:19:07.307996Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-17T18:19:07.308873Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:540:2459] destroyed 2025-04-17T18:19:07.308953Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:539:2458] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 77 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 77 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: 2025-04-17T18:16:41.764457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:41.764616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:41.764806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00153b/r3tmp/tmp29yAgT/pdisk_1.dat 2025-04-17T18:16:42.389534Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16547, node 1 2025-04-17T18:16:43.099870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:43.099936Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:43.099969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:43.100735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:43.108169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:43.208434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:43.208514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:43.223436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23162 2025-04-17T18:16:43.832866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:47.242170Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:47.312800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:47.312920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:47.364071Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:47.367034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:47.688048Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.688692Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.689311Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.689439Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.689513Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.689729Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.689820Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.689921Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.689984Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.887822Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:47.887945Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:47.907780Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:48.127613Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:48.189554Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:48.189695Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:48.233049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:48.234560Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:48.234817Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:48.234887Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:48.234948Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:48.235036Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:48.235109Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:48.235177Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:48.235702Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:48.264790Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:48.264916Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1867:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:48.317735Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1880:2607] 2025-04-17T18:16:48.322700Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1903:2616] 2025-04-17T18:16:48.322970Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1903:2616], schemeshard id = 72075186224037897 2025-04-17T18:16:48.339245Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:16:48.363260Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:48.363627Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:48.363705Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:16:48.374459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:48.388521Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:48.388707Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:48.617911Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:48.810089Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:48.855920Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:49.984829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:49.984992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.175823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:16:50.593349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2445:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.593519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.595131Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2450:3120]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:16:50.595353Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:16:50.595436Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2452:3122] 2025-04-17T18:16:50.595508Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2452:3122] 2025-04-17T18:16:50.596082Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2453:2925] 2025-04-17T18:16:50.596392Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2452:3122], server id = [2:2453:2925], tablet id = 72075186224037894, status = OK 2025-04-17T18:16:50.596616Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2453:2925], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:16:50.596679Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:16:50.596923Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:16:50.597017Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2450:3120], StatRequests.size() = 1 2025-04-17T18:16:50.605021Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2486:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:16:50.605211Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:16:50.605253Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2486:3131], StatRequests.size() = 1 2025-04-17T18:16:50.605449Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2488:3133]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:16:50.605605Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:16:50.605636Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2488:3133], StatRequests.size() = 1 2025-04-17T18:16:50.629420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2493:3138], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.629519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.630007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2498:3143], DatabaseI ... 7T18:18:57.750008Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:18:57.750048Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:18:58.538751Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6776:4776]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:58.539026Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-17T18:18:58.539067Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6776:4776], StatRequests.size() = 1 2025-04-17T18:18:59.273044Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:18:59.273171Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:18:59.273421Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:18:59.315993Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-04-17T18:18:59.316051Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 213.000000s, at schemeshard: 72075186224037897 2025-04-17T18:18:59.316284Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-04-17T18:18:59.329019Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:18:59.900878Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6811:4794]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:18:59.901134Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-17T18:18:59.901174Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6811:4794], StatRequests.size() = 1 2025-04-17T18:19:00.557577Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:00.557647Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:00.557687Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:19:00.557723Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:00.557977Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:19:00.567615Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:00.570525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6834:4813], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:00.570605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6844:4818], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:00.570733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:00.581993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:19:00.625023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6848:4821], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:19:00.765714Z node 2 :TX_PROXY ERROR: Actor# [2:6943:4869] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:00.810413Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6972:4884]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:00.810698Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T18:19:00.810767Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6972:4884], StatRequests.size() = 1 2025-04-17T18:19:00.932550Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTUxYjI2ODMtOTJmYTk2ZTItMTNkYjAzNTktYTM0ZDdlNGU=, TxId: 2025-04-17T18:19:00.932626Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTUxYjI2ODMtOTJmYTk2ZTItMTNkYjAzNTktYTM0ZDdlNGU=, TxId: 2025-04-17T18:19:00.933251Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:00.947426Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:00.947496Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:01.465807Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7007:4903]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:01.466086Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-17T18:19:01.466125Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7007:4903], StatRequests.size() = 1 2025-04-17T18:19:02.689576Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7046:4925]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:02.689778Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-17T18:19:02.689812Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7046:4925], StatRequests.size() = 1 2025-04-17T18:19:03.331968Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:19:03.342847Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:03.342906Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:03.342944Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 6] is data table. 2025-04-17T18:19:03.342972Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 6] 2025-04-17T18:19:03.343237Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:19:03.345003Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:03.356288Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGRjNGEzYzktZTAzZjg1M2YtZDk3YzhhMWUtYjkxYzE0OTE=, TxId: 2025-04-17T18:19:03.356331Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGRjNGEzYzktZTAzZjg1M2YtZDk3YzhhMWUtYjkxYzE0OTE=, TxId: 2025-04-17T18:19:03.356891Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:03.369396Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 6] 2025-04-17T18:19:03.369435Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:03.920164Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7114:4964]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:03.920408Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-17T18:19:03.920448Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7114:4964], StatRequests.size() = 1 2025-04-17T18:19:05.209086Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7157:4988]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:05.209356Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-17T18:19:05.209396Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7157:4988], StatRequests.size() = 1 2025-04-17T18:19:05.892732Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:19:05.893128Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:19:05.893361Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:05.904344Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:05.904405Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:05.904446Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T18:19:05.904486Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:19:05.904761Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:19:05.907435Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:05.918321Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGU1NjNmNTctZGI3MmFmNzItYWUzODMwNi1lMzQ1ZjY5ZA==, TxId: 2025-04-17T18:19:05.918378Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGU1NjNmNTctZGI3MmFmNzItYWUzODMwNi1lMzQ1ZjY5ZA==, TxId: 2025-04-17T18:19:05.918876Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:05.932589Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:19:05.932638Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:06.500694Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7222:5024]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:06.501008Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-17T18:19:06.501059Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7222:5024], StatRequests.size() = 1 >> AggregateStatistics::ShouldBePings >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } >> AggregateStatistics::ShouldBePings [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2025-04-17T18:19:08.498827Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:08.505763Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-04-17T18:19:08.510011Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.512133Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:08.512288Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2025-04-17T18:19:08.512349Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.512606Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-17T18:19:08.512788Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:08.512916Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [2:44:2057], tablet id = 4, status = OK 2025-04-17T18:19:08.512963Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.513038Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2025-04-17T18:19:08.513076Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.514318Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-17T18:19:08.514396Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-04-17T18:19:08.514531Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-17T18:19:08.514614Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:47:2057], server id = [3:47:2057], tablet id = 5, status = OK 2025-04-17T18:19:08.514674Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.514735Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-17T18:19:08.514777Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.514850Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-17T18:19:08.514870Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.514900Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-17T18:19:08.514991Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-04-17T18:19:08.515029Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-17T18:19:08.515145Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-17T18:19:08.515165Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.515211Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2025-04-17T18:19:08.515258Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.515311Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:47:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2025-04-17T18:19:08.515332Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.515425Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-17T18:19:08.515449Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.515513Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-04-17T18:19:08.515550Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:19:08.515742Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-17T18:19:08.515847Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-17T18:19:08.515875Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.515950Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-17T18:19:08.515998Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-17T18:19:08.516172Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-04-17T18:19:08.516222Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-04-17T18:19:08.423887Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:08.434385Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:08.538791Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-04-17T18:19:08.538854Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-17T18:19:08.538896Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-17T18:19:08.539779Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-17T18:19:08.539819Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.539857Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-17T18:19:08.539885Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.539940Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-04-17T18:19:08.539965Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2025-04-17T18:19:08.520242Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:08.527068Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2025-04-17T18:19:08.527538Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.527663Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2025-04-17T18:19:08.527721Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.527804Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-17T18:19:08.527984Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2025-04-17T18:19:08.528037Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.528133Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2025-04-17T18:19:08.528178Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.528267Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-17T18:19:08.528295Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.528336Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2025-04-17T18:19:08.528397Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.528446Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-17T18:19:08.528581Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2025-04-17T18:19:08.528628Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.528688Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-04-17T18:19:08.528743Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-17T18:19:08.528766Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.528801Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2025-04-17T18:19:08.528873Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:08.529045Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2025-04-17T18:19:08.529071Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.529095Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2025-04-17T18:19:08.529158Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2025-04-17T18:19:08.529177Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.539485Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2025-04-17T18:19:08.539538Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2025-04-17T18:19:08.539567Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-04-17T18:19:08.539639Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2025-04-17T18:19:08.539662Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2025-04-17T18:19:08.539677Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-04-17T18:19:08.539739Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2025-04-17T18:19:08.539774Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2025-04-17T18:19:08.539797Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2025-04-17T18:19:08.539816Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-17T18:19:08.539898Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T18:19:08.539924Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-17T18:19:08.539966Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-17T18:19:08.539989Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.540019Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-17T18:19:08.540034Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:08.540057Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-17T18:19:08.540072Z node 1 :STATISTICS DEBUG: Skip EvClientConnected >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-04-17T18:17:23.064329Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:23.147307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:23.166429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:23.166719Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:23.173394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:23.173582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:23.173803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:23.173911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:23.174032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:23.174140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:23.174237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:23.174328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:23.174407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:23.174487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:23.174568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:23.174639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:23.199179Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:23.199321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:23.199397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:23.199566Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:23.199719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:23.199792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:23.199830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:23.199913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:23.199970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:23.200007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:23.200033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:23.200202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:23.200290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:23.200330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:23.200360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:23.200449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:23.200530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:23.200578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:23.200606Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:23.200704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:23.200742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:23.200771Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:23.200817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:23.200859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:23.200887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:23.201307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-17T18:17:23.201405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-17T18:17:23.201487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-17T18:17:23.201556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-17T18:17:23.201771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:23.201832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:23.201878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:23.202048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:23.202089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:23.202119Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:23.202306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:23.202358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:23.202387Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:23.202592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:23.202632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:23.202659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:23.202771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:23.202840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:23.202888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ge:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913844117;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913844117;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913844117;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-17T18:19:07.626435Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5507:7499];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-17T18:19:07.627685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5507:7499];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-04-17T18:19:09.086105Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:09.090164Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2025-04-17T18:19:09.090227Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-04-17T18:19:09.090289Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-17T18:19:09.090743Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2025-04-17T18:19:09.090785Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2025-04-17T18:19:09.090870Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-17T18:19:09.090882Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:09.090903Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2025-04-17T18:19:09.090913Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-04-17T18:19:09.090948Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2025-04-17T18:19:09.090966Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2025-04-17T18:19:09.091035Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-04-17T18:19:09.091076Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2025-04-17T18:19:09.091087Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2025-04-17T18:19:09.091104Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-17T18:19:09.091113Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:09.091130Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2025-04-17T18:19:09.091139Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2025-04-17T18:19:09.091151Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-04-17T18:19:09.554157Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:09.559043Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-04-17T18:19:09.559292Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:09.559480Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:09.559568Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-17T18:19:09.559665Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-17T18:19:09.559681Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:09.559725Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-17T18:19:09.559818Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-17T18:19:09.559867Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:45:2057], server id = [3:45:2057], tablet id = 3, status = OK 2025-04-17T18:19:09.559890Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:45:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:09.559919Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-17T18:19:09.559945Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-17T18:19:09.560025Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-04-17T18:19:09.560054Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-17T18:19:09.560078Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:45:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-17T18:19:09.560087Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:09.560115Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-17T18:19:09.560127Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:19:09.560192Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-17T18:19:09.560235Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-17T18:19:09.560258Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-17T18:19:09.560325Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-17T18:19:09.570620Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T18:19:09.570666Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:19:09.570710Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T18:19:09.570723Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:19:09.581222Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-17T18:19:09.581312Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-04-17T18:19:09.581344Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-17T18:19:09.581471Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T18:19:09.581530Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-17T18:19:09.581665Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T18:19:09.581695Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:19:09.581745Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-17T18:19:09.581779Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2025-04-17T18:17:18.766066Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:18.843766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:18.866009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:18.866382Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:18.874232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:18.874445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:18.874662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:18.874774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:18.874891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:18.874990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:18.875087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:18.875217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:18.875313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:18.875427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:18.875519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:18.875610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:18.898137Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:18.898293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:18.898348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:18.898518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:18.898650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:18.898716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:18.898747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:18.898811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:18.898864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:18.898902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:18.898929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:18.899106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:18.899161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:18.899188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:18.899214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:18.899292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:18.899348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:18.899405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:18.899426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:18.899487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:18.899523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:18.899548Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:18.899592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:18.899624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:18.899647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:18.899953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-17T18:17:18.900030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T18:17:18.900119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-17T18:17:18.900192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-17T18:17:18.900318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:18.900358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:18.900391Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:18.900520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:18.900546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:18.900601Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:18.900700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:18.900733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:18.900753Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:18.900878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:18.900905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:18.900923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:18.900997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:18.901020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:18.901054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913839814;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913839814;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913839814;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-17T18:19:08.798400Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5648:7640];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-17T18:19:08.799670Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5648:7640];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] |94.4%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:24.506155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:24.506250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:24.506285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:24.506317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:24.506356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:24.506403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:24.506461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:24.506539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:24.506874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:24.589688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:24.589754Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:24.602336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:24.603017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:24.603200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:24.614201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:24.614837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:24.615488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:24.615718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:24.618828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:24.620107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:24.620158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:24.620359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:24.620406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:24.620450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:24.620604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:24.627415Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:24.746150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:24.746382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.746547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:24.746767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:24.746819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.748844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:24.748980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:24.749187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.749255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:24.749302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:24.749334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:24.751102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.751151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:24.751184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:24.752827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.752879Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.752920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:24.752976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:24.756538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:24.758090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:24.758268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:24.759107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:24.759228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:24.759288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:24.759531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:24.759594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:24.759750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:24.759812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:24.761534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:24.761572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:24.761710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:24.761746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:24.762062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:24.762120Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:24.762217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:24.762251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:24.762281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:24.762322Z no ... 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:09.999754Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:09.999969Z node 30 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 240us result status StatusSuccess 2025-04-17T18:19:10.000679Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:10.011539Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:849:2681] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:19:10.011619Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:789:2681] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-17T18:19:10.011729Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:849:2681] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744913949989962 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744913949989962 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744913949989962 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:19:10.013607Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:849:2681] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-17T18:19:10.013673Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:789:2681] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync >> ReadOnlyVDisk::TestGetWithMustRestoreFirst >> ReadOnlyVDisk::TestReads >> ReadOnlyVDisk::TestGarbageCollect >> ReadOnlyVDisk::TestStorageLoad >> ReadOnlyVDisk::TestDiscover |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> BasicStatistics::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: 2025-04-17T18:16:41.921323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:41.921466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:41.921514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001531/r3tmp/tmpNex5P8/pdisk_1.dat 2025-04-17T18:16:42.379967Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28711, node 1 2025-04-17T18:16:43.098021Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:43.098091Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:43.098130Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:43.098651Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:43.106618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:43.196128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:43.196696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:43.212421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18677 2025-04-17T18:16:43.832843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:47.194121Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:47.240784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:47.240897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:47.272661Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:47.274997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:47.522720Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.524874Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.525883Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.526045Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.526288Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.526413Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.526538Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.526607Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.526675Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:47.708062Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:47.708210Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:47.735845Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:47.942989Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:47.997505Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:47.997603Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:48.036184Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:48.036388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:48.036591Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:48.036666Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:48.036719Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:48.036769Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:48.036922Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:48.036990Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:48.037420Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:48.115026Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:48.115174Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1873:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:48.119916Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2607] 2025-04-17T18:16:48.132763Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:16:48.134383Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2625] 2025-04-17T18:16:48.135387Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2625], schemeshard id = 72075186224037897 2025-04-17T18:16:48.159084Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:48.159145Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:48.159223Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:16:48.176254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:48.184489Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:48.184622Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:48.378213Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:48.589746Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:48.685523Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:49.984417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2235:3068], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:49.984579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.126607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:16:50.564949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2375:3104], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.565089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.566531Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2380:3108]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:16:50.566756Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:16:50.566866Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2382:3110] 2025-04-17T18:16:50.567846Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2382:3110] 2025-04-17T18:16:50.568564Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2383:2877] 2025-04-17T18:16:50.568946Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2382:3110], server id = [2:2383:2877], tablet id = 72075186224037894, status = OK 2025-04-17T18:16:50.569234Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2383:2877], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:16:50.569316Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:16:50.569792Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:16:50.569953Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2380:3108], StatRequests.size() = 1 2025-04-17T18:16:50.625547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2387:3114], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.625672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.626101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2392:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.634010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:16:50.850388Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:16:50.850474Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:16:50.980533Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2382:3110], schemeshard count = 1 2025-04-17T18:16:51.405511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... TATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:6394:4625], StatRequests.size() = 1 2025-04-17T18:19:00.404934Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:6427:4639]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:00.405144Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-04-17T18:19:00.405172Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:6427:4639], StatRequests.size() = 1 2025-04-17T18:19:01.107747Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:19:01.679467Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:6462:4655]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:01.679791Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-17T18:19:01.679837Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:6462:4655], StatRequests.size() = 1 2025-04-17T18:19:02.486496Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:19:02.486598Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:19:02.486646Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:19:02.486686Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:19:03.213843Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6502:4672]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:03.214050Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-17T18:19:03.214082Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6502:4672], StatRequests.size() = 1 2025-04-17T18:19:03.953769Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:19:03.954034Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:19:03.954270Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:03.997369Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-17T18:19:03.997454Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 190.000000s, at schemeshard: 72075186224037897 2025-04-17T18:19:03.997817Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-04-17T18:19:04.011663Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:19:04.540936Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6533:4686]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:04.541241Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-17T18:19:04.541286Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6533:4686], StatRequests.size() = 1 2025-04-17T18:19:05.255781Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:05.255845Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:05.255890Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:19:05.255930Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:05.256240Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:19:05.279819Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:05.282975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6558:4707], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:05.283061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6568:4712], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:05.283182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:05.293034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:19:05.347086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6572:4715], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:19:05.509938Z node 2 :TX_PROXY ERROR: Actor# [2:6670:4763] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:05.541482Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6699:4778]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:05.541664Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T18:19:05.541704Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6699:4778], StatRequests.size() = 1 2025-04-17T18:19:05.671932Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjVkM2MzMGYtNWQwNTc2MjUtOGU5ZWY3Yy1hZTJmMTFkOQ==, TxId: 2025-04-17T18:19:05.672030Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjVkM2MzMGYtNWQwNTc2MjUtOGU5ZWY3Yy1hZTJmMTFkOQ==, TxId: 2025-04-17T18:19:05.672922Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:05.688805Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:05.688912Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:06.135639Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6731:4797]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:06.135979Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-17T18:19:06.136031Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6731:4797], StatRequests.size() = 1 2025-04-17T18:19:07.399866Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6770:4819]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:07.400070Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-17T18:19:07.400102Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6770:4819], StatRequests.size() = 1 2025-04-17T18:19:08.076733Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:19:08.087733Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:08.087797Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:08.087839Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T18:19:08.087876Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:19:08.088262Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:19:08.090976Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:08.103222Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-04-17T18:19:06.000000Z 2025-04-17T18:19:08.103802Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODY5Nzc3MjYtYWI1NmUyMDctNTc0OWE5ZGItMWVjMmM0NWY=, TxId: 2025-04-17T18:19:08.103854Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODY5Nzc3MjYtYWI1NmUyMDctNTc0OWE5ZGItMWVjMmM0NWY=, TxId: 2025-04-17T18:19:08.104360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:08.118074Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:19:08.118133Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:08.675810Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6838:4858]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:08.676118Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-17T18:19:08.676165Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6838:4858], StatRequests.size() = 1 2025-04-17T18:19:09.994334Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6881:4882]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:09.994530Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-17T18:19:09.994568Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6881:4882], StatRequests.size() = 1 2025-04-17T18:19:10.671714Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:19:10.672448Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:19:10.673132Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:10.684768Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:10.684837Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:11.229505Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:6914:4898]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:11.229815Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-17T18:19:11.229860Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:6914:4898], StatRequests.size() = 1 >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> AnalyzeColumnshard::Analyze [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 18026004737892615465 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-04-17T18:19:12.847878Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-04-17T18:19:12.851245Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-17T18:19:12.854565Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-17T18:19:12.856273Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-04-17T18:19:12.861194Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-04-17T18:19:12.862854Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-04-17T18:19:12.864954Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-04-17T18:19:12.866588Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-04-17T18:19:14.296635Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.296719Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:14.296810Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:14.297327Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [461d77f6d6574769] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-04-17T18:19:14.298361Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.298650Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:14.299440Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-04-17T18:19:14.300600Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.301089Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:14.301616Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-04-17T18:19:14.302369Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:14.303037Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.303406Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-04-17T18:19:14.304181Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:14.304237Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:14.304726Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:15:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2025-04-17T18:19:14.305843Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:14.305901Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:14.306517Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:16:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2025-04-17T18:19:14.307528Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.307673Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:14.307708Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2025-04-17T18:19:14.308910Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.309027Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:14.309089Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2025-04-17T18:19:14.310566Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.310696Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:14.310744Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2025-04-17T18:19:14.312089Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.312158Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:14.312225Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2025-04-17T18:19:14.315797Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:699] 2025-04-17T18:19:14.315918Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5317:706] 2025-04-17T18:19:14.315953Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5324:713] 2025-04-17T18:19:14.316332Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [7c877e46e4a1d614] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2025-04-17T18:19:14.316413Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5317:706] 2025-04-17T18:19:14.316453Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5324:713] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> ReadOnlyVDisk::TestDiscover [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:14.267770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:14.267859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.267916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:14.267957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:14.268793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:14.268868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:14.268945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.269017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:14.270046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:14.336818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:14.336860Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:14.348761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:14.349935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:14.350081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:14.358759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:14.359069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:14.361444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.361731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:14.370066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.374851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.374921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.375599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:14.375652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.375704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:14.375857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.381945Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:14.520431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:14.520645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.520828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:14.521073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:14.521126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.523236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.523361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:14.523563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.523612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:14.523642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:14.523683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:14.525369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.525434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:14.525465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:14.526973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.527020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.527057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.527110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.530728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:14.532571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:14.532725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:14.533584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.533699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:14.533742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.534023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:14.534079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.534268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:14.534369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:14.536177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.536216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.536378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.536421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:14.536763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.536823Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:14.536918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.536970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.537003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.537041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.537080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:14.537114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.537149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:14.537173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:14.537223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:14.537253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:14.537283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:14.538871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.538964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.538995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-04-17T18:19:14.779523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-17T18:19:14.779571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-17T18:19:14.781884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T18:19:14.782106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T18:19:14.782148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T18:19:14.782600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-04-17T18:19:14.782651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T18:19:14.782692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-04-17T18:19:14.817318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.817496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:14.817563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-04-17T18:19:14.817659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-17T18:19:14.844495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-04-17T18:19:14.844675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-17T18:19:14.844748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-17T18:19:14.844798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.844852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-17T18:19:14.845022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-17T18:19:14.845197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:19:14.845282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:14.848124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.848623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.848677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:19:14.848828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:19:14.849026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.849068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-17T18:19:14.849110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-17T18:19:14.849568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.849633Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T18:19:14.849735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:19:14.849767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:19:14.849807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:19:14.849836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:19:14.849891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T18:19:14.849926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:19:14.849961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:19:14.849987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:19:14.850136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T18:19:14.850197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-04-17T18:19:14.850232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-17T18:19:14.850261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T18:19:14.851290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:19:14.851400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:19:14.851434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:19:14.851466Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:19:14.851500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:19:14.852629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:19:14.852691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:19:14.852708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:19:14.852726Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T18:19:14.852743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:14.852795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-17T18:19:14.852823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:409:2376] 2025-04-17T18:19:14.856728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:19:14.856861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:19:14.856941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:19:14.856992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:543:2479] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2025-04-17T18:19:14.866864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:14.867066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.867241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2025-04-17T18:19:14.870842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:14.870984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T18:19:14.871232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T18:19:14.871273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T18:19:14.871739Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:19:14.871825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:19:14.871856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:639:2564] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: 2025-04-17T18:16:55.712491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:55.712636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:55.712784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bed/r3tmp/tmp3TEV44/pdisk_1.dat 2025-04-17T18:16:56.123653Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18453, node 1 2025-04-17T18:16:56.377043Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:56.377099Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:56.377143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:56.377857Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:56.380091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:56.467842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:56.468004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:56.483355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16807 2025-04-17T18:16:57.041362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:17:00.489619Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:17:00.562770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:00.562901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:00.611717Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:17:00.614880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:00.883851Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:00.884386Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:00.884936Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:00.885271Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:00.885354Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:00.885439Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:00.885518Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:00.885587Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:00.885679Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:01.068946Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:01.069067Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:01.089927Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:01.237045Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:01.291548Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:17:01.291653Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:17:01.326334Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:17:01.328024Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:17:01.328230Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:17:01.328284Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:17:01.328389Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:17:01.328449Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:17:01.328508Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:17:01.328559Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:17:01.329838Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:17:01.369768Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:17:01.369878Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:17:01.380574Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:17:01.390792Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:17:01.390960Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:17:01.405023Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:17:01.427483Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:17:01.427564Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:17:01.427638Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:17:01.440177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:17:01.449137Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:17:01.449307Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:17:01.691033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:17:01.875624Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:17:01.941585Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:17:02.717016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:02.717197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:02.736902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:17:02.862775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:02.863048Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:02.863366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:02.863564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:02.863757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:02.863960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:02.864109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:02.864358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:02.864526Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:02.864671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:02.864857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:02.864972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:02.901045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:02.901151Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 04-17T18:17:14.484870Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:08.223903Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:19:08.223979Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:19:08.224014Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:19:08.224054Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:19:09.685855Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-17T18:19:09.685940Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 183.000000s, at schemeshard: 72075186224037897 2025-04-17T18:19:09.686293Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-17T18:19:09.700074Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:19:10.797085Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:10.797195Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T18:19:10.797237Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T18:19:10.797276Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:19:10.797313Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:10.797657Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:19:10.800514Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:10.803132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6985:5166], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:10.803197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6995:5171], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:10.803255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:10.812065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:19:10.853741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6999:5174], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:19:11.053065Z node 2 :TX_PROXY ERROR: Actor# [2:7095:5220] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:11.100074Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7124:5235]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:11.100245Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:19:11.100304Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7126:5237] 2025-04-17T18:19:11.100359Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7126:5237] 2025-04-17T18:19:11.100620Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7127:5238] 2025-04-17T18:19:11.100699Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7127:5238], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:19:11.100753Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:19:11.100897Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7126:5237], server id = [2:7127:5238], tablet id = 72075186224037894, status = OK 2025-04-17T18:19:11.100959Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:11.101015Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7124:5235], StatRequests.size() = 1 2025-04-17T18:19:11.202229Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzNiZTBlZDUtY2VjMmM0MDMtOTQ3ZDc3NGEtODY4OTJlOWM=, TxId: 2025-04-17T18:19:11.202298Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzNiZTBlZDUtY2VjMmM0MDMtOTQ3ZDc3NGEtODY4OTJlOWM=, TxId: 2025-04-17T18:19:11.202666Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:11.216417Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:11.216467Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:11.258721Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:19:11.258799Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:19:11.333030Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7126:5237], schemeshard count = 1 2025-04-17T18:19:12.287423Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T18:19:12.287508Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:19:12.290508Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:19:12.306676Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:19:12.307178Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:19:12.307236Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-17T18:19:12.320085Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:19:12.330912Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-17T18:19:12.331586Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-17T18:19:12.331669Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-17T18:19:12.345215Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:13.500196Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:13.500318Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-17T18:19:13.500359Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:19:13.500873Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:19:13.514362Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:19:13.514703Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:19:13.514787Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:19:13.515588Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:19:13.529238Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:19:13.529445Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-17T18:19:13.529949Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7253:5314], server id = [2:7254:5315], tablet id = 72075186224037899, status = OK 2025-04-17T18:19:13.530052Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7253:5314], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:13.533899Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:19:13.534016Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:19:13.534276Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:19:13.534468Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:19:13.534651Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7253:5314], server id = [2:7254:5315], tablet id = 72075186224037899 2025-04-17T18:19:13.534690Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:13.534900Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:19:13.537447Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:19:13.570454Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7274:5334]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:13.570659Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:19:13.570705Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7274:5334], StatRequests.size() = 1 2025-04-17T18:19:13.674369Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmM0OTBkMjctN2M4ZDJlNWMtYTZlOGEzMjYtYjllODY5Nzg=, TxId: 2025-04-17T18:19:13.674431Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmM0OTBkMjctN2M4ZDJlNWMtYTZlOGEzMjYtYjllODY5Nzg=, TxId: 2025-04-17T18:19:13.675021Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:13.688842Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:19:13.688902Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2806:3224] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 12291482487646794825 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-17T18:19:12.854105Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:699] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-17T18:19:13.037407Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:699] 2025-04-17T18:19:13.038214Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:706] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-04-17T18:19:13.204726Z 3 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:713] 2025-04-17T18:19:13.205428Z 1 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:699] 2025-04-17T18:19:13.205917Z 2 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:706] 2025-04-17T18:19:13.206095Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [9d9071126f95831e] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:14.267714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:14.267794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.267842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:14.267870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:14.268735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:14.268775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:14.268839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.268900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:14.270017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:14.342830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:14.342873Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:14.352578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:14.352850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:14.352984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:14.359154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:14.359321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:14.361440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.361686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:14.369615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.374727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.374792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.375551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:14.375588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.375648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:14.375744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.380371Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:14.502132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:14.503133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.503944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:14.504890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:14.504963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.507588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.507726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:14.507916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.507982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:14.508036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:14.508080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:14.509887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.509934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:14.509967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:14.511630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.511677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.511722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.511765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.521880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:14.523697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:14.523866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:14.524798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.524914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:14.524958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.525188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:14.525230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.525386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:14.525477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:14.527376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.527420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.527581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.527623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:14.527946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.527998Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:14.528082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.528118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.528171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.528224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.528271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:14.528328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.528361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:14.528387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:14.528438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:14.528480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:14.528508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:14.539921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.540045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.540079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:14.961692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:737:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:740:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:741:2058] recipient: [1:739:2655] Leader for TabletID 72057594046678944 is [1:742:2656] sender: [1:743:2058] recipient: [1:739:2655] 2025-04-17T18:19:15.001868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:15.001973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:15.002011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:15.002047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:15.002094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:15.002139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:15.002218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:15.002282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:15.002577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:15.020090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:15.021552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:15.021783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:15.021902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:15.021943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:15.022090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:15.022862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-17T18:19:15.022950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:19:15.022998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:19:15.023083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.023197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.023427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:19:15.023727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.023846Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T18:19:15.024038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.024161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.024268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-17T18:19:15.024315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:19:15.024342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:19:15.024362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-17T18:19:15.024382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:19:15.024497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.024580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.024783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-17T18:19:15.024973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:19:15.025364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.025483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.025905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.025995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.026254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.026354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.026445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.026648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.026793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.026993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.027213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.027393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.027444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.027501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.034345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:15.034408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:15.034952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:15.035005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:15.035056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:15.036793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:742:2656] sender: [1:801:2058] recipient: [1:15:2062] 2025-04-17T18:19:15.100509Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:15.100818Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 282us result status StatusSuccess 2025-04-17T18:19:15.101429Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:14.267723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:14.267822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.267877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:14.267920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:14.268780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:14.268826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:14.268898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.268967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:14.270066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:14.353559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:14.353607Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:14.369037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:14.369401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:14.369570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:14.377692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:14.377892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:14.378548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.378787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:14.381611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.382915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.382969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.383174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:14.383220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.383268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:14.383386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.389614Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:14.517219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:14.517442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.517650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:14.517859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:14.517907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.519936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.520075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:14.520246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.520291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:14.520322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:14.520380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:14.522079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.522125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:14.522158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:14.523582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.523620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.523658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.523702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.527013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:14.528646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:14.528831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:14.529725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.529829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:14.529874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.530133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:14.530208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.530381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:14.530471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:14.532250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.532296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.532473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.532514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:14.532874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.532933Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:14.533015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.533054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.533106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.533137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.533185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:14.533221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.533257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:14.533282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:14.533336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:14.533369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:14.533401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:14.535186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.535291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.535329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [2:125:2151] sender: [2:640:2058] recipient: [2:15:2062] Leader for TabletID 72057594046678944 is [2:125:2151] sender: [2:641:2058] recipient: [2:639:2561] Leader for TabletID 72057594046678944 is [2:642:2562] sender: [2:643:2058] recipient: [2:639:2561] 2025-04-17T18:19:15.429624Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:15.429713Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:15.429755Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:15.429791Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:15.429825Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:15.429853Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:15.429912Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:15.430000Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:15.430299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:15.443765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:15.444671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:15.444787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:15.444909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:15.444932Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:15.445073Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:15.445491Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-17T18:19:15.445540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:19:15.445567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:19:15.445607Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.445647Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.445790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:19:15.445983Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.446052Z node 2 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T18:19:15.446223Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.446310Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.446403Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-17T18:19:15.446441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:19:15.446470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:19:15.446490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-17T18:19:15.446511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:19:15.446610Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.446685Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.446883Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-17T18:19:15.447057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:19:15.447335Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.447462Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.447770Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.447844Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.448026Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.448112Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.448183Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.448324Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.448415Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.448595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.448800Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.448965Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.449016Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.449061Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.455859Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:15.455927Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:15.456221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:15.456275Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:15.456321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:15.456445Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:642:2562] sender: [2:700:2058] recipient: [2:15:2062] 2025-04-17T18:19:15.509808Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:15.510131Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 358us result status StatusSuccess 2025-04-17T18:19:15.510792Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2025-04-17T18:17:26.445850Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:26.538507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:26.560637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:26.560919Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:26.567565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:26.567753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:26.567964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:26.568085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:26.568195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:26.568284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:26.568400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:26.568499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:26.568607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:26.568694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:26.568820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:26.568921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:26.595463Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:26.595598Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:26.595654Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:26.595869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:26.596044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:26.596124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:26.596166Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:26.596292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:26.596359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:26.596402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:26.596431Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:26.596597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:26.596661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:26.596698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:26.596740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:26.596849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:26.596908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:26.596947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:26.596977Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:26.597063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:26.597100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:26.597126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:26.597170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:26.597212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:26.597240Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:26.597611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-17T18:17:26.597690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-17T18:17:26.597775Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-17T18:17:26.597858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-17T18:17:26.598055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:26.598127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:26.598169Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:26.598515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:26.598563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:26.598594Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:26.598733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:26.598776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:26.598803Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:26.598988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:26.599029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:26.599056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:26.599184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:26.599226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:26.599268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ge:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913847509;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913847509;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913847509;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-17T18:19:14.969258Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5507:7499];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-17T18:19:14.970677Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5507:7499];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] Test command err: 2025-04-17T18:16:26.950068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494347648790378051:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:16:26.954669Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0013a1/r3tmp/tmprMdtES/pdisk_1.dat 2025-04-17T18:16:27.536082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:27.536173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:27.543612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:27.575786Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6295, node 1 2025-04-17T18:16:27.735377Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:27.735396Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:27.735406Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:27.735516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:16:28.030059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:16:28.063503Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:16:30.074368Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-17T18:16:30.075024Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-17T18:16:30.075056Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-17T18:16:30.075069Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-17T18:16:30.075204Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347665970247830:2327], Start check tables existence, number paths: 2 2025-04-17T18:16:30.079064Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzNlYmY5YTYtODI5ZWM1OS1lZTk2NTE5MC04MDQyZDBiMQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzNlYmY5YTYtODI5ZWM1OS1lZTk2NTE5MC04MDQyZDBiMQ== 2025-04-17T18:16:30.079676Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzNlYmY5YTYtODI5ZWM1OS1lZTk2NTE5MC04MDQyZDBiMQ==, ActorId: [1:7494347665970247856:2330], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:30.079788Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347665970247830:2327], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-17T18:16:30.079832Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347665970247830:2327], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-17T18:16:30.079861Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7494347665970247830:2327], Successfully finished 2025-04-17T18:16:30.083047Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-17T18:16:30.098067Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665970247858:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:30.101689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:16:30.105491Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665970247858:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-17T18:16:30.106664Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665970247858:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-17T18:16:30.112349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665970247858:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:16:30.183513Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665970247858:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-17T18:16:30.188057Z node 1 :TX_PROXY ERROR: Actor# [1:7494347665970247909:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:16:30.188267Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494347665970247858:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-17T18:16:30.191273Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE= 2025-04-17T18:16:30.191560Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE=, ActorId: [1:7494347665970247917:2331], ActorState: unknown state, session actor bootstrapped 2025-04-17T18:16:30.191760Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE=, ActorId: [1:7494347665970247917:2331], ActorState: ReadyState, TraceId: 01js2fex7f2bkhkhkjrtegqbnf, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7494347665970247916:2340] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-17T18:16:30.191800Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-17T18:16:30.191825Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-17T18:16:30.191874Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7494347665970247917:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE= 2025-04-17T18:16:30.191911Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347665970247919:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:30.191982Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347665970247920:2333], Database: /Root, Start database fetching 2025-04-17T18:16:30.195131Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7494347665970247920:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-17T18:16:30.195248Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-17T18:16:30.195357Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494347665970247930:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE=, Start pool fetching 2025-04-17T18:16:30.195387Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347665970247919:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:30.195391Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347665970247931:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:16:30.195446Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-17T18:16:30.195458Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-17T18:16:30.195723Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494347665970247931:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-17T18:16:30.195798Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347665970247933:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-17T18:16:30.195804Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7494347665970247930:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE=, Pool info successfully resolved 2025-04-17T18:16:30.195832Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE= 2025-04-17T18:16:30.195869Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE= 2025-04-17T18:16:30.195890Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7494347665970247933:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7494347665970247917:2331], session id: ydb://session/3?node_id=1&id=ZTA1NjExNTQtNWZlOWE0ZWItZDU3Y2UwOTEtM2I0ZTM2ZjE= 2025-04-17T18:16:30.195942Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] ... yed: 0 2025-04-17T18:17:47.596431Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTc4ZjcxMDMtOWRkYzM1OTMtNGI4N2JmZjEtYWEyYjY2NDA=, ActorId: [6:7494347994378462308:2438], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:17:47.596467Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTc4ZjcxMDMtOWRkYzM1OTMtNGI4N2JmZjEtYWEyYjY2NDA=, ActorId: [6:7494347994378462308:2438], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:17:47.596495Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTc4ZjcxMDMtOWRkYzM1OTMtNGI4N2JmZjEtYWEyYjY2NDA=, ActorId: [6:7494347994378462308:2438], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:17:47.596516Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTc4ZjcxMDMtOWRkYzM1OTMtNGI4N2JmZjEtYWEyYjY2NDA=, ActorId: [6:7494347994378462308:2438], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:17:47.596579Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTc4ZjcxMDMtOWRkYzM1OTMtNGI4N2JmZjEtYWEyYjY2NDA=, ActorId: [6:7494347994378462308:2438], ActorState: unknown state, Session actor destroyed 2025-04-17T18:17:47.596699Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2QzMDVmZjQtNzcxZTRmZjEtOGMxNGQxNWYtOTJjYWJmYTU=, ActorId: [6:7494347981493559852:2329], ActorState: ReadyState, TraceId: 01js2fh8tb549pm4x8t6226dqm, received request, proxyRequestId: 18 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; DROP RESOURCE POOL default; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-17T18:17:47.622594Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7494347981493559945:2336], DatabaseId: /Root, PoolId: sample_pool_id, Got delete notification 2025-04-17T18:17:47.622703Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-17T18:17:47.622784Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347994378462382:2454], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-17T18:17:47.623079Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347994378462382:2454], DatabaseId: /Root, PoolId: sample_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-04-17T18:17:47.623227Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool sample_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-04-17T18:17:47.628770Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7494347981493560143:2359], DatabaseId: /Root, PoolId: default, Got delete notification 2025-04-17T18:17:47.628866Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-17T18:17:47.628926Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347994378462402:2455], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-17T18:17:47.629226Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494347994378462402:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:47.629336Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:47.638604Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=N2QzMDVmZjQtNzcxZTRmZjEtOGMxNGQxNWYtOTJjYWJmYTU=, ActorId: [6:7494347981493559852:2329], ActorState: ExecuteState, TraceId: 01js2fh8tb549pm4x8t6226dqm, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [6:7494347994378462370:2329] WorkloadServiceCleanup: 0 2025-04-17T18:17:47.640901Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2QzMDVmZjQtNzcxZTRmZjEtOGMxNGQxNWYtOTJjYWJmYTU=, ActorId: [6:7494347981493559852:2329], ActorState: CleanupState, TraceId: 01js2fh8tb549pm4x8t6226dqm, EndCleanup, isFinal: 0 2025-04-17T18:17:47.640966Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2QzMDVmZjQtNzcxZTRmZjEtOGMxNGQxNWYtOTJjYWJmYTU=, ActorId: [6:7494347981493559852:2329], ActorState: CleanupState, TraceId: 01js2fh8tb549pm4x8t6226dqm, Sent query response back to proxy, proxyRequestId: 18, proxyId: [6:7494347968608657544:2267] Wait pool handlers 0.000014s: number handlers = 2 Wait pool handlers 1.000192s: number handlers = 2 Wait pool handlers 2.000351s: number handlers = 2 Wait pool handlers 3.000477s: number handlers = 2 Wait pool handlers 4.000708s: number handlers = 2 Wait pool handlers 5.000833s: number handlers = 2 Wait pool handlers 6.000973s: number handlers = 2 Wait pool handlers 7.001104s: number handlers = 2 Wait pool handlers 8.001232s: number handlers = 2 2025-04-17T18:17:56.406750Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:17:56.406778Z node 6 :IMPORT WARN: Table profiles were not loaded Wait pool handlers 9.001366s: number handlers = 2 Wait pool handlers 10.001492s: number handlers = 2 Wait pool handlers 11.001613s: number handlers = 2 Wait pool handlers 12.001744s: number handlers = 2 Wait pool handlers 13.001883s: number handlers = 2 2025-04-17T18:18:01.168914Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7494347981493559945:2336], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh Wait pool handlers 14.002896s: number handlers = 2 Wait pool handlers 15.003031s: number handlers = 2 Wait pool handlers 16.003182s: number handlers = 2 Wait pool handlers 17.003330s: number handlers = 2 Wait pool handlers 18.003477s: number handlers = 2 Wait pool handlers 19.003631s: number handlers = 2 Wait pool handlers 20.003794s: number handlers = 2 Wait pool handlers 21.003935s: number handlers = 2 Wait pool handlers 22.004082s: number handlers = 2 Wait pool handlers 23.004217s: number handlers = 2 Wait pool handlers 24.004336s: number handlers = 2 Wait pool handlers 25.006808s: number handlers = 2 Wait pool handlers 26.006926s: number handlers = 2 Wait pool handlers 27.007617s: number handlers = 2 Wait pool handlers 28.007752s: number handlers = 2 Wait pool handlers 29.007862s: number handlers = 2 Wait pool handlers 30.007972s: number handlers = 2 Wait pool handlers 31.008104s: number handlers = 2 Wait pool handlers 32.008223s: number handlers = 2 Wait pool handlers 33.008326s: number handlers = 2 Wait pool handlers 34.008459s: number handlers = 2 Wait pool handlers 35.008584s: number handlers = 2 Wait pool handlers 36.008688s: number handlers = 2 Wait pool handlers 37.008795s: number handlers = 2 Wait pool handlers 38.008905s: number handlers = 2 Wait pool handlers 39.009032s: number handlers = 2 Wait pool handlers 40.009161s: number handlers = 2 Wait pool handlers 41.009301s: number handlers = 2 Wait pool handlers 42.009415s: number handlers = 2 Wait pool handlers 43.009540s: number handlers = 2 Wait pool handlers 44.009666s: number handlers = 2 Wait pool handlers 45.009779s: number handlers = 2 Wait pool handlers 46.009883s: number handlers = 2 Wait pool handlers 47.010005s: number handlers = 2 Wait pool handlers 48.010150s: number handlers = 2 Wait pool handlers 49.010273s: number handlers = 2 Wait pool handlers 50.010399s: number handlers = 2 Wait pool handlers 51.010503s: number handlers = 2 Wait pool handlers 52.010624s: number handlers = 2 Wait pool handlers 53.010743s: number handlers = 2 Wait pool handlers 54.010863s: number handlers = 2 Wait pool handlers 55.011003s: number handlers = 2 Wait pool handlers 56.011158s: number handlers = 2 Wait pool handlers 57.011278s: number handlers = 2 Wait pool handlers 58.011397s: number handlers = 2 Wait pool handlers 59.011513s: number handlers = 2 Wait pool handlers 60.011629s: number handlers = 2 Wait pool handlers 61.011766s: number handlers = 2 Wait pool handlers 62.011889s: number handlers = 2 Wait pool handlers 63.012003s: number handlers = 2 Wait pool handlers 64.012214s: number handlers = 2 Wait pool handlers 65.012327s: number handlers = 2 Wait pool handlers 66.012440s: number handlers = 2 Wait pool handlers 67.012573s: number handlers = 2 Wait pool handlers 68.012681s: number handlers = 2 Wait pool handlers 69.012837s: number handlers = 2 Wait pool handlers 70.012952s: number handlers = 2 Wait pool handlers 71.013076s: number handlers = 2 Wait pool handlers 72.013185s: number handlers = 2 Wait pool handlers 73.013316s: number handlers = 2 Wait pool handlers 74.013464s: number handlers = 2 Wait pool handlers 75.013602s: number handlers = 2 Wait pool handlers 76.013740s: number handlers = 2 Wait pool handlers 77.013874s: number handlers = 2 Wait pool handlers 78.014006s: number handlers = 2 Wait pool handlers 79.014115s: number handlers = 2 Wait pool handlers 80.014220s: number handlers = 2 Wait pool handlers 81.014335s: number handlers = 2 Wait pool handlers 82.014450s: number handlers = 2 Wait pool handlers 83.014570s: number handlers = 2 Wait pool handlers 84.014712s: number handlers = 2 Wait pool handlers 85.014837s: number handlers = 2 Wait pool handlers 86.014952s: number handlers = 2 2025-04-17T18:19:14.372165Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7494347981493559945:2336], DatabaseId: /Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2025-04-17T18:19:14.372165Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7494347981493560143:2359], DatabaseId: /Root, PoolId: default, Got stop pool handler request, waiting for 0 requests 2025-04-17T18:19:14.372355Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-17T18:19:14.372399Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: default 2025-04-17T18:19:14.664719Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=N2QzMDVmZjQtNzcxZTRmZjEtOGMxNGQxNWYtOTJjYWJmYTU=, ActorId: [6:7494347981493559852:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-04-17T18:19:14.664799Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=N2QzMDVmZjQtNzcxZTRmZjEtOGMxNGQxNWYtOTJjYWJmYTU=, ActorId: [6:7494347981493559852:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-17T18:19:14.664837Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2QzMDVmZjQtNzcxZTRmZjEtOGMxNGQxNWYtOTJjYWJmYTU=, ActorId: [6:7494347981493559852:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-17T18:19:14.664878Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2QzMDVmZjQtNzcxZTRmZjEtOGMxNGQxNWYtOTJjYWJmYTU=, ActorId: [6:7494347981493559852:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-04-17T18:19:14.664983Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2QzMDVmZjQtNzcxZTRmZjEtOGMxNGQxNWYtOTJjYWJmYTU=, ActorId: [6:7494347981493559852:2329], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:14.269085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:14.269170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.269232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:14.269270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:14.269348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:14.269382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:14.269442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.269537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:14.270027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:14.359830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:14.359888Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:14.376497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:14.376941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:14.377144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:14.385180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:14.385366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:14.386044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.386289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:14.389186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.390551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.390615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.390840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:14.390894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.390948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:14.391111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.398202Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:14.538483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:14.538710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.538931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:14.539145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:14.539196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.541262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.541392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:14.541567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.541616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:14.541653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:14.541691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:14.543406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.543459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:14.543497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:14.545042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.545105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.545144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.545212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.559048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:14.561219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:14.561408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:14.562331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.562462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:14.562505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.562757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:14.562825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.562995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:14.563079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:14.564835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.564884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.565050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.565093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:14.565452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.565499Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:14.565589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.565628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.565677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.565710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.565745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:14.565782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.565818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:14.565847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:14.565919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:14.565956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:14.565987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:14.567720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.567823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.567855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... essage# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-04-17T18:19:15.899607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-17T18:19:15.899666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-17T18:19:15.900895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-17T18:19:15.901122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-17T18:19:15.901166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-17T18:19:15.901515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-04-17T18:19:15.901555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-17T18:19:15.901593Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-04-17T18:19:15.935846Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:15.935998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:15.936063Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-04-17T18:19:15.936150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-17T18:19:15.961194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-04-17T18:19:15.961368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-17T18:19:15.961443Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-17T18:19:15.961495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.961528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-17T18:19:15.961704Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-17T18:19:15.961860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-17T18:19:15.961918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:15.963977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.964418Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:15.964469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:19:15.964635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:19:15.964807Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:15.964847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-17T18:19:15.964890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-17T18:19:15.965245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.965286Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-17T18:19:15.965388Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:19:15.965420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:19:15.965455Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-17T18:19:15.965484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:19:15.965518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-17T18:19:15.965553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-17T18:19:15.965588Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-17T18:19:15.965617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-17T18:19:15.965747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T18:19:15.965790Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-04-17T18:19:15.965822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-17T18:19:15.965848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-17T18:19:15.966674Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:19:15.966779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:19:15.966815Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:19:15.966852Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:19:15.966893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-17T18:19:15.967708Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:19:15.967773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-17T18:19:15.967798Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-17T18:19:15.967821Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-17T18:19:15.967847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:15.967905Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-17T18:19:15.967942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:411:2377] 2025-04-17T18:19:15.972002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:19:15.972301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-17T18:19:15.972391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-17T18:19:15.972428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:545:2480] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2025-04-17T18:19:15.980799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:15.980995Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.981190Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2025-04-17T18:19:15.982933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:15.983100Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T18:19:15.983352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T18:19:15.983396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T18:19:15.983749Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:19:15.983825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:19:15.983872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:639:2563] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2025-04-17T18:17:25.467063Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:25.559721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:25.578625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:25.578926Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:25.586933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:25.587102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:25.587303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:25.587412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:25.587494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:25.587565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:25.587634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:25.587734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:25.587820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:25.587896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:25.587959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:25.588020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:25.608968Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:25.609132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:25.609199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:25.609364Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:25.609545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:25.609638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:25.609696Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:25.609810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:25.609890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:25.609925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:25.609949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:25.610079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:25.610134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:25.610170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:25.610212Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:25.610281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:25.610337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:25.610377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:25.610403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:25.610488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:25.610534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:25.610556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:25.610589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:25.610618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:25.610639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:25.610967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-17T18:17:25.611045Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-17T18:17:25.611113Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-17T18:17:25.611197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-17T18:17:25.611351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:25.611397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:25.611427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:25.611579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:25.611619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:25.611642Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:25.611746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:25.611781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:25.611801Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:25.611979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:25.612026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:25.612050Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:25.612143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:25.612176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:25.612214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913846529;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913846529;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913846529;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-17T18:19:15.401247Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5648:7640];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-17T18:19:15.402575Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5648:7640];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:14.850734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:14.850834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.850899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:14.850935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:14.850992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:14.851021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:14.851087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.851164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:14.851465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:14.932121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:14.932172Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:14.946212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:14.946566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:14.946791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:14.954356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:14.954528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:14.955111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.955304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:14.957777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.959066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.959122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.959336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:14.959379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.959423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:14.959567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.965686Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:15.090228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:15.090409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.090560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:15.090755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:15.090817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.092610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:15.092718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:15.092867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.092910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:15.092947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:15.092985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:15.094486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.094528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:15.094551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:15.095900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.095934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.095962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:15.096002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:15.098630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:15.099968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:15.100098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:15.100905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:15.100995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:15.101027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:15.101236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:15.101271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:15.101404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:15.101471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:15.102959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:15.103001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:15.103149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:15.103185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:15.103447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:15.103475Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:15.103548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:15.103572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:15.103606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:15.103633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:15.103665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:15.103692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:15.103715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:15.103740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:15.103783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:15.103807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:15.103827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:15.105016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:15.105102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:15.105133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... iber for txId 105: send EvNotifyTxCompletion 2025-04-17T18:19:16.450208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T18:19:16.450540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:19:16.450578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-17T18:19:16.450611Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:19:16.484104Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:16.484227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:16.484286Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-04-17T18:19:16.484323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-17T18:19:16.512488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-17T18:19:16.512654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-17T18:19:16.512722Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-17T18:19:16.512767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.512804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-17T18:19:16.512957Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-17T18:19:16.513117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:16.515303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.515639Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:16.515684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:19:16.515937Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:16.515976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-17T18:19:16.516561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.516610Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-17T18:19:16.516707Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:19:16.516747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:16.516784Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:19:16.516815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:16.516854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-17T18:19:16.516893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:16.516929Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T18:19:16.516960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T18:19:16.517107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T18:19:16.517147Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-04-17T18:19:16.517178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-17T18:19:16.517697Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:19:16.517785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:19:16.517822Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:19:16.517863Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:19:16.517895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:16.517963Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-04-17T18:19:16.517994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:411:2377] 2025-04-17T18:19:16.521295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:19:16.521378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:19:16.521412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:676:2597] TestWaitNotification: OK eventTxId 105 2025-04-17T18:19:16.528750Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:16.528961Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 254us result status StatusSuccess 2025-04-17T18:19:16.529578Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex >> KqpJoinOrder::TPCDS88-ColumnStore |94.5%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:14.267729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:14.267830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.267888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:14.267926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:14.268755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:14.268785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:14.268847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:14.268912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:14.269995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:14.348142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:14.348187Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:14.358299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:14.358596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:14.358801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:14.365264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:14.365441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:14.366033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.366280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:14.369238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.374907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.374976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.375596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:14.375654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.375716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:14.375852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.382250Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:14.502122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:14.503131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.503923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:14.504893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:14.504965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.507590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.507726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:14.507917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.507992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:14.508037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:14.508085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:14.509887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.509941Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:14.509977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:14.511559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.511604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.511652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.511710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.516428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:14.518498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:14.519499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:14.520540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:14.520659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:14.520713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.521919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:14.521984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:14.522977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:14.523085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:14.525533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:14.525586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:14.525770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:14.525812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:14.526202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:14.526332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:14.526434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.526483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.526535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:14.526571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.526606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:14.526645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:14.526698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:14.526732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:14.526808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:14.526844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:14.526878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:14.528620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.528726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:14.528761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... arts: 1/1 2025-04-17T18:19:16.396015Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:19:16.396051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:16.396088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-17T18:19:16.396126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:16.396165Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T18:19:16.396195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T18:19:16.396327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T18:19:16.396368Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-17T18:19:16.396403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-17T18:19:16.397587Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:19:16.397666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:19:16.397696Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:19:16.397735Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:19:16.397774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:16.397857Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-17T18:19:16.402011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T18:19:16.408004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T18:19:16.408055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T18:19:16.408556Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:19:16.408648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:19:16.408684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:756:2669] TestWaitNotification: OK eventTxId 105 2025-04-17T18:19:16.979106Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:16.979389Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 297us result status StatusSuccess 2025-04-17T18:19:16.980018Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:17.063683Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:17.063971Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 319us result status StatusSuccess 2025-04-17T18:19:17.064586Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2025-04-17T18:19:17.068046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:17.068285Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.068437Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2025-04-17T18:19:17.071126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:17.071314Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T18:19:17.071655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-17T18:19:17.071695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-17T18:19:17.072181Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-17T18:19:17.072300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T18:19:17.072341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:771:2683] TestWaitNotification: OK eventTxId 106 |94.5%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::CanonizedJoinOrderTPCH5 >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> OlapEstimationRowsCorrectness::TPCH3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:31.549647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:31.549706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:31.549734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:31.549756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:31.549790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:31.549823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:31.549876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:31.549934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:31.550214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:31.605931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:31.605975Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:31.614739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:31.615343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:31.615481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:31.623151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:31.623676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:31.624085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:31.624218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:31.626625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:31.627554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:31.627591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:31.627723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:31.627753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:31.627779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:31.627871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:31.632894Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:31.726803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:31.727014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.727176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:31.727377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:31.727424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.729310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:31.729432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:31.729618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.729682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:31.729742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:31.729772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:31.731342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.731393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:31.731423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:31.732880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.732929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.732967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:31.733018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:31.736411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:31.737963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:31.738096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:31.738958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:31.739083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:31.739125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:31.739358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:31.739424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:31.739574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:31.739638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:31.741456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:31.741495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:31.741633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:31.741668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:31.741998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.742056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:31.742144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:31.742196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:31.742230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:31.742297Z no ... tusSuccess 2025-04-17T18:19:17.765314Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:17.769900Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:17.770110Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 233us result status StatusSuccess 2025-04-17T18:19:17.770724Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:17.549804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:17.549883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:17.549938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:17.549972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:17.550011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:17.550050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:17.550104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:17.550166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:17.550496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:17.633380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:17.633430Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:17.648379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:17.648813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:17.648979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:17.657242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:17.657437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:17.658070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:17.658291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:17.661244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:17.662521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:17.662575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:17.662783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:17.662830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:17.662870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:17.663010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.669581Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:17.760269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:17.760467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.760607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:17.760767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:17.760815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.762646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:17.762758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:17.762971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.763020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:17.763043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:17.763067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:17.764636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.764687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:17.764713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:17.766322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.766378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.766431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:17.766490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:17.771013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:17.773202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:17.773388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:17.774334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:17.774445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:17.774492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:17.774754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:17.774839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:17.775027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:17.775120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:17.777264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:17.777333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:17.777545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:17.777594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:17.777941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.777977Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:17.778055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:17.778080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:17.778119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:17.778150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:17.778198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:17.778247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:17.778272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:17.778294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:17.778338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:17.778364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:17.778387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:17.779565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:17.779642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:17.779669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:18.076474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T18:19:18.076725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T18:19:18.076757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T18:19:18.077145Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:19:18.077216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:19:18.077241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:639:2564] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2025-04-17T18:19:18.079494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:18.079652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.081047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-04-17T18:19:18.083112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:18.083251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T18:19:18.083544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-17T18:19:18.083622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-17T18:19:18.084107Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-17T18:19:18.084226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T18:19:18.084264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:646:2571] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2025-04-17T18:19:18.087168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:18.087371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.087559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-04-17T18:19:18.090109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:18.090335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-17T18:19:18.090656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-17T18:19:18.090700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-17T18:19:18.091093Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-17T18:19:18.091164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-17T18:19:18.091188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:653:2578] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2025-04-17T18:19:18.093611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:18.093791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.093951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-04-17T18:19:18.096443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:18.096648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-17T18:19:18.096999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-17T18:19:18.097042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-17T18:19:18.097501Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-17T18:19:18.097595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-17T18:19:18.097647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:660:2585] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2025-04-17T18:19:18.100896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:18.101142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.101365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-04-17T18:19:18.103711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:18.103952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-04-17T18:19:18.104293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-04-17T18:19:18.104335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-04-17T18:19:18.104839Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-04-17T18:19:18.104944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-17T18:19:18.105020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:667:2592] TestWaitNotification: OK eventTxId 109 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:17.471272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:17.471352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:17.471414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:17.471449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:17.471488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:17.471527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:17.471581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:17.471645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:17.471945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:17.556665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:17.556738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:17.574306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:17.574785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:17.574953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:17.584169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:17.584399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:17.585112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:17.585356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:17.588491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:17.589749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:17.589803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:17.590002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:17.590046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:17.590088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:17.590230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.596400Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:17.695358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:17.695561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.695703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:17.695890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:17.695941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.698095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:17.698266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:17.698469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.698525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:17.698557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:17.698586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:17.700139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.700178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:17.700209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:17.701471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.701510Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.701542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:17.701585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:17.708253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:17.709905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:17.710035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:17.710821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:17.710916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:17.710973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:17.711184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:17.711221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:17.711360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:17.711437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:17.712914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:17.712948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:17.713113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:17.713144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:17.713401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:17.713444Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:17.713504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:17.713544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:17.713576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:17.713610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:17.713637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:17.713671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:17.713704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:17.713729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:17.713788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:17.713825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:17.713854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:17.715123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:17.715194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:17.715216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... y Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:18.126070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:755:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:758:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:759:2058] recipient: [1:757:2667] Leader for TabletID 72057594046678944 is [1:760:2668] sender: [1:761:2058] recipient: [1:757:2667] 2025-04-17T18:19:18.169307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:18.169388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:18.169428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:18.169463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:18.169519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:18.169546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:18.169603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:18.169668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:18.170032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:18.186358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:18.187782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:18.187955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:18.188150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:18.188184Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:18.188296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:18.188956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-17T18:19:18.189075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-17T18:19:18.189121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:19:18.189190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.189252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.189423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-17T18:19:18.189709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.189784Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-17T18:19:18.189966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.190065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.190168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-17T18:19:18.190252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-17T18:19:18.190296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-17T18:19:18.190313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-17T18:19:18.190327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:19:18.190417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.190487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.190700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-17T18:19:18.190890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:19:18.191236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.191372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.191755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.191858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.192065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.192183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.192284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.192450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.192551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.192752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.192992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.193143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.193206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.193255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.201082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:18.201159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:18.201972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:18.202022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:18.202080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:18.203160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 14352770786793652320 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-04-17T18:19:12.929241Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-04-17T18:19:12.932641Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-17T18:19:13.575202Z 1 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:13.576018Z 2 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-04-17T18:19:13.931083Z 1 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:13.931331Z 2 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-17T18:19:14.170710Z 1 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.171491Z 2 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:14.172157Z 3 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:14.172365Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [ea828ada4a8a2ffb] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2025-04-17T18:19:14.484098Z 1 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:14.484265Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:14.484308Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-04-17T18:19:15.027684Z 1 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:15.027901Z 2 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:15.027954Z 3 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:15.027990Z 4 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5330:719] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-04-17T18:19:15.233792Z 1 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:15.233937Z 2 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:15.233975Z 3 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:15.234011Z 4 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5330:719] 2025-04-17T18:19:15.234045Z 5 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5337:726] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-04-17T18:19:15.419858Z 1 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:15.420050Z 2 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:15.420113Z 3 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:15.420167Z 4 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5330:719] 2025-04-17T18:19:15.420219Z 5 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5337:726] 2025-04-17T18:19:15.420273Z 6 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5344:733] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-04-17T18:19:15.580349Z 1 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-04-17T18:19:15.580489Z 2 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:15.580533Z 3 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:15.580569Z 4 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5330:719] 2025-04-17T18:19:15.580604Z 5 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5337:726] 2025-04-17T18:19:15.580638Z 6 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5344:733] 2025-04-17T18:19:15.580673Z 7 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5351:740] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-04-17T18:19:15.768839Z 2 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-04-17T18:19:15.768925Z 3 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:15.768974Z 4 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5330:719] 2025-04-17T18:19:15.769021Z 5 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5337:726] 2025-04-17T18:19:15.769067Z 6 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5344:733] 2025-04-17T18:19:15.769112Z 7 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5351:740] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-04-17T18:19:16.008308Z 3 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-04-17T18:19:16.008373Z 4 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5330:719] 2025-04-17T18:19:16.008407Z 5 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5337:726] 2025-04-17T18:19:16.008438Z 6 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5344:733] 2025-04-17T18:19:16.008468Z 7 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5351:740] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2025-04-17T18:19:16.296208Z 4 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5330:719] 2025-04-17T18:19:16.296276Z 5 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5337:726] 2025-04-17T18:19:16.296315Z 6 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5344:733] 2025-04-17T18:19:16.296351Z 7 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5351:740] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-04-17T18:19:16.570259Z 5 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5337:726] 2025-04-17T18:19:16.570326Z 6 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5344:733] 2025-04-17T18:19:16.570361Z 7 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5351:740] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-04-17T18:19:17.347836Z 6 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5344:733] 2025-04-17T18:19:17.347902Z 7 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5351:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-04-17T18:19:17.745196Z 7 00h14m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5351:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] >> KqpJoinOrder::TPCDS87+ColumnStore >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull >> ReadOnlyVDisk::TestReads [GOOD] >> KqpJoinOrder::TPCDS61+ColumnStore >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:18.229940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:18.230007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:18.230050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:18.230078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:18.230126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:18.230156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:18.230230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:18.230296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:18.230624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:18.312496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:18.312555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:18.327651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:18.328073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:18.328238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:18.336272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:18.336475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:18.337108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:18.337327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:18.340025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:18.341297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:18.341358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:18.341578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:18.341625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:18.341665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:18.341796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.348188Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:18.484217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:18.484467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.484686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:18.484956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:18.485012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.487095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:18.487258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:18.487484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.487536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:18.487571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:18.487603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:18.489245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.489318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:18.489353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:18.490854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.490906Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.490948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:18.491010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:18.500435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:18.502253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:18.502413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:18.503433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:18.503562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:18.503625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:18.503900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:18.503951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:18.504140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:18.504230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:18.506119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:18.506164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:18.506367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:18.506413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:18.506816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:18.506881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:18.506975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:18.507031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:18.507083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:18.507115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:18.507149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:18.507191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:18.507223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:18.507251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:18.507311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:18.507346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:18.507380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:18.509072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:18.509183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:18.509223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2057594046678944 2025-04-17T18:19:19.861395Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:19:19.861462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-04-17T18:19:19.861567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2025-04-17T18:19:19.862516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2025-04-17T18:19:19.862572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2025-04-17T18:19:19.862639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2025-04-17T18:19:19.862770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-17T18:19:19.862884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2025-04-17T18:19:19.862935Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2025-04-17T18:19:19.862971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-17T18:19:19.863000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-17T18:19:19.864002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T18:19:19.864162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T18:19:19.864191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T18:19:19.864457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:19:19.864484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-17T18:19:19.864525Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:19:19.898112Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:19.898253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:19.898299Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-04-17T18:19:19.898333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-17T18:19:19.927994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-17T18:19:19.928172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-17T18:19:19.928248Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-17T18:19:19.928296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-17T18:19:19.928331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-17T18:19:19.928650Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-17T18:19:19.928837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:19.931461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:19:19.932036Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:19.932103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:19:19.932350Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:19.932393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-17T18:19:19.932745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:19:19.932785Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-17T18:19:19.932882Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:19:19.932913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:19.932950Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:19:19.932982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:19.933035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-17T18:19:19.933076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:19.933111Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T18:19:19.933161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T18:19:19.933282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T18:19:19.933321Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-04-17T18:19:19.933350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-17T18:19:19.933789Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:19:19.933866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:19:19.933900Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:19:19.933930Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:19:19.933955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:19.934010Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-04-17T18:19:19.934040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:411:2377] 2025-04-17T18:19:19.938362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-17T18:19:19.938470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:19:19.938509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:668:2591] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-04-17T18:19:19.945682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:19.945843Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-17T18:19:19.945964Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-04-17T18:19:19.947581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:19.947701Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-17T18:19:19.947896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-17T18:19:19.947930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-17T18:19:19.948223Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-17T18:19:19.948284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-17T18:19:19.948309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:759:2670] TestWaitNotification: OK eventTxId 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 10995124872160487061 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:19:16.604909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:19:16.604996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:16.605045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:19:16.605090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:19:16.605151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:19:16.605193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:19:16.605251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:19:16.605311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:19:16.605613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:19:16.659992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:19:16.660042Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:16.670225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:19:16.670684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:19:16.670867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:19:16.677691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:19:16.677829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:19:16.678267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:16.678426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:19:16.680363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:16.681476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:16.681521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:16.681660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:19:16.681698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:16.681725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:19:16.681806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.686920Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:19:16.779887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:19:16.780058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.780212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:19:16.780372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:19:16.780411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.782335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:16.782450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:19:16.782606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.782644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:19:16.782666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:19:16.782686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:19:16.784096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.784134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:19:16.784156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:19:16.785336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.785378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.785404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:16.785434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:19:16.787785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:19:16.789065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:19:16.789196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:19:16.789946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:19:16.790040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:16.790072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:16.790294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:19:16.790328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:19:16.790447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:19:16.790516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:19:16.791960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:16.791999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:19:16.792142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:16.792181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:19:16.792460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:19:16.792490Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:19:16.792552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:16.792575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:16.792610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:19:16.792631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:16.792655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:19:16.792686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:19:16.792714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:19:16.792736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:19:16.792771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:19:16.792813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:19:16.792842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:19:16.793955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:16.794022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:19:16.794044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-17T18:19:19.556916Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-17T18:19:19.557115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:19.560155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:19:19.560437Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:19:19.560477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:19:19.560704Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:19:19.560740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-17T18:19:19.561139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-17T18:19:19.561186Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-17T18:19:19.561333Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:19:19.561377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:19.561424Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-17T18:19:19.561458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:19.561498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-17T18:19:19.561548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-17T18:19:19.561609Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-17T18:19:19.561648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-17T18:19:19.561791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-17T18:19:19.561839Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-17T18:19:19.561879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-17T18:19:19.562396Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:19:19.562476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-17T18:19:19.562512Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-17T18:19:19.562561Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-17T18:19:19.562604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:19:19.562689Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-17T18:19:19.567246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-17T18:19:19.573196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-17T18:19:19.573254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-17T18:19:19.573734Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-17T18:19:19.573838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-17T18:19:19.573883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:758:2669] TestWaitNotification: OK eventTxId 105 2025-04-17T18:19:20.145483Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:20.145773Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 319us result status StatusSuccess 2025-04-17T18:19:20.146486Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:20.240641Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:20.240939Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 339us result status StatusSuccess 2025-04-17T18:19:20.241440Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin |94.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 28122, MsgBus: 9342 2025-04-17T18:19:17.483716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348379974417250:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:17.483811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fc6/r3tmp/tmpzSKEYi/pdisk_1.dat 2025-04-17T18:19:17.772799Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28122, node 1 2025-04-17T18:19:17.827830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:17.827850Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:17.827859Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:17.828004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:17.856844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:17.856956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:17.858690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9342 TClient is connected to server localhost:9342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:18.274877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:18.302890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:18.417463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:18.556883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:18.609652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:19.939486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348388564353631:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:19.939607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.212472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.242578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.270602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.296703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.336258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.363262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.431287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.508826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348392859321595:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.508895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348392859321600:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.508907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.512054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:20.521578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348392859321602:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:20.576194Z node 1 :TX_PROXY ERROR: Actor# [1:7494348392859321658:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:21.492790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.519461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.543871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.567325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.592273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.483736Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348379974417250:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:22.483802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Execution, code: 1060
:4:47: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:57: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] >> ReadOnlyVDisk::TestSync [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 9211643229070157374 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2025-04-17T18:19:13.117317Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:8807:940] 2025-04-17T18:19:13.117595Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8814:947] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-04-17T18:19:14.716332Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:8821:954] 2025-04-17T18:19:14.716504Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8814:947] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-17T18:19:18.444864Z 5 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8835:968] 2025-04-17T18:19:18.444957Z 4 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:8828:961] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-17T18:19:20.539136Z 6 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8842:975] 2025-04-17T18:19:20.539234Z 5 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8835:968] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-04-17T18:19:22.759151Z 7 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8849:982] 2025-04-17T18:19:22.759242Z 6 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8842:975] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2025-04-17T18:19:25.256129Z 7 00h26m00.561536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8849:982] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 18405, MsgBus: 26186 2025-04-17T18:19:21.296475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348400700699168:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:21.296761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fa7/r3tmp/tmpbZVJdI/pdisk_1.dat 2025-04-17T18:19:21.540532Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:21.545457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:21.545555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:21.548753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18405, node 1 2025-04-17T18:19:21.594922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:21.594938Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:21.594945Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:21.595101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26186 TClient is connected to server localhost:26186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:21.975314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:22.003719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:22.114506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:19:22.255039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.327945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:23.733046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348409290635550:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:23.733182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:24.024492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:24.051568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:24.079106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:24.104541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:24.160886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:24.229092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:24.264889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:24.319463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348413585603510:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:24.319518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:24.319599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348413585603515:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:24.323359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:24.334374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348413585603517:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:19:24.422141Z node 1 :TX_PROXY ERROR: Actor# [1:7494348413585603572:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:25.426556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.451904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.296482Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348400700699168:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:26.296588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: 2025-04-17T18:16:58.259260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:58.259394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:58.259533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bdd/r3tmp/tmpOrVVWW/pdisk_1.dat 2025-04-17T18:16:58.707136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23232, node 1 2025-04-17T18:16:58.965330Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:58.965388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:58.965433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:58.966161Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:58.968339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:59.054138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:59.054284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:59.072198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5932 2025-04-17T18:16:59.685373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:17:02.979724Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:17:03.039110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:03.039228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:03.080457Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:17:03.085483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:03.360205Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:03.360720Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:03.361359Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:03.361713Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:03.361814Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:03.361896Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:03.361972Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:03.362045Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:03.362128Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:03.537006Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:03.537117Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:03.554390Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:03.690528Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:03.733548Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:17:03.733728Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:17:03.774624Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:17:03.776447Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:17:03.776699Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:17:03.776767Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:17:03.776833Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:17:03.776907Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:17:03.776973Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:17:03.777030Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:17:03.778545Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:17:03.820532Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:17:03.820682Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:17:03.830022Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:17:03.835085Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:17:03.835245Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:17:03.845613Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:17:03.867541Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:17:03.867598Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:17:03.867670Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:17:03.915830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:17:03.923817Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:17:03.923959Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:17:04.112359Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:17:04.302923Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:17:04.372395Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:17:05.141592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:05.141789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:05.161111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:17:05.511156Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:05.511398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:05.511837Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:05.511995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:05.512118Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:05.512238Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:05.512362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:05.512487Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:05.512613Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:05.512773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:05.512924Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:05.513037Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2400:2892];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:05.545510Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:05.545606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=T ... server id = [2:8573:6493], tablet id = 72075186224037901, status = OK 2025-04-17T18:19:22.837997Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8567:6487], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:22.839504Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8568:6488], server id = [2:8572:6492], tablet id = 72075186224037902, status = OK 2025-04-17T18:19:22.839580Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8568:6488], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:22.839758Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8569:6489], server id = [2:8575:6495], tablet id = 72075186224037903, status = OK 2025-04-17T18:19:22.839801Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8569:6489], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:22.845657Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-17T18:19:22.846309Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8565:6485], server id = [2:8570:6490], tablet id = 72075186224037899 2025-04-17T18:19:22.846355Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.847801Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:19:22.848745Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8566:6486], server id = [2:8571:6491], tablet id = 72075186224037900 2025-04-17T18:19:22.848782Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.849280Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8587:6505], server id = [2:8590:6506], tablet id = 72075186224037904, status = OK 2025-04-17T18:19:22.849379Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8587:6505], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:22.851664Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-17T18:19:22.852257Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8591:6507], server id = [2:8593:6508], tablet id = 72075186224037905, status = OK 2025-04-17T18:19:22.852339Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8591:6507], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:22.852509Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8568:6488], server id = [2:8572:6492], tablet id = 72075186224037902 2025-04-17T18:19:22.852535Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.853193Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-17T18:19:22.854572Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8567:6487], server id = [2:8573:6493], tablet id = 72075186224037901 2025-04-17T18:19:22.854604Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.854964Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-17T18:19:22.855583Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8595:6510], server id = [2:8598:6513], tablet id = 72075186224037906, status = OK 2025-04-17T18:19:22.855674Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8595:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:22.856162Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8569:6489], server id = [2:8575:6495], tablet id = 72075186224037903 2025-04-17T18:19:22.856190Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.856777Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8597:6512], server id = [2:8603:6517], tablet id = 72075186224037907, status = OK 2025-04-17T18:19:22.856840Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8597:6512], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:22.857240Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8601:6516], server id = [2:8604:6518], tablet id = 72075186224037908, status = OK 2025-04-17T18:19:22.857297Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8601:6516], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:22.860392Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-17T18:19:22.860808Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8587:6505], server id = [2:8590:6506], tablet id = 72075186224037904 2025-04-17T18:19:22.860833Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.861975Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:19:22.862640Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8591:6507], server id = [2:8593:6508], tablet id = 72075186224037905 2025-04-17T18:19:22.862667Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.864907Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:19:22.865712Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8595:6510], server id = [2:8598:6513], tablet id = 72075186224037906 2025-04-17T18:19:22.865763Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.866035Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:19:22.866510Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8597:6512], server id = [2:8603:6517], tablet id = 72075186224037907 2025-04-17T18:19:22.866554Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.866907Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:19:22.866968Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:19:22.867170Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:19:22.867564Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8601:6516], server id = [2:8604:6518], tablet id = 72075186224037908 2025-04-17T18:19:22.867592Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:22.905967Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:19:22.906229Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:19:23.378913Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 3 2025-04-17T18:19:23.379001Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:19:26.451973Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:19:26.452211Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:26.567674Z node 2 :STATISTICS INFO: Node 3 is unavailable 2025-04-17T18:19:26.567740Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:19:26.567857Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-17T18:19:26.567885Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:19:26.567957Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:19:26.568023Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:19:26.568459Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:19:26.581887Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:19:26.582061Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-17T18:19:26.582511Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8729:6582], server id = [2:8730:6583], tablet id = 72075186224037900, status = OK 2025-04-17T18:19:26.582578Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8729:6582], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-17T18:19:26.583606Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-17T18:19:26.583664Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:19:26.583871Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8729:6582], server id = [2:8730:6583], tablet id = 72075186224037900 2025-04-17T18:19:26.583896Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:26.583959Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:19:26.584145Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:19:26.584555Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-17T18:19:26.587071Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:19:26.618261Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8748:6601]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:26.618410Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:19:26.618442Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8748:6601], StatRequests.size() = 1 2025-04-17T18:19:26.708664Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2025-04-17T18:19:24.000000Z 2025-04-17T18:19:26.709165Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Mzk2ZmNjOWYtZjI5MDczMTMtZTBhMDVkNzktYmYzODkzNDE=, TxId: 2025-04-17T18:19:26.709214Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mzk2ZmNjOWYtZjI5MDczMTMtZTBhMDVkNzktYmYzODkzNDE=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-17T18:19:26.709896Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8762:6607]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:26.710205Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:19:26.710257Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:19:26.710459Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:26.713826Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:19:26.713894Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-17T18:19:26.713955Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:19:26.720351Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] |94.6%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> KqpJoinOrder::Chain65Nodes >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 >> BasicStatistics::TwoServerlessDbs [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 21521, MsgBus: 2092 2025-04-17T18:19:18.668982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348387889560111:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:18.669121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fb7/r3tmp/tmpsdlwNi/pdisk_1.dat 2025-04-17T18:19:18.980432Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21521, node 1 2025-04-17T18:19:18.989981Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:19:19.014909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:19.014927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:19.014935Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:19.015030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:19.048959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:19.049130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:19.050795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2092 TClient is connected to server localhost:2092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:19.412644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:19.427031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:19.565704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:19.683751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:19.749308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:21.017734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348400774463797:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:21.017886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:21.305677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.342279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.368497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.392272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.444902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.508853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.538846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.617268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348400774464463:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:21.617345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348400774464468:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:21.617363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:21.620904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:21.630650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348400774464470:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:21.714252Z node 1 :TX_PROXY ERROR: Actor# [1:7494348400774464529:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:22.561796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.604426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7051, MsgBus: 29838 2025-04-17T18:19:23.969496Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348405419937355:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:23.969632Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fb7/r3tmp/tmpy6SG9w/pdisk_1.dat 2025-04-17T18:19:24.073447Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7051, node 2 2025-04-17T18:19:24.105929Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:24.106003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:24.108784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:24.140948Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:24.141008Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:24.141015Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:24.141123Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29838 TClient is connected to server localhost:29838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:24.526263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:24.537767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:24.592115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:24.776872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:19:24.856894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.929925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348418304841021:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:26.929974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:26.983986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.015760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.041455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.111188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.142162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.206873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.246413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.329080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348422599808990:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:27.329162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:27.329336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348422599808995:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:27.332403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:27.339955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348422599808997:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:19:27.430300Z node 2 :TX_PROXY ERROR: Actor# [2:7494348422599809054:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:28.282985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:28.329372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:28.969853Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348405419937355:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:28.969937Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2025-04-17T18:17:40.549577Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:40.635674Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:40.658911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:40.659243Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:40.666926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:40.667157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:40.667398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:40.667549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:40.667664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:40.667765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:40.667885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:40.668026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:40.668154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:40.668261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:40.668356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:40.668476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:40.693781Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:40.693935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:40.694012Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:40.694228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:40.694395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:40.694474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:40.694529Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:40.694615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:40.694707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:40.694765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:40.694798Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:40.694982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:40.695053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:40.695091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:40.695123Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:40.695228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:40.695320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:40.695368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:40.695406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:40.695511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:40.695539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:40.695562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:40.695591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:40.695618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:40.695641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:40.695928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-17T18:17:40.695991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-17T18:17:40.696049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=24; 2025-04-17T18:17:40.696130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-17T18:17:40.696302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:40.696361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:40.696403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:40.696633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:40.696689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:40.696724Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:40.696887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:40.696934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:40.696963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:40.697159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:40.697217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:40.697255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:40.697401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:40.697451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:40.697505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ge:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913861602;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913861602;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913861602;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-17T18:19:28.918969Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5508:7500];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-17T18:19:28.920811Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5508:7500];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: 2025-04-17T18:16:45.134016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:45.134154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:45.134335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001518/r3tmp/tmpp1QLrc/pdisk_1.dat 2025-04-17T18:16:45.550826Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18151, node 1 2025-04-17T18:16:45.861818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:45.861883Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:45.861914Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:45.863007Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:45.865430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:45.963037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:45.963159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:45.985582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18530 2025-04-17T18:16:46.613765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:50.039205Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:50.103937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:50.104059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:50.159675Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:50.167051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:50.467373Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.467949Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.468600Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.468973Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.469081Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.469180Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.469272Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.469387Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.469480Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:50.707650Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:50.707780Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:50.721217Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:50.917267Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:50.963988Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:50.964087Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:50.996552Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:50.998464Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:50.998700Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:50.998757Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:50.998825Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:50.998888Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:50.998952Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:50.999030Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:51.000256Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:51.041581Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:51.041703Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:51.051374Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:16:51.056857Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:16:51.057006Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:16:51.076118Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:16:51.109958Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:51.110022Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:51.110093Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:16:51.121619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:51.135937Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:51.136465Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:51.369782Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:51.615415Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:51.691353Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:52.271863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:16:52.859350Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:53.053218Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:16:53.053282Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:16:53.053369Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2598:2950], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:16:53.054254Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2599:2951] 2025-04-17T18:16:53.054393Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2599:2951], schemeshard id = 72075186224037899 2025-04-17T18:16:53.946547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:16:54.459519Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:54.749790Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-17T18:16:54.749866Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-04-17T18:16:54.749938Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3092:3156], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-04-17T18:16:54.750770Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3093:3157] 2025-04-17T18:16:54.751279Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3093:3157], schemeshard id = 72075186224037905 2025-04-17T18:16:55.858508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3215:3416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:55.858631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:55.874308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-17T18:16:56.112243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3366:3451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.112504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:56.114285Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3371:3455]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:16:56.114570Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:16:56.114751Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-17T18:16:56.114814Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3374:3458] 2025-04-17T18:16:56.114887Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3 ... 25-04-17T18:19:22.138558Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:19:22.138607Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:22.139015Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:19:22.151444Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:22.155324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8952:6409], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.155430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8962:6414], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.155566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.168848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:19:22.232207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8966:6417], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:19:22.428776Z node 2 :TX_PROXY ERROR: Actor# [2:9064:6465] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:22.486150Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:9093:6480]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:22.486704Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-17T18:19:22.487050Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-17T18:19:22.487136Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:19:22.487462Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:22.487559Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:9093:6480], StatRequests.size() = 1 2025-04-17T18:19:22.602775Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODEyNTYwODgtOTNhZDJhM2EtYWU0MGI5NDEtYTFhNDIwODY=, TxId: 2025-04-17T18:19:22.602865Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODEyNTYwODgtOTNhZDJhM2EtYWU0MGI5NDEtYTFhNDIwODY=, TxId: 2025-04-17T18:19:22.603536Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:22.618345Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:22.618419Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:22.663145Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:19:22.663209Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:19:22.729986Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3735:3401], schemeshard count = 1 2025-04-17T18:19:22.971187Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-17T18:19:22.971250Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 197.000000s, at schemeshard: 72075186224037899 2025-04-17T18:19:22.971543Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-17T18:19:22.986039Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:19:23.202035Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:9131:6505]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:23.202407Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-17T18:19:23.202453Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:9131:6505], StatRequests.size() = 1 2025-04-17T18:19:24.488283Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:9182:6537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:24.488566Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-17T18:19:24.488605Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:9182:6537], StatRequests.size() = 1 2025-04-17T18:19:24.999375Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-04-17T18:19:24.999463Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 216.000000s, at schemeshard: 72075186224037905 2025-04-17T18:19:24.999715Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 26 2025-04-17T18:19:25.015476Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:19:25.211010Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:19:25.223002Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:25.223068Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:25.223108Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-17T18:19:25.223148Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:19:25.223506Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:19:25.226322Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:25.251329Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTE0OTRmNzYtYTQ0NWQ2OWUtMzVkOTgxODMtZTU4MTY4N2Q=, TxId: 2025-04-17T18:19:25.251386Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTE0OTRmNzYtYTQ0NWQ2OWUtMzVkOTgxODMtZTU4MTY4N2Q=, TxId: 2025-04-17T18:19:25.251751Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:25.268116Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:19:25.268182Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:25.897654Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:9262:6590]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:25.897945Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T18:19:25.897986Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:9262:6590], StatRequests.size() = 1 2025-04-17T18:19:27.327547Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:9316:6622]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:27.327862Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-17T18:19:27.327902Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:9316:6622], StatRequests.size() = 1 2025-04-17T18:19:27.997547Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-04-17T18:19:27.997822Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:27.998258Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:19:28.009761Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:28.009816Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:28.009853Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is data table. 2025-04-17T18:19:28.009881Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-17T18:19:28.010223Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:19:28.012805Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:28.025246Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yjk1N2ZmNmYtZGZiNWRjNWYtYTc1YzlmMjQtYmYxOTdmMw==, TxId: 2025-04-17T18:19:28.025302Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yjk1N2ZmNmYtZGZiNWRjNWYtYTc1YzlmMjQtYmYxOTdmMw==, TxId: 2025-04-17T18:19:28.025691Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:28.040688Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-17T18:19:28.040737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:28.697775Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:9393:6671]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:28.698220Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-17T18:19:28.698268Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:9393:6671], StatRequests.size() = 1 2025-04-17T18:19:28.699164Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:9395:6673]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:28.704174Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-17T18:19:28.704241Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:9395:6673], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 2381, MsgBus: 24260 2025-04-17T18:19:19.492674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348390767365911:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:19.492836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fb1/r3tmp/tmpUWI6qt/pdisk_1.dat 2025-04-17T18:19:19.731122Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2381, node 1 2025-04-17T18:19:19.790874Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:19.790900Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:19.790909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:19.791167Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:19.829651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:19.829754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:19.831469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24260 TClient is connected to server localhost:24260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:20.257571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:20.274254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:20.373591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:20.484682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:20.556866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:21.964906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348399357302282:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:21.964977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.204236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.231149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.260576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.288409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.312127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.377211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.402138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.454258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348403652270235:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.454395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.454550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348403652270240:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.458210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:22.466830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348403652270242:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:19:22.547304Z node 1 :TX_PROXY ERROR: Actor# [1:7494348403652270298:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:23.418485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.463983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28259, MsgBus: 27883 2025-04-17T18:19:24.659851Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348412819228111:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:24.661372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fb1/r3tmp/tmpLcnJ68/pdisk_1.dat 2025-04-17T18:19:24.773227Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28259, node 2 2025-04-17T18:19:24.809609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:24.809667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:24.810894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:24.854119Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:24.854151Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:24.854162Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:24.854348Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27883 TClient is connected to server localhost:27883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:25.290694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:19:25.307098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.364696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:25.532003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:25.606147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:27.462312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348425704131770:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:27.462400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:27.515550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.543829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.572866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.603933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.638239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.678474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.748718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.805969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348425704132437:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:27.806055Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:27.806338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348425704132442:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:27.810397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:27.820098Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348425704132444:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:19:27.910064Z node 2 :TX_PROXY ERROR: Actor# [2:7494348425704132499:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:28.819420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:28.864698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] >> KqpJoinOrder::TPCDS90-ColumnStore >> KqpJoin::ExclusionJoin >> KqpJoinOrder::CanonizedJoinOrderLookupBug ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 16078, MsgBus: 16559 2025-04-17T18:19:20.506550Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348393515161973:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:20.506619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fac/r3tmp/tmpHo40da/pdisk_1.dat 2025-04-17T18:19:20.786173Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16078, node 1 2025-04-17T18:19:20.852483Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:20.852510Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:20.852522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:20.852664Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:20.892203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:20.892337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:20.893981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16559 TClient is connected to server localhost:16559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:21.257296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:21.286667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:21.410400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:21.532247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:21.591851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:23.394706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348406400065632:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:23.394802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:23.682734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.707293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.735536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.757619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.784095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.850541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.880789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.976499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348406400066298:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:23.976627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:23.976832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348406400066303:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:23.980770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:23.990035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348406400066305:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:24.066667Z node 1 :TX_PROXY ERROR: Actor# [1:7494348410695033657:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:24.916207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:24.969880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.506809Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348393515161973:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:25.506892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23859, MsgBus: 16876 2025-04-17T18:19:26.376158Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348422244516177:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:26.376310Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fac/r3tmp/tmpfEs9G4/pdisk_1.dat 2025-04-17T18:19:26.452563Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23859, node 2 2025-04-17T18:19:26.505948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:26.506025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:26.507325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:26.508192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:26.508214Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:26.508221Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:26.508337Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16876 TClient is connected to server localhost:16876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:26.895492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:26.907180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:26.952018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:27.075536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:27.159131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:29.006744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348435129419823:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:29.006833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:29.066077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:29.094308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:29.121575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:29.149969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:29.178948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:29.253769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:29.291195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:29.385848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348435129420489:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:29.385930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:29.386289Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348435129420494:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:29.391590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:29.403092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348435129420496:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:19:29.460764Z node 2 :TX_PROXY ERROR: Actor# [2:7494348435129420555:3543] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:30.380129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:30.449486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.377314Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348422244516177:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:31.377379Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |94.6%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] >> HttpRequest::ProbeServerless [GOOD] >> KqpJoin::FullOuterJoin2 >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2305, MsgBus: 27444 2025-04-17T18:19:22.406115Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348404454756273:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:22.406246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fa6/r3tmp/tmpFmI7j0/pdisk_1.dat 2025-04-17T18:19:22.719466Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2305, node 1 2025-04-17T18:19:22.782209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:22.782313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:22.782709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:22.782721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:22.782727Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:22.782844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:22.784047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27444 TClient is connected to server localhost:27444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:23.243429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:23.268232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:23.384631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:23.512209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:23.568133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:25.114198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348417339659936:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.114325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.421175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.452572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.478986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.509310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.535934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.601832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.631006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.708727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348417339660606:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.708810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.708909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348417339660611:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.730431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:25.739645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348417339660613:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:25.806472Z node 1 :TX_PROXY ERROR: Actor# [1:7494348417339660669:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:26.838683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.908941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.935558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.001069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.025936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.069312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.406099Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348404454756273:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:27.406209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64385, MsgBus: 18477 2025-04-17T18:19:28.640002Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348430662300386:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:28.640083Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fa6/r3tmp/tmpxvFXvu/pdisk_1.dat 2025-04-17T18:19:28.750205Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64385, node 2 2025-04-17T18:19:28.783141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:28.783225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:28.785211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:28.812526Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:28.812546Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:28.812556Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:28.812705Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18477 TClient is connected to server localhost:18477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:29.172945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:29.178139Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:29.190133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:29.301783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:19:29.446875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:29.529397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:31.454531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348443547204058:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:31.454616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:31.537135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.570767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.597243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.664629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.698816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.736395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.806243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.861088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348443547204724:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:31.861177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:31.861182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348443547204729:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:31.864544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:31.873786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348443547204731:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:31.971591Z node 2 :TX_PROXY ERROR: Actor# [2:7494348443547204788:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:32.875562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.907225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.938895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.000309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.029248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.061175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.640824Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348430662300386:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:33.640885Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: 2025-04-17T18:16:52.983548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:52.983902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:52.984052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001509/r3tmp/tmpVcgFXZ/pdisk_1.dat 2025-04-17T18:16:53.370465Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29707, node 1 2025-04-17T18:16:53.641440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:53.641513Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:53.641549Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:53.642041Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:53.644504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:53.737828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:53.737944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:53.753790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25256 2025-04-17T18:16:54.257550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:57.497861Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:57.532792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:57.532908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:57.561289Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:57.563808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:57.797498Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.797972Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.798581Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.798698Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.798881Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.798984Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.799070Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.799149Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.799231Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.969302Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:57.969430Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:57.993580Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:58.126737Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:58.173866Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:58.173952Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:58.206548Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:58.206986Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:58.207143Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:58.207189Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:58.207236Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:58.207280Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:58.207312Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:58.207353Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:58.207772Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:58.237525Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:58.237649Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1869:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:58.243563Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2606] 2025-04-17T18:16:58.247156Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2616] 2025-04-17T18:16:58.247234Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2616], schemeshard id = 72075186224037897 2025-04-17T18:16:58.254464Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:16:58.288458Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:58.288529Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:58.288602Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:16:58.361745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:58.368403Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:58.368526Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:58.589497Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:58.791978Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:58.905156Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:59.618081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:17:00.334068Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:00.583529Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:17:00.583611Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:17:00.583717Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2591:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:17:00.584964Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2592:2949] 2025-04-17T18:17:00.585700Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2592:2949], schemeshard id = 72075186224037899 2025-04-17T18:17:01.971870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2722:3243], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:01.972052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:01.996598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-17T18:17:02.385298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2866:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:02.385579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2866:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:02.385904Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2866:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:02.386051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2866:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:02.386203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2866:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:02.386357Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2866:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:02.386536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2866:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:02.386666Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2866:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:02.386804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2866:3084];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18 ... 37894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:32.291088Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:32.291127Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:32.291152Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:32.291173Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:32.291196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:32.291220Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:32.291245Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:32.291269Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-17T18:19:33.550566Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:33.550698Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-17T18:19:33.550734Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:19:33.551458Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:19:33.571754Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:19:33.572120Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:19:33.572191Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:19:33.572580Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:19:33.600576Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:19:33.600750Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-17T18:19:33.601562Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9853:7398], server id = [2:9858:7403], tablet id = 72075186224037905, status = OK 2025-04-17T18:19:33.601658Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9853:7398], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.602375Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9854:7399], server id = [2:9859:7404], tablet id = 72075186224037906, status = OK 2025-04-17T18:19:33.602439Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9854:7399], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.602575Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9855:7400], server id = [2:9860:7405], tablet id = 72075186224037907, status = OK 2025-04-17T18:19:33.602603Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9855:7400], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.602885Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9856:7401], server id = [2:9861:7406], tablet id = 72075186224037908, status = OK 2025-04-17T18:19:33.602922Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9856:7401], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.604073Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9857:7402], server id = [2:9862:7407], tablet id = 72075186224037909, status = OK 2025-04-17T18:19:33.604115Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9857:7402], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.604235Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-17T18:19:33.605051Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9853:7398], server id = [2:9858:7403], tablet id = 72075186224037905 2025-04-17T18:19:33.605075Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.605658Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-17T18:19:33.606333Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-17T18:19:33.606539Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9854:7399], server id = [2:9859:7404], tablet id = 72075186224037906 2025-04-17T18:19:33.606564Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.606647Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-17T18:19:33.606998Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9867:7412], server id = [2:9869:7414], tablet id = 72075186224037910, status = OK 2025-04-17T18:19:33.607044Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9867:7412], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.607129Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-04-17T18:19:33.614631Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9856:7401], server id = [2:9861:7406], tablet id = 72075186224037908 2025-04-17T18:19:33.614682Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.615144Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9855:7400], server id = [2:9860:7405], tablet id = 72075186224037907 2025-04-17T18:19:33.615177Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.615644Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9870:7415], server id = [2:9872:7417], tablet id = 72075186224037911, status = OK 2025-04-17T18:19:33.615732Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9870:7415], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.615926Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9857:7402], server id = [2:9862:7407], tablet id = 72075186224037909 2025-04-17T18:19:33.615956Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.616502Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9871:7416], server id = [2:9874:7419], tablet id = 72075186224037912, status = OK 2025-04-17T18:19:33.616570Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9871:7416], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.616755Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9873:7418], server id = [2:9876:7421], tablet id = 72075186224037913, status = OK 2025-04-17T18:19:33.616816Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9873:7418], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.617615Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9875:7420], server id = [2:9878:7423], tablet id = 72075186224037914, status = OK 2025-04-17T18:19:33.617655Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9875:7420], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:19:33.618281Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-04-17T18:19:33.618766Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9867:7412], server id = [2:9869:7414], tablet id = 72075186224037910 2025-04-17T18:19:33.618796Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.619188Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-17T18:19:33.619306Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-17T18:19:33.619723Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9870:7415], server id = [2:9872:7417], tablet id = 72075186224037911 2025-04-17T18:19:33.619760Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.620137Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9871:7416], server id = [2:9874:7419], tablet id = 72075186224037912 2025-04-17T18:19:33.620164Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.620207Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-04-17T18:19:33.620411Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-04-17T18:19:33.620452Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-17T18:19:33.620618Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:19:33.620767Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:19:33.620986Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:19:33.623204Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9873:7418], server id = [2:9876:7421], tablet id = 72075186224037913 2025-04-17T18:19:33.623236Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.623675Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:19:33.624201Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9875:7420], server id = [2:9878:7423], tablet id = 72075186224037914 2025-04-17T18:19:33.624226Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:19:33.651179Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDI4NTRiNTYtMzk4ZmRjMGEtMzI1YTIzYjItODQyZGM1Y2Q=, TxId: 2025-04-17T18:19:33.651245Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDI4NTRiNTYtMzk4ZmRjMGEtMzI1YTIzYjItODQyZGM1Y2Q=, TxId: 2025-04-17T18:19:33.651991Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:33.667923Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:19:33.667976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=D2[i, ActorId=[1:4594:3495] 2025-04-17T18:19:33.668699Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:9905:5735]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:33.668886Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:19:33.668946Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:19:33.669162Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:19:33.669213Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T18:19:33.669263Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-04-17T18:19:33.683135Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> KqpJoinOrder::CanonizedJoinOrderTPCH10 >> KqpJoinOrder::TPCDS96+ColumnStore >> KqpJoin::ExclusionJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ExclusionJoin [GOOD] Test command err: Trying to start YDB, gRPC: 2236, MsgBus: 23887 2025-04-17T18:19:32.873512Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348445887684873:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:32.873590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f97/r3tmp/tmpCtUAWZ/pdisk_1.dat 2025-04-17T18:19:33.233389Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2236, node 1 2025-04-17T18:19:33.278609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:33.278757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:33.285756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:33.325644Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:33.325675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:33.325685Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:33.325786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23887 TClient is connected to server localhost:23887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:33.817029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:33.838519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:33.985203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:34.144437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:19:34.221613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.111306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348463067555852:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.111437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.480391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.511981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.551896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.580131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.650372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.686716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.718836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.812136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348463067556518:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.812242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.812437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348463067556524:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.816096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:36.830085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348463067556526:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:36.919679Z node 1 :TX_PROXY ERROR: Actor# [1:7494348463067556585:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:37.828188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.865668Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348445887684873:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:37.865745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:19:37.893408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.924785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpJoin::FullOuterJoin2 [GOOD] >> BasicStatistics::Serverless [GOOD] >> KqpJoin::IdxLookupLeftPredicate >> ReadOnlyVDisk::TestStorageLoad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoin2 [GOOD] Test command err: Trying to start YDB, gRPC: 5987, MsgBus: 25943 2025-04-17T18:19:35.151344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348457765885672:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:35.151399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f8f/r3tmp/tmp2ZeVQ4/pdisk_1.dat 2025-04-17T18:19:35.506165Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5987, node 1 2025-04-17T18:19:35.574310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:35.574446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:35.575959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:35.585953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:35.585978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:35.585993Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:35.586107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25943 TClient is connected to server localhost:25943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:36.005050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:36.038938Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:36.049011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:36.196040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:36.345312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:36.429302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:38.080350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348470650789360:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:38.080446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:38.471118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.502499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.541036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.568596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.595941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.641034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.672899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.776154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348470650790026:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:38.776239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:38.776375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348470650790031:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:38.779544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:38.787655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348470650790033:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:38.873775Z node 1 :TX_PROXY ERROR: Actor# [1:7494348470650790089:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:39.834337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.864326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.901192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.152664Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348457765885672:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:40.152745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: 2025-04-17T18:16:51.206425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:51.206563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:51.206714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00150f/r3tmp/tmpN4p1v6/pdisk_1.dat 2025-04-17T18:16:51.648494Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32410, node 1 2025-04-17T18:16:51.933013Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:51.933072Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:51.933106Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:51.933845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:51.936391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:52.027931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:52.028050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:52.043749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8387 2025-04-17T18:16:52.653480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:55.761059Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:55.807156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:55.807269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:55.847662Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:55.850097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:56.097722Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.098141Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.098891Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.099080Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.099167Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.099399Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.099492Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.099557Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.099655Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.291867Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:56.291996Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:56.309287Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:56.478673Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:56.520177Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:56.520290Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:56.560216Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:56.561506Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:56.561730Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:56.561798Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:56.561857Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:56.561928Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:56.561988Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:56.562051Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:56.562576Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:56.592353Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:56.592468Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1866:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:56.599925Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1879:2606] 2025-04-17T18:16:56.605221Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1898:2615] 2025-04-17T18:16:56.606958Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1898:2615], schemeshard id = 72075186224037897 2025-04-17T18:16:56.612896Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:16:56.642202Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:56.642279Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:56.642363Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:16:56.701817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:56.709222Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:56.709376Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:56.910300Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:57.086971Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:57.132686Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:57.756815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:16:58.429645Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:58.662791Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:16:58.662853Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:16:58.662946Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2586:2943], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:16:58.663848Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2587:2944] 2025-04-17T18:16:58.664618Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2587:2944], schemeshard id = 72075186224037899 2025-04-17T18:16:59.784783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2715:3241], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:59.784971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:59.802490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-17T18:16:59.960790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2867:3276], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:59.960976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:00.009270Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2872:3280]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:17:00.009446Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:17:00.009667Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-17T18:17:00.009739Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2875:3283] 2025-04-17T18:17:00.009810Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2875:3283] 2025-04-17T18:17:00.010541Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2876:3074] 2025-04-17T18:17:00.010939Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2875:3283], server id = [2:2876:3074], tablet id = 72075186224037894, status = OK 2025-04-17T18:17:00.011141Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2876:3074], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:17:00.011223Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:17:00.011497Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:17:00.011585Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2872:3280], StatRequests.size() = 1 2025-04-17T18:17:00.033773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2880:3287], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... plyToActorId = [2:7531:5416], StatRequests.size() = 1 2025-04-17T18:19:30.382967Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:19:30.383054Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:19:30.383101Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:19:30.383145Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:19:30.680508Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:7575:5436]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:30.680763Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-04-17T18:19:30.680793Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:7575:5436], StatRequests.size() = 1 2025-04-17T18:19:32.133191Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:19:32.133585Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:32.134023Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:19:32.193954Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-04-17T18:19:32.194138Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 221.000000s, at schemeshard: 72075186224037897 2025-04-17T18:19:32.194573Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-17T18:19:32.208744Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:19:32.273209Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7616:5459]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:32.273574Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-17T18:19:32.273637Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7616:5459], StatRequests.size() = 1 2025-04-17T18:19:33.596216Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:33.596312Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:33.596378Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:19:33.596432Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:33.596944Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:19:33.630626Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:33.635846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7652:5485], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.635970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7661:5490], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.636213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.653682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:19:33.724692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7666:5493], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:19:33.837758Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7761:5541]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:33.838051Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-17T18:19:33.838094Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7761:5541], StatRequests.size() = 1 2025-04-17T18:19:33.928139Z node 2 :TX_PROXY ERROR: Actor# [2:7766:5543] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:33.973371Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7795:5558]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:33.973633Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-17T18:19:33.973863Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-17T18:19:33.973911Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:19:33.974039Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:33.974111Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7795:5558], StatRequests.size() = 1 2025-04-17T18:19:34.114358Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2RiMDc0NGUtZmI1ZTQzNjAtYjRlNTcxNDEtNDY0Y2MxZTc=, TxId: 2025-04-17T18:19:34.114453Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2RiMDc0NGUtZmI1ZTQzNjAtYjRlNTcxNDEtNDY0Y2MxZTc=, TxId: 2025-04-17T18:19:34.115596Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:34.132696Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:34.132767Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:34.202275Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:19:34.202346Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:19:34.275805Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3053:3108], schemeshard count = 1 2025-04-17T18:19:34.566962Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-17T18:19:34.567031Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 236.000000s, at schemeshard: 72075186224037899 2025-04-17T18:19:34.567479Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-17T18:19:34.583636Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:19:35.667372Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7861:5600]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:35.667653Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T18:19:35.667694Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7861:5600], StatRequests.size() = 1 2025-04-17T18:19:37.066584Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:19:37.077646Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:37.077700Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:37.077740Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-17T18:19:37.077771Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:19:37.078035Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:19:37.080730Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:37.095771Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGQwMmIwNDctZDQ2NjlhZTYtNzVjODMxZWEtYzQxNTBhZDg=, TxId: 2025-04-17T18:19:37.095836Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGQwMmIwNDctZDQ2NjlhZTYtNzVjODMxZWEtYzQxNTBhZDg=, TxId: 2025-04-17T18:19:37.096271Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:37.111882Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:19:37.111937Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:37.170553Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7930:5642]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:37.170855Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-17T18:19:37.170898Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7930:5642], StatRequests.size() = 1 2025-04-17T18:19:38.753771Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7982:5673]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:38.754077Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-17T18:19:38.754125Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7982:5673], StatRequests.size() = 1 2025-04-17T18:19:40.119131Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-17T18:19:40.119647Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:19:40.120064Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:40.135125Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:40.135192Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:40.240274Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8022:5695]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:40.240540Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-17T18:19:40.240581Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8022:5695], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 6559434859534907140 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-04-17T18:19:16.955385Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:16.958156Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:16.960964Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:16.964978Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:16.965041Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.139308Z 1 00h02m38.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.154861Z 1 00h02m38.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.265242Z 1 00h02m38.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.275710Z 1 00h02m38.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.296866Z 1 00h02m38.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.310124Z 1 00h02m38.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.358860Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.359620Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.508264Z 1 00h02m39.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.527313Z 1 00h02m39.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.538024Z 1 00h02m39.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.753921Z 1 00h02m39.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.831970Z 1 00h02m39.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.912873Z 1 00h02m40.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.933648Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.934093Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:17.945408Z 1 00h02m40.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.033389Z 1 00h02m40.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.047713Z 1 00h02m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.062674Z 1 00h02m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.075566Z 1 00h02m40.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.204978Z 1 00h02m40.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.337816Z 1 00h02m40.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.355528Z 1 00h02m40.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.387688Z 1 00h02m41.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.503825Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.504606Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.580457Z 1 00h02m41.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.590925Z 1 00h02m41.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.610398Z 1 00h02m41.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.620244Z 1 00h02m41.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.732724Z 1 00h02m41.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.809839Z 1 00h02m42.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.849250Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.852075Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.880620Z 1 00h02m42.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:18.999668Z 1 00h02m42.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.017349Z 1 00h02m42.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.024787Z 1 00h02m42.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.142577Z 1 00h02m42.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.152737Z 1 00h02m43.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.283980Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.284476Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.362825Z 1 00h02m43.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.371158Z 1 00h02m43.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.469655Z 1 00h02m43.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.560847Z 1 00h02m43.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.571403Z 1 00h02m43.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.581584Z 1 00h02m43.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.589798Z 1 00h02m43.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.597894Z 1 00h02m43.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.686369Z 1 00h02m44.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.706977Z 1 00h02m44.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.723373Z 1 00h02m44.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.732339Z 1 00h02m44.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.817824Z 1 00h02m44.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.826599Z 1 00h02m44.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:19.992029Z 1 00h02m44.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:20.004962Z 1 00h02m44.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:20.134570Z 1 00h02m45.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:20.154826Z 1 00h02m45.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:20.167970Z 1 00h02m45.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:20.189334Z 1 00h02m45.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:20.341923Z 1 00h02m45.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:20.371541Z 1 00h02m45.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:20.386393Z 1 00h02m45.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-04-17T18:19:20.406938Z 1 00h02m46.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1 ... k [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-04-17T18:19:31.286924Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:31.291132Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:31.301041Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:31.306962Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:31.307959Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:31.323039Z 8 00h20m54.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:31.527701Z 8 00h20m54.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:31.748374Z 8 00h20m54.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:31.763602Z 8 00h20m54.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:31.993796Z 8 00h20m55.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.030708Z 8 00h20m55.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.090593Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.092483Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.119802Z 8 00h20m55.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.386876Z 8 00h20m55.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.404236Z 8 00h20m55.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.432261Z 8 00h20m56.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.536278Z 8 00h20m56.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.646008Z 8 00h20m56.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.663099Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.663901Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.678350Z 8 00h20m56.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.734332Z 8 00h20m56.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.928456Z 8 00h20m56.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:32.941089Z 8 00h20m56.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.057005Z 8 00h20m57.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.075489Z 8 00h20m57.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.092126Z 8 00h20m57.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.110240Z 8 00h20m57.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.179000Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.180421Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.299668Z 8 00h20m57.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.314123Z 8 00h20m57.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.677968Z 8 00h20m57.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.691913Z 8 00h20m58.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.888181Z 8 00h20m58.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.930904Z 8 00h20m58.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.953784Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.954714Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:33.983835Z 8 00h20m58.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.154783Z 8 00h20m58.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.280928Z 8 00h20m58.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.297454Z 8 00h20m59.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.444781Z 8 00h20m59.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.456364Z 8 00h20m59.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.474431Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.475262Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.533731Z 8 00h20m59.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.552030Z 8 00h20m59.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.595081Z 8 00h20m59.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.621292Z 8 00h21m00.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.667340Z 8 00h21m00.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.690544Z 8 00h21m00.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.751872Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.754595Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.777284Z 8 00h21m00.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.806906Z 8 00h21m00.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.824701Z 8 00h21m00.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:34.845714Z 8 00h21m00.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.028512Z 8 00h21m00.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.173413Z 8 00h21m01.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.185515Z 8 00h21m01.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.237468Z 8 00h21m01.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.410113Z 8 00h21m01.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.425668Z 8 00h21m01.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.454698Z 8 00h21m01.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.465940Z 8 00h21m01.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.492921Z 8 00h21m02.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.617436Z 8 00h21m02.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.811201Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.811598Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-04-17T18:19:35.813825Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] |94.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull >> KqpJoinOrder::CanonizedJoinOrderTPCH13 >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup >> KqpJoin::IdxLookupLeftPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupLeftPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 1377, MsgBus: 14692 2025-04-17T18:19:42.119203Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348489672022768:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:42.119281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f85/r3tmp/tmplSWHdR/pdisk_1.dat 2025-04-17T18:19:42.568100Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:42.571180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:42.571279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:42.573938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1377, node 1 2025-04-17T18:19:42.718739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:42.718764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:42.718770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:42.718881Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14692 TClient is connected to server localhost:14692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:43.369116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:43.390882Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:43.408026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:43.537819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:43.688800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:43.754363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:45.414306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348502556926429:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:45.414428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:45.713050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:45.753584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:45.790075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:45.823350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:45.860276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:45.903616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:45.974865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:46.021239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348506851894388:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:46.021300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:46.021502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348506851894393:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:46.024712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:46.032721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348506851894395:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:46.108432Z node 1 :TX_PROXY ERROR: Actor# [1:7494348506851894450:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:47.029640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:47.060422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:47.091425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:47.120366Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348489672022768:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:47.120620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup >> KqpJoin::ComplexJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12280, MsgBus: 23046 2025-04-17T18:19:18.513138Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348384182912944:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:18.513293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fb9/r3tmp/tmpT0S1EC/pdisk_1.dat 2025-04-17T18:19:18.813594Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12280, node 1 2025-04-17T18:19:18.869549Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:18.869585Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:18.869594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:18.869723Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:18.922855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:18.922978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:18.924779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23046 TClient is connected to server localhost:23046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:19.315998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:20.722790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348392772848209:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.722796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348392772848201:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.722859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.726130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:20.735241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348392772848215:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:20.810978Z node 1 :TX_PROXY ERROR: Actor# [1:7494348392772848266:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:21.078105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.187443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.214165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.239269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.264377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.379181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.408621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.435908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.461892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.487054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.508319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.533258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.556653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.993826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:19:22.020223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.044682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.065504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.086359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.118629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.140060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.166432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.191944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.215646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.240834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.268535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.292220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.314928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.336930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.363373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.410201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.439105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.462490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.487681Z node 1 :FLAT_T ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.121024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.127267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.128972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.133971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.142959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.149113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.155059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.160700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.166515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.172202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.172624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.178773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.180086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.184828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.186767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.190660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.191607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.195840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.198421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.201412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.203420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.206401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.208134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.211551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.213204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.218558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.223355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.223362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.228461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.229061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.237308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.237308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.242661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.243622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.248163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.248543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.253605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.254597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.259575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.261217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.266087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.267314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038492;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.272726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.273220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.279451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:45.354087Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fm6aqcfwrh445fy4djvam", SessionId: ydb://session/3?node_id=1&id=ZWYzNWRiNDUtMjQ0OWI2MDQtMTcwOWMwMjAtYTI3ZDNlZjU=, Slow query, duration: 22.002577s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:19:45.599065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:19:45.599083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:19:45.599743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7494348444312475269:4973];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-04-17T18:19:45.600100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::GeneralPrioritiesBug4 >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] >> BasicStatistics::ServerlessGlobalIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7468, MsgBus: 5609 2025-04-17T18:19:22.659069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348401430026315:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:22.659113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fa3/r3tmp/tmpXYcpOL/pdisk_1.dat 2025-04-17T18:19:22.962418Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7468, node 1 2025-04-17T18:19:23.032093Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:23.032128Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:23.032145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:23.032265Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:23.033766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:23.033896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:23.035706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5609 TClient is connected to server localhost:5609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:23.488975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:25.247714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348414314928864:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.247720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348414314928872:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.247908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.251542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:25.263815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348414314928878:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:25.329056Z node 1 :TX_PROXY ERROR: Actor# [1:7494348414314928929:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:25.585449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.694386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.721816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.748645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.770710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.875750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.912682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.936119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.961986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:25.981703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.002783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.024223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.049294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.598127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:19:26.628630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.655212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.720991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.749532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.774867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.804561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.835237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.864791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.901330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.926898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.955254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.981797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.008006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.071686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.100636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.128285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.160379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.224387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-17T18:19:27.253797Z node 1 :FLAT_TX_SCH ... COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.477034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.481903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.483134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.488042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.488337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.493570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.494068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.498486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.499854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.503625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.505186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.510756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.512079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.516402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.517064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.522029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.522758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.527572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.528317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.533069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.533570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.538510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.538536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.544189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.544216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.549826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.549925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.555947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.555948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.561540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.561543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.565597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.566984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.570994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.571845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.576527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.577077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.581930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.582309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.587314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.587483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.593125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.593125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.598318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.618150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:50.702412Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fmaz6d2ewvbpstq4vwwfx", SessionId: ydb://session/3?node_id=1&id=MzZhM2ExODEtZjZhZDQ1N2EtNjdjMTkxNTYtMTZmYTZjZDQ=, Slow query, duration: 22.599492s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:19:50.929562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:19:50.929571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:19:50.930341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:40.935426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:40.935487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:40.935522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:40.935545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:40.935575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:40.935604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:40.935639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:40.935690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:40.935910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:40.989164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:40.989207Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:41.000824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:41.001416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:41.001544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:41.008659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:41.009092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:41.009523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:41.009660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:41.011662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:41.012483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:41.012519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:41.012648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:41.012685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:41.012715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:41.012799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:41.017330Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:41.106275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:41.106472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.106617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:41.106792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:41.106828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.108682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:41.108783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:41.108930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.108977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:41.109029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:41.109052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:41.110438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.110477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:41.110500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:41.111686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.111726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.111754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:41.111794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:41.114279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:41.115467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:41.115599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:41.116238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:41.116331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:41.116364Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:41.116560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:41.116631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:41.116752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:41.116804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:41.118227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:41.118258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:41.118362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:41.118390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:41.118624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.118666Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:41.118759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:41.118785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:41.118807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:41.118839Z no ... BrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:56.569899Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:56.570210Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 324us result status StatusSuccess 2025-04-17T18:19:56.571188Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:56.586227Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1095:2883] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:19:56.586349Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1053:2883] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-17T18:19:56.586525Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1095:2883] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744913996531811 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744913996531811 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744913996531811 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:19:56.589196Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1095:2883] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-17T18:19:56.589326Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1053:2883] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 13700, MsgBus: 20144 2025-04-17T18:19:44.674535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348498820341874:2228];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:44.678853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f84/r3tmp/tmpZaWt6H/pdisk_1.dat 2025-04-17T18:19:45.179810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:45.179911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:45.183772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:45.255389Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13700, node 1 2025-04-17T18:19:45.421762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:45.421792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:45.421815Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:45.421967Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20144 TClient is connected to server localhost:20144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:46.029927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:46.066983Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:46.080113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:46.243798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:46.424585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:46.503997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:48.017792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348516000212651:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:48.017919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:48.355657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.385627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.412696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.440326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.474958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.507314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.547023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.626171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348516000213308:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:48.626288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:48.626540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348516000213313:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:48.630040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:48.645622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348516000213315:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:48.721676Z node 1 :TX_PROXY ERROR: Actor# [1:7494348516000213374:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:49.595266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:49.653197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:49.659284Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348498820341874:2228];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:49.659345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10849, MsgBus: 8968 2025-04-17T18:19:51.118496Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348527592450493:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:51.122266Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f84/r3tmp/tmpKM0j1W/pdisk_1.dat 2025-04-17T18:19:51.291857Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:51.304587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:51.304675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:51.305828Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10849, node 2 2025-04-17T18:19:51.378732Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:51.378757Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:51.378763Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:51.378880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8968 TClient is connected to server localhost:8968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:51.817407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:51.825091Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:51.834095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:51.905468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:19:52.080888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:52.172864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:54.338299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348540477354160:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:54.338387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:54.391230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:54.429265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:54.459476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:54.491807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:54.523320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:54.564595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:54.601785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:54.685444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348540477354819:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:54.685539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:54.685812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348540477354824:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:54.688870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:54.702171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348540477354826:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:54.760452Z node 2 :TX_PROXY ERROR: Actor# [2:7494348540477354879:3543] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:55.600821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.661947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:56.119615Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348527592450493:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:56.119686Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: 2025-04-17T18:17:02.544535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:17:02.544688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:17:02.544849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014e0/r3tmp/tmpPtColz/pdisk_1.dat 2025-04-17T18:17:02.936650Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28922, node 1 2025-04-17T18:17:03.225518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:17:03.225578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:17:03.225615Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:17:03.226501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:17:03.234470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:17:03.354875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:03.355002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:03.373102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11745 2025-04-17T18:17:03.959156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:17:06.980938Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:17:07.025065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:07.025194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:07.063330Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:17:07.066524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:07.320007Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:07.320524Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:07.321125Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:07.321495Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:07.321575Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:07.321676Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:07.321790Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:07.321891Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:07.321977Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:17:07.511612Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:07.511698Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:07.524949Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:07.674914Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:07.722524Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:17:07.722624Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:17:07.757451Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:17:07.759483Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:17:07.759735Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:17:07.759802Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:17:07.759876Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:17:07.759961Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:17:07.760024Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:17:07.760083Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:17:07.761629Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:17:07.797397Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:17:07.797505Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:17:07.805058Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:17:07.809345Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:17:07.809480Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:17:07.821147Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:17:07.886640Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:17:07.886704Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:17:07.886772Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:17:07.897254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:17:07.904782Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:17:07.904916Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:17:08.080967Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:17:08.258273Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:17:08.319054Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:17:08.870050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:17:09.418224Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:09.559492Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:17:09.559563Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:17:09.559674Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2598:2950], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:17:09.560630Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2599:2951] 2025-04-17T18:17:09.560760Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2599:2951], schemeshard id = 72075186224037899 2025-04-17T18:17:10.688542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2734:3250], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:10.688744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:10.708439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-17T18:17:10.976342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2964:3295], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:10.976512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:17:11.018349Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2969:3299]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:17:11.018651Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:17:11.018847Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-17T18:17:11.018919Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2972:3302] 2025-04-17T18:17:11.018987Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2972:3302] 2025-04-17T18:17:11.019527Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2973:3141] 2025-04-17T18:17:11.019800Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2972:3302], server id = [2:2973:3141], tablet id = 72075186224037894, status = OK 2025-04-17T18:17:11.019993Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2973:3141], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:17:11.020050Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:17:11.020274Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:17:11.020360Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2969:3299], StatRequests.size() = 1 2025-04-17T18:17:11.028384Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3006:3311]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:17:11.028530Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] R ... .787669Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:19:47.847857Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7854:5568]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:47.848136Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-17T18:19:47.848179Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7854:5568], StatRequests.size() = 1 2025-04-17T18:19:49.212480Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:49.212586Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:49.212653Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-17T18:19:49.212703Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:49.213091Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:19:49.233204Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:49.240341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7892:5595], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:49.240459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7902:5600], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:49.241068Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:49.256850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-17T18:19:49.322633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7906:5603], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-17T18:19:49.441669Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:8001:5651]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:49.441998Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-17T18:19:49.442039Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:8001:5651], StatRequests.size() = 1 2025-04-17T18:19:49.504245Z node 2 :TX_PROXY ERROR: Actor# [2:8003:5653] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:49.562522Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:8032:5668]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:49.562808Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-17T18:19:49.563088Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-17T18:19:49.563136Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:19:49.563290Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:49.563373Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:8032:5668], StatRequests.size() = 1 2025-04-17T18:19:49.687789Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmU3NGM5MTMtMzU1YTJkYjItODI4YWY5YzctNjFmZDc4ZjE=, TxId: 2025-04-17T18:19:49.687869Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmU3NGM5MTMtMzU1YTJkYjItODI4YWY5YzctNjFmZDc4ZjE=, TxId: 2025-04-17T18:19:49.688462Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:49.703876Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:19:49.703958Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:49.750328Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:19:49.750398Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:19:49.829991Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3245:3186], schemeshard count = 1 2025-04-17T18:19:50.217553Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037899 2025-04-17T18:19:50.217622Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 197.000000s, at schemeshard: 72075186224037899 2025-04-17T18:19:50.217916Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 50 2025-04-17T18:19:50.234766Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:19:51.312105Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:8105:5713]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:51.312419Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T18:19:51.312486Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:8105:5713], StatRequests.size() = 1 2025-04-17T18:19:52.858994Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:19:52.871233Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:52.871292Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:52.871331Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 4] is data table. 2025-04-17T18:19:52.871365Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 4] 2025-04-17T18:19:52.871724Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:19:52.878641Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:52.902919Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTdhZWIwY2MtZDNmZmRmMWMtNDJkYjdmMWEtN2Q5ZDVmZA==, TxId: 2025-04-17T18:19:52.902985Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTdhZWIwY2MtZDNmZmRmMWMtNDJkYjdmMWEtN2Q5ZDVmZA==, TxId: 2025-04-17T18:19:52.903500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:52.919406Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 4] 2025-04-17T18:19:52.919462Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:53.000580Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:8174:5755]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:53.000876Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-17T18:19:53.000922Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:8174:5755], StatRequests.size() = 1 2025-04-17T18:19:54.676191Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:8228:5787]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:54.676478Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-17T18:19:54.676520Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:8228:5787], StatRequests.size() = 1 2025-04-17T18:19:56.044669Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-17T18:19:56.044897Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:19:56.045249Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:19:56.056495Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:19:56.056573Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:19:56.056612Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-17T18:19:56.056643Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:19:56.056923Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:19:56.060134Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:19:56.072583Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjI0MDQ2MzgtYjRiZjVlNzMtNWNiYzcyNjgtZGYwMzU2MWQ=, TxId: 2025-04-17T18:19:56.072647Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjI0MDQ2MzgtYjRiZjVlNzMtNWNiYzcyNjgtZGYwMzU2MWQ=, TxId: 2025-04-17T18:19:56.073113Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2025-04-17T18:19:54.000000Z 2025-04-17T18:19:56.073363Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:19:56.088753Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:19:56.088819Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:19:56.192591Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8292:5824]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:19:56.192885Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-17T18:19:56.192949Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8292:5824], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2074, MsgBus: 9060 2025-04-17T18:19:45.187073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348502330388417:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f7f/r3tmp/tmpqpg9kN/pdisk_1.dat 2025-04-17T18:19:45.468554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:19:45.641730Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:45.661022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:45.661145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:45.663150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2074, node 1 2025-04-17T18:19:45.780151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:45.780185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:45.780192Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:45.780385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9060 TClient is connected to server localhost:9060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:46.349795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:46.379044Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:46.395143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:19:46.545775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:19:46.705496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:46.789678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:48.382626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348515215291936:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:48.382708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:48.690164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.719028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.787009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.879123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.916907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.948849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.983103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:49.078552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348519510259905:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:49.078636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:49.078968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348519510259910:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:49.083074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:49.095149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348519510259912:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:49.162950Z node 1 :TX_PROXY ERROR: Actor# [1:7494348519510259968:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:50.103527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:50.142075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:50.146739Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348502330388417:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:50.146851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:19:50.181821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:50.218441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:50.241057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:50.267138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28561, MsgBus: 13084 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f7f/r3tmp/tmpLAZEyG/pdisk_1.dat 2025-04-17T18:19:52.031757Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:19:52.045441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:52.045519Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:52.047639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:52.066537Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28561, node 2 2025-04-17T18:19:52.134511Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:52.134530Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:52.134538Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:52.134646Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13084 TClient is connected to server localhost:13084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:52.596514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:52.603394Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:52.609272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:19:52.696275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:19:52.869433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:52.932750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:54.862307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348540702333625:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:54.862399Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:54.909413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:54.937225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:54.967326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.027509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.067126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.120256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.195197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.260263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348544997301586:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:55.260351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:55.260408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348544997301591:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:55.264363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:55.279007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348544997301593:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:55.377119Z node 2 :TX_PROXY ERROR: Actor# [2:7494348544997301651:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:56.308352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:56.353120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:56.389584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:56.430165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:56.485556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:56.516645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:21.844446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:21.844542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:21.844580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:21.844621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:21.844665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:21.844710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:21.844774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:21.844861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:21.845217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:21.924556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:21.924612Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:21.936920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:21.937578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:21.937740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:21.949068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:21.949731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:21.950442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:21.950689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:21.953976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:21.955458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:21.955520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:21.955750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:21.955800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:21.955846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:21.955989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:21.962849Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:22.091161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:22.091477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:22.091732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:22.092039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:22.092106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:22.094726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:22.094873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:22.095182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:22.095259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:22.095332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:22.095369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:22.097744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:22.097810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:22.097883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:22.100047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:22.100113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:22.100165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:22.100241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:22.104752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:22.107178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:22.107433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:22.108542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:22.108670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:22.108714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:22.108937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:22.109013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:22.109190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:22.109317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:22.111949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:22.112006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:22.112210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:22.112269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:22.112786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:22.112866Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:22.112989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:22.113053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:22.113107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:22.113175Z no ... nId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:58.022958Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:19:58.023274Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 343us result status StatusSuccess 2025-04-17T18:19:58.024265Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:19:58.036410Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1040:2805] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:19:58.036499Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1041:2805] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:19:58.036575Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:960:2805] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-17T18:19:58.036650Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:960:2805] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-17T18:19:58.036773Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1040:2805] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744913998004638 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744913998004638 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:19:58.036952Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1041:2805] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1744913998004638 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:19:58.041235Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1040:2805] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-04-17T18:19:58.041521Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:960:2805] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-17T18:19:58.042210Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1041:2805] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-17T18:19:58.042291Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:960:2805] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> KqpJoinOrder::CanonizedJoinOrderTPCH17 >> KqpJoinOrder::FiveWayJoin-ColumnStore >> KqpJoinOrder::TPCDS34-ColumnStore >> KqpJoinOrder::TPCH12_100 >> KqpJoin::ComplexJoin [GOOD] >> KqpJoinOrder::TPCDS16-ColumnStore >> KqpJoinOrder::TPCH9_100 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ComplexJoin [GOOD] Test command err: Trying to start YDB, gRPC: 6273, MsgBus: 27599 2025-04-17T18:19:51.896585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348528763274505:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:51.897003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f7e/r3tmp/tmpbehw7P/pdisk_1.dat 2025-04-17T18:19:52.341035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:52.344859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:52.344974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:52.348558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6273, node 1 2025-04-17T18:19:52.510555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:52.510575Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:52.510584Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:52.510685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27599 TClient is connected to server localhost:27599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:53.117331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:53.140642Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:53.145056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:53.285096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:53.441500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:53.503511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:55.124368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348545943145310:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:55.124486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:55.457859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.485426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.562958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.602573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.672039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.705821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.738157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:55.824806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348545943145973:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:55.824892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:55.824956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348545943145978:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:55.828492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:19:55.843162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348545943145980:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:19:55.944309Z node 1 :TX_PROXY ERROR: Actor# [1:7494348545943146039:3542] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:56.889273Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348528763274505:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:56.889347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:19:57.004131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:57.074773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:19:57.119195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:57.147895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:57.276239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH3 >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13472, MsgBus: 20052 2025-04-17T18:19:27.450868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348425153367262:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:27.451768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fa1/r3tmp/tmp8Wx9Ni/pdisk_1.dat 2025-04-17T18:19:27.780506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:27.780988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:27.781067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:27.785894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13472, node 1 2025-04-17T18:19:27.841045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:27.841076Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:27.841082Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:27.841205Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20052 TClient is connected to server localhost:20052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:28.333735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:30.159969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348438038269819:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:30.159971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348438038269811:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:30.160046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:30.164316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:30.176966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348438038269825:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:19:30.279039Z node 1 :TX_PROXY ERROR: Actor# [1:7494348438038269876:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:30.586891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:30.700249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:30.731073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:30.762346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:30.789691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:30.964624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:30.991917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.059556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.084453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.152419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.177556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.200755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.231603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.783581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:2, at schemeshard: 72057594046644480 2025-04-17T18:19:31.812333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.839571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.864869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.887285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.915225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:19:31.937627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.003418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.040215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.069141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.091347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.122367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.149296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.183953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.221937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.251768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.319119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.347378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.378021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-17T18:19:32.409264Z node 1 :FLAT_T ... tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:57.987099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:57.993301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:57.999784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.006062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.012233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.018311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.024343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.029055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.030097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.034468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.035573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.039429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.040643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.044662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.046118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.050287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.051855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.055679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.057542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.061130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.063434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.066679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.069269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.072360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.075288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.078027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.081275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.083862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.087444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.090348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.094092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.096191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.100454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.101822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.106701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.107431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.112762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.118112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.118160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.124018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.124293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.130067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.130068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.136269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.138970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.141922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:19:58.184641Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fmg414fderpwctb9808ah", SessionId: ydb://session/3?node_id=1&id=MTA3Y2E1NTQtM2YwYWM2NmYtMWRlMGJiYmItZDE5MGJlMDI=, Slow query, duration: 24.807039s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:19:58.468302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715716; 2025-04-17T18:19:58.468356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715716; 2025-04-17T18:19:58.468896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715716; >> KqpJoin::LeftJoinWithNull+StreamLookupJoin >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] Test command err: 2025-04-17T18:16:52.140758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:603:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:52.141054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:52.141097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00150a/r3tmp/tmpK950mS/pdisk_1.dat 2025-04-17T18:16:52.545853Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2619, node 1 2025-04-17T18:16:52.779719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:52.779791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:52.779827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:52.779997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:52.781617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:52.870892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:52.871018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:52.886090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3525 2025-04-17T18:16:53.453003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:56.640275Z node 4 :STATISTICS INFO: Subscribed for config changes on node 4 2025-04-17T18:16:56.685344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:56.685453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:56.714234Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T18:16:56.715947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:56.947928Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.948706Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.948951Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.949095Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.949385Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.949506Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.949618Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.949698Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:56.949791Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:57.108660Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:57.108774Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:57.132755Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:57.290963Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:57.334307Z node 4 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:57.334419Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:57.365467Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:57.366938Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:57.367143Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:57.367195Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:57.367278Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:57.367333Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:57.367383Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:57.367428Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:57.375117Z node 4 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:57.412873Z node 4 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:57.412985Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:2034:2602], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:57.455890Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2052:2614] 2025-04-17T18:16:57.462500Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2080:2626] 2025-04-17T18:16:57.462978Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2080:2626], schemeshard id = 72075186224037897 2025-04-17T18:16:57.471047Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-17T18:16:57.489497Z node 4 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:57.489543Z node 4 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:57.489601Z node 4 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-17T18:16:57.501267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:57.513557Z node 4 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:57.513699Z node 4 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:57.696787Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:57.856285Z node 4 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:57.951232Z node 4 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:58.714305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:17:02.066408Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-17T18:17:02.116973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:02.117086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:02.117392Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:02.117481Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:02.174055Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:17:02.178822Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:02.322927Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:02.396622Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:17:02.397422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:02.520149Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-17T18:17:02.520210Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:17:02.520329Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3090:2949], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-17T18:17:02.521761Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:3092:2951] 2025-04-17T18:17:02.524941Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:3092:2951], schemeshard id = 72075186224037899 2025-04-17T18:17:03.665819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:17:07.132010Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:17:07.185504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:07.185627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:07.185800Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:17:07.185857Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:17:07.221924Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:17:07.225877Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:07.343005Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:17:07.437114Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:17:07.437869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:17:07.661889Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-17T18:17:07.661963Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-04-17T18:17:07.662053Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3924:3151], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224 ... Dc2ZDc=, TxId: 2025-04-17T18:20:01.610644Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:20:01.631808Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-17T18:20:01.631875Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:20:01.707206Z node 4 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:20:01.707291Z node 4 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:20:01.759098Z node 4 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [4:11611:7140], schemeshard count = 1 2025-04-17T18:20:03.739199Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-17T18:20:03.739407Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 198.000000s, at schemeshard: 72075186224037899 2025-04-17T18:20:03.739651Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-17T18:20:03.756367Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:20:04.843384Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:20:04.843447Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:20:04.843487Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-17T18:20:04.843555Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:20:04.848017Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:20:04.884764Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:20:04.886076Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:20:04.886193Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:20:04.888683Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:20:04.926919Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:20:04.927165Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 2, current Round: 0 2025-04-17T18:20:04.927969Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11785:7237], server id = [4:11786:7238], tablet id = 72075186224037911, status = OK 2025-04-17T18:20:04.928480Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11785:7237], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-17T18:20:04.933704Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-17T18:20:04.933833Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-04-17T18:20:04.934049Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:20:04.934310Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:20:04.934648Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:20:04.936969Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11785:7237], server id = [4:11786:7238], tablet id = 72075186224037911 2025-04-17T18:20:04.937020Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:20:04.937625Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:20:04.982027Z node 4 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [4:11806:7257]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:20:04.982408Z node 4 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:20:04.982467Z node 4 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [4:11806:7257], StatRequests.size() = 1 2025-04-17T18:20:05.109675Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=OWVhNjQ5NjEtMzE4N2Y0NmItZTNiMTE5MGItY2Q0OThkNTY=, TxId: 2025-04-17T18:20:05.109744Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OWVhNjQ5NjEtMzE4N2Y0NmItZTNiMTE5MGItY2Q0OThkNTY=, TxId: 2025-04-17T18:20:05.110614Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:20:05.128236Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:20:05.128312Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:20:05.742885Z node 4 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-17T18:20:05.742964Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-17T18:20:06.239278Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-04-17T18:20:06.239354Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 203.000000s, at schemeshard: 72075186224037905 2025-04-17T18:20:06.239563Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 28 2025-04-17T18:20:06.259850Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:20:08.179217Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:20:08.179292Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:20:08.179337Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is column table. 2025-04-17T18:20:08.179375Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-17T18:20:08.188049Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-17T18:20:08.218526Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-17T18:20:08.219311Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-17T18:20:08.219386Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-17T18:20:08.220105Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-17T18:20:08.255912Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-17T18:20:08.256183Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 3, current Round: 0 2025-04-17T18:20:08.256964Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11954:7330], server id = [4:11955:7331], tablet id = 72075186224037912, status = OK 2025-04-17T18:20:08.257067Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11954:7330], path = { OwnerId: 72075186224037905 LocalId: 2 } 2025-04-17T18:20:08.261371Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-17T18:20:08.261484Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-04-17T18:20:08.261649Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-17T18:20:08.261838Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-17T18:20:08.262106Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-17T18:20:08.273836Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11954:7330], server id = [4:11955:7331], tablet id = 72075186224037912 2025-04-17T18:20:08.273891Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-17T18:20:08.275002Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-17T18:20:08.314099Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZDYzNDAzMWMtMWI0ZTgzM2ItOWViYjI0YmMtNjY3YTI5MjI=, TxId: 2025-04-17T18:20:08.314166Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZDYzNDAzMWMtMWI0ZTgzM2ItOWViYjI0YmMtNjY3YTI5MjI=, TxId: 2025-04-17T18:20:08.315309Z node 4 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 4, interval end# 1970-01-01T00:02:05.000000Z, event interval end# 2025-04-17T18:20:06.000000Z 2025-04-17T18:20:08.315739Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:20:08.317101Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:11978:6047]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:20:08.317617Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:20:08.317691Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:20:08.325673Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:20:08.325762Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T18:20:08.325823Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:20:08.369962Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-17T18:20:08.375636Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:11978:6047]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-17T18:20:08.376138Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:20:08.376195Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-17T18:20:08.376428Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:20:08.376484Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-17T18:20:08.376541Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-17T18:20:08.385403Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> KqpJoinOrder::TPCDS23-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] Test command err: Trying to start YDB, gRPC: 20088, MsgBus: 27412 2025-04-17T18:19:33.066098Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348449104611759:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:33.066170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f93/r3tmp/tmpLHxVd8/pdisk_1.dat 2025-04-17T18:19:33.439713Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20088, node 1 2025-04-17T18:19:33.514777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:33.514934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:33.519211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:33.530722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:33.530745Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:33.530752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:33.530880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27412 TClient is connected to server localhost:27412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:34.005731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:34.018435Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:19:35.967369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348457694547019:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:35.967382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348457694547027:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:35.967498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:35.971299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:35.980476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348457694547033:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:19:36.062823Z node 1 :TX_PROXY ERROR: Actor# [1:7494348461989514380:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:36.360206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.485794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.523534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.554877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.584060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.729775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.757649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.781933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.815527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.846627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.874005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.900582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.926799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.495602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:2, at schemeshard: 72057594046644480 2025-04-17T18:19:37.528367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.555671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.591058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.621388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.653518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.678590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.704186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.730522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.762579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.795827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.828218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.858676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.903102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.971938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.010898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.044917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.068195Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348449104611759:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:38.068271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... 15714; 2025-04-17T18:20:06.249760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.257418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.262228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.267827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.272873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.277701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.282371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.284320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.287283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.289237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.292543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.294531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.297675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.301455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.302693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.307134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.308202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.313192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.313935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.318884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.319327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.324100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.324436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.330281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.330281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.336065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.336065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.341924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.341982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.347984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.348099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.353676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.353676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.359785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.359794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.365836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.365863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.371852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038482;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.372170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.377935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.377935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.383555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.383610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:06.453259Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fmnvy6v2324wtdxbyg0na", SessionId: ydb://session/3?node_id=1&id=ZjU3ZGU1MzYtNzdmZjNlM2MtMTg0OTQ0YWUtZjFjOTYyMDI=, Slow query, duration: 27.190658s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:06.790693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715716; 2025-04-17T18:20:06.791034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494348565068758610:6406];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-17T18:20:06.791978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715716; 2025-04-17T18:20:06.792459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715716;
:3:9: Warning: Symbol $limit is not used, code: 4527
:2:9: Warning: Symbol $browserGroup is not used, code: 4527
:1:9: Warning: Symbol $quotaName is not used, code: 4527
:4:9: Warning: Symbol $offset is not used, code: 4527
:3:9: Warning: Symbol $limit is not used, code: 4527
:2:9: Warning: Symbol $browserGroup is not used, code: 4527
:1:9: Warning: Symbol $quotaName is not used, code: 4527
:4:9: Warning: Symbol $offset is not used, code: 4527 >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore [GOOD] >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3301, MsgBus: 32188 2025-04-17T18:19:35.637833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348457849247335:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:35.638367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f8b/r3tmp/tmprhVohU/pdisk_1.dat 2025-04-17T18:19:35.987783Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3301, node 1 2025-04-17T18:19:36.030913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:36.030982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:36.034500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:36.058567Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:36.058586Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:36.058592Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:36.058660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32188 TClient is connected to server localhost:32188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:36.577746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:36.591632Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:38.475876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348470734149889:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:38.475950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348470734149897:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:38.476002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:38.479951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:38.491263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348470734149903:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:38.595316Z node 1 :TX_PROXY ERROR: Actor# [1:7494348470734149954:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:38.869919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:38.988957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.022254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.054320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.088455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.248614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.277650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.309700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.337548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.365296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.394455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.422303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:19:39.446533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.019620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:19:40.059449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.098432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.169321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.218109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.250734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.287120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.321884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.356231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.395833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.469920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.504173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.539858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.607519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.637834Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348457849247335:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:40.637910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:19:40.676890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.702112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:19:40.757824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.877973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.882725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.883211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.888754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.889929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.893981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.895386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.899672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.902813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.905903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.908070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.912677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.914561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.920608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.925850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.927672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.933106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.939770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.946280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.957129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.963615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.969097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.974574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.982458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.987926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.991807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:08.997771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.001749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.007720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.010976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.016643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.021540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.031097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.034420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.043799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.047369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.053538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.059447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.069208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.072015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.078504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.082891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.088992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.094095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.102520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:09.222899Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fmrfs4mwe3d0srzqdb9d2", SessionId: ydb://session/3?node_id=1&id=ZjQ4YzE1MzktOWIzZTYxMGMtOWEyMGQ4MjctNWQ0YmIxYWI=, Slow query, duration: 27.276982s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:09.432278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:09.432280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:09.432549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494348569518423993:5875];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-17T18:20:09.433462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 25089, MsgBus: 63523 2025-04-17T18:20:09.972559Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348604430733427:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:09.973061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f6c/r3tmp/tmpjk2c4N/pdisk_1.dat 2025-04-17T18:20:10.585374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:10.585495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:10.593573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:10.686889Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25089, node 1 2025-04-17T18:20:10.854811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:10.854836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:10.854844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:10.854981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63523 TClient is connected to server localhost:63523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:11.901218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:11.934942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:11.953693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:12.164199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:12.363293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:12.446566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:14.421253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348625905571514:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:14.421404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:14.735460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:14.778101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:14.868097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:14.914139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:14.956864Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348604430733427:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:14.956984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:14.959033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:15.002739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:15.081424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:15.210601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348630200539483:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:15.210710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:15.211043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348630200539488:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:15.215113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:15.230145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348630200539490:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:15.313916Z node 1 :TX_PROXY ERROR: Actor# [1:7494348630200539547:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:16.461049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:16.495420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:16.537572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup >> KqpJoinOrder::ShuffleEliminationTpcdsMapJoinBug >> KqpJoinOrder::TPCDS90-ColumnStore [GOOD] >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS90-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 8826, MsgBus: 64035 2025-04-17T18:19:32.709755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348445052564524:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:32.710234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f98/r3tmp/tmpP3ryrt/pdisk_1.dat 2025-04-17T18:19:33.085358Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:33.100504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:33.100631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:33.104562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8826, node 1 2025-04-17T18:19:33.191184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:33.191209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:33.191216Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:33.191337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64035 TClient is connected to server localhost:64035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:33.708058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:35.445026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348457937467078:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:35.445047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348457937467070:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:35.445157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:35.448223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:35.458134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348457937467084:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:35.543039Z node 1 :TX_PROXY ERROR: Actor# [1:7494348457937467135:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:35.838540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:35.946102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.014591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.046283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.076756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.209935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.276085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.309076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.379349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.448642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.482134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.525358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.549846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.043195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:19:37.104002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.128852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.156459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.185630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.213090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.239610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.267504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.300152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.339084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.371349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.405757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.436691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.471425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.501623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.561315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.589768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.621016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.648849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.677493Z node 1 :FLAT_TX_ ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.699454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.699454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.705551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.705728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.711875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.711885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.717870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.717873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.724185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.724284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.730199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.730202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.735025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.735356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.740949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.741150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.746686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.748176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.752167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.752953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.757822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.758308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.762801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.766490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.768144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.771330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.773380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.777434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.778474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.783026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.783047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.789115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.789257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.794366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.794974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.800190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.800352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.805190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.805798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.814410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.819451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.820182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.827130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.827129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.873343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:05.922714Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fmne09hgxb8hytc5as9sk", SessionId: ydb://session/3?node_id=1&id=Y2VkMmMwNDktZTAwNzVmMDctZTg1NGE2ZjctYTExZGU4OWM=, Slow query, duration: 27.105415s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:06.417951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:06.418400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:06.419264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7494348518067025921:4435];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-04-17T18:20:06.419648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:15.521526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:15.521625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:15.521663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:15.521700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:15.521745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:15.521793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:15.521858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:15.521939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:15.522298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:15.606661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:15.606741Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:15.619544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:15.620236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:15.620423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:15.631876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:15.632449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:15.633121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:15.633334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:15.636689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:15.638051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:15.638109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:15.638342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:15.638401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:15.638452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:15.638588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:15.646513Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:15.777776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:15.778022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:15.778235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:15.778461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:15.778527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:15.780683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:15.780818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:15.781052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:15.781120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:15.781173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:15.781206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:15.783364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:15.783412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:15.783449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:15.786832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:15.786892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:15.786931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:15.786985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:15.790680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:15.792462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:15.792646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:15.793549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:15.793685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:15.793733Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:15.793992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:15.794058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:15.794249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:15.794321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:15.796288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:15.796331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:15.796481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:15.796517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:15.796874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:15.796935Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:15.797037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:15.797071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:15.797110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:15.797167Z no ... nd_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:20:24.106150Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:20:24.106454Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 308us result status StatusSuccess 2025-04-17T18:20:24.107131Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:20:24.129466Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1169:2953] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:20:24.129571Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1139:2953] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-17T18:20:24.129722Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1169:2953] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744914024055658 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744914024055658 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744914024055658 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:20:24.135877Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1169:2953] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-17T18:20:24.135995Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1139:2953] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpJoin::IdxLookupPartialLeftPredicate >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 17531, MsgBus: 23103 2025-04-17T18:20:21.876535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348658671840891:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:21.876582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f66/r3tmp/tmpB8icba/pdisk_1.dat 2025-04-17T18:20:22.607270Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:22.614084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:22.614172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:22.617623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17531, node 1 2025-04-17T18:20:22.769396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:22.769430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:22.769441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:22.769572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23103 TClient is connected to server localhost:23103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:23.396911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:23.426743Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:23.441107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:20:23.597141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:23.800736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:23.872022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:25.745398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348675851711723:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:25.745529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:26.130839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:26.205794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:26.243927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:26.339594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:26.398342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:26.469362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:26.522708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:26.618972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348680146679697:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:26.619070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:26.619446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348680146679702:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:26.623603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:26.643769Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:20:26.644723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348680146679704:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:26.730130Z node 1 :TX_PROXY ERROR: Actor# [1:7494348680146679763:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:26.876629Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348658671840891:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:26.876705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:28.026010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:28.086527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:28.124056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:28.202790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:28.260457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:28.298773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH16 >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:31.367414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:31.367509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:31.367549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:31.367583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:31.367629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:31.367671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:31.367728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:31.367810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:31.368146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:31.430471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:31.430538Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:31.440297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:31.440963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:31.441146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:31.449974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:31.450512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:31.451000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:31.451144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:31.453363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:31.454271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:31.454314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:31.454468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:31.454503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:31.454531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:31.454617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:31.459648Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:31.565857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:31.566041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.566169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:31.566342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:31.566384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.568181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:31.568281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:31.568433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.568477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:31.568515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:31.568537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:31.570053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.570090Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:31.570112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:31.571316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.571358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.571386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:31.571431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:31.573808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:31.575212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:31.575347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:31.575987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:31.576070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:31.576112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:31.576309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:31.576352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:31.576482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:31.576532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:31.578001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:31.578033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:31.578139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:31.578162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:31.578430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:31.578469Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:31.578535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:31.578562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:31.578587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:31.578616Z no ... athId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.137917Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.137950Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-17T18:20:32.139574Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-04-17T18:20:32.139636Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:20:32.139903Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-17T18:20:32.140030Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-17T18:20:32.140065Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-17T18:20:32.140101Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-17T18:20:32.140131Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-17T18:20:32.140167Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-04-17T18:20:32.143455Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.143587Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.143622Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-17T18:20:32.143669Z node 114 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-17T18:20:32.143707Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-17T18:20:32.143808Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-04-17T18:20:32.144426Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-17T18:20:32.144476Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:20:32.144764Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-17T18:20:32.144888Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-17T18:20:32.144922Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-17T18:20:32.144966Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-17T18:20:32.144996Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-17T18:20:32.145033Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-04-17T18:20:32.145068Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-17T18:20:32.145113Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-17T18:20:32.145146Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-17T18:20:32.145242Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:20:32.145283Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-04-17T18:20:32.145309Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-04-17T18:20:32.145342Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-17T18:20:32.145370Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-04-17T18:20:32.145395Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-04-17T18:20:32.145440Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-17T18:20:32.146791Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.147042Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.151692Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.151770Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.151816Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.160083Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-17T18:20:32.163580Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 489626274075 } TabletId: 72075186233409546 State: 4 2025-04-17T18:20:32.163673Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-17T18:20:32.167335Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:20:32.167871Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-17T18:20:32.171515Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-17T18:20:32.172117Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-17T18:20:32.172822Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:20:32.172878Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-17T18:20:32.172956Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-17T18:20:32.173005Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-17T18:20:32.173129Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:20:32.182838Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-17T18:20:32.182933Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-04-17T18:20:32.183666Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-17T18:20:32.183933Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-17T18:20:32.183980Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-17T18:20:32.185193Z node 114 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-17T18:20:32.185291Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-17T18:20:32.185329Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:630:2556] 2025-04-17T18:20:32.193150Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 489626274078 } TabletId: 72075186233409547 State: 4 2025-04-17T18:20:32.193245Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-04-17T18:20:32.199518Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-17T18:20:32.200116Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-04-17T18:20:32.200348Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-17T18:20:32.200642Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-17T18:20:32.204173Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-17T18:20:32.204229Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-17T18:20:32.204313Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:20:32.210634Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-17T18:20:32.210731Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-04-17T18:20:32.211200Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-17T18:20:32.211620Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-17T18:20:32.211701Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> KqpJoinOrder::GeneralPrioritiesBug4 [GOOD] >> TGroupMapperTest::MonteCarlo [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull >> KqpJoinOrder::TPCDS88+ColumnStore |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 26810, MsgBus: 5844 2025-04-17T18:20:19.566099Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348649538546791:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:19.566132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f67/r3tmp/tmpJPZRYW/pdisk_1.dat 2025-04-17T18:20:20.272038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:20.318091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:20.319319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:20.327592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26810, node 1 2025-04-17T18:20:20.642793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:20.642836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:20.642851Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:20.643079Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5844 TClient is connected to server localhost:5844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:21.284991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:21.299283Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:21.314076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:21.456983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:21.634634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:21.729014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:23.432419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348666718417690:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:23.432619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:23.785110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:23.818355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:23.850385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:23.881825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:23.930478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:23.999090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:24.034898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:24.107882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348671013385650:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:24.107962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:24.108220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348671013385655:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:24.112130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:24.123224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348671013385657:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:24.181282Z node 1 :TX_PROXY ERROR: Actor# [1:7494348671013385711:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:24.567033Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348649538546791:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:24.567104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:25.289689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:25.332856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:25.369038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:25.423311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:25.506619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:25.586916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30338, MsgBus: 7351 2025-04-17T18:20:27.047441Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348681787136094:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:27.047482Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f67/r3tmp/tmpzxEbii/pdisk_1.dat 2025-04-17T18:20:27.362598Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:27.362671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:27.364039Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:27.402742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30338, node 2 2025-04-17T18:20:27.568926Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:27.568946Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:27.568954Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:27.569056Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7351 TClient is connected to server localhost:7351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:28.088240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:28.098903Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:20:28.123361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:28.199941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:28.361981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:28.441934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:30.624029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348694672039743:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:30.624092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:30.686500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:30.723610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:30.793467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:30.837893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:30.882558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:30.964147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:31.042461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:31.148884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348698967007717:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:31.148955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:31.149338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348698967007722:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:31.153305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:31.165282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348698967007724:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:20:31.238385Z node 2 :TX_PROXY ERROR: Actor# [2:7494348698967007780:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:32.048286Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348681787136094:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:32.048353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:32.339205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.427838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.463318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.493087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.526962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.563975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug4 [GOOD] Test command err: Trying to start YDB, gRPC: 4326, MsgBus: 12991 2025-04-17T18:19:54.620887Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348540646988080:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:54.621994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f7a/r3tmp/tmpH4k4dK/pdisk_1.dat 2025-04-17T18:19:55.050609Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:55.069823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:55.070102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:55.072591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4326, node 1 2025-04-17T18:19:55.178851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:55.178871Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:55.178878Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:55.178991Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12991 TClient is connected to server localhost:12991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:55.729994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:55.754029Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:57.892316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348553531890620:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:57.892444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:57.892861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348553531890632:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:57.896748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:57.911214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348553531890634:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:58.005640Z node 1 :TX_PROXY ERROR: Actor# [1:7494348557826857981:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:58.355386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:58.482147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:58.511979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:58.543311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:58.621583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:58.781833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:58.811368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:58.852826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:58.934875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:58.969570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:19:59.002612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:19:59.039523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:19:59.081351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:59.621419Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348540646988080:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:59.621504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:19:59.760729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:19:59.840936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:59.896959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:59.961448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:59.992417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.027759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.060492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.087401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.119018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.159583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.192300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.228396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.285382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.354139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.385933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.418327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:20:00.450395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.090941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.091165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.096945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.097032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.103248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.103442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.109066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.109088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.115199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.117450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.121075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.124986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.132559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.135171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.141547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.143706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.149876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.152122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.155537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.156971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.164608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.170850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.170861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.176519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.180966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.188748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.190428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.194154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.195962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.199942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.205499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.210838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.211168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.226068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.230355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.234754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.240871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.248286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.253692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.258219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.263357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.268076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.272886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.277067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.286547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.390793Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fnbtm0e7ddsnyc0spzsdf", SessionId: ydb://session/3?node_id=1&id=ZjYxZjFmNjAtMWI5MmVmZDgtN2UxNDI2YjgtZjYzMjc1NDE=, Slow query, duration: 25.642246s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:27.655258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:27.655794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494348656611132426:6015];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-17T18:20:27.655847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:27.656405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 32415, MsgBus: 19672 2025-04-17T18:20:27.040068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348679556250555:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:27.044755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f60/r3tmp/tmpfrqJdw/pdisk_1.dat 2025-04-17T18:20:27.615267Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:27.621360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:27.621468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:27.624390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32415, node 1 2025-04-17T18:20:27.854577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:27.854593Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:27.854598Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:27.854671Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19672 TClient is connected to server localhost:19672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:28.550450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:28.613590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:28.796137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:29.110374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:29.209751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:31.114944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348701031088670:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:31.115045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:31.500196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:31.579499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:31.636489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:31.681929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:31.724765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:31.811767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:31.873825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:31.955844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348701031089332:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:31.955929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:31.956123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348701031089337:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:31.960172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:31.977102Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:20:31.977743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348701031089339:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:32.004380Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348679556250555:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:32.004493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:32.073365Z node 1 :TX_PROXY ERROR: Actor# [1:7494348705326056695:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:33.107546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:33.147802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:33.191601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 query_phases { duration_us: 9411 table_access { name: "/Root/Join1_1" reads { rows: 8 bytes: 136 } partitions_count: 1 } cpu_time_us: 5012 affected_shards: 1 } query_phases { duration_us: 12624 table_access { name: "/Root/Join1_2" reads { rows: 3 bytes: 57 } partitions_count: 1 } cpu_time_us: 9996 affected_shards: 1 } compilation { duration_us: 590408 cpu_time_us: 586650 } process_cpu_time_us: 559 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":9,\"Plans\":[{\"E-Size\":\"No estimate\",\"PlanNodeId\":8,\"LookupKeyColumns\":[\"Key1\"],\"Node Type\":\"TableLookup\",\"Path\":\"\\/Root\\/Join1_2\",\"Columns\":[\"Fk3\",\"Key1\",\"Key2\",\"Value\"],\"E-Rows\":\"No estimate\",\"Table\":\"Join1_2\",\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Iterator\":\"PartitionByKey\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"Name\":\"PartitionByKey\",\"Input\":\"precompute_0_0\"}],\"Node Type\":\"ConstantExpr-Aggregate\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1744914034182,\"Host\":\"ghrun-gkdwnlcfnq\",\"ResultRows\":2,\"ResultBytes\":7,\"OutputRows\":2,\"ComputeTimeUs\":78,\"NodeId\":1,\"OutputChannels\":[{\"ChannelId\":1,\"Rows\":2,\"DstStageId\":0,\"Bytes\":7}],\"TaskId\":1,\"OutputBytes\":7}],\"PeakMemoryUsageBytes\":131072,\"CpuTimeUs\":1598}],\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7}},\"Name\":\"RESULT\",\"Push\":{\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"ResultRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"OutputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FinishedTasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1744914034179,\"CpuTimeUs\":{\"Count\":1,\"Sum\":1042,\"Max\":1042,\"Min\":1042},\"OutputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"UpdateTimeMs\":4},\"CTE Name\":\"precompute_0_0\"}],\"PlanNodeType\":\"Connection\",\"E-Cost\":\"No estimate\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"TopSort\",\"Limit\":\"1001\",\"TopSortBy\":\"row.t2.Value\"},{\"Inputs\":[{\"InternalOperatorId\":3},{\"InternalOperatorId\":2}],\"E-Rows\":\"No estimate\",\"Condition\":\"t2.Key1 = t1.Fk21\",\"Name\":\"InnerJoin (MapJoin)\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"},{\"Inputs\":[],\"ToFlow\":\"precompute_0_0\",\"Name\":\"ToFlow\"},{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"E-Rows\":\"No estimate\",\"Predicate\":\"Exist ... "Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":34,\"Max\":34,\"Min\":34}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":34,\"Max\":34,\"Min\":34},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1112,\"Max\":1112,\"Min\":1112},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"UpdateTimeMs\":3,\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Node Type\":\"Precompute_0\",\"Parent Relationship\":\"InitPlan\",\"PlanNodeType\":\"Materialize\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":590408,\"CpuTimeUs\":586650},\"ProcessCpuTimeUs\":559,\"TotalDurationUs\":641688,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"E-Rows\":\"No estimate\",\"Columns\":[\"Fk3\",\"Key1\",\"Key2\",\"Value\"],\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\",\"Name\":\"TableLookup\",\"Table\":\"Join1_2\",\"LookupKeyColumns\":[\"Key1\"]}],\"Node Type\":\"TableLookup\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Predicate\":\"Exist(item.Key1)\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Filter\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Join1_1\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Join1_1\",\"ReadColumns\":[\"Fk21\",\"Fk22\",\"Key\",\"Value\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Size\":\"No estimate\",\"A-SelfCpu\":1.037,\"Name\":\"Filter\",\"Predicate\":\"Exist(item.Fk21) AND item.Value == \\\"Value3\\\"\",\"A-Rows\":2,\"E-Rows\":\"No estimate\",\"A-Cpu\":1.037,\"E-Cost\":\"No estimate\",\"A-Size\":34}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Condition\":\"t2.Key1 = t1.Fk21\",\"Name\":\"InnerJoin (MapJoin)\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"InnerJoin (MapJoin)\"}],\"Operators\":[{\"A-Rows\":3,\"A-SelfCpu\":2.528,\"A-Cpu\":3.565,\"A-Size\":108,\"Name\":\"TopSort\",\"Limit\":\"1001\",\"TopSortBy\":\"row.t2.Value\"}],\"Node Type\":\"TopSort\"}],\"Operators\":[{\"A-Rows\":3,\"A-SelfCpu\":1.01,\"A-Cpu\":4.575,\"A-Size\":108,\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"Fk21\" (OptionalType (DataType \'Int32))) \'(\'\"Fk22\" (OptionalType (DataType \'String))) \'(\'\"Key\" (OptionalType (DataType \'Int32))) \'(\'\"Value\" (OptionalType (DataType \'String))))))\n(let $1 (KqpTable \'\"/Root/Join1_1\" \'\"72057594046644480:17\" \'\"\" \'1))\n(let $2 \'(\'\"Fk21\" \'\"Fk22\" \'\"Key\" \'\"Value\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'() (Void) \'()))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($32) (FromFlow (Filter (ToFlow $32) (lambda \'($33) (And (Exists (Member $33 \'\"Fk21\")) (Coalesce (== (Member $33 \'\"Value\") (String \'\"Value3\")) (Bool \'false))))))) \'(\'(\'\"_logical_id\" \'1035) \'(\'\"_id\" \'\"4d23bc99-a5179e70-56fcea66-b571a220\"))))\n(let $5 (DqCnUnionAll (TDqOutput $4 \'0)))\n(let $6 (DqPhyStage \'($5) (lambda \'($34) $34) \'(\'(\'\"_logical_id\" \'1413) \'(\'\"_id\" \'\"74a481f5-60bd3099-3a1b1cb5-ea30131\"))))\n(let $7 (DqCnResult (TDqOutput $6 \'0) \'()))\n(let $8 \'(\'(\'\"type\" \'\"data\")))\n(let $9 (KqpPhysicalTx \'($4 $6) \'($7) \'() $8))\n(let $10 \'\"%kqp%tx_result_binding_0_0\")\n(let $11 (DataType \'Int32))\n(let $12 (OptionalType $11))\n(let $13 (OptionalType (DataType \'String)))\n(let $14 (StructType \'(\'\"Fk21\" $12) \'(\'\"Fk22\" $13) \'(\'\"Key\" $12) \'(\'\"Value\" $13)))\n(let $15 (ListType $14))\n(let $16 %kqp%tx_result_binding_0_0)\n(let $17 \'(\'(\'\"_logical_id\" \'1078) \'(\'\"_id\" \'\"3f6d60c2-19d81f1e-6261cc61-67c8987c\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $18 (DqPhyStage \'() (lambda \'() (Iterator (PartitionByKey $16 (lambda \'($35) (Member $35 \'\"Fk21\")) (Void) (Void) (lambda \'($36) (Map (Filter (FlatMap $36 (lambda \'($37) (Map (Take (Nth $37 \'1) (Uint64 \'1)) (lambda \'($38) (AsStruct \'(\'\"Fk21\" (Member $38 \'\"Fk21\"))))))) (lambda \'($39) (Exists (Member $39 \'\"Fk21\")))) (lambda \'($40) (AsStruct \'(\'\"Key1\" (Member $40 \'\"Fk21\"))))))))) $17))\n(let $19 (KqpTable \'\"/Root/Join1_2\" \'\"72057594046644480:18\" \'\"\" \'1))\n(let $20 \'(\'\"Fk3\" \'\"Key1\" \'\"Key2\" \'\"Value\"))\n(let $21 (KqpCnStreamLookup (TDqOutput $18 \'0) $19 $20 (ListType (StructType \'(\'\"Key1\" $12))) \'(\'(\'\"Strategy\" \'\"LookupRows\"))))\n(let $22 (Uint64 \'\"1001\"))\n(let $23 (StructType \'(\'\"t1.Fk21\" $12) \'(\'\"t1.Fk22\" $13) \'(\'\"t1.Key\" $12) \'(\'\"t1.Value\" $13) \'(\'\"t2.Fk3\" $13) \'(\'\"t2.Key1\" $12) \'(\'\"t2.Key2\" $13) \'(\'\"t2.Value\" $13)))\n(let $24 \'(\'(\'\"_logical_id\" \'1298) \'(\'\"_id\" \'\"7f7081d0-b40ae9c5-99c368f4-bbc87023\") \'(\'\"_wide_channels\" $23)))\n(let $25 (DqPhyStage \'($21) (lambda \'($41) (block \'(\n (let $42 \'(\'Many \'Hashed \'Compact))\n (let $43 (SqueezeToDict (FlatMap (ToFlow $16) (lambda \'($46) (block \'(\n (let $47 (Member $46 \'\"Fk21\"))\n (let $48 (Nothing (OptionalType (TupleType $11 $14))))\n (let $49 (IfPresent $47 (lambda \'($50) (Just \'($50 $46))) $48))\n (return (If (Exists $47) $49 $48))\n )))) (lambda \'($51) (Nth $51 \'0)) (lambda \'($52) (Nth $52 \'1)) $42))\n (let $44 (TopSort (FlatMap $43 (lambda \'($53) (block \'(\n (let $54 \'(\'\"Fk3\" \'\"t2.Fk3\" \'\"Key1\" \'\"t2.Key1\" \'\"Key2\" \'\"t2.Key2\" \'\"Value\" \'\"t2.Value\"))\n (let $55 \'(\'\"Fk21\" \'\"t1.Fk21\" \'\"Fk22\" \'\"t1.Fk22\" \'\"Key\" \'\"t1.Key\" \'\"Value\" \'\"t1.Value\"))\n (return (MapJoinCore (OrderedFilter (ToFlow $41) (lambda \'($56) (Exists (Member $56 \'\"Key1\")))) $53 \'\"Inner\" \'(\'\"Key1\") \'(\'\"Fk21\") $54 $55 \'(\'\"t2.Key1\") \'(\'\"t1.Fk21\")))\n )))) $22 (Bool \'true) (lambda \'($57) (Member $57 \'\"t2.Value\"))))\n (let $45 (lambda \'($58) (Member $58 \'\"t1.Fk21\") (Member $58 \'\"t1.Fk22\") (Member $58 \'\"t1.Key\") (Member $58 \'\"t1.Value\") (Member $58 \'\"t2.Fk3\") (Member $58 \'\"t2.Key1\") (Member $58 \'\"t2.Key2\") (Member $58 \'\"t2.Value\")))\n (return (FromFlow (ExpandMap $44 $45)))\n))) $24))\n(let $26 (DqCnMerge (TDqOutput $25 \'0) \'(\'(\'\"7\" \'\"Asc\"))))\n(let $27 (DqPhyStage \'($26) (lambda \'($59) (FromFlow (NarrowMap (Take (ToFlow $59) $22) (lambda \'($60 $61 $62 $63 $64 $65 $66 $67) (AsStruct \'(\'\"t1.Fk21\" $60) \'(\'\"t1.Fk22\" $61) \'(\'\"t1.Key\" $62) \'(\'\"t1.Value\" $63) \'(\'\"t2.Fk3\" $64) \'(\'\"t2.Key1\" $65) \'(\'\"t2.Key2\" $66) \'(\'\"t2.Value\" $67)))))) \'(\'(\'\"_logical_id\" \'1311) \'(\'\"_id\" \'\"3c530631-95e1d3a1-b3c295f5-51e9b79\"))))\n(let $28 \'($18 $25 $27))\n(let $29 (DqCnResult (TDqOutput $27 \'0) \'()))\n(let $30 (KqpTxResultBinding $15 \'0 \'0))\n(let $31 (KqpPhysicalTx $28 \'($29) \'(\'($10 $30)) $8))\n(return (KqpPhysicalQuery \'($9 $31) \'((KqpTxResultBinding (ListType $23) \'1 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 641688 total_cpu_time_us: 602217 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Join1_2\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":18},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Fk3\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key1\\\",\\\"Key2\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\",\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Join1_1\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":17},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Fk21\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Fk22\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1744914034\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"eb7dbdd6-36317b7d-7270ee46-e78348f4\",\"version\":\"1.0\"}" >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13871, MsgBus: 9907 2025-04-17T18:19:17.965670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348380801461094:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:17.965845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fc0/r3tmp/tmpwpKy4v/pdisk_1.dat 2025-04-17T18:19:18.233785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:18.241998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:18.242098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:18.244315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13871, node 1 2025-04-17T18:19:18.310981Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:18.311006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:18.311022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:18.311184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9907 TClient is connected to server localhost:9907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:18.788849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:20.308495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348393686363652:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.308526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348393686363643:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.308642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.312536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:20.321557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348393686363657:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:20.391547Z node 1 :TX_PROXY ERROR: Actor# [1:7494348393686363708:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:20.624338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.823522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:20.823524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:20.823676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:20.823895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:20.823970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:20.824036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:20.824103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:20.824180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:20.824211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:20.824249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:20.824318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:20.824373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:20.824400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:20.824489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:20.824505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:20.824597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348393686363977:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:20.824629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:20.824741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:20.824847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:20.824979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:20.825110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:20.825234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:20.825361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:20.825484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348393686363967:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:20.853496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348393686363973:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:20.853562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348393686363973:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:20.853807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_i ... ller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.334580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.335063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.340216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.340523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.345683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.345972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.351343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.351344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.356838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.356837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.366214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.370764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.372409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.378091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.380905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039224;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.390083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.391405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.396411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.402314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.404831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.407575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.412826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.420476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.424644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.430060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.433876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.435285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.440316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.443380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.445836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.451905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.455962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.457431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.461523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.463221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.468934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.474773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.475120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.481107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.486232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.496308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.500502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.505493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.508907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.522367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:19.698848Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fn2635wxv0rc3qmy9hcpc", SessionId: ydb://session/3?node_id=1&id=ZDJhYzViN2ItZWFlNjliYTMtYjE4YzRkMzItYmZjZGQ0MGU=, Slow query, duration: 27.823258s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:19.993208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:19.993599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:19.994378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494348625614656581:11136];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-17T18:20:19.994698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH21 >> KqpJoinOrder::CanonizedJoinOrderTPCC >> OlapEstimationRowsCorrectness::TPCH9 >> KqpJoin::RightSemiJoin_KeyPrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:23.291261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:23.291361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:23.291400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:23.291440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:23.291484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:23.291524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:23.291582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:23.291663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:23.291979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:23.377533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:23.377594Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:23.389420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:23.390011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:23.390212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:23.400913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:23.401500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:23.402110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:23.402347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:23.405330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:23.406675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:23.406734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:23.406939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:23.407016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:23.407062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:23.407200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:23.414023Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:23.535804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:23.536031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:23.536234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:23.536462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:23.536522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:23.538835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:23.538976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:23.539198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:23.539267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:23.539323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:23.539356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:23.541310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:23.541359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:23.541394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:23.543106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:23.543171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:23.543210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:23.543265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:23.547105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:23.548963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:23.549118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:23.550043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:23.550204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:23.550254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:23.550520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:23.550598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:23.550783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:23.550853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:23.552880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:23.552925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:23.553072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:23.553116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:23.553503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:23.553557Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:23.553646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:23.553684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:23.553719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:23.553763Z no ... Size: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:20:36.523850Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:20:36.524142Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 317us result status StatusSuccess 2025-04-17T18:20:36.525028Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:20:36.538658Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1115:2883] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:20:36.538768Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1062:2883] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-17T18:20:36.538939Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1115:2883] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744914036509983 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744914036509983 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744914036509983 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:20:36.541371Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1115:2883] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-17T18:20:36.541467Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1062:2883] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> KqpJoinOrder::CanonizedJoinOrderTPCH5 [GOOD] >> KqpJoinOrder::TestJoinHint1+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] Test command err: Trying to start YDB, gRPC: 24907, MsgBus: 20413 2025-04-17T18:19:18.838639Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348384233213953:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:18.838854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fb3/r3tmp/tmpa8L4zG/pdisk_1.dat 2025-04-17T18:19:19.075822Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24907, node 1 2025-04-17T18:19:19.131938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:19.131957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:19.131962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:19.132049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:19.164463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:19.164573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:19.166734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20413 TClient is connected to server localhost:20413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:19.496092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:21.418732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348397118116507:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:21.418743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348397118116515:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:21.418873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:21.422212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:21.431427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348397118116521:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:21.494669Z node 1 :TX_PROXY ERROR: Actor# [1:7494348397118116572:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:21.774318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.974532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:21.974532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:21.974676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:21.974876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:21.974953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:21.975015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:21.975081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:21.975135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:21.975182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:21.975261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:21.975287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:21.975379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:21.975385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:21.975474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:21.975553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:21.975602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:21.975697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:21.975704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:21.975824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348397118116834:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:21.975832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:21.975931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:21.976037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:21.976142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:21.976240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348397118116842:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:22.004569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348397118116864:2359];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:22.004569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348397118116862:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:22.004617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:74943483 ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.158723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.158740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.165039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.165101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.171813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.176975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.178451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.184240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.187601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.200733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.203598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.210860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.216767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.224311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.229460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.230838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.236708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.247261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.260904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.274600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.283672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.292453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.301351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.308913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.308956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.313793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.320082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.322215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.331104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.335764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.340579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.345215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.349927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.359144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.368749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.378065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.378318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.388036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.391820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.393958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.400115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.405815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.407340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.417991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.419993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.622218Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fn3jb9dhccndgrvaymtwm", SessionId: ydb://session/3?node_id=1&id=ZGIyMmZiYWYtOWMxNzM2ZTktMmQyODIwYjYtZTcwODZkZTc=, Slow query, duration: 29.330347s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:22.979860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:22.980297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:22.981081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494348581801756659:9135];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T18:20:22.981450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |94.8%| [TA] $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH5 [GOOD] Test command err: Trying to start YDB, gRPC: 18185, MsgBus: 17403 2025-04-17T18:19:18.374286Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348384677316843:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:18.374399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fbc/r3tmp/tmpF9UHwX/pdisk_1.dat 2025-04-17T18:19:18.667198Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18185, node 1 2025-04-17T18:19:18.697309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:18.697331Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:18.697343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:18.697464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:18.747811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:18.747990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:18.749839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17403 TClient is connected to server localhost:17403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:19.136149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:20.581204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348393267252111:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.581215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348393267252102:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.581301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.584683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:20.594012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348393267252116:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:20.681731Z node 1 :TX_PROXY ERROR: Actor# [1:7494348393267252167:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:20.967966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.126572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:21.126823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:21.127082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:21.127200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:21.127293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:21.127417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:21.127527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:21.127652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:21.127784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:21.127912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:21.128035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:21.128139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348397562219738:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:21.134356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:21.134424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:21.134678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:21.134817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:21.134997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:21.135137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:21.135258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:21.135394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:21.135525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:21.135656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:21.135800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:21.135920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348397562219748:2360];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:21.154799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348397562219732:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:21.154851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348397562219732:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:21.154983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;sel ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.179494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.181206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.187413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.188591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.193369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.194058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.199308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.199898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.206317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.206382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.212282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.212655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.219277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.219473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.225590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.226149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.231770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.232186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.237888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.237946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.243808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.244785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.250037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.250776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.256148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.256722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.262094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.262254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.268463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.269928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.274673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.275585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.280898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.281482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.286880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.288486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.293491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.298235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.302977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.307940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.313253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.318704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.324705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.330314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.340726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:20.473834Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fn2mj5ebm5hbbbbwn2f72", SessionId: ydb://session/3?node_id=1&id=YjEwNjM5ZDgtY2FkYTYzMWEtYmMzOTFiNDMtZWU1OTgzZGY=, Slow query, duration: 28.134593s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:20.730970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:20.731094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:20.731371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494348539296177384:7803];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T18:20:20.731753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FiveWayJoin-ColumnStore [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple >> KqpJoinOrder::TPCDS61-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 31677, MsgBus: 64241 2025-04-17T18:20:28.057418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348687147447169:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:28.057466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f5f/r3tmp/tmpKMLU57/pdisk_1.dat 2025-04-17T18:20:28.604410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:28.608173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:28.608245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:28.611034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31677, node 1 2025-04-17T18:20:28.827957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:28.827981Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:28.827989Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:28.828202Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64241 TClient is connected to server localhost:64241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:29.609032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:29.684964Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:29.704858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:29.896719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:30.100435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:30.198364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:31.972832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348700032350628:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:31.972917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:32.293285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.329097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.364512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.399873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.444945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.531814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.574623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:32.664873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348704327318595:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:32.664943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:32.665196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348704327318600:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:32.668891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:32.683286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348704327318602:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:32.740375Z node 1 :TX_PROXY ERROR: Actor# [1:7494348704327318658:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:33.058291Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348687147447169:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:33.058398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:33.900696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:33.948945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29697, MsgBus: 17243 2025-04-17T18:20:35.506360Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348717156280806:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:35.506504Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f5f/r3tmp/tmpSmLXHr/pdisk_1.dat 2025-04-17T18:20:35.637928Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29697, node 2 2025-04-17T18:20:35.679675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:35.679750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:35.681120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:35.698643Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:35.698662Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:35.698668Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:35.698773Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17243 TClient is connected to server localhost:17243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:36.118450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:36.131329Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:20:36.147107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:36.229673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:20:36.380936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:36.445313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:38.755365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348730041184388:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:38.755435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:38.825397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:38.891285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:38.930056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:38.969755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:39.007595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:39.067304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:39.102379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:39.158529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348734336152338:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:39.158666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:39.158779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348734336152343:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:39.162573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:39.173179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348734336152345:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:20:39.268591Z node 2 :TX_PROXY ERROR: Actor# [2:7494348734336152403:3542] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:40.107088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:40.200433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:40.506465Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348717156280806:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:40.506522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11319, MsgBus: 6098 2025-04-17T18:19:22.827477Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348405048936879:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:22.827555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fa2/r3tmp/tmpqAJSP1/pdisk_1.dat 2025-04-17T18:19:23.149712Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11319, node 1 2025-04-17T18:19:23.230882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:23.230916Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:23.230929Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:23.231054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:23.234380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:23.234487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:23.236313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6098 TClient is connected to server localhost:6098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:23.713115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:25.667454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348417933839434:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.667506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348417933839443:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.667615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:25.671938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:25.681690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348417933839448:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:25.757668Z node 1 :TX_PROXY ERROR: Actor# [1:7494348417933839499:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:26.047966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:26.238099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:26.238383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:26.238701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:26.238902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:26.239057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:26.239203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:26.239369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:26.239519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:26.239665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:26.239848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:26.239958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:26.240087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348422228807062:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:26.242775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:26.242921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:26.243225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:26.243563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:26.244353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:26.244530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:26.244722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:26.244912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:26.245062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:26.245212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:26.245398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:26.245550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348422228807051:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:26.285364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348422228807108:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:26.285434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348422228807108:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:26.285682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_i ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:26.985912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:26.987127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:26.990652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:26.991756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:26.995483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:26.996354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.000347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.001081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.010455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.011970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.015142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.016541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.020450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.021677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.025982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.026998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.031822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.032490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.037226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.037886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.042650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.042649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.048128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.048183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.053413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.053413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.059108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.059109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.064732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.070591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.070605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.076520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.077022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.082960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.141312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.141312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.146728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.146733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.152855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.152855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.158826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.166347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.176749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.185316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.188236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:27.301989Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fn8k66kqbzgkg1j7x2eyq", SessionId: ydb://session/3?node_id=1&id=NDk3MDFjYTQtMTE2ZTNmZGItMTNiN2Q2ZWMtNmZjM2FjMmY=, Slow query, duration: 28.863002s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:27.559737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:27.560220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494348611207415906:9380];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:20:27.567483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:27.568046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10149, MsgBus: 19677 2025-04-17T18:20:00.320111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348566806068760:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:00.320172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f74/r3tmp/tmp1IEW0j/pdisk_1.dat 2025-04-17T18:20:00.821003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:00.821064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:00.827066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:00.841639Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10149, node 1 2025-04-17T18:20:00.984033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:00.984049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:00.984054Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:00.984164Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19677 TClient is connected to server localhost:19677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:01.782629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:01.803209Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:03.625366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348579690971177:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:03.625508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:03.625974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348579690971189:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:03.630260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:03.650712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348579690971191:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:03.718537Z node 1 :TX_PROXY ERROR: Actor# [1:7494348579690971242:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:03.989328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.093780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.124893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.205461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.236308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.405272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.433975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.505783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.539899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.579746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.629200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.687509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.725543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.264445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:20:05.298502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.316148Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348566806068760:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:05.316193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:05.323284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.351067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.383057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.409145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.446664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.468200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.508202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.542787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.586197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.611730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.651306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.701413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.738880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.803104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.876114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.931691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.936704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.936703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.941913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.941913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.946701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.946701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.951660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.951660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.956603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.956603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.961574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038514;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.961574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.966764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.966764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.971933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.971933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.977160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038492;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.977160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.982341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.982341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.987744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.987744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038462;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.992959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.992959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.998429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:34.998902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.004489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.005871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.009693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.012095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.015011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.017220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.020174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.022128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.024933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.026920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.030005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.031831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.035026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.036826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.040072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.041763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.045085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.046865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.127751Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fnh3hasasnragg835ewys", SessionId: ydb://session/3?node_id=1&id=Y2EzNmViOTAtYjQxMDA5NDgtYjQzZTg1MTItZmIzNjQ2NjQ=, Slow query, duration: 27.973479s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:35.345586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:35.345778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:35.346051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7494348605460781926:2976];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-04-17T18:20:35.346677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoin::FullOuterJoinSizeCheck >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees >> KqpJoinOrder::TPCDS88-ColumnStore [GOOD] >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 14942, MsgBus: 22678 2025-04-17T18:20:38.344238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348730399042326:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:38.344321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f55/r3tmp/tmpSm2xeR/pdisk_1.dat 2025-04-17T18:20:38.785347Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:38.789747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:38.789829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:38.791404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14942, node 1 2025-04-17T18:20:38.856708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:38.856737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:38.856747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:38.856869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22678 TClient is connected to server localhost:22678 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:39.503336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:39.559748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:39.711850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:39.848862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:39.941996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:41.703876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348743283945987:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.703961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:42.055139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.137598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.170982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.213477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.241854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.273758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.304633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.378113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348747578913945:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:42.378258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:42.378516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348747578913951:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:42.390569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:42.403164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348747578913953:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:42.495239Z node 1 :TX_PROXY ERROR: Actor# [1:7494348747578914012:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:43.346580Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348730399042326:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:43.346685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:43.560980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.594987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.624282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.660269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.760109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:57: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 1692, MsgBus: 1332 2025-04-17T18:19:17.696682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348381957018642:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:17.696756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fc2/r3tmp/tmpIjZifd/pdisk_1.dat 2025-04-17T18:19:17.969739Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1692, node 1 2025-04-17T18:19:18.039742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:18.039828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:18.039834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:18.039845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:18.039854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:18.039955Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:18.041525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1332 TClient is connected to server localhost:1332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:18.483004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:20.217090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348394841921205:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.217094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348394841921198:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.217215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:20.220720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:20.230402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348394841921212:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:20.329643Z node 1 :TX_PROXY ERROR: Actor# [1:7494348394841921263:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:20.612380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.727533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.753727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.776366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.815577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.921442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.943579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.966987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:20.992746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.018007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.043512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.068479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.094348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.570633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:19:21.598611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.620840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.648115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.677490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.711107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.737427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.762993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.787836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.814478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.840820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.866734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.892991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.916223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.942021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.968388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:19:21.996220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.021726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.045772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-17T18:19:22.071383Z node 1 :FLAT_TX_SCH ... id=281474976710714; 2025-04-17T18:19:46.488525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038468;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.489209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038472;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.493078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038490;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.493148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038448;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.497265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038476;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.498084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.502142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.503909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.506548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.509660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038456;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.511032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.514602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038506;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.516160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038438;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.519587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038492;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.521305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.525080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.526196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.529994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.530928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038514;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.534890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:19:46.597148Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fm5xb1bhjvxkqxyxcgk9b", SessionId: ydb://session/3?node_id=1&id=OGUyZDAyMGQtMjY1NWUzYS02MTk1NDI0ZS03M2JkZWM2Yg==, Slow query, duration: 23.672813s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:19:47.017298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:19:47.017430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:19:47.017950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:42.440885Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fnpdn3rdybdmsvm18kfac", SessionId: ydb://session/3?node_id=1&id=OGUyZDAyMGQtMjY1NWUzYS02MTk1NDI0ZS03M2JkZWM2Yg==, Slow query, duration: 29.842478s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] >> KqpJoinOrder::TPCDS87+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34-ColumnStore [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH11 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] Test command err: Trying to start YDB, gRPC: 20397, MsgBus: 64885 2025-04-17T18:20:41.381225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348740456867225:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:41.383392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f4d/r3tmp/tmpMQGcWb/pdisk_1.dat 2025-04-17T18:20:41.809310Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:41.813463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:41.813548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:41.819893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20397, node 1 2025-04-17T18:20:41.941849Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:41.941880Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:41.941887Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:41.942051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64885 TClient is connected to server localhost:64885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:42.725567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:42.761932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:42.781925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:42.973394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:43.148330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:43.224579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:44.900352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348753341770772:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:44.900480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:45.239949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:45.268910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:45.334511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:45.379459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:45.411096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:45.459446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:45.494569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:45.579605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348757636738733:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:45.579666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:45.580070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348757636738738:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:45.583650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:45.598747Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:20:45.599105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348757636738740:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:45.674055Z node 1 :TX_PROXY ERROR: Actor# [1:7494348757636738797:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:46.382259Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348740456867225:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:46.382407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:46.732878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:46.762425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:46.799058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpJoin::RightTableIndexPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] Test command err: Trying to start YDB, gRPC: 13217, MsgBus: 20555 2025-04-17T18:20:42.704386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348746068894603:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:42.704818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f4b/r3tmp/tmpkyuQfd/pdisk_1.dat 2025-04-17T18:20:43.130056Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:43.158159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:43.159986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:43.162105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13217, node 1 2025-04-17T18:20:43.282729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:43.282762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:43.282770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:43.283974Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20555 TClient is connected to server localhost:20555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:43.910103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:43.938619Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:43.961606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:44.170507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:44.384791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:44.521214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:46.414950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348763248765413:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:46.415085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:46.732476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:46.781056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:46.817898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:46.860871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:46.903815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:46.949777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:46.998152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:47.085610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348767543733372:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:47.085695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:47.085928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348767543733377:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:47.089878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:47.104561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348767543733379:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:47.200444Z node 1 :TX_PROXY ERROR: Actor# [1:7494348767543733437:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:47.678152Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348746068894603:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:47.678234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:48.107956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.136400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.176595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24953, MsgBus: 8867 2025-04-17T18:20:00.464344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348566992069587:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:00.464392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f78/r3tmp/tmpwhkVmy/pdisk_1.dat 2025-04-17T18:20:01.083881Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:01.094982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:01.095070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:01.110491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24953, node 1 2025-04-17T18:20:01.276837Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:01.276860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:01.276865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:01.276960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8867 TClient is connected to server localhost:8867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:01.980955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:04.200095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348584171939434:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:04.200111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348584171939446:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:04.200221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:04.204281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:04.216109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348584171939448:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:04.314488Z node 1 :TX_PROXY ERROR: Actor# [1:7494348584171939499:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:04.608839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.741923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.780565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.811895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.853874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.005794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.076700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.109925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.135437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.181880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.205914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.243355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.277827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.466680Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348566992069587:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:05.466736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:05.901582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:20:05.944695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.979375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.013141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.044334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.081265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.122109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.190114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.231083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.299709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.345522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.380344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.413638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.457334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.527175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.565029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.599471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.630588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.503594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.503595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.509535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.509535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.514957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.514958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.520884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.520884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.526272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.526559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.531807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.531853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.535750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.543495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.548210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.549796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.553216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.555474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.558469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.560763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.563446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.565457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.568153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.570580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.574134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.575505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.579040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.581538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.584143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.586311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.589244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.591071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038462;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.594932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.596093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.600209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.600951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.606529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.606525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.612424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.617959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.623226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.628467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.633748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.638764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.655695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:33.731735Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fnhrr5z45srf39q9hajqb", SessionId: ydb://session/3?node_id=1&id=YzA1ZmJiOTAtNzZkNTNlMjUtYzM1ZjA0ZDgtYTRmYmY1NGM=, Slow query, duration: 25.896580s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:34.299483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:34.299934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:34.300249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494348687251182727:6328];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-17T18:20:34.300603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29524, MsgBus: 13857 2025-04-17T18:19:30.835654Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348439009417382:2123];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:30.835701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f99/r3tmp/tmpuUjNh9/pdisk_1.dat 2025-04-17T18:19:31.177764Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:31.181914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:31.182028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:31.185152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29524, node 1 2025-04-17T18:19:31.265103Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:31.265128Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:31.265134Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:31.265222Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13857 TClient is connected to server localhost:13857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:31.755156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:31.771234Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:33.644803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348451894319876:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.644880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348451894319868:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.645155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.650086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:33.662070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348451894319882:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:33.724230Z node 1 :TX_PROXY ERROR: Actor# [1:7494348451894319933:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:33.997897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:34.209028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:34.209245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:34.209242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:34.209286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:34.209767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:34.209798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:34.209877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:34.209945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:34.209944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:34.210000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:34.210062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:34.210070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:34.210245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:34.210346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:34.210522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:34.210637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:34.210740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348456189287460:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:34.212229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:34.212369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:34.212533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:34.212677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:34.212807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:34.212926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:34.213043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348456189287486:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:34.247421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348456189287544:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:34.247473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348456189287544:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.850772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.856486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.856492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.861944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.861951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.867284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.867324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.872624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.872787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.878029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.878344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.883108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.883428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.888650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.888723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.894209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.894430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.900107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.900117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.905942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.905943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.911721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.911721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.917711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.918467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.924249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.924249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.929261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.929614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.934697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.935588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.939946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.941519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.945314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.948174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.951340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.955377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.958412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.963449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.969207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.969299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.975532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.975971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.980822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:36.003179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:36.139535Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fnheb0y18zbrr1bagdw99", SessionId: ydb://session/3?node_id=1&id=NTdhODc0NmUtMzJjM2ZiNDUtMzQzMGIxMTMtN2ZmYjBmMGU=, Slow query, duration: 28.639978s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:36.395360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:36.396000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:36.396881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494348653757828744:9125];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T18:20:36.397234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 15659, MsgBus: 5217 2025-04-17T18:19:20.433910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348396217274380:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:20.434041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fad/r3tmp/tmpKM7A9P/pdisk_1.dat 2025-04-17T18:19:20.745913Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15659, node 1 2025-04-17T18:19:20.800410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:20.800437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:20.800457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:20.800606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:20.818505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:20.818603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:20.820409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5217 TClient is connected to server localhost:5217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:21.223788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:22.976448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348404807209648:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.976460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348404807209640:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.976552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:22.979338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:22.987605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348404807209654:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:23.054191Z node 1 :TX_PROXY ERROR: Actor# [1:7494348409102177001:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:23.344505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.544142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:23.544170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:23.544285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:23.544460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:23.544538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:23.544605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:23.544672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:23.544732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:23.544808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:23.544838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:23.544899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:23.544977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:23.544999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:23.545042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:23.545081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:23.545098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348409102177268:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:23.545178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:23.545282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:23.545378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:23.545470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:23.545556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:23.545643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:23.545726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:23.545837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348409102177259:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:23.568912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348409102177324:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:23.569008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348409102177324:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:23.569195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_i ... WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.490894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.497402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.497506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.503816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.503816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.510039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.510072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.516494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.516506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.522687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.523064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.533098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.533460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.537919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.541216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.543604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.557028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.558268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.563309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.567005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.576356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.577741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.582545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.583480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.589386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.591901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.601983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.608245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.614945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.622922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.628833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.634434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.639830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.646707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.652246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.803170Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fn57q2qcqw94pmbbzrk0b", SessionId: ydb://session/3?node_id=1&id=MWU2MmJiYzgtZGFmOTlhZjUtMWRmNTJkMjctMzVmMGNlOGQ=, Slow query, duration: 27.803261s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:23.357278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:23.357593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494348550836134750:7820];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-17T18:20:23.357643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:23.357887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:42.305492Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fp9m0bxayzw4z74r13bbe", SessionId: ydb://session/3?node_id=1&id=MWU2MmJiYzgtZGFmOTlhZjUtMWRmNTJkMjctMzVmMGNlOGQ=, Slow query, duration: 10.048832s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n$bla1 = (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from store_sales as store_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11);\n\n$bla2 = ((select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from catalog_sales as catalog_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11)\n union all\n (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from web_sales as web_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where web_sales.ws_sold_date_sk = date_dim.d_date_sk\n and web_sales.ws_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11));\n\n-- start query 1 in stream 0 using template query87.tpl and seed 1819994127\nselect count(*)\nfrom $bla1 bla1 left only join $bla2 bla2 using (c_last_name, c_first_name, d_date)\n;\n\n-- end query 1 in stream 0 using template query87.tpl", parameters: 0b >> KqpJoin::FullOuterJoinSizeCheck [GOOD] >> KqpJoin::IdxLookupSelf >> KqpJoinOrder::TPCDS90+ColumnStore >> KqpIndexLookupJoin::LeftOnly+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinSizeCheck [GOOD] Test command err: Trying to start YDB, gRPC: 19743, MsgBus: 23524 2025-04-17T18:20:45.498652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348759270901562:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:45.498697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f47/r3tmp/tmpKUYQOR/pdisk_1.dat 2025-04-17T18:20:46.024295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:46.024376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:46.033285Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:46.044346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19743, node 1 2025-04-17T18:20:46.278648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:46.278674Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:46.278682Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:46.278768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23524 TClient is connected to server localhost:23524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:47.074273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:47.091367Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:47.107047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:20:47.290631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:20:47.485355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:20:47.622777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.127130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348776450772534:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.127221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.473273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.505091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.529137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.598772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.632742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.683685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.758263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.852261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348776450773208:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.852332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.852596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348776450773213:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.856408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:49.866960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348776450773215:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:49.949078Z node 1 :TX_PROXY ERROR: Actor# [1:7494348776450773270:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:50.495574Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348759270901562:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:50.495627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:50.954874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.996356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.036424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TestJoinHint1-ColumnStore >> OlapEstimationRowsCorrectness::TPCH5 >> KqpJoin::RightTableIndexPredicate [GOOD] >> KqpJoinOrder::TPCDS16-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableIndexPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 24146, MsgBus: 3244 2025-04-17T18:20:50.951005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348780358060868:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:50.953952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f3e/r3tmp/tmpBaT4bb/pdisk_1.dat 2025-04-17T18:20:51.512633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:51.512798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:51.513983Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:51.516748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24146, node 1 2025-04-17T18:20:51.646793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:51.646816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:51.646824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:51.646954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3244 TClient is connected to server localhost:3244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:52.314902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:52.331200Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:52.350364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:52.516925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:52.672437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:20:52.731413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:54.162036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348797537931797:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:54.162133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:54.544090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:54.596491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:54.643465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:54.678741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:54.716312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:54.780558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:54.860631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:54.977285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348797537932473:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:54.977388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:54.977464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348797537932479:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:54.981042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:54.997930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348797537932481:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:55.086845Z node 1 :TX_PROXY ERROR: Actor# [1:7494348801832899832:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:55.951108Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348780358060868:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:55.951164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:56.280881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] >> KqpJoin::IdxLookupSelf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS16-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21058, MsgBus: 3681 2025-04-17T18:20:01.790456Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348571826045045:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:01.790931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f6e/r3tmp/tmpeC2Nfz/pdisk_1.dat 2025-04-17T18:20:02.306637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:02.306735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:02.309157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:02.328614Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21058, node 1 2025-04-17T18:20:02.478758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:02.478789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:02.478804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:02.478952Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3681 TClient is connected to server localhost:3681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:03.037431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:03.051048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:05.020037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348589005914763:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:05.020123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:05.020376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348589005914775:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:05.024808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:05.043137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348589005914777:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:05.101364Z node 1 :TX_PROXY ERROR: Actor# [1:7494348589005914828:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:05.437915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.553270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.595360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.668937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.698056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.888789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.920648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.949852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.978350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.020727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.057338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.127721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.212924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.778582Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348571826045045:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:06.778652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:06.839261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:20:06.879519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.956212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:06.992183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.059354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.095897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.130307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.177327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.247832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.281933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.355093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.396225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.475489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.510703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.549108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.584094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:20:07.631221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.621377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.621669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.626989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.627075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.632257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.632535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.637230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.637429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.642128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.642161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.647226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.647247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.652328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.652338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.657183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.657187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.661416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.661868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.665919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.666319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.670740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.670986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.675773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.675906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.680761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.681082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.685859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.686266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.690619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.690826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.695913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.695952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.701202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.701207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.706361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.706379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.711354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.711354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.716498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.716503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.721792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.721796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.726942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.726948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.732395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:35.822293Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fnk1va3671mpsp0s0akyf", SessionId: ydb://session/3?node_id=1&id=YjdhYzY0MTEtMTM2NjA4MzctNTM3OWE3ZjMtNzYxYjRjZjA=, Slow query, duration: 26.673979s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:36.306789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:36.307212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:36.308036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494348687790188201:5803];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-17T18:20:36.308435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 1897, MsgBus: 2459 2025-04-17T18:20:52.647729Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348791792557658:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:52.647867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f3c/r3tmp/tmpk6y33x/pdisk_1.dat 2025-04-17T18:20:53.240218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:53.240355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:53.242778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1897, node 1 2025-04-17T18:20:53.264030Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:53.378628Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:53.378647Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:53.378652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:53.378758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2459 TClient is connected to server localhost:2459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:54.104949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:54.119395Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:54.136414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:54.301916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:54.506915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:54.597608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:56.420333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348808972428538:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:56.420446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:56.732060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:56.810094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:56.835816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:56.882423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:56.945869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.015504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.093435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.172590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348813267396503:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.172800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.173544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348813267396508:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.177461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:57.189363Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:20:57.189981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348813267396512:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:57.277441Z node 1 :TX_PROXY ERROR: Actor# [1:7494348813267396567:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:57.642306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348791792557658:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:57.642380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:58.425371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.458539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.525911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.553964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.622909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.664948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH15 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2025-04-17T18:17:13.637556Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:13.709729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:13.728520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:13.728759Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:13.735774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:13.735986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:13.736208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:13.736308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:13.736444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:13.736604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:13.736726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:13.736844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:13.736986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:13.737100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:13.737225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:13.737388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:13.758577Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:13.758683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:13.758735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:13.758899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:13.759038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:13.759115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:13.759184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:13.759276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:13.759340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:13.759380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:13.759408Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:13.759569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:13.759630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:13.759667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:13.759701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:13.759783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:13.759829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:13.759867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:13.759895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:13.759970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:13.760002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:13.760026Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:13.760064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:13.760100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:13.760126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:13.760450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-17T18:17:13.760512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-04-17T18:17:13.760581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-17T18:17:13.760645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-17T18:17:13.760766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:13.760806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:13.760832Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:13.760960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:13.760996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:13.761023Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:13.761183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:13.761238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:13.761267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:13.761435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:13.761472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:13.761496Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:13.761603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:13.761638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:13.761678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... d:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=1744913834683;tx_id=10;);schema_version:1;level:0;column_size:2117960;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=1744913834683;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-17T18:21:00.022657Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:10775:12402];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-17T18:21:00.024739Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10775:12402];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupSelf [GOOD] Test command err: Trying to start YDB, gRPC: 63153, MsgBus: 65249 2025-04-17T18:20:53.326694Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348792072564562:2257];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:53.327000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f38/r3tmp/tmpe1Ko7I/pdisk_1.dat 2025-04-17T18:20:53.845367Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:53.848528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:53.848625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:53.852684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63153, node 1 2025-04-17T18:20:54.018640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:54.018667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:54.018673Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:54.018811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65249 TClient is connected to server localhost:65249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:54.649202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:54.662205Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:54.672739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:54.828508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:55.005321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:55.070109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:56.767329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348804957468028:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:56.767446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.172426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.250495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.295534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.333452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.414017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.505030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.579701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.649349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348809252436000:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.649414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.649641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348809252436005:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.652751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:57.662444Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:20:57.662843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348809252436007:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:57.742988Z node 1 :TX_PROXY ERROR: Actor# [1:7494348809252436063:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:58.313867Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348792072564562:2257];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:58.313921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:58.779988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.819334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.867491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:3:29: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 28582, MsgBus: 6331 2025-04-17T18:20:53.962147Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348795035741732:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:53.962205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f34/r3tmp/tmpEz1eME/pdisk_1.dat 2025-04-17T18:20:54.453817Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28582, node 1 2025-04-17T18:20:54.488133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:54.488233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:54.503023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:54.657468Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:54.657493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:54.657500Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:54.657614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6331 TClient is connected to server localhost:6331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:55.575669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:55.601906Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:55.620655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:55.773259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:55.970286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:56.045217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:57.716622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348812215612459:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.716706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:58.031510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.065527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.097704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.168438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.206477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.276659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.317579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.375397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348816510580424:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:58.375487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:58.375715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348816510580429:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:58.379397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:20:58.392808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348816510580431:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:20:58.466094Z node 1 :TX_PROXY ERROR: Actor# [1:7494348816510580486:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:58.963562Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348795035741732:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:58.963662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:59.519795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.595994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.625533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.655047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.694567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.740682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH10 [GOOD] Test command err: Trying to start YDB, gRPC: 1684, MsgBus: 7000 2025-04-17T18:19:37.854235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348467149835621:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:37.854680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f8a/r3tmp/tmp2Oekjg/pdisk_1.dat 2025-04-17T18:19:38.231185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:38.231266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:38.280117Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:38.289818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1684, node 1 2025-04-17T18:19:38.349387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:38.349406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:38.349421Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:38.349546Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7000 TClient is connected to server localhost:7000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:38.876013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:38.892870Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:40.795075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348480034738088:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:40.795173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:40.795253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348480034738096:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:40.800142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:40.809606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348480034738102:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:40.878980Z node 1 :TX_PROXY ERROR: Actor# [1:7494348480034738153:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:41.195633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:41.404873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:41.405067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:41.405336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:41.405457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:41.405859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:41.405996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:41.406102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:41.406234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:41.406369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:41.406495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:41.406621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:41.406722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348484329705733:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:41.431808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:41.431865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:41.432077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:41.432195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:41.432328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:41.432431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:41.432535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:41.432628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:41.432737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:41.432846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:41.432951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:41.433090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348484329705723:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:41.436811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348484329705737:2361];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:41.436858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348484329705737:2361];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.772209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.773561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.777572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.780076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.783476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.785671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.787434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.791572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.792642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.797414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.797893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.803391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.803446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.808204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.808801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.813488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.813584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.818611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.818985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.823926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.824413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.829491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.830950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.835321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.836480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.840695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.845966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.849895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.851444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.855161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.856551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.860611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.865372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.865560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.869663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.873030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.875813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.877709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.881663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.883309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.888088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.893405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.894369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.903639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:42.911008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:43.088564Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fnspt0av22fn06sde6vzw", SessionId: ydb://session/3?node_id=1&id=ZTM1ODQ5YTQtYjRkNmY0NTYtYWI2ODg4MGUtNTg4OGViMTg=, Slow query, duration: 27.125649s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:43.339827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:43.340241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:43.340779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494348690488185534:9716];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T18:20:43.341137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TPCDS96+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92-ColumnStore >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore >> KqpJoin::RightSemiJoin_ComplexKey >> KqpJoinOrder::TPCDS61+ColumnStore [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup >> KqpJoin::JoinAggregate >> KqpJoinOrder::CanonizedJoinOrderTPCH18 >> KqpJoinOrder::CanonizedJoinOrderTPCH13 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24002, MsgBus: 26302 2025-04-17T18:19:38.064478Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348470528183787:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:38.064628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f86/r3tmp/tmpE0hbIX/pdisk_1.dat 2025-04-17T18:19:38.457270Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24002, node 1 2025-04-17T18:19:38.463413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:38.463504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:38.469398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:38.531845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:38.531866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:38.531876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:38.531987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26302 TClient is connected to server localhost:26302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:39.068634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:39.084469Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:40.940385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348479118119029:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:40.940409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348479118119021:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:40.940559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:40.943979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:40.956090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348479118119035:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:41.031916Z node 1 :TX_PROXY ERROR: Actor# [1:7494348483413086382:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:41.329758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:41.522128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:41.522128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:41.522374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:41.522632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:41.522765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:41.522879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:41.522989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:41.523012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:41.523140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:41.523167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:41.523246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:41.523269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:41.523394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:41.523469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:41.523491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:41.523568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:41.523605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:41.523669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:41.523723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348483413086657:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:41.523782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:41.523877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:41.523961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:41.524055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:41.524146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348483413086724:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:41.554046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348483413086645:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:41.554049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348483413086852:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.298666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.303435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.306564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.309842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.313590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.316457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.320903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.323138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.328307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.329313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.335128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.335642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.341439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.341506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.347399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.348576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.353789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.355415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.360413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.361967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.367431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.370003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.374236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.374526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.381516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.382232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.389148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.390130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.396418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.396934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.403004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.404540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.409890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.412007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.416621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.418844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.423279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.426165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.429536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.430530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.449170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.452464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.455359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.459017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.460675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:44.580900Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fntkn3hv8zkyt2se8jrm4", SessionId: ydb://session/3?node_id=1&id=NDEwOTQzOWUtMzI0ZjQ3MmUtNGZiMjJhNjAtODFjY2I0YjE=, Slow query, duration: 27.694734s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:45.049459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:45.049926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:45.050765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494348642326912246:7619];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-17T18:20:45.051213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS61+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 30548, MsgBus: 10934 2025-04-17T18:19:20.603041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348395677518780:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:20.603167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000fab/r3tmp/tmpJuDJ8d/pdisk_1.dat 2025-04-17T18:19:20.886967Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30548, node 1 2025-04-17T18:19:20.936494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:20.936514Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:20.936520Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:20.936618Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:19:20.969658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:20.969783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:20.971596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10934 TClient is connected to server localhost:10934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:21.368716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:23.052617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348408562421341:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:23.052617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348408562421333:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:23.052682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:23.056219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:23.065113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348408562421347:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:23.143703Z node 1 :TX_PROXY ERROR: Actor# [1:7494348408562421398:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:23.375367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:23.566056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:23.566058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:23.566239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:23.566453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:23.566525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:23.566613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:23.566649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:23.566725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:23.566760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:23.566844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:23.566862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:23.566984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:23.567002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:23.567094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:23.567110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:23.567205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:23.567212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:23.567368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:23.567379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:23.567517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:23.567548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348408562421713:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:23.567657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:23.567777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:23.567881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348408562421664:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:23.597989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348408562421676:2354];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:23.598052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348408562421676:2354];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:23.598292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;sel ... 6710714; 2025-04-17T18:20:22.861654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.870734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.874276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.879321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.883047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.888028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.891204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.897659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.901990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.903027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.907844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.916079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.917164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.920910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.924827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.931552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.934506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.941188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.945574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.954839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.956886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.961793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.966151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.976362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.977892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.986193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.990887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.995789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:22.996830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:23.006279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:23.010835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039186;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:23.015364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:23.018393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:23.028474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:23.032193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:23.041283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:23.048049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:23.210405Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fn5m8d887q8vb4mvz2j4w", SessionId: ydb://session/3?node_id=1&id=ZWVlM2RhMTAtZmNiODk1OGYtMjQ3YmRhODMtNDY2MTM2MGQ=, Slow query, duration: 27.809130s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:23.716478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:23.716824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:23.717203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494348588951093926:9086];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T18:20:23.717488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:53.824096Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fpezjdqgp87tq0p7k4k7j", SessionId: ydb://session/3?node_id=1&id=ZWVlM2RhMTAtZmNiODk1OGYtMjQ3YmRhODMtNDY2MTM2MGQ=, Slow query, duration: 16.077103s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query61.tpl and seed 1930872976\nselect promotions,total,cast(promotions as float)/cast(total as float)*100\nfrom\n (select sum(ss_ext_sales_price) promotions\n from store_sales\n cross join store\n cross join promotion\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_promo_sk = p_promo_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y')\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) promotional_sales cross join\n (select sum(ss_ext_sales_price) total\n from store_sales\n cross join store\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) all_sales\norder by promotions, total\nlimit 100;\n", parameters: 0b >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-04-17T18:17:15.819880Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:15.896586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:15.912490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:15.912811Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:15.918613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:15.918753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:15.918930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:15.919012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:15.919094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:15.919157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:15.919219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:15.919287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:15.919357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:15.919425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:15.919512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:15.919626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:15.939380Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:15.939481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:15.939521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:15.939638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:15.939786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:15.939847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:15.939900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:15.939970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:15.940023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:15.940058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:15.940077Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:15.940237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:15.940280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:15.940307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:15.940336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:15.940417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:15.940457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:15.940493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:15.940530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:15.940604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:15.940641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:15.940666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:15.940705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:15.940741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:15.940765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:15.941083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-17T18:17:15.941141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=22; 2025-04-17T18:17:15.941200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-17T18:17:15.941270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-17T18:17:15.941406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:15.941444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:15.941467Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:15.941605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:15.941632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:15.941672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:15.941779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:15.941805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:15.941821Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:15.941939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:15.941964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:15.941983Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:15.942091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:15.942120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:15.942151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... d:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=1744913836872;tx_id=10;);schema_version:1;level:0;column_size:2117960;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=1744913836872;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-17T18:21:06.134700Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:10775:12402];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-17T18:21:06.136599Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10775:12402];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH13 [GOOD] Test command err: Trying to start YDB, gRPC: 19707, MsgBus: 11705 2025-04-17T18:19:44.830792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348496456460556:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:44.831397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f80/r3tmp/tmpiL69pz/pdisk_1.dat 2025-04-17T18:19:45.403635Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:45.406794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:45.406903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:45.409929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19707, node 1 2025-04-17T18:19:45.534450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:45.534477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:45.534499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:45.534645Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11705 TClient is connected to server localhost:11705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:46.159856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:47.993992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348509341363110:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:47.994072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348509341363099:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:47.994218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:47.998028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:48.010678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348509341363113:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:19:48.097245Z node 1 :TX_PROXY ERROR: Actor# [1:7494348513636330460:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:48.381132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:48.620493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:48.620733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:48.620884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:48.620929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:48.621019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:48.621090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:48.621127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:48.621184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:48.621216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:48.621288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:48.621352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:48.621414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:48.621451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:48.621524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:48.621576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:48.621637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:48.621703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:48.621752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:48.621800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:48.621854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:48.621917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:48.621993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:48.622036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348513636330748:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:48.622089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348513636330722:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:48.659000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348513636330785:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:48.659060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348513636330785:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:48.659261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;sel ... ller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.944822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.949772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.950649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.958159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.958479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.966866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.967054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.972040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.977394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.978559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.982108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.984720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.986317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.990035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.994491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:51.994978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.000018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.000098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.005664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.005899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.010369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.012113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.017057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.017789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.023053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.023716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.030311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.030559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.036259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.036959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.042573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.043165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.048998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.051473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.055213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.060103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.060806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.066854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.067522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.073509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.075308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.079195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.086433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.086450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.226089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:52.250384Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fp1r3et2ckg1g23rfk885", SessionId: ydb://session/3?node_id=1&id=MzQxZmM0NTItNmEzNGY1OTktOGUyZWI5ZDEtMTk3ZTZhYzA=, Slow query, duration: 28.054078s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:52.567222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:52.567260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:52.567872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494348758449522661:10777];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-17T18:20:52.568301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCH20 >> KqpJoin::JoinAggregateSingleRow >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 14252, MsgBus: 23247 2025-04-17T18:21:04.425907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348842781846748:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:04.430257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f24/r3tmp/tmpyrNv3V/pdisk_1.dat 2025-04-17T18:21:04.954580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:04.954680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:04.961721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:05.002798Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14252, node 1 2025-04-17T18:21:05.204057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:05.204080Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:05.204108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:05.204212Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23247 TClient is connected to server localhost:23247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:05.982010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.034640Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:06.058359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.240334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.465572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.548456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:08.276330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348859961717691:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.276450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.589538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.622092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.663783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.706279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.777436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.813388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.851273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.908557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348859961718358:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.908627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.908848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348859961718363:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.912582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:08.928366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348859961718365:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:09.007145Z node 1 :TX_PROXY ERROR: Actor# [1:7494348864256685717:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:09.432099Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348842781846748:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:09.432193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:10.137545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.182589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.227910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.262829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.301562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.329944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] >> KqpJoin::JoinAggregate [GOOD] >> KqpJoinOrder::TPCDS92+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] Test command err: Trying to start YDB, gRPC: 7304, MsgBus: 3057 2025-04-17T18:21:04.446497Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348843073149869:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:04.446575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f28/r3tmp/tmpV1Xd1f/pdisk_1.dat 2025-04-17T18:21:05.146782Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:05.164414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:05.164517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:05.171273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7304, node 1 2025-04-17T18:21:05.514632Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:05.514650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:05.514656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:05.514745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3057 TClient is connected to server localhost:3057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:06.331997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.346856Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:06.356481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.548571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.743226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.859555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:08.739215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348860253020695:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.739314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:09.047441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.089828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.129567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.164903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.220526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.259330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.295383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.373203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348864547988649:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:09.373301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:09.373661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348864547988654:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:09.378713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:09.395582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348864547988656:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:09.413860Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348843073149869:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:09.413954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:09.461698Z node 1 :TX_PROXY ERROR: Actor# [1:7494348864547988711:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:10.486976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.528482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.576241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.617187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.652525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:43: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:56: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinAggregate [GOOD] Test command err: Trying to start YDB, gRPC: 4907, MsgBus: 17833 2025-04-17T18:21:04.801595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348840757614897:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:04.830500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f23/r3tmp/tmpwVFkV6/pdisk_1.dat 2025-04-17T18:21:05.343605Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:05.366107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:05.366249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:05.369616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4907, node 1 2025-04-17T18:21:05.582721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:05.582739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:05.582746Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:05.586218Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17833 TClient is connected to server localhost:17833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:06.357041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.370982Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:06.382461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.631749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.882065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.986033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:08.826987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348857937485718:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.827108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:09.129546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.159118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.187079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.259097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.295123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.393359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.434314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.512459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348862232453681:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:09.512561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:09.515214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348862232453686:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:09.519839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:09.541141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348862232453689:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:09.641945Z node 1 :TX_PROXY ERROR: Actor# [1:7494348862232453747:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:09.787680Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348840757614897:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:09.787793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:10.753383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.795791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.826422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::Left+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 16916, MsgBus: 16717 2025-04-17T18:21:08.576787Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348859127894036:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:08.577893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f1e/r3tmp/tmpqyNXNa/pdisk_1.dat 2025-04-17T18:21:09.076819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:09.079782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:09.079872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:09.081485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16916, node 1 2025-04-17T18:21:09.190882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:09.190914Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:09.190921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:09.191044Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16717 TClient is connected to server localhost:16717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:10.043080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:10.067066Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:10.081858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:10.248361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:10.444688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:21:10.526664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:12.057730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348876307764767:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:12.057826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:12.380689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:12.457586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:12.496460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:12.534532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:12.571607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:12.638015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:12.723091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:12.812267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348876307765439:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:12.812335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:12.812895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348876307765444:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:12.816230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:12.829723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348876307765446:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:12.928766Z node 1 :TX_PROXY ERROR: Actor# [1:7494348876307765502:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:13.578265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348859127894036:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:13.615130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:14.097116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:14.151029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:14.192647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:14.270578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:14.303570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:14.371194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftSemi >> KqpJoinOrder::CanonizedJoinOrderTPCH8 >> KqpJoin::JoinAggregateSingleRow [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinAggregateSingleRow [GOOD] Test command err: Trying to start YDB, gRPC: 12360, MsgBus: 29313 2025-04-17T18:21:11.150754Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348872302316718:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:11.163621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f18/r3tmp/tmpp8HEod/pdisk_1.dat 2025-04-17T18:21:11.691139Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:11.696614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:11.696704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:11.697807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12360, node 1 2025-04-17T18:21:11.881900Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:11.881925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:11.881938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:11.882048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29313 TClient is connected to server localhost:29313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:12.539620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:12.578377Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:12.594069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:12.731253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:12.900499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:12.978977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:15.004097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348885187220228:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:15.004198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:15.404436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:15.459826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:15.535260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:15.578806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:15.615584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:15.671933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:15.732930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:15.823647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348889482188186:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:15.823718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:15.824022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348889482188191:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:15.827892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:15.843370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348889482188193:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:15.920042Z node 1 :TX_PROXY ERROR: Actor# [1:7494348889482188250:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:16.126289Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348872302316718:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:16.126359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:17.102881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:17.137405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:17.172161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-04-17T18:17:09.420756Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:09.499137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:09.521269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:09.521558Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:09.528660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:09.528881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:09.529081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:09.529172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:09.529248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:09.529323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:09.529410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:09.529481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:09.529556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:09.529621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:09.529685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:09.529756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:09.552192Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:09.552331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:09.552386Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:09.552549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:09.552689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:09.552759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:09.552802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:09.552899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:09.552994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:09.553040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:09.553067Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:09.553253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:09.553315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:09.553360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:09.553405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:09.553496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:09.553555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:09.553598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:09.553644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:09.553747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:09.553786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:09.553815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:09.553857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:09.553894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:09.553919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:09.554340Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=68; 2025-04-17T18:17:09.554436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-04-17T18:17:09.554505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-17T18:17:09.554578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-17T18:17:09.554775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:09.554835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:09.554871Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:09.555093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:09.555143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:09.555170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:09.555302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:09.555344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:09.555371Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:09.555586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:09.555629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:09.555655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:09.555806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:09.555853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:09.555897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913830469;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913830469;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-17T18:21:19.274837Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11152:12779];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-17T18:21:19.277036Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11152:12779];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpFlipJoin::RightSemi_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] Test command err: Trying to start YDB, gRPC: 11383, MsgBus: 16797 2025-04-17T18:20:38.014662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348730486919793:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:38.014728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f59/r3tmp/tmpGELHJ5/pdisk_1.dat 2025-04-17T18:20:38.537116Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:38.541622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:38.541708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:38.544752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11383, node 1 2025-04-17T18:20:38.641889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:38.641909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:38.641916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:38.642013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16797 TClient is connected to server localhost:16797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:39.255778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:39.297120Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:41.328328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348743371822319:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.328331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348743371822327:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.328448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.331411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:41.339638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348743371822333:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:41.397768Z node 1 :TX_PROXY ERROR: Actor# [1:7494348743371822384:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:41.718022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:41.830858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:41.913540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:41.957032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.011509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.169368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.241287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.311635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.341477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.415448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.447254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.486764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.519040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.018275Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348730486919793:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:43.018341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:43.089323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:20:43.167702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.203052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.272818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.305823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.339535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.414597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.454861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.493260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.525732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.554515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.597221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.671030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.745054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.777154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.810876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:20:43.856806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.097525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.103070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.103342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.109133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.114151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.116388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.123975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.125254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.128775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.129651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.132329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.135524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.137311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.141740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038427;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.144519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.146789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.151699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.151813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.157166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.157376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.165758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.166371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.170706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.170706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.175933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.175990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.181498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.181716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.186454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.186454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.191467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.192240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.196989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.197123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.202455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.207439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.208187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.212643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.212729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.217415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.221960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.226545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.228009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.231351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.257798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:12.331820Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fppax2ae4bh28s55qj6ek", SessionId: ydb://session/3?node_id=1&id=ODliNjViZDUtZGM5ZjAyMi01NjliZWZkZC0yMDRhNGVkYg==, Slow query, duration: 27.054147s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:12.616021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:12.616411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:12.617747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7494348777731569480:3324];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-17T18:21:12.618201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH17 [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> KqpJoinOrder::TPCH12_100 [GOOD] >> KqpIndexLookupJoin::Left+StreamLookup [GOOD] >> KqpIndexLookupJoin::Left-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:20.386012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:20.386086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:20.386120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:20.386154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:20.386209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:20.386248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:20.386306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:20.386393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:20.386694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:20.466747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:20.466812Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:20.478572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:20.479228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:20.479399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:20.489345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:20.489883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:20.490500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:20.490757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:20.493693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:20.494908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:20.494948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:20.495106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:20.495139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:20.495166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:20.495292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:20.501177Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:20.591123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:20.591353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:20.591558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:20.591780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:20.591831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:20.593908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:20.594049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:20.594351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:20.594434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:20.594499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:20.594535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:20.596787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:20.596863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:20.596901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:20.599513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:20.599586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:20.599627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:20.599700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:20.603467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:20.607133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:20.607362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:20.608340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:20.608484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:20.608535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:20.608799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:20.608864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:20.609046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:20.609119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:20.611592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:20.611634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:20.611810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:20.611848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:20.612207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:20.612263Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:20.612371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:20.612401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:20.612442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:20.612483Z no ... CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:21:23.547368Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:21:23.547689Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 358us result status StatusSuccess 2025-04-17T18:21:23.548718Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoinOrder::TPCDS23-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:24.811930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:24.812026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:24.812077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:24.812122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:24.812169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:24.812220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:24.812297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:24.812418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:24.812870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:24.874318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:24.874368Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:24.883136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:24.883741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:24.883909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:24.894357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:24.894956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:24.895538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:24.895738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:24.898532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:24.899755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:24.899806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:24.899992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:24.900028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:24.900065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:24.900183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:24.906398Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:25.035430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:25.035670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.035854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:25.036101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:25.036156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.038084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:25.038238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:25.038434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.038502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:25.038546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:25.038578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:25.040168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.040217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:25.040246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:25.041596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.041645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.041684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:25.041737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:25.045190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:25.046665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:25.046844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:25.047686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:25.047808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:25.047847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:25.048076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:25.048140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:25.048287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:25.048354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:25.050087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:25.050125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:25.050286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:25.050326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:25.050697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:25.050749Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:25.050837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:25.050875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:25.050908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:25.050951Z no ... 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:21:23.392686Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:21:23.392983Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 352us result status StatusSuccess 2025-04-17T18:21:23.394644Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:21:23.406448Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:797:2614] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:21:23.406564Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:717:2614] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-17T18:21:23.406737Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:797:2614] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744914083366954 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744914083366954 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744914083366954 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:21:23.409764Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:797:2614] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-17T18:21:23.409893Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:717:2614] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpNewEngine::KeyColumnOrder >> KqpIndexLookupJoin::LeftSemi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH17 [GOOD] Test command err: Trying to start YDB, gRPC: 24255, MsgBus: 19935 2025-04-17T18:20:00.136774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348565613250163:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:00.136808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f79/r3tmp/tmpvkY1K4/pdisk_1.dat 2025-04-17T18:20:00.669646Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:00.673494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:00.673581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:00.676387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24255, node 1 2025-04-17T18:20:00.817315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:00.817330Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:00.817336Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:00.817423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19935 TClient is connected to server localhost:19935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:01.544826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:01.567021Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:03.489068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348578498152717:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:03.489177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:03.492146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348578498152729:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:03.498092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:03.514403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348578498152731:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:03.584041Z node 1 :TX_PROXY ERROR: Actor# [1:7494348578498152782:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:03.950356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.185692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:04.185901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:04.186162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:04.186713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:04.186829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:04.186987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:04.187097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:04.187207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:04.187325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:04.187444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:04.187567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:04.187709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348582793120331:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:04.188034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:04.188074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:04.188247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:04.188351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:04.188445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:04.188538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:04.188635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:04.188761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:04.188863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:04.188974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:04.189078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:04.189168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348582793120325:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:04.228231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348582793120339:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:04.228304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348582793120339:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.738193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.739769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.744645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.752516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.753985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.761025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.766687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039194;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.766738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.771410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.772137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.776425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.777186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.783190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.784551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.788727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039200;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.788729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.794950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.795235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.800452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.800682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.804376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.806068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.809370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.811287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.816054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.816066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.820658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.820768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.825760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.825963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.830327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039190;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.831195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.835107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.836423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.840926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.847345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.847736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.852219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.852298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039222;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.856832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.857067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.863156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.868361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.900907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.943643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:08.966597Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fpj198wcjv3br3f3kgmtt", SessionId: ydb://session/3?node_id=1&id=M2NmNTVkMDMtOWMwZjcyM2UtOGFiZWVjNmEtMzFlNmNiZg==, Slow query, duration: 28.093072s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:09.281530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:09.281580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:09.282122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494348746001913947:7828];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-17T18:21:09.282711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] >> KqpFlipJoin::Inner_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemi [GOOD] Test command err: Trying to start YDB, gRPC: 30201, MsgBus: 61661 2025-04-17T18:21:18.210507Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348901394819307:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:18.210638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f12/r3tmp/tmp0renvs/pdisk_1.dat 2025-04-17T18:21:18.799479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:18.804461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:18.804579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:18.806031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30201, node 1 2025-04-17T18:21:18.914723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:18.914751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:18.914763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:18.914889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61661 TClient is connected to server localhost:61661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:19.575439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:19.588944Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:19.594402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:21:19.744484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:19.938366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:20.020178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:21.719867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348914279722917:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:21.719962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:22.092230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:22.126739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:22.160683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:22.211034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:22.240949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:22.288418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:22.345990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:22.447341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348918574690882:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:22.447455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:22.447687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348918574690888:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:22.451276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:22.471739Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:21:22.472413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348918574690890:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:22.557234Z node 1 :TX_PROXY ERROR: Actor# [1:7494348918574690947:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:23.210309Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348901394819307:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:23.255963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:23.593457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.635833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.712376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.799619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.844864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.924767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS23-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23080, MsgBus: 19783 2025-04-17T18:20:14.902010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348627897023715:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:14.906366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f68/r3tmp/tmp26y77Y/pdisk_1.dat 2025-04-17T18:20:15.487670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:15.487800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:15.499176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:15.506330Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23080, node 1 2025-04-17T18:20:15.738747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:15.738767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:15.738774Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:15.738913Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19783 TClient is connected to server localhost:19783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:16.408141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:16.437405Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:18.744257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348645076893552:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:18.744376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:18.744714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348645076893564:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:18.749686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:18.762016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348645076893566:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:18.825097Z node 1 :TX_PROXY ERROR: Actor# [1:7494348645076893617:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:19.115879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.258257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.299142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.331163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.366173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.529505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.571348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.614523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.659823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.708963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.796517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.851531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.883149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:19.901006Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348627897023715:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:19.901066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:20.703383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:20:20.748122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:20.821867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:20.860227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:20.934746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:20:20.978332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.008112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.037109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.076565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.113255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.146597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.176733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.211168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.250029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.288507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.324503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:20:21.364092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.385220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.390325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.390325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.395506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.395506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.400670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.400674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.405941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.405941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.411320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.411320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.416567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.416567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.421884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.421884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.427488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.427487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.433052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.433052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.438261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.438261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.443580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.443580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.448725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.448725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.454516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.454516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.460085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.460085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.465451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.465452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.470928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.470929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.476612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.478897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.481828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.484966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.487242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.490672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.492414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.496076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.497684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.501350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.502771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.506925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.507903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:20:47.621378Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fp09j6scts1mmp07h63jg", SessionId: ydb://session/3?node_id=1&id=MWQ0YzI1MWYtNDJmMDcwZWQtOTU2YTNkZTEtMTQ4ZGM5YTQ=, Slow query, duration: 24.914653s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:48.048740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:48.048753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:20:48.049273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH12_100 [GOOD] Test command err: Trying to start YDB, gRPC: 9784, MsgBus: 14715 2025-04-17T18:20:00.723270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348564718803164:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:00.723593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f73/r3tmp/tmpGZGL7Z/pdisk_1.dat 2025-04-17T18:20:01.221141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:01.221269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:01.226417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:01.226681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9784, node 1 2025-04-17T18:20:01.371266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:01.371308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:01.371319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:01.371447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14715 TClient is connected to server localhost:14715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:02.033073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:02.046809Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:04.054302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348581898672886:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:04.054410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348581898672875:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:04.054570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:04.058953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:04.074090Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:20:04.074393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348581898672889:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:04.166680Z node 1 :TX_PROXY ERROR: Actor# [1:7494348581898672940:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:04.484885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:04.737196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:04.737493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:04.737831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:04.738045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:04.738226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:04.738421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:04.738567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:04.738748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:04.738948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:04.739164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:04.739331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:04.739488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348581898673198:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:04.743900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:04.744019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:04.744249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:04.744397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:04.744524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:04.744618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:04.744776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:04.744940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:04.745180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:04.745320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:04.748460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:04.748725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348581898673235:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:04.791906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348581898673214:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:04.791972Z node ... tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.144344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.148600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.150979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.153734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.156062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.159501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.161070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.164283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.166158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.169493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.170820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.174251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.175458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.178718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.179930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.183381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.184345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.188331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.189168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.193363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.194106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.197927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.198644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.202453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.202887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.207049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.207227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.211814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.211838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.216521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.216525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.221639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.221639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.226762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.227096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.232065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.232065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.237236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.237237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.242631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.244175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.247963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.249459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.253527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.255198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.321401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.394455Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fpjee52kn3tnabbnj5v51", SessionId: ydb://session/3?node_id=1&id=ODhmZTZjOTgtNzAwMzA4MzQtMzFlMzQ0NzEtNTgzNDliZGM=, Slow query, duration: 29.100066s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:10.681154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:10.681152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:10.682019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpFlipJoin::RightSemi_1 >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH3 [GOOD] Test command err: Trying to start YDB, gRPC: 20427, MsgBus: 2108 2025-04-17T18:20:04.200634Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348582367266761:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:04.200685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f6d/r3tmp/tmp0UHtKN/pdisk_1.dat 2025-04-17T18:20:04.615841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:04.638706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:04.638796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:04.640340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20427, node 1 2025-04-17T18:20:04.745041Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:04.745059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:04.745065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:04.745156Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2108 TClient is connected to server localhost:2108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:05.464247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:05.486720Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:07.444364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348595252169231:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:07.444366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348595252169222:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:07.444447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:07.447613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:07.459878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348595252169236:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:07.523041Z node 1 :TX_PROXY ERROR: Actor# [1:7494348595252169287:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:07.903438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:08.164075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:08.164226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:08.164446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:08.164537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:08.164620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:08.164713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:08.164798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:08.164893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:08.165013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:08.165104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:08.165211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:08.165302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348599547136834:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:08.200539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:08.200602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:08.200826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:08.200932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:08.201022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:08.201112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:08.201201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:08.201296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:08.201393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:08.201493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:08.201588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:08.201676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348599547136840:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:08.202009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348599547136748:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:08.202090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348599547136748:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstra ... ontroller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.676391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.677699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.681832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.682712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.687068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.688045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.692513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.693746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.697002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.698811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.701917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.704188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.708018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.709414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.712909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.714336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.718085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.719974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.724448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.725823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.729995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.737528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.738486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.743740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.744013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.749467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.749885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.756200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.756231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.761888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.761930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.767693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.768806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.772231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.772805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.776180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.777096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.780463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.782845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.784415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.788206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.788412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.792230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.794223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.796362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:10.927972Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fpnztfp3j8ndmkmw5wvzn", SessionId: ydb://session/3?node_id=1&id=NjU5ODllYmItNGU4MWNlLTk1OTZlODRjLTExYjM0MTg0, Slow query, duration: 26.005078s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:11.187909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:11.188315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:11.188817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494348805705614753:9490];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T18:21:11.189114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2025-04-17T18:17:18.947627Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:19.029510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:19.051510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:19.051794Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:19.059193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:19.059417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:19.059635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:19.059760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:19.059867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:19.059970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:19.060079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:19.060190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:19.060301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:19.060398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:19.060506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:19.060609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:19.086334Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:19.086470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:19.086523Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:19.086682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:19.086822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:19.086899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:19.086939Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:19.087023Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:19.087091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:19.087128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:19.087153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:19.087324Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:19.087383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:19.087418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:19.087446Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:19.087551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:19.087600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:19.087634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:19.087659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:19.087736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:19.087773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:19.087813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:19.087858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:19.087894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:19.087918Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:19.088268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-17T18:17:19.088336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-17T18:17:19.088411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-17T18:17:19.088483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-17T18:17:19.088652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:19.088701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:19.088736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:19.088904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:19.088944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:19.088969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:19.089097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:19.089137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:19.089166Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:19.089355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:19.089394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:19.089418Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:19.089539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:19.089573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:19.089610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913839998;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=1744913839998;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-17T18:21:26.712234Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11157:12784];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-17T18:21:26.714146Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11157:12784];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpJoinOrder::TPCH9_100 [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup >> KqpJoin::FullOuterJoinNotNullJoinKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-04-17T18:17:34.378100Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:34.464783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:34.479933Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:34.480139Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:34.486088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:34.486246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:34.486431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:34.486523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:34.486606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:34.486671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:34.486737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:34.486813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:34.486884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:34.486952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:34.487047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:34.487105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:34.504754Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:34.504936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:34.505006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:34.505168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:34.505351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:34.505431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:34.505473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:34.505561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:34.505623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:34.505662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:34.505692Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:34.505882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:34.505955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:34.505994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:34.506025Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:34.506112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:34.506159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:34.506217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:34.506243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:34.506333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:34.506375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:34.506413Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:34.506458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:34.506510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:34.506537Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:34.506908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-04-17T18:17:34.506996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-17T18:17:34.507091Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-17T18:17:34.507175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-17T18:17:34.507374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:34.507426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:34.507482Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:34.507661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:34.507704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:34.507732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:34.507870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:34.507912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:34.507943Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:34.508127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:34.508169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:34.508199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:34.508351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:34.508389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:34.508433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... d:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=1744913855429;tx_id=10;);schema_version:1;level:0;column_size:2117960;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=1744913855429;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-17T18:21:27.176463Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:10775:12402];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-17T18:21:27.178387Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10775:12402];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpIndexLookupJoin::MultiJoins >> KqpJoinOrder::CanonizedJoinOrderTPCH1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-04-17T18:17:21.624077Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:21.715469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:21.739893Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:21.740190Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:21.748598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:21.748810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:21.749046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:21.749171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:21.749300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:21.749417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:21.749530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:21.749660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:21.749802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:21.749906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:21.750024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:21.750197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:21.777501Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:21.777656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:21.777718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:21.777944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:21.778132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:21.778238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:21.778317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:21.778429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:21.778518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:21.778578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:21.778619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:21.778795Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:21.778863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:21.778904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:21.778938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:21.779038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:21.779096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:21.779141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:21.779169Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:21.779253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:21.779293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:21.779326Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:21.779374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:21.779416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:21.779444Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:21.779829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-04-17T18:17:21.779912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-17T18:17:21.779987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-17T18:17:21.780105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=72; 2025-04-17T18:17:21.780277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:21.780338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:21.780382Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:21.780580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:21.780629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:21.780665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:21.780805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:21.780872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:21.780910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:21.781103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:21.781147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:21.781176Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:21.781292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:21.781334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:21.781380Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913842683;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913842683;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-17T18:21:27.696838Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11069:12696];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-17T18:21:27.698984Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11069:12696];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3872, MsgBus: 29668 2025-04-17T18:20:45.618726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348761473228077:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:45.619108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f42/r3tmp/tmp48Oqw3/pdisk_1.dat 2025-04-17T18:20:46.093863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:46.093953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:46.100019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:46.132835Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3872, node 1 2025-04-17T18:20:46.310636Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:46.310656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:46.310665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:46.310779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29668 TClient is connected to server localhost:29668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:47.137643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:49.093775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348778653097792:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.093875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.094234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348778653097804:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.098007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:49.106923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348778653097806:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:49.205840Z node 1 :TX_PROXY ERROR: Actor# [1:7494348778653097858:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:49.488738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.625988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.657602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.695228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.761960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.940351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.017070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.063057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.113885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.141655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.174961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.210310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.246757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.610297Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348761473228077:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:50.610379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:50.857962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:20:50.892586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:50.970796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.007673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.032908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.068250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.093865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.119962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.147857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.185499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.222264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.280268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.318578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.349714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.381972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.417767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.444031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:20:51.471983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.056026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.057790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.061290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.064122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.067451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.070021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.073262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.077182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.079811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.083680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.087623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.089628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.094386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.095203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.102197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.125943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.130527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.135628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.139299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.140910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.148804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.150347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.156052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.161333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.161584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.171334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.174761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.177039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.180496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.186884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.191576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.203290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.205243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.210410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.213927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.215476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.218845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.220497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.223764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.225533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.229031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.234736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.239642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.245729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.251174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:20.326449Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fpxkz99ndrnpdbbcb09yx", SessionId: ydb://session/3?node_id=1&id=NTYyNGUzMTctYjBlZGJhMzEtMmQzYTI3MmItNzg1MmE5NjM=, Slow query, duration: 27.590634s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:20.568175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:20.568575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:20.569705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494348877437372240:6025];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-17T18:21:20.570047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH19 >> KqpJoinOrder::CanonizedJoinOrderTPCH7 >> KqpFlipJoin::RightSemi_2 [GOOD] >> KqpFlipJoin::RightSemi_3 >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup >> KqpFlipJoin::RightOnly_1 >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH9_100 [GOOD] Test command err: Trying to start YDB, gRPC: 4552, MsgBus: 3583 2025-04-17T18:20:01.721187Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348571224589755:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:01.721233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f72/r3tmp/tmp74hcFi/pdisk_1.dat 2025-04-17T18:20:02.180027Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4552, node 1 2025-04-17T18:20:02.264513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:02.265718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:02.283072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:02.310836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:02.310874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:02.310881Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:02.310990Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3583 TClient is connected to server localhost:3583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:02.941694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:02.966999Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:04.991418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348584109492316:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:04.991488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:04.991704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348584109492328:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:04.994271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:05.004622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348584109492330:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:05.065935Z node 1 :TX_PROXY ERROR: Actor# [1:7494348588404459677:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:05.396429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:05.652933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:05.653122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:05.653338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:05.653440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:05.653536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:05.653632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:05.653717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:05.653819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:05.653913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:05.654007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:05.654099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:05.654207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348588404459939:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:05.665892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:05.665939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:05.666111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:05.666241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:05.666350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:05.666471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:05.666564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:05.666653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:05.666743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:05.666835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:05.666943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:05.667046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348588404459918:2348];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:05.696190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348588404459949:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:05.696238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348588404459949:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.145733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.147770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.151720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.153902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.157588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.160520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.163071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.169306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.176135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.181513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.188712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.194310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.199587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.204679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.210106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.211617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.215839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.216841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.223504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.228908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.232829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.236404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.238378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.242114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.246379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.251089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.256243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.259923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.262581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.268265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.268703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.274351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.275755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.281313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.281974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.287569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.289658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.296073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.297813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.306445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.311632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.312586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.322110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.327346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.401358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:11.512361Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fpkfqcaqazja0cz5f5yw5", SessionId: ydb://session/3?node_id=1&id=YzAwMTVhZjUtZTdjNGY1OWQtMmU2Zjk1MDAtNGFjYjlmNTE=, Slow query, duration: 29.152404s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:11.785605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:11.785976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:11.786724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494348755908219865:7593];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-17T18:21:11.787080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] >> KqpNewEngine::KeyColumnOrder [GOOD] >> KqpNewEngine::KeyColumnOrder2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19248, MsgBus: 12894 2025-04-17T18:21:17.446818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348896629963481:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:17.447422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f16/r3tmp/tmpZtlVku/pdisk_1.dat 2025-04-17T18:21:17.996905Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:18.000080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:18.000160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:18.003592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19248, node 1 2025-04-17T18:21:18.235463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:18.235486Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:18.235493Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:18.235590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12894 TClient is connected to server localhost:12894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:18.919919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:18.946991Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:18.968960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:19.149478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:19.316348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:19.402309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:21.140608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348913809834431:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:21.140755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:21.496251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:21.540258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:21.575410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:21.644870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:21.680185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:21.725016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:21.798591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:21.887769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348913809835095:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:21.887909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:21.889602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348913809835100:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:21.894009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:21.910343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348913809835104:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:21.973733Z node 1 :TX_PROXY ERROR: Actor# [1:7494348913809835158:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:22.449228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348896629963481:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:22.449313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:23.051184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.087844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.127420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.164049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.198994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.230292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26698, MsgBus: 14043 2025-04-17T18:21:25.190582Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348931873810794:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:25.191067Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f16/r3tmp/tmpF53Ooy/pdisk_1.dat 2025-04-17T18:21:25.497030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:25.497106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:25.502529Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:25.504112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26698, node 2 2025-04-17T18:21:25.678734Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:25.678753Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:25.678760Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:25.678872Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14043 TClient is connected to server localhost:14043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:26.336611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:26.347193Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:26.357584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:26.448653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:26.604284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:26.683251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:28.766307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348944758714451:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:28.766396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:28.840361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.893440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.938717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.976030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.018328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.098657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.142673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.218924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348949053682405:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:29.219014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:29.219880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348949053682410:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:29.224123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:29.239275Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:21:29.239459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348949053682412:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:29.314019Z node 2 :TX_PROXY ERROR: Actor# [2:7494348949053682472:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:30.194393Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348931873810794:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:30.194472Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:30.560218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.609462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.682879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.767779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.821924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.896314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::GeneralPrioritiesBug1 >> KqpFlipJoin::RightSemi_1 [GOOD] >> KqpFlipJoin::RightOnly_3 >> KqpFlipJoin::Inner_1 [GOOD] >> KqpFlipJoin::Inner_2 >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] >> KqpJoinOrder::TestJoinHint1-ColumnStore [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] Test command err: Trying to start YDB, gRPC: 6130, MsgBus: 26116 2025-04-17T18:21:29.619037Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348948563414050:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:29.626015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f00/r3tmp/tmpz5puvP/pdisk_1.dat 2025-04-17T18:21:30.261377Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:30.281065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:30.281159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:30.287976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6130, node 1 2025-04-17T18:21:30.472377Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:30.472395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:30.472402Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:30.472497Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26116 TClient is connected to server localhost:26116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:31.405462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:31.453024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:31.667110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:31.947080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:32.063256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:33.848303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348965743284886:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:33.848398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.119295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.158761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.191348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.243998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.305722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.350162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.407282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.510549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348970038252847:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.510647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.511246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348970038252852:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.516001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:34.531208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348970038252855:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:34.595101Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348948563414050:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:34.595151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:34.598610Z node 1 :TX_PROXY ERROR: Actor# [1:7494348970038252911:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:35.785514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.817053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::MultiJoins [GOOD] >> KqpFlipJoin::RightSemi_3 [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup >> KqpNewEngine::KeyColumnOrder2 [GOOD] >> KqpNewEngine::LocksEffects >> KqpFlipJoin::RightOnly_1 [GOOD] >> KqpFlipJoin::RightOnly_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint1-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 14462, MsgBus: 14379 2025-04-17T18:20:54.674459Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348797472636705:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:54.678656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f30/r3tmp/tmpkFe8VL/pdisk_1.dat 2025-04-17T18:20:55.210449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:55.210533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:55.253747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:55.273162Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14462, node 1 2025-04-17T18:20:55.494681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:55.494701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:55.494707Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:55.494806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14379 TClient is connected to server localhost:14379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:56.367006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:56.401037Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:58.413382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348814652506419:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:58.413408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348814652506427:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:58.413478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:58.416772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:58.428192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348814652506433:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:58.520177Z node 1 :TX_PROXY ERROR: Actor# [1:7494348814652506484:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:58.834448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.926797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:58.993782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.067417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.168796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.345259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.381537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.415205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.446683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.480414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.513798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.561034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.611958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:59.639362Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348797472636705:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:59.639694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:00.266481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:21:00.343414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.370435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.427839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.464226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.499978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.544277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.581097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.614112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.647282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.752234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.788032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.825075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.862126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.892640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.971043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:21:01.009966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.074494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.090848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.100992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.113156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.115016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.127331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.131172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.140987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.143437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.155085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.161486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.162559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.167426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.172169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.177717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.180843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.190172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.192016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.195922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.205735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.207212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.218819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.222452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.228541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.232517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.246894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.250968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.265248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.268031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.277809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.279974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.289708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.291966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.301574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.304162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.314533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.318663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.324106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.328121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.333790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.337623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.348040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.351822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.360436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.408892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.486421Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fq70271b3fjts880x5g8p", SessionId: ydb://session/3?node_id=1&id=MmJlYmFmMjAtMWM4NDc0ZTAtOGMwYTM1MzMtNzA3MTE2Mw==, Slow query, duration: 30.147213s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:32.732557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:32.732927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:32.733498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494348926321682571:5941];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-17T18:21:32.733872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::MultiJoins [GOOD] Test command err: Trying to start YDB, gRPC: 30868, MsgBus: 11197 2025-04-17T18:21:29.820189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348948081000714:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:29.829230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000efb/r3tmp/tmpmeO8Bo/pdisk_1.dat 2025-04-17T18:21:30.320875Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:30.322700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:30.322818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:30.327752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30868, node 1 2025-04-17T18:21:30.515416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:30.515439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:30.515445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:30.515540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11197 TClient is connected to server localhost:11197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:31.411269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:31.426014Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:31.431548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:31.624473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:31.883671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:31.990065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:33.910306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348965260871537:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:33.910405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.265496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.310425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.382150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.420739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.465940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.512257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.582821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.685971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348969555839509:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.686071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.687533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348969555839514:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.691805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:34.705306Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:21:34.705556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348969555839516:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:34.794771Z node 1 :TX_PROXY ERROR: Actor# [1:7494348969555839571:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:34.810304Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348948081000714:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:34.810388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:35.941665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.981309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.022326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.064045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.118105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.157194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpJoin::JoinLeftPureExclusion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 6067, MsgBus: 7119 2025-04-17T18:21:23.002648Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348918023740302:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:23.003171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f10/r3tmp/tmplhqwUc/pdisk_1.dat 2025-04-17T18:21:23.602290Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:23.608632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:23.608720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:23.616000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6067, node 1 2025-04-17T18:21:23.810764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:23.810784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:23.810790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:23.819323Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7119 TClient is connected to server localhost:7119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:24.536446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:24.568433Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:24.601074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:24.754136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:21:24.928473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:25.077460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:26.908606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348935203611118:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:26.908732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:27.273160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.325974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.363184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.431140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.472030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.520577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.595807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.712220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348939498579085:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:27.712292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:27.712652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348939498579090:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:27.716535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:27.737030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348939498579092:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:27.842337Z node 1 :TX_PROXY ERROR: Actor# [1:7494348939498579148:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:28.000689Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348918023740302:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:28.000837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:29.290700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.361177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.388180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.420812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18858, MsgBus: 4565 2025-04-17T18:21:31.521349Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348956808916680:2205];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f10/r3tmp/tmpE6ODFu/pdisk_1.dat 2025-04-17T18:21:31.572546Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:21:31.680319Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:31.704493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:31.704585Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:31.707051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18858, node 2 2025-04-17T18:21:31.810700Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:31.810721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:31.810729Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:31.810844Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4565 TClient is connected to server localhost:4565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:32.500081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:32.509715Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:32.552776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.654343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:32.840433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:32.939837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:35.433005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348973988787487:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:35.433090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:35.500303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.543838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.586257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.637618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.700411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.756145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.813766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.881932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348973988788148:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:35.881997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:35.882197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348973988788153:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:35.886155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:35.905866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348973988788155:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:21:36.006075Z node 2 :TX_PROXY ERROR: Actor# [2:7494348978283755508:3539] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:36.438275Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348956808916680:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:36.438335Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:37.093062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.135795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.170066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.209699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCH22 >> KqpFlipJoin::RightOnly_3 [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |95.0%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpFlipJoin::Inner_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 64990, MsgBus: 12509 2025-04-17T18:21:27.674799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348938312779235:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:27.701443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f06/r3tmp/tmpTHbVCB/pdisk_1.dat 2025-04-17T18:21:28.210352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:28.210439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:28.214685Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:28.219615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64990, node 1 2025-04-17T18:21:28.352505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:28.352525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:28.352533Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:28.352671Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12509 TClient is connected to server localhost:12509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:29.122001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:29.151109Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:29.168729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:29.336252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:29.556777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:21:29.653291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:31.314063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348955492650190:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:31.314168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:31.710698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:31.750551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:31.816633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:31.856376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:31.891486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:31.929825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:31.992623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.070337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348959787618148:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:32.070461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:32.074579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348959787618153:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:32.079539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:32.100122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348959787618155:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:32.182060Z node 1 :TX_PROXY ERROR: Actor# [1:7494348959787618212:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:32.677281Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348938312779235:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:32.693091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:33.426695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:33.483699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:33.519562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:33.562312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5785, MsgBus: 1128 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f06/r3tmp/tmpS0oFdD/pdisk_1.dat 2025-04-17T18:21:35.378433Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:21:35.417844Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:35.422014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:35.422093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:35.423905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5785, node 2 2025-04-17T18:21:35.586829Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:35.586849Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:35.586858Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:35.586999Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1128 TClient is connected to server localhost:1128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:36.180014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:36.211993Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:21:36.232943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:36.321795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:36.507043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:36.620804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:38.908386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348988851571634:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:38.908464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:39.010669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.063427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.118112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.149876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.190796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.274920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.353255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.460476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348993146539600:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:39.460576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:39.460856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348993146539605:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:39.464897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:39.483547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348993146539607:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:21:39.552643Z node 2 :TX_PROXY ERROR: Actor# [2:7494348993146539664:3549] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:40.755532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.805548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.855588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.893989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::Inner+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Inner_2 [GOOD] Test command err: Trying to start YDB, gRPC: 25667, MsgBus: 11824 2025-04-17T18:21:27.489364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348940167621175:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:27.523157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f0a/r3tmp/tmpPDQxpE/pdisk_1.dat 2025-04-17T18:21:28.296067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:28.320900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:28.321005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:28.323700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25667, node 1 2025-04-17T18:21:28.583432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:28.583457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:28.583465Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:28.583572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11824 TClient is connected to server localhost:11824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:29.422948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:29.482605Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:29.500533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:29.744553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:21:30.074633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.272439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:32.163184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348961642459281:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:32.163289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:32.478305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348940167621175:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:32.482272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:32.544286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.582978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.668372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.699521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.727242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.776775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.831014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.953569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348961642459951:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:32.953680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:32.953974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348961642459956:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:32.958719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:32.975868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348961642459958:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:33.032708Z node 1 :TX_PROXY ERROR: Actor# [1:7494348965937427309:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:34.031587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.078668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.118338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.192717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"Tables":["FJ_Table_1"],"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_1","ReadRangesPointPrefixLen":"0","E-Rows":"4","Table":"FJ_Table_1","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["FJ_Table_2"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_2","ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1744914095015,"Host":"ghrun-gkdwnlcfnq","OutputRows":2,"StartTimeMs":1744914095015,"IngressRows":2,"ComputeTimeUs":125,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":2,"DstStageId":1,"Bytes":24}],"TaskId":1,"OutputBytes":24}],"PeakMemoryUsageBytes":65536,"CpuTimeUs":1215}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":2053,"Max":2053,"Min":2053},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/FJ_Table_2","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":22,"Max":22,"Min":22}}],"BaseTimeMs":1744914095012,"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"CpuTimeUs":{"Count":1,"Sum":509,"Max":509,"Min":509},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":64,"Max":6 ... }}}],"CpuTimeUs":{"Count":1,"Sum":802,"Max":802,"Min":802},"StageDurationUs":1000,"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResultBytes":{"Count":1,"Sum":38,"Max":38,"Min":38},"OutputBytes":{"Count":1,"Sum":38,"Max":38,"Min":38},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":38,"Max":38,"Min":38}},"Name":"7","Push":{"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":38,"Max":38,"Min":38},"WaitTimeUs":{"Count":1,"Sum":7559,"Max":7559,"Min":7559},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"UpdateTimeMs":9,"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":485743,"CpuTimeUs":481417},"ProcessCpuTimeUs":305,"TotalDurationUs":514153,"ResourcePoolId":"default","QueuedTimeUs":0},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/FJ_Table_1","ReadRangesPointPrefixLen":"0","E-Rows":"4","Table":"FJ_Table_1","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"4","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":12,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/FJ_Table_2","ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"2","Condition":"t1.Key = t2.Fk1","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"14.4"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"A-Rows":2,"A-SelfCpu":4.844,"A-Cpu":4.844,"A-Size":38,"Name":"TopSort","Limit":"1001","TopSortBy":"[row.t1.Value,row.t2.Value]"}],"Node Type":"TopSort"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.802,"A-Cpu":5.646,"A-Size":38,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 31512, MsgBus: 26489 2025-04-17T18:21:36.128901Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348980216079693:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:36.129350Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f0a/r3tmp/tmpLElzOh/pdisk_1.dat 2025-04-17T18:21:36.520427Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:36.538661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:36.538736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:36.539889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31512, node 2 2025-04-17T18:21:36.701272Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:36.701290Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:36.701297Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:36.701407Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26489 TClient is connected to server localhost:26489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:37.523632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:37.530409Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:37.548771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:37.633941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:37.852323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:37.925571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:40.311347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348997395950506:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:40.311443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:40.421323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.465323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.513110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.556722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.591458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.675945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.725768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.810546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348997395951168:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:40.810636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:40.810852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494348997395951173:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:40.814702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:40.826126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494348997395951175:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:40.923596Z node 2 :TX_PROXY ERROR: Actor# [2:7494348997395951232:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:41.118829Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348980216079693:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:41.118897Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:42.127998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.168632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.203625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.285848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpNewEngine::LocksEffects [GOOD] >> KqpNewEngine::JoinWithParams >> KqpJoin::JoinWithDuplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 5456, MsgBus: 15059 2025-04-17T18:21:29.371292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348948032003300:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:29.371322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f05/r3tmp/tmpBptGzy/pdisk_1.dat 2025-04-17T18:21:29.988827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:29.988936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:29.995578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5456, node 1 2025-04-17T18:21:30.126885Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:21:30.130410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:30.156166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:30.156191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:30.156198Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:30.156308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15059 TClient is connected to server localhost:15059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:30.909570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:30.954630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:31.077024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:21:31.292990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:31.424291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:33.487248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348965211874269:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:33.487356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:33.972452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.024955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.067532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.166036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.204554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.259250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.339091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.380964Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348948032003300:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:34.381158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:34.439352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348969506842232:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.439484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.440486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348969506842237:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.444834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:34.476724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348969506842239:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:34.577876Z node 1 :TX_PROXY ERROR: Actor# [1:7494348969506842302:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:35.838422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.882450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.965415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.001314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.075380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.111255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29133, MsgBus: 12249 2025-04-17T18:21:38.134993Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348987631539053:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:38.135028Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f05/r3tmp/tmp8ge3Sx/pdisk_1.dat 2025-04-17T18:21:38.479851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:38.479930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:38.487188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:38.510423Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29133, node 2 2025-04-17T18:21:38.634683Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:38.634706Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:38.634713Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:38.634815Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12249 TClient is connected to server localhost:12249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:39.331284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:39.348490Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:39.368730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:39.613637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:39.827861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:21:39.977460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.071225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349004811409998:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:42.071306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:42.170566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.229762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.266168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.323255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.370937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.426375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.479878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.585301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349004811410665:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:42.585385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:42.585571Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349004811410670:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:42.589468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:42.618536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349004811410672:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:42.676062Z node 2 :TX_PROXY ERROR: Actor# [2:7494349004811410727:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:43.138279Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494348987631539053:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:43.138338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:43.962317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.041741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.103557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.192893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.269737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.359862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpFlipJoin::RightOnly_2 [GOOD] >> KqpJoin::RightSemiJoin_SimpleKey >> KqpJoin::JoinLeftPureExclusion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 62563, MsgBus: 4349 2025-04-17T18:21:32.054662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348959631061310:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:32.054740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ef5/r3tmp/tmpGNd1rH/pdisk_1.dat 2025-04-17T18:21:32.691839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:32.691940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:32.695246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:32.735945Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62563, node 1 2025-04-17T18:21:32.801062Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:32.801082Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:32.801092Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:32.801201Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4349 TClient is connected to server localhost:4349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:33.401277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:33.415104Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:33.435040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:33.591133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:33.737423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:33.816385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:35.739836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348972515964751:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:35.740017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:36.079760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.162552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.205100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.243646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.331293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.422775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.495862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.607564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348976810932727:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:36.607625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:36.607896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348976810932732:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:36.611042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:36.626106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348976810932734:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:36.706331Z node 1 :TX_PROXY ERROR: Actor# [1:7494348976810932790:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:37.058194Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348959631061310:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:37.058295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:37.767548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.801828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.831876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.865918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.898194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.969121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64316, MsgBus: 20753 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ef5/r3tmp/tmpOnBPuB/pdisk_1.dat 2025-04-17T18:21:39.892881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:21:40.054442Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:40.055478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:40.055551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:40.060031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64316, node 2 2025-04-17T18:21:40.226594Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:40.226612Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:40.226620Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:40.226720Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20753 TClient is connected to server localhost:20753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:21:40.822356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.844863Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:40.859655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:40.960765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:41.266262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:41.383189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:43.666153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349008934703942:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:43.666239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:43.712144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.753195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.795149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.828451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.865429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.913569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.953387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.046790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349013229671903:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:44.046849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:44.047008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349013229671908:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:44.050394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:44.065481Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:21:44.065834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349013229671910:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:44.145111Z node 2 :TX_PROXY ERROR: Actor# [2:7494349013229671967:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:45.213557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.255082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.343714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.390955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.425334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.501304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_2 [GOOD] Test command err: Trying to start YDB, gRPC: 5716, MsgBus: 16048 2025-04-17T18:21:32.713772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348961950647148:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:32.713902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ef4/r3tmp/tmpgyZw9h/pdisk_1.dat 2025-04-17T18:21:33.234562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:33.235760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:33.235822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:33.240267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5716, node 1 2025-04-17T18:21:33.391889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:33.391916Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:33.391929Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:33.392040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16048 TClient is connected to server localhost:16048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:34.015287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:34.027770Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:34.039602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:34.216941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:34.407920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:34.486759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:36.396199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348979130517902:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:36.396287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:36.762240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.822134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.872791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.930787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:36.986136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.040554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.081107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.144938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348983425485858:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:37.145028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:37.146164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348983425485864:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:37.152705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:37.165227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348983425485866:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:37.259074Z node 1 :TX_PROXY ERROR: Actor# [1:7494348983425485923:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:37.712925Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348961950647148:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:37.712983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:38.282649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:38.337829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:38.378706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:38.456236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20970, MsgBus: 62870 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ef4/r3tmp/tmpDGGlHG/pdisk_1.dat 2025-04-17T18:21:40.314465Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:21:40.346782Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:40.364631Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:40.364711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:40.370956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20970, node 2 2025-04-17T18:21:40.554626Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:40.554648Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:40.554655Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:40.554755Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62870 TClient is connected to server localhost:62870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:41.081535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:41.096756Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:21:41.104944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:41.193095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:41.401668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:41.480985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:43.790380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349009364301908:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:43.790488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:43.842013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.877244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.926246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.983432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.037820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.090747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.168391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.306684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349013659269873:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:44.306798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:44.314315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349013659269878:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:44.322403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:44.344067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349013659269880:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:21:44.417418Z node 2 :TX_PROXY ERROR: Actor# [2:7494349013659269938:3545] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:45.533973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.574877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.617559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.683759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureExclusion [GOOD] Test command err: Trying to start YDB, gRPC: 63953, MsgBus: 19409 2025-04-17T18:21:40.916276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348994493693951:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:40.916313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eea/r3tmp/tmpqNUL8j/pdisk_1.dat 2025-04-17T18:21:41.588953Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:41.620032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:41.620161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:41.623569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63953, node 1 2025-04-17T18:21:41.705533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:41.705551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:41.705558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:41.705656Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19409 TClient is connected to server localhost:19409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:42.382675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:42.397695Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:42.408302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:42.555244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:42.757644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:21:42.864107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.576873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349011673564919:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:44.576996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:44.890533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:44.944286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.053179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.102498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.149155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.213117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.294466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:45.395942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349015968532891:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:45.396026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:45.396174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349015968532896:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:45.400114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:45.415279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349015968532898:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:45.510143Z node 1 :TX_PROXY ERROR: Actor# [1:7494349015968532954:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:45.918283Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348994493693951:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:45.927462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> KqpJoinOrder::TPCDS61-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint2+ColumnStore >> KqpJoinOrder::TPCDS23+ColumnStore >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2025-04-17T18:17:38.100279Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:38.193461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:38.216033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:38.216317Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:38.224131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:38.224322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:38.224534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:38.224639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:38.224769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:38.224867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:38.224966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:38.225085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:38.225193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:38.225292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:38.225400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:38.225559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:38.252732Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:38.252875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:38.252932Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:38.253128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:38.253303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:38.253374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:38.253417Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:38.253476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:38.253518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:38.253542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:38.253558Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:38.253726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:38.253779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:38.253805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:38.253825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:38.253901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:38.253935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:38.253991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:38.254007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:38.254087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:38.254128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:38.254160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:38.254225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:38.254270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:38.254306Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:38.254703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-17T18:17:38.254779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-17T18:17:38.254876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-04-17T18:17:38.254990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-17T18:17:38.255187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:38.255260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:38.255304Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:38.255501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:38.255559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:38.255621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:38.255790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:38.255837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:38.255865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:38.256071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:38.256119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:38.256148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:38.256261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:38.256298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:38.256341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913859154;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=1744913859154;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-17T18:21:50.071558Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11069:12696];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-17T18:21:50.073383Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11069:12696];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpJoinOrder::ShuffleEliminationTpcdsMapJoinBug [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS61-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10337, MsgBus: 5680 2025-04-17T18:20:43.466155Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348750054522949:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:43.476900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f49/r3tmp/tmpnpItkz/pdisk_1.dat 2025-04-17T18:20:43.927897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:43.928006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:43.930797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:43.932154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10337, node 1 2025-04-17T18:20:44.246808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:44.246836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:44.246845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:44.246998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5680 TClient is connected to server localhost:5680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:45.054525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:45.091857Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:47.059821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348767234392661:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:47.059978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:47.060481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348767234392673:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:47.067356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:47.090930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348767234392675:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:47.175384Z node 1 :TX_PROXY ERROR: Actor# [1:7494348767234392726:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:47.542239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:47.709194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:20:47.745012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:20:47.785843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:20:47.817295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:20:47.986897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.055367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.092621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.125616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.159768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.200037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.244440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.283398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:20:48.515155Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348750054522949:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:48.515330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:20:48.914614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:20:48.962678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.030394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.062208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.095325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.127841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.160090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.225875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.252609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.295689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.332113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.366315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.402253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.432533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.462560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.498766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.568704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... 6710714; 2025-04-17T18:21:19.142641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.145294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.148245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.150854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.153940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.156296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.159704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.162011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.165761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.167963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.171686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.173408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.177141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.178673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.182459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.184216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.188191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.189941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.193947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.199868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.199869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.205747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.209432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.211677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.215037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.216717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.220372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.221961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.226104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.227481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.231607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038456;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.232821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038460;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.237313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.238493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.243043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.243813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.248569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:19.280787Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fpvr84qahz5sb9fm9enfb", SessionId: ydb://session/3?node_id=1&id=NmI1OTMyMTAtYTcxMzljMS05MjA1OTNjMi1lMTM1MDcxOA==, Slow query, duration: 28.456087s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:19.827171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:19.827619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:19.828138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7494348831658918235:4348];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-04-17T18:21:19.828468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:46.731676Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fr4epbmfjx4wxks8spyjc", SessionId: ydb://session/3?node_id=1&id=NmI1OTMyMTAtYTcxMzljMS05MjA1OTNjMi1lMTM1MDcxOA==, Slow query, duration: 14.228619s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query61.tpl and seed 1930872976\nselect promotions,total,cast(promotions as float)/cast(total as float)*100\nfrom\n (select sum(ss_ext_sales_price) promotions\n from store_sales\n cross join store\n cross join promotion\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_promo_sk = p_promo_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y')\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) promotional_sales cross join\n (select sum(ss_ext_sales_price) total\n from store_sales\n cross join store\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) all_sales\norder by promotions, total\nlimit 100;\n", parameters: 0b >> KqpIndexLookupJoin::Inner+StreamLookup [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup >> KqpJoinOrder::TPCDS94+ColumnStore >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore >> KqpNewEngine::JoinWithParams [GOOD] >> KqpNewEngine::LeftSemiJoin >> KqpJoin::JoinWithDuplicates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationTpcdsMapJoinBug [GOOD] Test command err: Trying to start YDB, gRPC: 29972, MsgBus: 29518 2025-04-17T18:20:22.204812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348662916267942:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:22.204845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f62/r3tmp/tmpkySVob/pdisk_1.dat 2025-04-17T18:20:22.894546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:22.894623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:22.901272Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:22.927846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29972, node 1 2025-04-17T18:20:23.165546Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:23.165576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:23.165584Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:23.165682Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29518 TClient is connected to server localhost:29518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:23.863680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:25.820491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348675801170493:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:25.820717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:25.820794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348675801170505:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:25.825255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:25.849417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348675801170507:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:25.945432Z node 1 :TX_PROXY ERROR: Actor# [1:7494348675801170558:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:26.271662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:26.521788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:26.521984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:26.522263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:26.522374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:26.522491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:26.522618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:26.522709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:26.522796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:26.522899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:26.522986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:26.523071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:26.523159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348680096138069:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:26.523644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:26.523679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:26.523864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:26.523957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:26.524049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:26.524134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:26.524224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:26.524333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:26.524439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:26.524535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:26.524624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:26.524724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348680096138110:2361];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:26.551405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348680096138059:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:26.551468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348680096138059:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:26.551679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;sel ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.359811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.364020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.365438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.371506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.371873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.377392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.380502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.383671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.389161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.389171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.396147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.396322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.401743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.403586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.407529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.413132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.414553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.419182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.421576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.425234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.429740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.431349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.437030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.441599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.443221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.450278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.459411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.464577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.470610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.477537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.483336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.488920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.494067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.499263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.505033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.510874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.516302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.522925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.525249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.529696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.532835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.535025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.537617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.540589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.546651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:32.819211Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fq76t9wssk2ehvry1x21q", SessionId: ydb://session/3?node_id=1&id=MTJhMmQwODktMTY4ZWM3YWYtM2E2NmEyYy1lNjJhNjlmYw==, Slow query, duration: 30.264339s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:33.324191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:33.324707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:33.326606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494348847599899395:7830];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-17T18:21:33.327050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinWithDuplicates [GOOD] Test command err: Trying to start YDB, gRPC: 64034, MsgBus: 61192 2025-04-17T18:21:47.478760Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349024281220743:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:47.479896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000edd/r3tmp/tmpKSXWcM/pdisk_1.dat 2025-04-17T18:21:48.039767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:48.039860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:48.045659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64034, node 1 2025-04-17T18:21:48.071683Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:48.086814Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:21:48.086830Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:21:48.250859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:48.250879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:48.250886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:48.250996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61192 TClient is connected to server localhost:61192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:49.058163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:49.094516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:49.114034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:49.357869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:49.555742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:49.638468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:51.689122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349041461091680:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:51.689700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:51.992919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.024980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.094371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.142479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.241174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.329932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.390470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.487325Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349024281220743:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:52.487531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:52.493790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349045756059658:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:52.493852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:52.494154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349045756059663:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:52.507281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:52.528145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349045756059665:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:52.601482Z node 1 :TX_PROXY ERROR: Actor# [1:7494349045756059722:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:53.904683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:53.951930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] Test command err: Trying to start YDB, gRPC: 24961, MsgBus: 12931 2025-04-17T18:21:48.415151Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349029266375395:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:48.415809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ed8/r3tmp/tmpTZHSmX/pdisk_1.dat 2025-04-17T18:21:49.102249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:49.102385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:49.102680Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:49.111993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24961, node 1 2025-04-17T18:21:49.399328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:49.399350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:49.399359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:49.399463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12931 TClient is connected to server localhost:12931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:50.262566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:50.300245Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:50.321422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:50.508959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:50.696052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:50.798786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:52.615843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349046446246220:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:52.615927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:52.976909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:53.022041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:53.073536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:53.128780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:53.166327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:53.256885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:53.336725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:53.410473Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349029266375395:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:53.410821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:53.432090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349050741214192:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:53.432171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:53.432492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349050741214197:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:53.437037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:53.456787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349050741214199:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:53.518108Z node 1 :TX_PROXY ERROR: Actor# [1:7494349050741214256:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:54.639963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.694221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.747387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.790302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.828129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> OlapEstimationRowsCorrectness::TPCH2 >> KqpJoinOrder::TPCDS92-ColumnStore [GOOD] >> KqpFlipJoin::Right_3 >> KqpJoinOrder::TestJoinHint1+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 1090, MsgBus: 25329 2025-04-17T18:21:03.796378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348835474418708:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:03.796790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f2a/r3tmp/tmpeJxDDy/pdisk_1.dat 2025-04-17T18:21:04.390245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:04.390329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:04.395726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:04.398839Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1090, node 1 2025-04-17T18:21:04.543259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:04.543280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:04.543291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:04.543387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25329 TClient is connected to server localhost:25329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:05.497990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:07.619566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348852654288426:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:07.619963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348852654288414:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:07.620026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:07.623759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:07.643026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348852654288428:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:07.733666Z node 1 :TX_PROXY ERROR: Actor# [1:7494348852654288479:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:08.038307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.134741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.165970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.251494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.278839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.433820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.488119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.534193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.568413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.608657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.675478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.705765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.738199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.783903Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348835474418708:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:08.783963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:09.393705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:21:09.432338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.472831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.515230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.556181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.588785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.658116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.697478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.738777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.827206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.859392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.894660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.935056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.969990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.001950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.044692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.078643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:21:10.110655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.671638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.676830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.683611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.685732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.689743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.691272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.696364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.701657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.701910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.707818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.713156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.718395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.721122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.723805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.728107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.728982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.734247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.738979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.743472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.746596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.748623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.751630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.753810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.756636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.758690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.761718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.763666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.766661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.768809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.771987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.774031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.777210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.779379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.782611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.784489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.788037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.791519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.795038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.796018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.801305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.801547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.833042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.836853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.837885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.843409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:40.937707Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fqfxb9wn20sen7b7hn99q", SessionId: ydb://session/3?node_id=1&id=MWM4ZWQzNTAtNTY5YjcwZTktNTMwNTQ2MDMtNDBlYTRjMDA=, Slow query, duration: 29.470182s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:41.478543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:41.479073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:41.479414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494348964323464378:5900];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-17T18:21:41.480187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint1+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 31900, MsgBus: 29362 2025-04-17T18:20:38.911364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348729742945888:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:38.911413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f51/r3tmp/tmpPLqat5/pdisk_1.dat 2025-04-17T18:20:39.389006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:39.390708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:39.390825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:39.394872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31900, node 1 2025-04-17T18:20:39.522701Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:39.522757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:39.522765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:39.522904Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29362 TClient is connected to server localhost:29362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:40.190510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:42.254553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348746922815666:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:42.254668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:42.254736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348746922815678:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:42.258279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:42.268815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348746922815680:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:42.350138Z node 1 :TX_PROXY ERROR: Actor# [1:7494348746922815731:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:42.652931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:42.880734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:42.880747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:42.880982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:42.881277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:42.881438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:42.881550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:42.881562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:42.881739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:42.881747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:42.881866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:42.881865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:42.881986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:42.881987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:42.882098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:42.882127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:42.882223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:42.882248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:42.882319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:42.882423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:42.882464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:42.882538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:42.882589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348746922815994:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:42.882652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:42.882761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348746922816009:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:42.937297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348746922816011:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:42.938360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348746922816011:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:42.938573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;sel ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.865267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.873387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.886889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.896180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.901773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.906930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.909501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.916216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.919082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.924566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.925021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.931371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.931902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.937118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.937357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.942841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.942920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.948960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.950422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.955881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.956613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.962194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.965582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.975124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.979389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.987093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.990606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.996784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.999984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.005661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.008853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.011380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.017384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.024461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.026956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.030410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.038213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.045695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.052264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.057006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.064973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.076407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.082030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.082051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.090142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.232957Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fqqny5ksen4tj0vfy32nk", SessionId: ydb://session/3?node_id=1&id=ZjEzMmU4ZDctOWVlNzYwNjctZDlmZWNkYTUtZmQ3N2E0Mzk=, Slow query, duration: 30.810078s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:50.501856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:50.502307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:50.502821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494348914426575964:7563];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T18:21:50.503222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH16 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 25498, MsgBus: 1282 2025-04-17T18:21:45.919725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349016585576464:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:45.919766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ede/r3tmp/tmpprmj1L/pdisk_1.dat 2025-04-17T18:21:46.671447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:46.705442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:46.705537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:46.707674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25498, node 1 2025-04-17T18:21:46.848436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:46.848452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:46.848459Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:46.848547Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1282 TClient is connected to server localhost:1282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:47.730858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:47.754607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:47.772368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:47.998777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:48.201070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:48.313160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:50.244339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349038060414738:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:50.244476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:50.578851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:50.615499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:50.649806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:50.687964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:50.736353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:50.809394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:50.881349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:50.927628Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349016585576464:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:50.927888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:50.954648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349038060415406:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:50.954721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:50.955026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349038060415411:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:50.958373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:50.973757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349038060415413:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:51.045018Z node 1 :TX_PROXY ERROR: Actor# [1:7494349042355382767:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:52.375158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.434928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.471930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.508205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.549472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:52.591838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28202, MsgBus: 14606 2025-04-17T18:21:54.370533Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349057421835440:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:54.370584Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ede/r3tmp/tmpjvMDj3/pdisk_1.dat 2025-04-17T18:21:54.557883Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:54.567691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:54.567769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:54.568929Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28202, node 2 2025-04-17T18:21:54.684866Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:54.684883Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:54.684890Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:54.684988Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14606 TClient is connected to server localhost:14606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:55.200233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:55.207142Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:21:55.224691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:55.276155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:55.461327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:55.544156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:58.370881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349074601706370:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:58.370959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:58.451471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.485107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.516201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.552683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.596164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.657809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.713522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.808474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349074601707033:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:58.808529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:58.808692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349074601707038:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:58.811703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:58.830006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349074601707040:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:21:58.902337Z node 2 :TX_PROXY ERROR: Actor# [2:7494349074601707098:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:59.373651Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494349057421835440:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:59.373706Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:00.008828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:00.075749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:00.122111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:00.155787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:00.194880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:00.246808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 >> KqpNewEngine::LeftSemiJoin [GOOD] >> KqpNewEngine::JoinWithPrecompute >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup >> KqpJoinOrder::TPCDS95+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH16 [GOOD] Test command err: Trying to start YDB, gRPC: 3828, MsgBus: 61887 2025-04-17T18:20:33.266693Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348706538193680:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:33.282779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f5d/r3tmp/tmpp2jJBK/pdisk_1.dat 2025-04-17T18:20:33.786250Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:33.796795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:33.796921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:33.803416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3828, node 1 2025-04-17T18:20:33.903523Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:33.903540Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:33.903546Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:33.903669Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61887 TClient is connected to server localhost:61887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:34.433978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:36.527201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348719423096230:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:36.527305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:36.534307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348719423096242:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:36.542360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:36.557179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348719423096244:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:36.625050Z node 1 :TX_PROXY ERROR: Actor# [1:7494348719423096295:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:36.959910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:37.237732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:37.237932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:37.247205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:37.247369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:37.247469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:37.247583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:37.247684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:37.247787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:37.247886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:37.247991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:37.248090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:37.248193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348723718063827:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:37.251431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:37.251488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:37.251661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:37.251783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:37.251888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:37.251991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:37.252101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:37.252211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:37.252332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:37.252459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:37.252605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:37.252739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348723718063825:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:37.280200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348723718063829:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:37.280260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348723718063829:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:37.280511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_ ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.495530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.498710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.499407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.504856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.504943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.511459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.511580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.517340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.517343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.523460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.523619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.529481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.529794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.536106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.536362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.541580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.541801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039220;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.547617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.547740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.553432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.555074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.559080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.560302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.564406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.565765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.572259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.573073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.577476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.578388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.583516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.583804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.589271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.594944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.600664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.605472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.610582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.615277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.620178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.625242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.637336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.640173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.643608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.645877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.712305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.796979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:45.852164Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fqhyva2m273pyya0v0skg", SessionId: ydb://session/3?node_id=1&id=ZmU1NmMyNWYtYjIxYmVmNzktNzQwYTU1MTgtYWU3M2YzMWU=, Slow query, duration: 32.288709s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:46.208952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:46.209271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494348942761445205:9771];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:21:46.209369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:46.209871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] >> TFlatTest::WriteMergeAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 16847, MsgBus: 29253 2025-04-17T18:21:50.139470Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349038053841256:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:50.166667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ed7/r3tmp/tmpaR8HqY/pdisk_1.dat 2025-04-17T18:21:50.821706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:50.827000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:50.827073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:50.832355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16847, node 1 2025-04-17T18:21:51.102696Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:51.102712Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:51.102718Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:51.102812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29253 TClient is connected to server localhost:29253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:51.969901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:52.014801Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:52.038371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:52.236962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:52.413758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:52.503959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:54.055587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349055233712149:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:54.055715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:54.451373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.534492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.607862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.641626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.710891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.766523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.809152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:54.876812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349055233712814:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:54.876949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:54.877395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349055233712820:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:54.882239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:54.901348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349055233712822:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:54.992022Z node 1 :TX_PROXY ERROR: Actor# [1:7494349055233712879:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:55.194406Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349038053841256:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:55.194665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:56.176346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.233147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.320432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.384216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.456295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.494225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8052, MsgBus: 23137 2025-04-17T18:21:58.493909Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349073969917476:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:58.510422Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ed7/r3tmp/tmp4l0HPt/pdisk_1.dat 2025-04-17T18:21:58.665322Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:58.691282Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:58.691359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:58.695543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8052, node 2 2025-04-17T18:21:58.878676Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:58.878698Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:58.878707Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:58.878833Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23137 TClient is connected to server localhost:23137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:59.495420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:59.514566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:59.580911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:59.773943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:59.897581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:02.404183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349091149788258:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:02.404275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:02.502016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:02.553652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:02.606242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:02.692414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:02.756590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:02.817258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:02.879476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:03.012674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349095444756220:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:03.012780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:03.012868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349095444756225:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:03.022080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:03.040709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349095444756227:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:03.132934Z node 2 :TX_PROXY ERROR: Actor# [2:7494349095444756284:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:03.414603Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494349073969917476:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:03.414657Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:04.395075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.467442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.515531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.559157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.613917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.692304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::Right_3 [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast >> KqpJoinOrder::CanonizedJoinOrderTPCH4 >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH21 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 27233, MsgBus: 28255 2025-04-17T18:20:38.287889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348729812826085:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:38.287928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f53/r3tmp/tmpYfb4C4/pdisk_1.dat 2025-04-17T18:20:38.711155Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:38.755328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:38.755423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:38.756288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27233, node 1 2025-04-17T18:20:38.830515Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:38.830541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:38.830548Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:38.830658Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28255 TClient is connected to server localhost:28255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:39.438365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:39.458390Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:41.315667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348742697728512:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.315760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348742697728517:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.315831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.320778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:41.329643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348742697728526:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:41.391193Z node 1 :TX_PROXY ERROR: Actor# [1:7494348742697728577:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:41.643723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:41.885110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:41.885340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:41.885636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:41.885823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:41.885961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:41.886123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:41.886293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:41.886448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:41.886599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:41.886745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:41.886895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:41.887031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348742697728845:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:41.911169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:41.911222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:41.911391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:41.911485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:41.911591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:41.911702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:41.911790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:41.911876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:41.911967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:41.912059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:41.912149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:41.912237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742697728847:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:41.938639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348742697728872:2360];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:41.938691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348742697728872:2360];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abs ... ller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.000892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.002789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.007512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.007754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.012905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.013174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.018648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.018665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.023422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.023725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.028857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.029154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.034002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.034005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.039096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.039234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.044575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.045198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.050456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.050526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.055627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.055671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.060633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.060729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.065891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.065891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.071002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.071002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.075927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.076014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.080895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.080901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.085792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.086023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.091134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.091346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.096485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.096779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.101330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.101601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.107130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.107145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.112915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.225496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.267457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:50.309597Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fqp6acz5xrj38rdbvgfkz", SessionId: ydb://session/3?node_id=1&id=NWJjODQ1YzItNGRmMzhkNDYtMTYzZWUxNzAtMjZiZWY5MGI=, Slow query, duration: 32.410789s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:50.664221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:50.664578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:50.665185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494348996100854928:10612];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-17T18:21:50.665517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20356, MsgBus: 17926 2025-04-17T18:21:23.722811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348921210162902:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:23.723232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f0c/r3tmp/tmpn39z52/pdisk_1.dat 2025-04-17T18:21:24.297321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:24.297427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:24.299649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:24.302553Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20356, node 1 2025-04-17T18:21:24.482717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:24.482755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:24.482767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:24.482879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17926 TClient is connected to server localhost:17926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:25.279491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:25.338326Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:27.342766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348938390032617:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:27.342838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:27.346296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348938390032629:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:27.350312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:27.362140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348938390032631:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:27.463709Z node 1 :TX_PROXY ERROR: Actor# [1:7494348938390032682:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:27.795102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.908679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.947910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:27.979443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.023535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.207817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.269658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.336046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.385975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.433539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.469236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.521566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.597674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:28.722409Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348921210162902:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:28.722486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:29.190995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:21:29.238384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.282099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.359739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.399264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.483989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.529775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.570007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.600493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.628031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.657081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.687242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.758703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.802723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.878664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.925108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:21:29.967652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.903222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.909937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.916082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.929302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.930063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.944752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.947640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.951904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.952910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.958011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.958946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.963700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.966691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.976401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.979790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.982122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.985711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.992296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.997235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:01.998379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.003720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.005112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.008892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.010019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.014072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.014859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.018991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.020326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.024008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.025215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.028936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.029990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.038605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.044163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.047100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.052496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.056822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038427;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.061101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.064187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.069484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.073748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.079000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.079576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.084339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.085207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:02.180956Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fr3ft68qkfkcfa6pz55p9", SessionId: ydb://session/3?node_id=1&id=ODRiNzU3OTItM2E5NDEzMmYtOTAzOTQwMmItNGFkMmQzNjk=, Slow query, duration: 30.665912s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:02.515435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:02.515824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:02.516414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7494348964159842593:2819];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-17T18:22:02.516776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees [GOOD] Test command err: Trying to start YDB, gRPC: 23657, MsgBus: 22775 2025-04-17T18:20:45.670629Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348758611854206:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:45.670684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f44/r3tmp/tmpAX7eDf/pdisk_1.dat 2025-04-17T18:20:46.180180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:46.180277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:46.187038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:46.205480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23657, node 1 2025-04-17T18:20:46.452959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:46.452979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:46.452994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:46.453126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22775 TClient is connected to server localhost:22775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:47.181589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:47.228176Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:49.182624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348775791724035:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.182731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.183022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348775791724047:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:49.186749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:49.199970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348775791724049:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:49.298414Z node 1 :TX_PROXY ERROR: Actor# [1:7494348775791724100:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:49.562616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:49.807624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:49.807847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:49.808080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:49.808169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:49.808271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:49.808381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:49.808463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:49.808555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:49.808620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:49.808673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:49.808725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:49.808798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348775791724397:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:49.810262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:49.810305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:49.810486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:49.810582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:49.810668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:49.810998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:49.811099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:49.811217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:49.811315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:49.811417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:49.811510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:49.811591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348775791724348:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:49.842903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348775791724402:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:49.842944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348775791724402:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abs ... 0714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.564534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.564672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.570967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.575970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.580626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.585808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.591025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.596185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.601459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.606477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.611649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.616690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.621354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.626331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.628825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.631689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.637180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.638378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.643382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.650226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.656116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.659099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.661759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.664369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.668037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.669260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.672776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.675469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.680390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.682239Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.691289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.695302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.700872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.703226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.710833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.713243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.716202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.721678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.727246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.728642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.732432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.738719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.744599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.749929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:55.993978Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fqydf6enmj440fd9p3qr7", SessionId: ydb://session/3?node_id=1&id=Njg3MDhlY2QtNjczZDBlYy03NjMzYTdiOS0yODhhZjk4OA==, Slow query, duration: 29.674360s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:56.253389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:56.253809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:56.254103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494349024899883003:10663];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-04-17T18:21:56.254878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> KqpJoin::LeftJoinWithNull-StreamLookupJoin >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 10872, MsgBus: 11831 2025-04-17T18:20:37.864370Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348725714836986:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:37.870668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f57/r3tmp/tmpU6RzVc/pdisk_1.dat 2025-04-17T18:20:38.398894Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:38.405638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:38.405767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:38.414038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10872, node 1 2025-04-17T18:20:38.562763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:38.562786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:38.562798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:38.562921Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11831 TClient is connected to server localhost:11831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:39.255037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:39.269031Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:41.062468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348742894706837:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.062581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.062837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348742894706849:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:41.066520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:41.077618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348742894706851:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:41.182933Z node 1 :TX_PROXY ERROR: Actor# [1:7494348742894706902:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:41.513284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:41.776184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:41.776184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:41.776424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:41.776436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:41.776676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:41.776688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:41.776804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:41.776815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:41.777093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:41.777207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:41.777309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:41.777419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:41.777544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:41.777646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:41.777758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:41.777853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348742894707182:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:41.778009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:41.778165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:41.778286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:41.778416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:41.778548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:41.778661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:41.778775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:41.778921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348742894707146:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:41.829203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742894707184:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:41.829696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348742894707184:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:48.989023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:48.989025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:48.994363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:48.996380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.000015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.001430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.005449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.008607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.010887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.019925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.021762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.025404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.027104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.030856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.032472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.036279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.037698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.041816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.043317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.052367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.057355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.057727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.062679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.063022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.068647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.068655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.074632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.075869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.080392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.086127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.095638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.099324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.105109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.115442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.120146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.125126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.129878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.139357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.144436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.149085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.155571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.157918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.161698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.167522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.169600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:49.450160Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fqpqk3rp7sxs2cqbx02sq", SessionId: ydb://session/3?node_id=1&id=NDgwMDdkNjQtNTdlYzhmNi1mYTNhNDczNy01ZWFiZjY3ZQ==, Slow query, duration: 30.996187s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:49.734912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:49.735408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:49.735733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494348927578342236:8771];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-17T18:21:49.736033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 26556, MsgBus: 23331 2025-04-17T18:22:04.755334Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349099838768353:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:04.758445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eb6/r3tmp/tmpYEVWKR/pdisk_1.dat 2025-04-17T18:22:05.422050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:05.424836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:05.426305Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:05.463207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26556, node 1 2025-04-17T18:22:05.698665Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:05.698690Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:05.698698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:05.698791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23331 TClient is connected to server localhost:23331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:06.674837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:06.724137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:06.912630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:07.184167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:07.299645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:09.087853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349121313606586:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.087945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.435451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.472290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.542219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.593372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.635327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.674724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.756766Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349099838768353:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:09.756817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:09.775962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.860631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349121313607252:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.860702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.860911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349121313607257:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.864574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:09.888893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349121313607259:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:09.970082Z node 1 :TX_PROXY ERROR: Actor# [1:7494349121313607314:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:11.335304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:11.394620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:11.448089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:11.518501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:11.551978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:11.591753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpNewEngine::JoinWithPrecompute [GOOD] >> KqpNewEngine::LiteralKeys >> KqpJoin::JoinConvert >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup >> TFlatTest::WriteSplitAndRead [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2025-04-17T18:22:08.439960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349115719096703:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:08.440374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001da5/r3tmp/tmpAi8UpW/pdisk_1.dat 2025-04-17T18:22:09.099404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:09.099504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:09.101273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:09.107667Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25500 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:22:09.654403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:09.673112Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:09.688948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:09.928663Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.009s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-17T18:22:09.931943Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.021s,wait=0.010s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-17T18:22:09.979842Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.009s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1605 647 6413)b }, ecr=1.000 2025-04-17T18:22:09.985565Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2358 1432 5183)b }, ecr=1.000 2025-04-17T18:22:10.020635Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.022305Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:22:10.022368Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:22:10.025930Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.028907Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3527 2180 6413)b }, ecr=1.000 2025-04-17T18:22:10.030727Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:22:10.030753Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-17T18:22:10.031138Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:22:10.031202Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:22:10.033787Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.036850Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:22:10.036907Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:22:10.042237Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.048115Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4153 2983 5183)b }, ecr=1.000 2025-04-17T18:22:10.049883Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-17T18:22:10.049905Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-04-17T18:22:10.050484Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-17T18:22:10.050549Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744914129805 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-17T18:22:10.074351Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.075773Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.075958Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.077169Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.082311Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.082826Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-17T18:22:10.083497Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.083618Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.084030Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-17T18:22:10.084497Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.090288Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.090862Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-17T18:22:10.091470Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.091580Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.091902Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-17T18:22:10.092368Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.098321Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.098915Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-17T18:22:10.099743Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.099883Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.100422Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-17T18:22:10.101220Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.101771Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.102198Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-17T18:22:10.103231Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.103361Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.103720Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-17T18:22:10.104400Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.104976Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.105373Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-17T18:22:10.106105Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.106212Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.106638Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-17T18:22:10.107245Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.107842Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.108288Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-17T18:22:10.108921Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.109023Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.109427Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-17T18:22:10.110102Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.110211Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:22:10.110677Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-17T18:22:10.111434Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-17T18:22:10.112117Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-17T18:22:10.112530Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 7207518622403788 ... T_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:22:14.358268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494349138276388043 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-04-17T18:22:14.358290Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:22:14.358392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494349138276388043 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-04-17T18:22:14.358411Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:22:14.358499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494349142571355696 RawX2: 4503608217307458 } TabletId: 72075186224037890 State: 4 2025-04-17T18:22:14.358519Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:22:14.358666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:22:14.358724Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:22:14.358774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:22:14.358803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:22:14.358836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:22:14.359492Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-17T18:22:14.359560Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-17T18:22:14.359571Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-17T18:22:14.359580Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-17T18:22:14.359588Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-17T18:22:14.359803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494349138276388044 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-17T18:22:14.359832Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:22:14.359950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7494349142571355701 RawX2: 4503608217307459 } TabletId: 72075186224037891 State: 4 2025-04-17T18:22:14.359967Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-17T18:22:14.360209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:22:14.360300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:22:14.360464Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-17T18:22:14.360480Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-17T18:22:14.362819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-17T18:22:14.363035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-17T18:22:14.363205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:22:14.363368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:22:14.363472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:22:14.363590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:22:14.363679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:22:14.363789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:22:14.365226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-17T18:22:14.365241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-17T18:22:14.365273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:22:14.365280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-17T18:22:14.365303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:22:14.365319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:22:14.365333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:22:14.365340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:22:14.365587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:22:14.366471Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-17T18:22:14.366497Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-17T18:22:14.366545Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7494349142571355833:2624], serverId# [2:7494349142571355836:2627], sessionId# [0:0:0] 2025-04-17T18:22:14.366565Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-17T18:22:14.366581Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7494349138276388155:2386], serverId# [2:7494349138276388156:2387], sessionId# [0:0:0] 2025-04-17T18:22:14.366593Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-17T18:22:14.366607Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7494349142571355831:2622], serverId# [2:7494349142571355834:2625], sessionId# [0:0:0] 2025-04-17T18:22:14.366618Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-17T18:22:14.366950Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-17T18:22:14.367020Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-17T18:22:14.368296Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-17T18:22:14.368339Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-17T18:22:14.368535Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-17T18:22:14.368549Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-17T18:22:14.368559Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-17T18:22:14.368575Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-17T18:22:14.368585Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-17T18:22:14.369130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:22:14.369277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-17T18:22:14.369378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:22:14.369546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:22:14.369558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:22:14.369588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:22:14.369992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:22:14.370019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:22:14.370055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-17T18:22:14.370071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-17T18:22:14.370094Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:22:14.374545Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-17T18:22:14.374603Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-17T18:22:14.375828Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-17T18:22:14.375882Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-17T18:22:14.376736Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7494349142571355832:2623], serverId# [2:7494349142571355835:2626], sessionId# [0:0:0] 2025-04-17T18:22:14.376918Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-17T18:22:14.376950Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 |95.1%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::GeneralPrioritiesBug1 [GOOD] >> KqpJoinOrder::TPCDS96-ColumnStore >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug1 [GOOD] Test command err: Trying to start YDB, gRPC: 26757, MsgBus: 9987 2025-04-17T18:21:34.939267Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348968974829712:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:34.939676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eee/r3tmp/tmpnsrFDi/pdisk_1.dat 2025-04-17T18:21:35.627272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:35.627364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:35.641894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:35.645669Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26757, node 1 2025-04-17T18:21:35.880558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:35.880573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:35.880581Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:35.880673Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9987 TClient is connected to server localhost:9987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:36.646159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:38.949590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348986154699424:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:38.949694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:38.949926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348986154699436:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:38.953628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:38.997345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348986154699438:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:39.085700Z node 1 :TX_PROXY ERROR: Actor# [1:7494348990449666786:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:39.456588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.674581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.717932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.801891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.874072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:39.942222Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348968974829712:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:39.942285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:40.021046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.105046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.176732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.223357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.282197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.320655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.394896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:21:40.468835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.123712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:21:41.171748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.238249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.289926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.339185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.395758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.432046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.528992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.561251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.609049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.652890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.687881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.726409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.769568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.798623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.836334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.914721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.957170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.373019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.378669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.378670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.382595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.387294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.387746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.393010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.393013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.398215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.398215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.403104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.407689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.424157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.425727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.429514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.434786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.436587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.447161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.448441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.451116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.453749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.456511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.462060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.463056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.468026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.469465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.473419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.475224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.479064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.480670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.484522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.490689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.493466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.495806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.499409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.501070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.506631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.507044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.513126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.515389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.520867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.526364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.526397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.538974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.574450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.644791Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frf9t2hsfdk7twvp07q61", SessionId: ydb://session/3?node_id=1&id=MjRlY2Q3YzEtZTdkNWRlMGEtNDMxYjJkYS1jNDkxNThjOA==, Slow query, duration: 30.030102s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:14.025700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:14.025795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:14.026030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494349110708780078:6374];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-17T18:22:14.026386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 5365, MsgBus: 9629 2025-04-17T18:22:13.182665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349138628269955:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:13.183265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eab/r3tmp/tmpojtewX/pdisk_1.dat 2025-04-17T18:22:13.873517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:13.876774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:13.876905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:13.879692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5365, node 1 2025-04-17T18:22:14.110663Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:14.110684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:14.110690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:14.110793Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9629 TClient is connected to server localhost:9629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:15.120247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:15.155134Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:15.179710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:15.436248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:22:15.607960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:15.683923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:17.296270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349155808140772:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:17.296481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:17.598792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.645437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.685205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.722195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.798846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.886407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.959156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:18.065328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349160103108749:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:18.065411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:18.065602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349160103108754:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:18.068717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:18.084510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349160103108756:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:18.164277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349138628269955:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:18.164343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:18.194247Z node 1 :TX_PROXY ERROR: Actor# [1:7494349160103108811:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:19.295496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.350771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.386925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH11 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 65343, MsgBus: 22374 2025-04-17T18:22:14.400143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349140933850101:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:14.400191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ea9/r3tmp/tmpYyLBvL/pdisk_1.dat 2025-04-17T18:22:15.231015Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:15.243788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:15.250316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:15.258040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65343, node 1 2025-04-17T18:22:15.530589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:15.530614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:15.530621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:15.530707Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22374 TClient is connected to server localhost:22374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:16.349572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:16.390337Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:16.403215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:22:16.621303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:22:16.807727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:22:16.909904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:18.791223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349158113721057:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:18.791351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:19.199981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.279728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.330479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.376701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.402664Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349140933850101:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:19.402717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:19.417474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.454321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.493501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.583830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349162408689015:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:19.583903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:19.586454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349162408689021:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:19.590006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:19.603736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349162408689023:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:19.659857Z node 1 :TX_PROXY ERROR: Actor# [1:7494349162408689080:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:20.755797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:20.795422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:20.828814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:20.910956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:20.949458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:20.989160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 [GOOD] Test command err: Trying to start YDB, gRPC: 3429, MsgBus: 22414 2025-04-17T18:21:36.684193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348976789795998:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:36.684235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eec/r3tmp/tmpD4bhKU/pdisk_1.dat 2025-04-17T18:21:37.323440Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:37.329562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:37.329648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:37.337360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3429, node 1 2025-04-17T18:21:37.600679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:37.600703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:37.600710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:37.600814Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22414 TClient is connected to server localhost:22414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:38.389371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:38.440877Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:40.394370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348993969665724:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:40.394497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:40.394830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348993969665736:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:40.398893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:40.412239Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:21:40.412463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348993969665738:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:40.470319Z node 1 :TX_PROXY ERROR: Actor# [1:7494348993969665789:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:40.830395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.008971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.060752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.149746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.203518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.381534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.425001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.460874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.494733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.528053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.639690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.680495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:21:41.684292Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348976789795998:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:41.684348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:41.710166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.431217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:21:42.495100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.526971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.556361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.625908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.678387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.713951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.753809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.791101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.833056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.915285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.957031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:21:42.997098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.037481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.069552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:21:43.101276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 202 ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.791430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.797805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.800342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.809280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.810738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.819988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.823038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.829352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.832195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038520;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.837671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.842154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.851538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.855664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.861159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.874645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.884234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.893594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.911425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.916900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.930691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.944622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.950153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.955323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.961300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.962615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.966785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.967757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.972064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.973350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.977362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.978724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.982765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.983957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.989062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.989065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.994950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:13.997522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:14.000739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:14.006561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:14.011003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:14.012392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:14.017104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:14.018808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:14.024562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:14.026276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:14.141456Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frg9d7dzxghfzpqcnkpmy", SessionId: ydb://session/3?node_id=1&id=NzVjMDBhN2MtNjY0N2E5MDYtNWQxNWRhNmUtYzlkM2QyZDQ=, Slow query, duration: 29.519311s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:14.382994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:14.383232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:14.383468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7494349019739476281:2975];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-17T18:22:14.383832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin >> KqpNewEngine::LiteralKeys [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup >> KqpJoin::JoinConvert [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] >> KqpJoin::JoinLeftPureInnerConverted >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LiteralKeys [GOOD] Test command err: Trying to start YDB, gRPC: 17507, MsgBus: 2318 2025-04-17T18:21:26.197587Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348937870700880:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:26.208603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158d/r3tmp/tmps2ujEC/pdisk_1.dat 2025-04-17T18:21:26.791650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:26.791769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:26.793674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:26.824748Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17507, node 1 2025-04-17T18:21:26.939227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:26.939258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:26.939273Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:26.939439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2318 TClient is connected to server localhost:2318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:27.713179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:27.753656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:27.908685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:28.073595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:28.150708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:30.010844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348955050571694:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:30.010975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:30.452194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.561240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.643187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.696631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.738893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.795986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.855784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:30.949295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348955050572358:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:30.949428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:30.949954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348955050572363:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:30.953995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:21:30.979674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348955050572365:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:21:31.081192Z node 1 :TX_PROXY ERROR: Actor# [1:7494348959345539722:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:31.243760Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348937870700880:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:31.243958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:32.396383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:32.671049Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 65439, MsgBus: 1932 2025-04-17T18:21:33.568711Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494348966496836700:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:33.568752Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158d/r3tmp/tmpaBNTe5/pdisk_1.dat 2025-04-17T18:21:33.746117Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:33.788972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:33.789055Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 65439, node 2 2025-04-17T18:21:33.791426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:33.910687Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:33.910706Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:33.910714Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:33.910809Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1932 TClient is connected to server localhost:1932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:34.430799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:34.457368Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:21:34.468661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:34.590126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ... 349120174720009:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.059834Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.134095Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.194518Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.249048Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.298373Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.359445Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.420592Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.539900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:09.639283Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494349120174720676:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.639407Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.639844Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7494349120174720681:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:09.644620Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:09.659711Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7494349120174720683:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:09.750979Z node 6 :TX_PROXY ERROR: Actor# [6:7494349120174720740:3558] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20541, MsgBus: 30300 2025-04-17T18:22:15.814645Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494349146112721652:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:15.814702Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00158d/r3tmp/tmpWTdOoa/pdisk_1.dat 2025-04-17T18:22:16.173689Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:16.197236Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:16.197363Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:16.199031Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20541, node 7 2025-04-17T18:22:16.410857Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:16.410883Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:16.410905Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:16.411061Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30300 TClient is connected to server localhost:30300 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:17.371713Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:17.395943Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:17.520065Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:17.765125Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:17.895477Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:20.818610Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7494349146112721652:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:20.819173Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:20.847610Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494349167587559915:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:20.847727Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:20.970618Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.032144Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.094698Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.141413Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.197807Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.281383Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.405515Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.564268Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494349171882527886:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:21.564382Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:21.564629Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7494349171882527891:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:21.569537Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:21.598239Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7494349171882527893:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:21.698114Z node 7 :TX_PROXY ERROR: Actor# [7:7494349171882527951:3557] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinConvert [GOOD] Test command err: Trying to start YDB, gRPC: 4775, MsgBus: 1058 2025-04-17T18:22:16.320011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349151319234974:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:16.320044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ea3/r3tmp/tmp6XlLpt/pdisk_1.dat 2025-04-17T18:22:17.067420Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4775, node 1 2025-04-17T18:22:17.270139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:17.270260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:17.273466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:17.323309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:17.323331Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:17.323338Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:17.334319Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1058 TClient is connected to server localhost:1058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:18.154438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:18.175317Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:18.198378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:18.406825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:18.576025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:18.671002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:20.715066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349168499105884:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:20.715177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:21.168588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.251454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.318548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.323429Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349151319234974:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:21.323634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:21.383372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.455652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.501229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.547558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:21.614235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349172794073844:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:21.614390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:21.614650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349172794073850:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:21.618830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:21.628883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349172794073852:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:21.712138Z node 1 :TX_PROXY ERROR: Actor# [1:7494349172794073910:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:22.946505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:22.988661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.028629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.400829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH11 [GOOD] Test command err: Trying to start YDB, gRPC: 65020, MsgBus: 22119 2025-04-17T18:20:50.412264Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348780157371182:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:50.414310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f40/r3tmp/tmpfdIGRI/pdisk_1.dat 2025-04-17T18:20:50.810784Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:50.813008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:50.813124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:50.818475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65020, node 1 2025-04-17T18:20:51.077988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:51.078017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:51.078023Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:51.078143Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22119 TClient is connected to server localhost:22119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:51.725903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:51.774849Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:53.827930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348793042273739:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:53.828047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:53.828353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348793042273751:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:53.832260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:53.844746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348793042273753:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:53.929302Z node 1 :TX_PROXY ERROR: Actor# [1:7494348793042273804:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:54.253322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:54.466270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:54.466550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:54.466839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:54.467053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:54.467184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:54.467294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:54.467393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:54.467485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:54.467651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:54.467813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:54.467958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:54.468102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494348797337241364:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:54.469265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:54.469305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:54.469466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:54.469574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:54.469673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:54.469759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:54.469844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:54.469930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:54.470019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:54.470106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:54.470457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:54.470581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348797337241358:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:54.515127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348797337241366:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:54.515181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348797337241366:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.098326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.103425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.103868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.109919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.115799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.116715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.122314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.126402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.128021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.134519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.140295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.140797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039210;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.148250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.152863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.159454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.166858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.172770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.176401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.185818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.186339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039200;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.195926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.199868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.205350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.209373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.219147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.223176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.228947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.232670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.242632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.246423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039216;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.251645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.257040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.260759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.266105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.269795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.279256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.283755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.288584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.292668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.298377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.305789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.307697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.317895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.319370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.332476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.509783Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fr4mrafcpm7g86pvn5rgr", SessionId: ydb://session/3?node_id=1&id=Yzk2YTFhZjktZWZiMTNmMmUtYzg3Y2IzM2YtMTYyMzcyMzc=, Slow query, duration: 32.811152s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:05.951393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:05.951861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:05.962242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349024970556633:9706];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:22:05.962676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15553, MsgBus: 64945 2025-04-17T18:22:17.382763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349156598807912:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:17.383171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ea1/r3tmp/tmpTW9RkQ/pdisk_1.dat 2025-04-17T18:22:18.066969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:18.067057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:18.068572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:18.113535Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15553, node 1 2025-04-17T18:22:18.322622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:18.322640Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:18.322647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:18.322753Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64945 TClient is connected to server localhost:64945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:19.181957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:19.214695Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:19.238032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:19.425038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:19.609544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:19.695358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:21.699943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349173778678740:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:21.700052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:22.064433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:22.111337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:22.186500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:22.237217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:22.274471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:22.327366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:22.378431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:22.381367Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349156598807912:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:22.383112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:22.487737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349178073646703:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:22.487951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:22.488659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349178073646708:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:22.492860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:22.505749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349178073646712:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:22.606339Z node 1 :TX_PROXY ERROR: Actor# [1:7494349178073646767:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:23.617087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.659038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.690396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.740437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.806946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.849887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] |95.2%| [TA] $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 21621, MsgBus: 8140 2025-04-17T18:22:10.675797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349126600768811:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:10.676253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eac/r3tmp/tmp6Teogj/pdisk_1.dat 2025-04-17T18:22:11.240841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:11.243816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:11.265740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:11.269748Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21621, node 1 2025-04-17T18:22:11.533725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:11.533748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:11.533755Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:11.533864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8140 TClient is connected to server localhost:8140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:12.400756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:12.421422Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:12.437401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:12.609377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:12.810388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:12.917302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:14.827645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349143780639622:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:14.828276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:15.219568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:15.266011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:15.328275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:15.360798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:15.438633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:15.489292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:15.537129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:15.595865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349148075607583:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:15.595948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:15.596006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349148075607588:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:15.600329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:15.621279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349148075607590:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:15.624406Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349126600768811:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:15.624484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:15.681815Z node 1 :TX_PROXY ERROR: Actor# [1:7494349148075607645:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:17.044204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.077517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.122836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.218747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.261067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:17.314004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13107, MsgBus: 2966 2025-04-17T18:22:19.104561Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349165535507717:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:19.104663Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eac/r3tmp/tmpXYtSj5/pdisk_1.dat 2025-04-17T18:22:19.333081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:19.333150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:19.339169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:19.362342Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13107, node 2 2025-04-17T18:22:19.486087Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:19.486107Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:19.486114Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:19.486228Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2966 TClient is connected to server localhost:2966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:20.205383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:20.210860Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:20.224073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:22:20.335871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:20.569150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:20.688817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:23.166323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349182715378635:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:23.182294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:23.260158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.307160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.351733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.435996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.473103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.516950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.571143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:23.650393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349182715379303:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:23.650501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:23.650865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349182715379308:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:23.654363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:23.669205Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:22:23.669787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349182715379310:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:23.762028Z node 2 :TX_PROXY ERROR: Actor# [2:7494349182715379366:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:24.164448Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494349165535507717:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:24.164872Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:24.818915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:24.856963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:24.887652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:24.914853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:24.956655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:24.996373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpJoin::RightTableValuePredicate >> KqpJoinOrder::GeneralPrioritiesBug3 >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore [GOOD] >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] >> KqpJoin::JoinLeftPureInner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13769, MsgBus: 64801 2025-04-17T18:21:42.875510Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349002850178934:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:42.875555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ee7/r3tmp/tmpsmK4L7/pdisk_1.dat 2025-04-17T18:21:43.642153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:43.642255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:43.648059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:43.654390Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13769, node 1 2025-04-17T18:21:43.885329Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:43.885351Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:43.885363Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:43.885464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64801 TClient is connected to server localhost:64801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:44.728608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:44.754515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:46.798123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349020030048759:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:46.798294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:46.798833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349020030048771:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:46.802984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:46.826719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349020030048773:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:46.926543Z node 1 :TX_PROXY ERROR: Actor# [1:7494349020030048826:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:47.298078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:47.431657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:47.472094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:47.516413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:47.581808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:47.855094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:47.877791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349002850178934:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:47.877854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:47.890847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:47.949051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:47.989078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:48.081435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:21:48.116941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:21:48.148056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:21:48.236732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.022980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:21:49.066411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.138890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.183372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.216494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.263300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.308693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.351473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.389574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.427031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.459508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.497701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.534216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.576718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.614448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.650147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:21:49.689865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.867644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.870935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.872403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.876763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.877215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.882951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.884796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.893760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.895411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.899618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.901466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.905501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.907633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.911086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.913521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.917099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.919421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.921701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.925516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.926814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.932250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.933159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.938302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.941116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.944722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.950918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.952933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.957969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.959508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.963113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.965018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.968068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.970410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.976733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.980064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.981406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.985306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.987234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.990537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.997686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:21.004021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:21.005463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:21.009865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:21.010615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:21.062099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038454;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:21.138548Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frptecndfqv6e5syt4gcc", SessionId: ydb://session/3?node_id=1&id=MjY3NzAwZjQtNmQwYTc2MTktNDQ0YWRjYzAtODEwMzY1ZWY=, Slow query, duration: 29.827212s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:21.451267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:21.451679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:21.452156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7494349050094826749:3001];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-04-17T18:22:21.452508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TPCDS90+ColumnStore [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] Test command err: Trying to start YDB, gRPC: 21907, MsgBus: 2211 2025-04-17T18:20:56.561740Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348805297849913:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:56.561791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f2f/r3tmp/tmptzTL7e/pdisk_1.dat 2025-04-17T18:20:57.120056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:57.120180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:57.123543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:20:57.124806Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21907, node 1 2025-04-17T18:20:57.234685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:57.234713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:57.234720Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:57.234819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2211 TClient is connected to server localhost:2211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:57.932396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:59.960028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348818182752262:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:59.960132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:59.960245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348818182752274:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:59.967609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:59.980528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348818182752276:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:00.086505Z node 1 :TX_PROXY ERROR: Actor# [1:7494348822477719623:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:00.410155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:00.699340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:00.699510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:00.699771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:00.699867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:00.699943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:00.700029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:00.700097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:00.700190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:00.700279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:00.700338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:00.700402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:00.700484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348822477719882:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:00.705392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:00.705440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:00.705604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:00.705702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:00.705803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:00.705902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:00.706003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:00.706092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:00.706228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:00.706322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:00.706424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:00.706517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348822477719886:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:00.731672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348822477719876:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:00.731739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348822477719876:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:00.731963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_i ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.220938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.225318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.250061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.254665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.255977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.261815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.263122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.272352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.272737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.317375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.323688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.326157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.333088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.339854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.341521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.348752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.351847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.362123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.364767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.369331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.371471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.376902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.383758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.384137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.389829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.390103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.395571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.396473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.401632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.401636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.407484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.407599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.413275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.413275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.419296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.420517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.425044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.425705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.431019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.434074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.442720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.444926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.449213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.449679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.456399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:12.641359Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frb0f31nvjkee6nkc0mkc", SessionId: ydb://session/3?node_id=1&id=M2MzOTM5NmYtZjZkMDU4ODUtYmU1MDA3NTMtOTNmMjk5Mjg=, Slow query, duration: 33.425509s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:12.920524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:12.920925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:12.921637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494349024341224771:8864];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T18:22:12.921993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoin::JoinLeftPureInnerConverted [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2063, MsgBus: 4800 2025-04-17T18:22:25.177954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349188895419080:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:25.177982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e9b/r3tmp/tmpx4gEeA/pdisk_1.dat 2025-04-17T18:22:25.784616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:25.784684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:25.798604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:25.839021Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2063, node 1 2025-04-17T18:22:26.048715Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:26.048783Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:26.048792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:26.048900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4800 TClient is connected to server localhost:4800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:26.789991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:26.822573Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:26.841275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:27.060781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:27.301705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:22:27.376477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.119154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349206075290029:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:29.119268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:29.506277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.540294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.607974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.650256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.712031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.783484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.843979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.938174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349206075290690:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:29.938259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:29.938638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349206075290696:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:29.943510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:29.959540Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:22:29.959722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349206075290698:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:30.042437Z node 1 :TX_PROXY ERROR: Actor# [1:7494349210370258050:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:30.178390Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349188895419080:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:30.178470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:31.233918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.308788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.349875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.410716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.457982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.499812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureInnerConverted [GOOD] Test command err: Trying to start YDB, gRPC: 16431, MsgBus: 26081 2025-04-17T18:22:26.340758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349192939973714:2239];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:26.553024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e9a/r3tmp/tmpM4cFOn/pdisk_1.dat 2025-04-17T18:22:26.895685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:26.895839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:26.902521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:26.923993Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16431, node 1 2025-04-17T18:22:27.058912Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:27.058928Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:27.058937Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:27.059058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26081 TClient is connected to server localhost:26081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:27.723590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:27.753177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:22:27.908056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:22:28.093915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:28.192783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:29.920017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349205824877196:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:29.920139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:30.257289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:30.289366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:30.324628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:30.369033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:30.404325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:30.475502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:30.552867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:30.639570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349210119845163:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:30.639634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:30.639825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349210119845168:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:30.643498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:30.656609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349210119845170:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:30.741162Z node 1 :TX_PROXY ERROR: Actor# [1:7494349210119845225:3555] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:31.338272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349192939973714:2239];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:31.338348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:31.681467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.763240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.801430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:41.354622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:41.354709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:41.354742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:41.354779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:41.354818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:41.354861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:41.354915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:41.354998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:41.355297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:41.428593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:41.428637Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:41.439415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:41.439935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:41.440078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:41.447767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:41.448277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:41.448773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:41.448926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:41.450942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:41.451804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:41.451842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:41.451988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:41.452024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:41.452051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:41.452139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:41.456786Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:41.555856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:41.556055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.556232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:41.556395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:41.556436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.558064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:41.558211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:41.558393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.558483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:41.558523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:41.558549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:41.560165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.560220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:41.560256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:41.561939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.561999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.562045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:41.562107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:41.565750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:41.567487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:41.567686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:41.568659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:41.568799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:41.568861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:41.569108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:41.569179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:41.569363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:41.569437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:41.571418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:41.571459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:41.571600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:41.571638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:41.571998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:41.572058Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:41.572177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:41.572213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:41.572259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:41.572302Z no ... ct: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:22:33.786517Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:22:33.786783Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 298us result status StatusSuccess 2025-04-17T18:22:33.787446Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:22:33.798358Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:847:2678] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:22:33.798452Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:786:2678] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-17T18:22:33.798593Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:847:2678] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744914153774683 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744914153774683 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744914153774683 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:22:33.802632Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:847:2678] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-17T18:22:33.802724Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:786:2678] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS90+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 2639, MsgBus: 2026 2025-04-17T18:20:53.670499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348795489154336:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:53.670589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f36/r3tmp/tmpLa9ywV/pdisk_1.dat 2025-04-17T18:20:54.212853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:54.235235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:54.235323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:54.238039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2639, node 1 2025-04-17T18:20:54.398701Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:54.398725Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:54.398732Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:54.398833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2026 TClient is connected to server localhost:2026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:55.137024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:55.156525Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:57.270730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348812669024187:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.270745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348812669024199:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.270837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:57.275631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:57.289073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348812669024201:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:57.350250Z node 1 :TX_PROXY ERROR: Actor# [1:7494348812669024252:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:57.611616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:57.856369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:57.856369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:57.856549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:57.856824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:57.856939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:57.857033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:57.857038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:57.857370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:57.857406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:57.857520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:57.857536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:57.857618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:57.857653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:57.857706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:57.857756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:57.857790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:57.857857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:57.857880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:57.857990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:57.858281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:57.858319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348812669024484:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:57.858452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:57.858582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:57.858690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348812669024521:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:57.892753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348812669024494:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:57.892814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348812669024494:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract ... tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.982314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.988078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.992235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:05.998081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.001889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.007823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.013591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.019378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.020447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.024701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.029704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.030159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.034863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.040260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.045319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.050650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.050825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.055862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.059975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.068987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.071346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.078018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.080589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.089947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.091085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.100377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.103363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.111328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.114497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.120178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.123607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.129004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.133977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.142220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.146877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.146936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.151451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.155646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.165339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.166761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.191494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.193137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.199473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.205678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.212450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.218680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:06.426047Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fr82a2f0fhfner51w2r6h", SessionId: ydb://session/3?node_id=1&id=N2NlM2RmMWItMmIwMjQxZS1mOGE5OTZhNC03NTBlODdlNQ==, Slow query, duration: 30.222572s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:06.997030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:06.997678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:06.997729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TPCDS87-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCH15 [GOOD] >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] >> KqpJoin::RightTableValuePredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19939, MsgBus: 26762 2025-04-17T18:22:26.738361Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349193954115251:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:26.984076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e99/r3tmp/tmpGFqlfR/pdisk_1.dat 2025-04-17T18:22:27.298663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:27.306479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:27.306621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:27.311641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19939, node 1 2025-04-17T18:22:27.526638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:27.526658Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:27.526664Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:27.526781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26762 TClient is connected to server localhost:26762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:28.374464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:28.386976Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:28.401871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:28.544420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:28.744494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:28.833529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:31.017895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349215428953348:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:31.018007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:31.296040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.340676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.391436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.437788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.513533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.601625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.657355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.730558Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349193954115251:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:31.730762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:31.756808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349215428954017:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:31.756879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:31.757091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349215428954022:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:31.760678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:31.775801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349215428954024:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:31.873634Z node 1 :TX_PROXY ERROR: Actor# [1:7494349215428954081:3553] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:33.156535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.232496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.275396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.314537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.385244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.425749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS95-ColumnStore >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCH18 [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> OlapEstimationRowsCorrectness::TPCDS96 >> KqpJoin::JoinLeftPureInner [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableValuePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 27564, MsgBus: 5656 2025-04-17T18:22:28.900312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349201000678890:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:28.901311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e96/r3tmp/tmpxI4euu/pdisk_1.dat 2025-04-17T18:22:29.411115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:29.411676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:29.411751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:29.421737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27564, node 1 2025-04-17T18:22:29.637767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:29.637792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:29.637799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:29.638934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5656 TClient is connected to server localhost:5656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:30.622669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:30.636089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:30.642617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:30.784515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:31.071246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:31.165319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:32.862326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349218180549706:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:32.862409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:33.170442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.206822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.283184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.347110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.430991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.469821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.502523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.563979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349222475517666:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:33.564047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:33.564298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349222475517671:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:33.568343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:33.586778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349222475517673:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:33.662745Z node 1 :TX_PROXY ERROR: Actor# [1:7494349222475517728:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:33.896878Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349201000678890:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:33.896951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:34.944643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpJoin::RightTableKeyPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] Test command err: Trying to start YDB, gRPC: 2509, MsgBus: 14358 2025-04-17T18:21:51.934385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349043876857171:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:51.934766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eca/r3tmp/tmpwCbS1i/pdisk_1.dat 2025-04-17T18:21:52.604655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:52.607547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:52.607630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:52.620528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2509, node 1 2025-04-17T18:21:52.852962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:52.852987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:52.852995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:52.853108Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14358 TClient is connected to server localhost:14358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:53.801377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:53.825348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:56.257894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349065351694184:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:56.258001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:56.258695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349065351694196:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:56.263559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:56.279166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349065351694198:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:56.346543Z node 1 :TX_PROXY ERROR: Actor# [1:7494349065351694249:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:56.677999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.828846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.859867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.892078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.924466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.929074Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349043876857171:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:56.929213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:57.097681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:21:57.143162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:21:57.195146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:21:57.266127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:21:57.320771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:21:57.392979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:21:57.439430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:21:57.478497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.188756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:21:58.237838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.272847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.321071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.374155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.408918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.486633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.519975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.554260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.589502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.666721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.706812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.739483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.786411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.863099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.895205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:21:58.976440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.631478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.636041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.636151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.641863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.641880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.647445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.649156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.652595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.653883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.657908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.659354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.663651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.664373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.668899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.671054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.673783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.675750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.678826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.680760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.683925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.685935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.688811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.690903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.693524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.695893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.698340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.700365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.702756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.703573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.707626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.707889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.713151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.713196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.718431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.718430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.723427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.723441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.729136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.730400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.734088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.735591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.739139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.740525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.743936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.745926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.825824Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frzrmfen4jkxqrqske0sg", SessionId: ydb://session/3?node_id=1&id=OTQ3N2E0NS01OWI4YzMzOS00MjlmZDJjNS0zNDUzZmQwZA==, Slow query, duration: 29.356882s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:30.059611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:30.060030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:30.060505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7494349091121504183:2838];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-17T18:22:30.060872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28806, MsgBus: 4629 2025-04-17T18:21:04.006866Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348836757668124:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:04.038621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f29/r3tmp/tmpTAN1t4/pdisk_1.dat 2025-04-17T18:21:04.686946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:04.725331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:04.725430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:04.726971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28806, node 1 2025-04-17T18:21:04.950597Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:04.950616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:04.950636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:04.950727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4629 TClient is connected to server localhost:4629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:05.940937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:05.982557Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:08.064113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348858232505266:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.064133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348858232505278:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.064227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.068567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:08.085133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348858232505280:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:08.150726Z node 1 :TX_PROXY ERROR: Actor# [1:7494348858232505331:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:08.483880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:08.696132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:08.696373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:08.696656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:08.696719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:08.696762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:08.696782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:08.696871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:08.696876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:08.696956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:08.696982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:08.697034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:08.697093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:08.697112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:08.697187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:08.697188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:08.697269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:08.697279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:08.697358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:08.697393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:08.697488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:08.697511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:08.697620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:08.697710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348858232505586:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:08.698204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348858232505591:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:08.732626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348858232505578:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:08.732695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348858232505578:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstra ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.224438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.225996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.236731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.243341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.245258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.248651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.250881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.253617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.256656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.258561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.261334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.266339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.266371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.271371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.277082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.282585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.289747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.295246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.300458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.305740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.310931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.316058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.321702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.327231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.333060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.333696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.340581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.340915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.346753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.353595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.357952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.360048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.363739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.365258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.370107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.371723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.376625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.377437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.382504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.383229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.388845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.393481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.397931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.593788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.596383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:22.713976Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frkeyd7ngsqgdsfwp0nj5", SessionId: ydb://session/3?node_id=1&id=Y2FhMjg0MjEtZjA2NWFjYWQtMjllYzkzMjctZDM1OTYwYmM=, Slow query, duration: 34.842919s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:22.975057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:22.975518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:22.981242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494349034326200781:7739];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T18:22:22.981632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] |95.3%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureInner [GOOD] Test command err: Trying to start YDB, gRPC: 4950, MsgBus: 2203 2025-04-17T18:22:31.053347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349215010639744:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:31.053754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e8e/r3tmp/tmpTaU1Ne/pdisk_1.dat 2025-04-17T18:22:31.768504Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:31.797719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:31.797829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:31.800836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4950, node 1 2025-04-17T18:22:32.074401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:32.074420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:32.074426Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:32.074515Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2203 TClient is connected to server localhost:2203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:32.862320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:32.883075Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:32.899940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:33.057427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:33.229833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:33.319690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:34.917490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349227895543266:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:34.917667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:35.269088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.315188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.345716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.394217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.435488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.488062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.520270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.612695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349232190511225:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:35.612742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:35.612947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349232190511230:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:35.616200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:35.625843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349232190511232:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:35.732076Z node 1 :TX_PROXY ERROR: Actor# [1:7494349232190511290:3554] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:36.053672Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349215010639744:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:36.053767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH15 [GOOD] Test command err: Trying to start YDB, gRPC: 4949, MsgBus: 15847 2025-04-17T18:21:02.074915Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348831513905162:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:02.093468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f2e/r3tmp/tmpwfL6ks/pdisk_1.dat 2025-04-17T18:21:02.747752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:02.748503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:02.748582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:02.752893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4949, node 1 2025-04-17T18:21:02.982693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:02.982712Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:02.982718Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:02.982806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15847 TClient is connected to server localhost:15847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:03.838703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:03.866624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:05.990523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348844398807524:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:05.990606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:05.991024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348844398807536:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:05.995675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:06.010103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348844398807538:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:06.097605Z node 1 :TX_PROXY ERROR: Actor# [1:7494348848693774885:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:06.528144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:06.790445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:06.790445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:06.790622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:06.790716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:06.790982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:06.791134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:06.791247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:06.791354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:06.791447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:06.791545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:06.791642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:06.791758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:06.791862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:06.791972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348848693775132:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:06.794589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:06.794766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:06.794862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:06.794971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:06.795059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:06.795186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:06.795298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:06.795397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:06.795489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:06.795576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348848693775146:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:06.831122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348848693775152:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:06.831187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494348848693775152:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstr ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.596229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.596550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.600937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.601729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.605547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.609166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.613538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039194;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.617113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.621131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039190;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.626045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.626045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.632441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.632441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039186;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.637825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.642459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.642831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.648400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.648413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.654078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039200;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.654330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039222;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.665977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.666690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.672990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.675584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.678741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.688001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.691369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.696520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.699782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.710011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.710733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.717565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039220;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.721875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.732081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039224;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.736949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.741918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.743431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.752231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039226;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.752483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.757066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.757928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.762299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.763108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.768462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.768816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:20.966431Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frhrmf7xhere6y4phwjjf", SessionId: ydb://session/3?node_id=1&id=NTc0MzZkZWQtZWI2MThkYjYtN2U3YjcyNDQtY2ZkODVlYmI=, Slow query, duration: 34.833858s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:21.277166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:21.277589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:21.278914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349093506960586:9823];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:22:21.279328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28265, MsgBus: 21097 2025-04-17T18:21:10.018500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348867427137665:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:10.018649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f1d/r3tmp/tmpDB5rYC/pdisk_1.dat 2025-04-17T18:21:10.563086Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:10.591468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:10.591547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:10.594877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28265, node 1 2025-04-17T18:21:10.670675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:10.670692Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:10.670697Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:10.670796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21097 TClient is connected to server localhost:21097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:11.328153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:11.343156Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:13.504975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348880312040214:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:13.505049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348880312040225:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:13.505098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:13.508865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:13.521448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348880312040228:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:13.584907Z node 1 :TX_PROXY ERROR: Actor# [1:7494348880312040279:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:13.975991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:14.219006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:14.219176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:14.219229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:14.219356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:14.219451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:14.219529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:14.219551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:14.219739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:14.219854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:14.219946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:14.220049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:14.220144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:14.220242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:14.220327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:14.220438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:14.220550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348884607007867:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:14.221700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:14.221876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:14.221986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:14.222097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:14.222203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:14.222290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:14.222423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:14.222530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348884607007864:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:14.256490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348884607007859:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:14.256557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348884607007859:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abs ... tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.626215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.626215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.632148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.632147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.638195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.638194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.643782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.643782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.649031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.649031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.654610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.654610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.661041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.661498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.667967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.669204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.673913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.683368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.686666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.689203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.692808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.695182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.698268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.700668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.704049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.706533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.709997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.710637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.714121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.715422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.718920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.721124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.725762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.726720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.732034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.732658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.738076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.738089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.743976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.743982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.750328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.750328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.756549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.756627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.762705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.763667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.882669Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frscaahwd8s3n859rmphe", SessionId: ydb://session/3?node_id=1&id=NTk4ZjE2MjYtYjUyOTZlMWItYTY3ZTY0ZS1iODA3ZTE2Zg==, Slow query, duration: 29.951238s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:24.258385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:24.258769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:24.259822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:39.682917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:39.682975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:39.683003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:39.683027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:39.683055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:39.683092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:39.683131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:39.683189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:39.683401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:39.749614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:39.749663Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:39.758308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:39.758913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:39.759037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:39.766579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:39.767174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:39.767649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:39.767833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:39.770149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:39.771079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:39.771116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:39.771247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:39.771275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:39.771302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:39.771388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:39.776163Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:39.849766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:39.849933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:39.850071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:39.850237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:39.850274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:39.851874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:39.851967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:39.852123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:39.852171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:39.852205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:39.852225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:39.853436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:39.853466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:39.853490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:39.854593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:39.854632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:39.854658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:39.854717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:39.856939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:39.858030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:39.858156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:39.858784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:39.858872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:39.858902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:39.859073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:39.859114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:39.859238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:39.859282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:39.860919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:39.860951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:39.861088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:39.861127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:39.861480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:39.861541Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:39.861634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:39.861664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:39.861701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:39.861740Z no ... uccess 2025-04-17T18:22:38.637248Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:22:38.643612Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:22:38.643886Z node 104 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 301us result status StatusSuccess 2025-04-17T18:22:38.644784Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH18 [GOOD] Test command err: Trying to start YDB, gRPC: 64616, MsgBus: 8686 2025-04-17T18:21:04.856556Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348842823256380:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:04.856600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f22/r3tmp/tmpiXpdde/pdisk_1.dat 2025-04-17T18:21:05.648921Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:05.653274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:05.653459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:05.660005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64616, node 1 2025-04-17T18:21:05.962658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:05.962686Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:05.962698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:05.962813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8686 TClient is connected to server localhost:8686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:06.897070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:06.926985Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:08.898205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348860003126088:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.898314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.898583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348860003126100:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:08.902586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:08.914117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348860003126102:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:08.974991Z node 1 :TX_PROXY ERROR: Actor# [1:7494348860003126156:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:09.323550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:09.578708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:09.578882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:09.579141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:09.579283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:09.579388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:09.579480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:09.579589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:09.579684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:09.579779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:09.579879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:09.579983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:09.580084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348864298093742:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:09.583426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:09.583468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:09.583626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:09.583717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:09.583795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:09.583899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:09.584001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:09.584118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:09.584214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:09.584296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:09.584393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:09.584470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348864298093725:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:09.629472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348864298093748:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:09.629533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348864298093748:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstra ... ller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.345377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.350657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.350970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.356089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.356356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.361842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.362500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.367717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.368342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.373541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.380295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.387015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.391198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.398150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.403688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.404383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.409526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.412788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.422253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.427500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.427855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.433325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.433401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.439553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.439743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.445852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.445922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.452842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.465906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.472890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.478074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.487660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.492587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.497649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.498627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.507543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.509827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.517940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.519067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.528678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.531986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.541048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.542295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.550118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.551067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:23.730141Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frm71458f8b7bsxgc72kq", SessionId: ydb://session/3?node_id=1&id=M2ExZTljMWItNTdjMWQ5MzQtOTg0ZmNmYWEtMjkwY2NjNmE=, Slow query, duration: 35.088720s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:24.031757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:24.032284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:24.032703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494349139176056311:10754];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-17T18:22:24.033081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: 2025-04-17T18:16:43.356657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:43.356808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:43.356950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001529/r3tmp/tmptRoO16/pdisk_1.dat 2025-04-17T18:16:43.730744Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9927, node 1 2025-04-17T18:16:43.997174Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:43.997233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:43.997266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:43.997988Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:44.000397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:44.091209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:44.091337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:44.112338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5634 2025-04-17T18:16:44.803298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:48.288687Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:48.340807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:48.340915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:48.382626Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:48.385513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:48.650099Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.650700Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.651329Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.651601Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.651657Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.651708Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.651760Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.651820Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.651903Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.843469Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:48.843573Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:48.857816Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:49.006743Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:49.068890Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:49.068976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:49.108265Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:49.109962Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:49.110202Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:49.110261Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:49.110344Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:49.110420Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:49.110481Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:49.110532Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:49.114954Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:49.156220Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:49.156357Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:49.169314Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:16:49.179349Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:16:49.179494Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:16:49.195887Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:16:49.222342Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:49.222404Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:49.222470Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:16:49.234967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:49.242725Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:49.242861Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:49.492584Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:49.694603Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:49.765449Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:50.700180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.700336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.729380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:16:51.025341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:51.025591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:16:51.025906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:16:51.026050Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:16:51.026342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:16:51.026541Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:16:51.026681Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:16:51.026814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:16:51.026942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:16:51.027069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:16:51.027219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:16:51.027349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2363:2873];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:16:51.086797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2366:2875];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:16:51.086893Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2366:2875];tablet_id=72075186224037900;process=TT ... NextTraversal. No force traversals. 2025-04-17T18:21:22.940877Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:21:24.514838Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:24.514920Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:25.902828Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:21:25.903097Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:21:27.433552Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:27.433622Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:30.033505Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:21:30.082444Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:30.082515Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:32.945481Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:21:32.945643Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:21:32.996702Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:32.996763Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:35.795263Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:35.795342Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:37.082395Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:21:38.462493Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:38.462565Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:39.798863Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:21:39.799102Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:21:41.166890Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:41.166977Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:43.878822Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:21:43.919716Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:43.926370Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:45.301598Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:21:45.301676Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:21:45.301709Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:21:45.301740Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:21:46.911752Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:21:46.911969Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:21:46.962733Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:46.962810Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:49.758870Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:49.758959Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:50.926037Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:21:52.255229Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:52.255301Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:53.406848Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:21:53.407083Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:21:54.620618Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:54.620677Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:57.019000Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:21:57.067014Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:57.067097Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:21:59.805712Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:21:59.805867Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:21:59.877167Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:21:59.877229Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:02.329941Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:02.330007Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:03.523357Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:04.987069Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:04.987139Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:06.306768Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:22:06.307043Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:07.506918Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:07.506995Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:09.446883Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:09.510962Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:09.511033Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:12.159179Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:22:12.159370Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:12.239923Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:12.239999Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:14.770919Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:14.770993Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:15.898927Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:17.223069Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:17.223140Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:18.430810Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:22:18.431037Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:19.723074Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:19.723149Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:22.194882Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:22.269686Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:22.269750Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:23.570939Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:22:23.571004Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:22:23.571035Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:22:23.571065Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:22:25.146911Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:22:25.147133Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:25.232293Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:25.232358Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:27.884983Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:27.885055Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:29.034843Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:30.345620Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:30.345698Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:31.458844Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:22:31.459071Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:31.469990Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-17T18:22:31.470100Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 206.000000s, at schemeshard: 72075186224037897 2025-04-17T18:22:31.470389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 53 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-04-17T18:22:31.498219Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:22:32.706593Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:32.706692Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:35.139107Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:35.207923Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:35.207999Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:37.789635Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T18:22:37.789884Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2025-04-17T18:22:37.790310Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:14354:8939]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:22:37.798333Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-17T18:22:37.798432Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [2:14354:8939], StatRequests.size() = 1 >> KqpFlipJoin::LeftSemi_2 >> KqpJoin::CrossJoinCount >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull >> KqpFlipJoin::Right_1 >> KqpJoinOrder::GeneralPrioritiesBug2 >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 12335, MsgBus: 19088 2025-04-17T18:22:25.047673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349189573011745:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:25.047709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e9c/r3tmp/tmpVzqjIZ/pdisk_1.dat 2025-04-17T18:22:25.895665Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:25.942907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:25.943008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:25.978941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12335, node 1 2025-04-17T18:22:26.122867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:26.122907Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:26.122918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:26.123028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19088 TClient is connected to server localhost:19088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:26.869983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:26.902928Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:26.924399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:27.167900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:27.394397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:27.482092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:29.352682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349206752882691:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:29.352769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:29.620805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.661650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.703064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.740475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.776024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.856017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:29.933222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:30.042721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349211047850663:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:30.042835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:30.043128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349211047850668:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:30.048017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:30.053067Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349189573011745:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:30.053517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:30.069776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349211047850670:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:30.130425Z node 1 :TX_PROXY ERROR: Actor# [1:7494349211047850727:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:31.564983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:31.610431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16510, MsgBus: 23646 2025-04-17T18:22:34.211037Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349229178911805:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:34.268733Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e9c/r3tmp/tmp1ZQMD0/pdisk_1.dat 2025-04-17T18:22:34.395053Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:34.416842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:34.417078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:34.419386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16510, node 2 2025-04-17T18:22:34.623752Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:34.623775Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:34.623782Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:34.623890Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23646 TClient is connected to server localhost:23646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:35.263410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:35.283438Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:22:35.305181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:35.415160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:35.609493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:35.697744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:37.899204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349242063815286:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:37.899293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:37.990409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.034033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.116291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.164758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.263454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.321448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.373556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.464890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349246358783256:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:38.464987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:38.465243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349246358783261:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:38.469477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:38.519166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349246358783263:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:22:38.579876Z node 2 :TX_PROXY ERROR: Actor# [2:7494349246358783320:3540] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:39.193026Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494349229178911805:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:39.193086Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:39.646972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:39.723919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 [GOOD] Test command err: Trying to start YDB, gRPC: 6857, MsgBus: 11944 2025-04-17T18:19:29.812641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348431525749832:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:29.813733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f9d/r3tmp/tmpDG0Lw3/pdisk_1.dat 2025-04-17T18:19:30.146781Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:19:30.163922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:30.164010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:30.166896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6857, node 1 2025-04-17T18:19:30.243734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:30.243753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:30.243767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:30.243892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11944 TClient is connected to server localhost:11944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:30.767732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:30.783203Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:19:32.584631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348444410652380:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:32.584657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348444410652370:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:32.584791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:32.588668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:19:32.598633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348444410652384:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:19:32.680999Z node 1 :TX_PROXY ERROR: Actor# [1:7494348444410652435:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:33.002208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.238015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:33.238016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:33.238231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:33.238531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:33.238652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:19:33.238672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:33.238784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:33.238890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:19:33.238915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:33.239022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:19:33.239047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:33.239212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:19:33.239326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:19:33.239433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:19:33.239570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:33.239671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:33.239794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:33.239903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:33.239993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348448705619983:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:33.240129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:19:33.240252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:19:33.240347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:19:33.240452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:19:33.240532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494348448705620037:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:19:33.279880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348448705620006:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:19:33.279971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348448705620006:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstr ... 20:36.001773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.003083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.008648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.017424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.021291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.023765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.028518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.030344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.036385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.042526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.047906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.052278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.057899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.062082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.071589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.077357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.083298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.089473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.097187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.105693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.109196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.111532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.114351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.185153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715714; 2025-04-17T18:20:36.295995Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fngnfd48k67hy3mh1q81k", SessionId: ydb://session/3?node_id=1&id=YWI4ZjA2MWYtOWI3NGJjMmItMWFiZWM1MjQtYjNmN2MzMDg=, Slow query, duration: 29.592117s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:20:36.814698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715716; 2025-04-17T18:20:36.815174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715716; 2025-04-17T18:20:36.816544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494348599029512266:7833];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T18:20:36.816949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715716; 2025-04-17T18:22:32.566630Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fqz6q1fsjcw9v8ema8ch6", SessionId: ydb://session/3?node_id=1&id=YWI4ZjA2MWYtOWI3NGJjMmItMWFiZWM1MjQtYjNmN2MzMDg=, Slow query, duration: 65.438510s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n\n$cs_ui =\n\n (select catalog_sales.cs_item_sk cs_item_sk\n\n ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund\n\n from catalog_sales as catalog_sales\n\n cross join catalog_returns as catalog_returns\n\n where cs_item_sk = cr_item_sk\n\n and cs_order_number = cr_order_number\n\n group by catalog_sales.cs_item_sk\n\n having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit));\n\n$cross_sales =\n\n (select item.i_product_name product_name\n\n ,item.i_item_sk item_sk\n\n ,store.s_store_name store_name\n\n ,store.s_zip store_zip\n\n ,ad1.ca_street_number b_street_number\n\n ,ad1.ca_street_name b_street_name\n\n ,ad1.ca_city b_city\n\n ,ad1.ca_zip b_zip\n\n ,ad2.ca_street_number c_street_number\n\n ,ad2.ca_street_name c_street_name\n\n ,ad2.ca_city c_city\n\n ,ad2.ca_zip c_zip\n\n ,d1.d_year as syear\n\n ,d2.d_year as fsyear\n\n ,d3.d_year s2year\n\n ,count(*) cnt\n\n ,sum(ss_wholesale_cost) s1\n\n ,sum(ss_list_price) s2\n\n ,sum(ss_coupon_amt) s3\n\n FROM store_sales as store_sales\n\n cross join store_returns as store_returns\n\n cross join $cs_ui cs_ui\n\n cross join date_dim d1\n\n cross join date_dim d2\n\n cross join date_dim d3\n\n cross join store as store\n\n cross join customer as customer\n\n cross join customer_demographics cd1\n\n cross join customer_demographics cd2\n\n cross join promotion as promotion\n\n cross join household_demographics hd1\n\n cross join household_demographics hd2\n\n cross join customer_address ad1\n\n cross join customer_address ad2\n\n cross join income_band ib1\n\n cross join income_band ib2\n\n cross join item as item\n\n WHERE ss_store_sk = s_store_sk AND\n\n ss_sold_date_sk = d1.d_date_sk AND\n\n ss_customer_sk = c_customer_sk AND\n\n ss_cdemo_sk= cd1.cd_demo_sk AND\n\n ss_hdemo_sk = hd1.hd_demo_sk AND\n\n ss_addr_sk = ad1.ca_address_sk and\n\n ss_item_sk = i_item_sk and\n\n ss_item_sk = sr_item_sk and\n\n ss_ticket_number = sr_ticket_number and\n\n ss_item_sk = cs_ui.cs_item_sk and\n\n c_current_cdemo_sk = cd2.cd_demo_sk AND\n\n c_current_hdemo_sk = hd2.hd_demo_sk AND\n\n c_current_addr_sk = ad2.ca_address_sk and\n\n c_first_sales_date_sk = d2.d_date_sk and\n\n c_first_shipto_date_sk = d3.d_date_sk and\n\n ss_promo_sk = p_promo_sk and\n\n hd1.hd_income_band_sk = ib1.ib_income_band_sk and\n\n hd2.hd_income_band_sk = ib2.ib_income_band_sk and\n\n cd1.cd_marital_status <> cd2.cd_marital_status and\n\n i_color in ('azure','gainsboro','misty','blush','hot','lemon') and\n\n i_current_price between 80 and 80 + 10 and\n\n i_current_price between 80 + 1 and 80 + 15\n\ngroup by item.i_product_name\n\n ,item.i_item_sk\n\n ,store.s_store_name\n\n ,store.s_zip\n\n ,ad1.ca_street_number\n\n ,ad1.ca_street_name\n\n ,ad1.ca_city\n\n ,ad1.ca_zip\n\n ,ad2.ca_street_number\n\n ,ad2.ca_street_name\n\n ,ad2.ca_city\n\n ,ad2.ca_zip\n\n ,d1.d_year\n\n ,d2.d_year\n\n ,d3.d_year\n\n);\n\n-- start query 1 in stream 0 using template query64.tpl and seed 1220860970\n\nselect cs1.product_name\n\n ,cs1.store_name\n\n ,cs1.store_zip\n\n ,cs1.b_street_number\n\n ,cs1.b_street_name\n\n ,cs1.b_city\n\n ,cs1.b_zip\n\n ,cs1.c_street_number\n\n ,cs1.c_street_name\n\n ,cs1.c_city\n\n ,cs1.c_zip\n\n ,cs1.syear\n\n ,cs1.cnt\n\n ,cs1.s1 as s11\n\n ,cs1.s2 as s21\n\n ,cs1.s3 as s31\n\n ,cs2.s1 as s12\n\n ,cs2.s2 as s22\n\n ,cs2.s3 as s32\n\n ,cs2.syear\n\n ,cs2.cnt\n\nfrom $cross_sales cs1 cross join $cross_sales cs2\n\nwhere cs1.item_sk=cs2.item_sk and\n\n cs1.syear = 1999 and\n\n cs2.syear = 1999 + 1 and\n\n cs2.cnt <= cs1.cnt and\n\n cs1.store_name = cs2.store_name and\n\n cs1.store_zip = cs2.store_zip\n\norder by cs1.product_name\n\n ,cs1.store_name\n\n ,cs2.cnt\n\n ,s11\n\n ,s21\n\n ,s22;\n\n\n\n-- end query 1 in stream 0 using template query64.tpl\n", parameters: 0b |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH20 [GOOD] >> KqpJoin::RightTableKeyPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 21727, MsgBus: 19347 2025-04-17T18:22:29.184435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349207628660155:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:29.184471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e93/r3tmp/tmpZpaQ2x/pdisk_1.dat 2025-04-17T18:22:29.806938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:29.807053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:29.809507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:29.812336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21727, node 1 2025-04-17T18:22:29.982719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:29.982744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:29.982751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:29.982858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19347 TClient is connected to server localhost:19347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:30.701437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:30.737530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:30.911203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:31.072587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:31.185542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:32.823174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349220513563668:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:32.823277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:33.319403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.362377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.395510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.434321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.516213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.596131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.649236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.744355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349224808531638:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:33.744433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:33.744806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349224808531643:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:33.749043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:33.764325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349224808531645:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:33.826154Z node 1 :TX_PROXY ERROR: Actor# [1:7494349224808531703:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:34.186313Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349207628660155:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:34.186373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:34.970407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.010271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.092010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.130701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.204049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.259934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10722, MsgBus: 62042 2025-04-17T18:22:37.235138Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349242230303587:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:37.248120Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e93/r3tmp/tmpxQFzUy/pdisk_1.dat 2025-04-17T18:22:37.427480Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:37.430121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:37.430201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:37.431635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10722, node 2 2025-04-17T18:22:37.574769Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:37.574790Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:37.574796Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:37.574914Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62042 TClient is connected to server localhost:62042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:38.158397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:38.180977Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:38.188270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:38.285883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:22:38.457422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.574015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:41.261507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349259410174420:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.261600Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.330022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.411694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.469473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.508412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.574150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.655735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.714893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.819393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349259410175095:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.819482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.819681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349259410175100:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.823120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:41.835689Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349259410175102:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:41.924726Z node 2 :TX_PROXY ERROR: Actor# [2:7494349259410175157:3546] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:42.234434Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494349242230303587:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:42.234481Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:43.499591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.547328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.604648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.678845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.754639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.833149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableKeyPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 15955, MsgBus: 18163 2025-04-17T18:22:39.005175Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349247769148234:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:39.007521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e82/r3tmp/tmpBGlijN/pdisk_1.dat 2025-04-17T18:22:39.682710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:39.686172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:39.690705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:39.699303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15955, node 1 2025-04-17T18:22:39.802829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:39.802857Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:39.802864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:39.803016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18163 TClient is connected to server localhost:18163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:40.564105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:40.581415Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:40.588695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:40.816750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:41.023292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:41.120391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:43.042851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349264949019109:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:43.042953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:43.369791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.421197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.482531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.526263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.566932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.652317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.704085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.792800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349264949019782:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:43.792886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:43.793289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349264949019787:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:43.800651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:43.815739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349264949019789:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:43.881754Z node 1 :TX_PROXY ERROR: Actor# [1:7494349264949019846:3551] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:44.003425Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349247769148234:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:44.003478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:45.019509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> KqpJoin::CrossJoinCount [GOOD] >> KqpFlipJoin::LeftSemi_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> KqpFlipJoin::Right_1 [GOOD] >> KqpFlipJoin::Right_2 >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH20 [GOOD] Test command err: Trying to start YDB, gRPC: 62994, MsgBus: 65465 2025-04-17T18:21:10.630774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348868386605706:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:10.678086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f1c/r3tmp/tmpPNPt6H/pdisk_1.dat 2025-04-17T18:21:11.272467Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:11.309464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:11.309564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:11.311286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62994, node 1 2025-04-17T18:21:11.577369Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:11.577387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:11.577393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:11.577498Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65465 TClient is connected to server localhost:65465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:12.503268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:12.558376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:14.677262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348885566475565:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:14.677331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348885566475554:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:14.677441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:14.689047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:14.705624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348885566475568:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:14.794753Z node 1 :TX_PROXY ERROR: Actor# [1:7494348885566475621:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:15.132344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:15.414077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:15.414077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:15.414317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:15.414317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:15.414563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:15.414580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:15.414724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:15.414729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:15.414840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:15.414912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:15.414968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:15.415012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:15.415062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:15.415106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:15.415186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:15.415219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:15.415359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:15.415511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:15.415625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:15.415736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494348889861443186:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:15.415965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:15.416060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:15.416150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:15.416246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348889861443220:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:15.453506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348889861443180:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:15.453565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494348889861443180:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abs ... ller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.782383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.785906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.787888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.791485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.793757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.797258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.799157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.802582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.803992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.807954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.808802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.811995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.817366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.819603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.823198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.824937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.829263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.830302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.835033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.835813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.840837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.841463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.846876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.846977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.853077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.854133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.858633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.860266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.864349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.871029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.875579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.877037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.881754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.882630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.888637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.888683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.894469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.895026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.900554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.901518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.908800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.920008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.926251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.934941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:28.947481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:29.128845Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frtt06dg8wphqjma7h4ze", SessionId: ydb://session/3?node_id=1&id=Y2JlMzZkNTMtYzg1ZGIwZDEtNjI4OGQyMjUtNjgyN2JlZTI=, Slow query, duration: 33.735331s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:29.590365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:29.590794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:29.591530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494349160444438191:10699];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-04-17T18:22:29.591900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 22079, MsgBus: 2503 2025-04-17T18:22:33.961217Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349224516624796:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:33.961756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e8b/r3tmp/tmp754Wzd/pdisk_1.dat 2025-04-17T18:22:34.423733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:34.423798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:34.424870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:34.467561Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22079, node 1 2025-04-17T18:22:34.519866Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:34.519883Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:34.519889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:34.520020Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2503 TClient is connected to server localhost:2503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:35.261284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:35.286461Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:35.304352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:35.441154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:35.618574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:22:35.713341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:37.354113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349241696495735:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:37.354256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:37.780749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:37.823292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:37.881414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:37.951880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.020325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.095447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.167441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:38.268896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349245991463704:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:38.269020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:38.269654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349245991463709:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:38.278407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:38.295578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349245991463711:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:38.358011Z node 1 :TX_PROXY ERROR: Actor# [1:7494349245991463768:3547] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:38.966514Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349224516624796:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:38.975708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:39.512324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:39.617633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24449, MsgBus: 27858 2025-04-17T18:22:41.459930Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349258223849085:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:41.463056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e8b/r3tmp/tmpIL1rex/pdisk_1.dat 2025-04-17T18:22:41.638984Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:41.644049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:41.644123Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:41.646597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24449, node 2 2025-04-17T18:22:41.779615Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:41.779635Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:41.779643Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:41.779754Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27858 TClient is connected to server localhost:27858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:42.428393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:42.435264Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:42.448426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:42.600942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:42.767548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:42.846643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:45.303246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349275403719990:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:45.303326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:45.343065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.387916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.417113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.470894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.546368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.610646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.656248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.720740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349275403720659:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:45.720814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:45.721050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349275403720664:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:45.724579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:45.734646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349275403720666:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:45.816560Z node 2 :TX_PROXY ERROR: Actor# [2:7494349275403720722:3540] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:46.462409Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494349258223849085:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:46.518080Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:46.867521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.983662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |95.3%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::CrossJoinCount [GOOD] Test command err: Trying to start YDB, gRPC: 12900, MsgBus: 3690 2025-04-17T18:22:41.281351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349257571301749:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:41.281389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e78/r3tmp/tmpO0S66G/pdisk_1.dat 2025-04-17T18:22:41.816758Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:41.819447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:41.819537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:41.823499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12900, node 1 2025-04-17T18:22:41.918666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:41.918679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:41.918683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:41.918751Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3690 TClient is connected to server localhost:3690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:42.767916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:42.782893Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:42.797566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:43.003513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:43.191003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:43.258816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:45.231848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349274751172700:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:45.231940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:45.562988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.607012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.652517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.732267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.786390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.865980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.912114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.013898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349279046140666:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.013968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.014307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349279046140671:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.018157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:46.041102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349279046140673:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:46.130013Z node 1 :TX_PROXY ERROR: Actor# [1:7494349279046140730:3552] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:46.294388Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349257571301749:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:46.294467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:47.323344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.416517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.468988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 12337, MsgBus: 9101 2025-04-17T18:22:42.652924Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349264310187005:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:42.653301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e71/r3tmp/tmp0Jqycj/pdisk_1.dat 2025-04-17T18:22:43.420893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:43.420970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:43.433941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:43.436499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12337, node 1 2025-04-17T18:22:43.635767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:43.635792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:43.635799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:43.635908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9101 TClient is connected to server localhost:9101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:44.506946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:44.530325Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:44.547688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:44.704714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:44.890241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:44.978238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:46.698989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349281490057829:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.699083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:47.023301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.075793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.112439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.176566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.240195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.317478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.375775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.466554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349285785025793:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:47.466639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:47.466935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349285785025798:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:47.471154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:47.486303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349285785025800:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:47.571069Z node 1 :TX_PROXY ERROR: Actor# [1:7494349285785025856:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:47.610629Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349264310187005:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:47.610754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:48.724298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.776233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |95.4%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success >> KqpJoinOrder::TPCDS92+ColumnStore [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpFlipJoin::Right_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7330, MsgBus: 24083 2025-04-17T18:21:15.102549Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348890065416728:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:15.109741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f17/r3tmp/tmpWeb767/pdisk_1.dat 2025-04-17T18:21:15.705235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:15.728154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:15.728253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:15.732744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7330, node 1 2025-04-17T18:21:15.930947Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:15.930971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:15.930977Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:15.931096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24083 TClient is connected to server localhost:24083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:16.812453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:16.843318Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:18.879951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348902950319277:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:18.880120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:18.880570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348902950319289:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:18.884217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:18.897147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348902950319291:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:18.962697Z node 1 :TX_PROXY ERROR: Actor# [1:7494348902950319342:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:19.355012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:19.662500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:19.662676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:19.662943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:19.663059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:19.663179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:19.663288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:19.663391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:19.663486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:19.663602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:19.663725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:19.663827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:19.663928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348907245286884:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:19.680182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:19.680268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:19.680468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:19.680570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:19.680668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:19.680767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:19.680866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:19.680952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:19.681037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:19.681126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:19.681218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:19.681305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348907245286888:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:19.723574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348907245286906:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:19.723626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348907245286906:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstr ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.102649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.109109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.109231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.115821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.115822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.122280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.122281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.128011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.128063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.134037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.134041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.140278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.140360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.146431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.146564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.152256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.152263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.157088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.157874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.162166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.163790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.165883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.169889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.170025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.174839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.176105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.179220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.182952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.183153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.190111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.190317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.196371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.196380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.203124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.203183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.209083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.209148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.215802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.215805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.222378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.224294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.228756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.230201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.234818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.278903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:30.358457Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2frz07dny8a7ax7n70mzdj", SessionId: ydb://session/3?node_id=1&id=M2YwYjk4NTktZjFhOWNhOWYtN2UzMTM2ZGEtMTExNGZlNTI=, Slow query, duration: 30.669539s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:30.856227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:30.858732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349134878599955:9322];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:22:30.859127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:30.859985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Right_2 [GOOD] Test command err: Trying to start YDB, gRPC: 9786, MsgBus: 19689 2025-04-17T18:22:42.107683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349261970622835:2235];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:42.107786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e76/r3tmp/tmpmTcRHq/pdisk_1.dat 2025-04-17T18:22:42.745336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:42.745427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:42.764090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:42.845559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9786, node 1 2025-04-17T18:22:43.022637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:43.022656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:43.022662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:43.022767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19689 TClient is connected to server localhost:19689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:43.819138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:43.849685Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:43.868525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:44.075232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:44.271428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:44.376999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:46.082991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349279150493624:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.083116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.404964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.476952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.532556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.604220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.641751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.681420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.733435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.837894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349279150494293:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.837996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.838383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349279150494298:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.842677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:46.867491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349279150494300:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:46.947365Z node 1 :TX_PROXY ERROR: Actor# [1:7494349279150494356:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:47.075499Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349261970622835:2235];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:47.075815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:48.167674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.246061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.300126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.338917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1258, MsgBus: 10332 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e76/r3tmp/tmpbjMECg/pdisk_1.dat 2025-04-17T18:22:50.478433Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:22:50.519568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:50.519640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:50.520088Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:50.527571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1258, node 2 2025-04-17T18:22:50.666730Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:50.666749Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:50.666755Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:50.666855Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10332 TClient is connected to server localhost:10332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:51.297923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:51.368361Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:22:51.401328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:51.514026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:51.718486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:51.809922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:54.072950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349315160428882:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:54.073058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:54.157237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.236629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.299167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.353083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.408657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.467459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.549345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.623652Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349315160429550:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:54.623736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:54.623974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349315160429555:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:54.627450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:54.651119Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-04-17T18:22:54.654330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349315160429557:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:22:54.722396Z node 2 :TX_PROXY ERROR: Actor# [2:7494349315160429614:3546] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:55.776788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:55.809704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:55.846719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:55.896744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 4380, MsgBus: 61787 2025-04-17T18:22:41.158934Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349255863624195:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:41.167115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e79/r3tmp/tmp2RAJ4S/pdisk_1.dat 2025-04-17T18:22:41.891097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:41.891198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:41.895033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:41.900729Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4380, node 1 2025-04-17T18:22:42.286629Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:42.286663Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:42.286675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:42.286813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61787 TClient is connected to server localhost:61787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:43.297450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:43.320306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:22:43.504397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.728967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:43.814195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:45.508414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349273043495130:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:45.508514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:45.780516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.825679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.911992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.968258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.006575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.056926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.116587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.162388Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349255863624195:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:46.162575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:46.258503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349277338463098:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.258595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.259053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349277338463103:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.264345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:46.281092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349277338463105:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:46.361899Z node 1 :TX_PROXY ERROR: Actor# [1:7494349277338463163:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:47.666891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.722741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.759404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.795302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15083, MsgBus: 13445 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e79/r3tmp/tmpipEQeW/pdisk_1.dat 2025-04-17T18:22:49.850386Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:22:49.859729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:49.859801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:49.863130Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:49.886746Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15083, node 2 2025-04-17T18:22:50.034705Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:50.034729Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:50.034736Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:50.034838Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13445 TClient is connected to server localhost:13445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:50.735779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:50.748572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:50.822520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:51.025153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:51.109648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:53.797880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349307559150439:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:53.797976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:53.912932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:53.978926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.064646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.113989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.156989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.214830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.282645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:54.390649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349311854118401:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:54.390766Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:54.390971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349311854118407:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:54.394855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:54.407611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349311854118409:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:54.469498Z node 2 :TX_PROXY ERROR: Actor# [2:7494349311854118463:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:55.488362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:55.536130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:55.592241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:55.689941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpJoinOrder::CanonizedJoinOrderTPCH8 [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH8 [GOOD] Test command err: Trying to start YDB, gRPC: 15325, MsgBus: 5146 2025-04-17T18:21:19.590345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348906699799827:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:19.591155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f11/r3tmp/tmptsRubN/pdisk_1.dat 2025-04-17T18:21:20.207550Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:20.231243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:20.231347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:20.233179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15325, node 1 2025-04-17T18:21:20.402932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:20.402958Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:20.402981Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:20.403083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5146 TClient is connected to server localhost:5146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:21.141247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:23.200591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348923879669670:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:23.200683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348923879669682:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:23.200766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:23.204749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:23.217016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348923879669684:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:23.311004Z node 1 :TX_PROXY ERROR: Actor# [1:7494348923879669735:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:23.593142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:23.896645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:23.899502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:23.899664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:23.899898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:23.900000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:23.900115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:23.900237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:23.900338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:23.900444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:23.900536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:23.900634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:23.900725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:23.900816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348923879670017:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:23.901149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:23.901384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:23.901531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:23.901649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:23.901736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:23.901821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:23.901905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:23.901990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:23.902106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:23.902246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:23.902332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494348923879670015:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:23.947420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348923879670019:2359];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:23.947485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348923879670019:2359];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:23.947764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_i ... ller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.671463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.677851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.677863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.683270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.685839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.688650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.693276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.693909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.699161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.700243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.704682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.707729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039186;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.709900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.715046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.715051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.720259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.721910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.725457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.730374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.730709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.740181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.740186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.745641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.747193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.751360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.759418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.766642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.767271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.773206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.773987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.779858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.787797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.791455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.797797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.799454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.807816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.810847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.818518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.825335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.825421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.832759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.832845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.837903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.839469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.910685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:35.977635Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fs3vb88y893bhbqtg1ynd", SessionId: ydb://session/3?node_id=1&id=ZmVmNjMzNDQtZjdiOTRjZDgtYzE3YzFmYTEtYTQ3MDg5OGU=, Slow query, duration: 31.326004s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:36.292819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:36.293528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494349194462664532:10652];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-17T18:22:36.293603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:36.294613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH19 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins [GOOD] Test command err: Trying to start YDB, gRPC: 31520, MsgBus: 9497 2025-04-17T18:21:32.940551Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348960786778835:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:32.941081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ef3/r3tmp/tmpcFifQs/pdisk_1.dat 2025-04-17T18:21:33.710673Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31520, node 1 2025-04-17T18:21:33.854369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:33.882321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:33.943187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:34.110692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:34.110711Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:34.110739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:34.110842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9497 TClient is connected to server localhost:9497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:34.999855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:35.054325Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:37.043853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348982261615797:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:37.043990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:37.044279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348982261615809:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:37.048542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:37.063064Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:21:37.063366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348982261615811:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:37.142231Z node 1 :TX_PROXY ERROR: Actor# [1:7494348982261615862:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:37.462339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:37.783949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:37.784210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:37.784438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:37.784553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:37.784680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:37.784813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:37.784916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:37.785048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:37.785172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:37.785273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:37.785362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:37.785481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494348982261616209:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:37.788332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:37.788385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:37.788561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:37.788666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:37.788756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:37.788838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:37.788919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:37.789026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:37.789126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:37.789245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:37.789361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:37.789463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348982261616191:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:37.858047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348982261616110:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:37.858104Z node ... tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.259736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.267400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.267434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.281316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.281464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.288439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.293132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.298698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.302425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.310674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.314709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.320835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.324726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.334981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.338570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.345268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.348189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.351536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.355486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.359031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.367058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.373771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.375705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.382991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.386122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.391918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.406203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.420209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.428802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.430091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.435459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.442446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.446440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.454000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.454730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.462007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.464991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.469266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.472722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.476281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.481331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.483862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.490026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.492074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.497605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.501264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:50.721248Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fsh47599vv713kbrgs9nf", SessionId: ydb://session/3?node_id=1&id=ZThiMzVmMDktZTViYzdjYmQtNTYyZGNhZmQtYzE1MGQwMWY=, Slow query, duration: 32.473860s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:51.007690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:51.007818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:51.008381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> KqpJoinOrder::CanonizedJoinOrderTPCH7 [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH19 [GOOD] Test command err: Trying to start YDB, gRPC: 6546, MsgBus: 2368 2025-04-17T18:21:30.642696Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348954008831251:2216];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000efa/r3tmp/tmp05lY3O/pdisk_1.dat 2025-04-17T18:21:30.956049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:21:31.249396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:31.249500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:31.264439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:31.304042Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6546, node 1 2025-04-17T18:21:31.495415Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:31.495432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:31.495438Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:31.495542Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2368 TClient is connected to server localhost:2368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:32.430950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:32.443558Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:34.847613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348971188700935:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.847714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348971188700924:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.847805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.860150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:34.873448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348971188700938:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:34.959064Z node 1 :TX_PROXY ERROR: Actor# [1:7494348971188700989:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:35.299124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.538785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:35.538951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:35.539174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:35.539286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:35.539399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:35.539525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:35.539634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:35.539766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:35.539873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:35.539959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:35.540057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:35.540150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494348975483668555:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:35.567499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:35.567558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:35.567764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:35.567861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:35.567978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:35.568089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:35.568184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:35.568270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:35.568374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:35.568461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:35.568549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:35.568652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348975483668543:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:35.572714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348975483668561:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:35.572772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494348975483668561:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.604367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.604367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.613847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.615140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.624707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.627580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.633203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.637321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.647118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.648294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.653248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.657588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.668223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.670528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.680722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.683700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.692794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.696134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.700004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.701078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.705544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.713642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.716045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.725655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.727601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.738246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.742812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.749666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.754453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.755405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.759734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.760913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.766509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.767713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.773141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.773441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.781721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.783246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.788271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.790989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.795016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.802644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.807141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.809332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:48.813969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.009370Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fsf1j2jm6ntvrb1zz6sef", SessionId: ydb://session/3?node_id=1&id=MWM4MTY2ZTItNDU3ZjIxZTQtNDcxMjRiYTYtZTIzZjZlOWQ=, Slow query, duration: 32.894507s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:49.301446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:49.301844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:49.302577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349203116980979:9190];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:22:49.302963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH1 [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH7 [GOOD] Test command err: Trying to start YDB, gRPC: 25811, MsgBus: 19934 2025-04-17T18:21:31.376139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348957087629055:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:31.376192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ef9/r3tmp/tmpqrSmR1/pdisk_1.dat 2025-04-17T18:21:32.147398Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:32.170536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:32.170643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:32.181195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25811, node 1 2025-04-17T18:21:32.409727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:32.409752Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:32.409758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:32.409846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19934 TClient is connected to server localhost:19934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:32.990504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:35.125768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348974267498771:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:35.125871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:35.126248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348974267498783:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:35.129905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:35.147417Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:21:35.148159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348974267498785:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:35.218991Z node 1 :TX_PROXY ERROR: Actor# [1:7494348974267498836:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:35.543492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:35.796263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:35.796430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:35.796655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:35.796758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:35.796846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:35.796941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:35.797052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:35.797158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:35.797261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:35.797355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:35.797447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:35.797531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494348974267499111:2357];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:35.798805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:35.798848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:35.799020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:35.799113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:35.799199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:35.799285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:35.799367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:35.799450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:35.799539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:35.799621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:35.799723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:35.799817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494348974267499115:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:35.829288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348974267499113:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:35.829363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494348974267499113:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.816746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.822521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.828271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.833616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.839506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.845401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.851036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.856962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.862480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.869214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.874391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.880450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.886461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.886528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.892891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.892907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.901355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.901772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.907402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.907701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.913122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.919745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.919745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.925957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.934207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.939200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.943889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.951109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.955555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.961597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.961817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.968621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.968624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.974826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.976582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.982561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.983671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.988735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.989508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.994643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:45.995537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:46.002436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:46.009203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:46.011101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:46.015120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:46.314404Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fsewc1ekkwt1cbjk0z80c", SessionId: ydb://session/3?node_id=1&id=ZWYxYjA2YWMtYWNmM2MxMWUtYTE1Y2YyMjctM2Y2ZTJjOGQ=, Slow query, duration: 30.365355s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:46.638782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:46.639197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:46.640866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349201900813406:9429];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T18:22:46.641220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TPCDS96-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 8826, MsgBus: 9104 2025-04-17T18:20:35.729307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348717973175487:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:20:35.729384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f5b/r3tmp/tmpvpWuMc/pdisk_1.dat 2025-04-17T18:20:36.194449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:20:36.213057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:20:36.213205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:20:36.218264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8826, node 1 2025-04-17T18:20:36.308125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:20:36.308154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:20:36.308172Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:20:36.308722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9104 TClient is connected to server localhost:9104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:20:36.837035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:20:36.852706Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:20:38.901904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348730858078039:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:38.901918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348730858078031:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:38.902014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:20:38.907090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:20:38.923210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:20:38.923638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348730858078045:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:20:39.000201Z node 1 :TX_PROXY ERROR: Actor# [1:7494348730858078096:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:20:39.312619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:20:39.515926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:39.515955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:39.516083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:39.516347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:39.516664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:20:39.516701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:39.516822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:39.516876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:20:39.516920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:39.516985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:20:39.517006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:39.517290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:20:39.517392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:20:39.517471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:20:39.517561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:39.517670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:39.518009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:20:39.518130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:20:39.518265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:39.518356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:39.518462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494348735153045657:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:39.521270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:20:39.521501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:20:39.521643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348735153045667:2356];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:20:39.550854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494348735153045651:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:20:39.550934Z node 1 ... state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.147185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.150644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.153013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.155938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.158337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.161354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.163885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.167777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.169549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.172659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.177162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.178035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.187504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.191075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.197669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.197787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.205587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.211819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:21:46.443844Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fqm9ka9shqa2z4dt2vzj0", SessionId: ydb://session/3?node_id=1&id=MjM0ODc2OWQtM2UwNjQ3ZjItZjgzODU4ZDgtY2Q0ZTE3Yzg=, Slow query, duration: 30.487887s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:21:46.926868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:46.927323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:21:46.927628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494348992851141461:11074];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-17T18:21:46.927949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:57.755946Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fsjqacnfxfjvdnmxe0twy", SessionId: ydb://session/3?node_id=1&id=MjM0ODc2OWQtM2UwNjQ3ZjItZjgzODU4ZDgtY2Q0ZTE3Yzg=, Slow query, duration: 37.872800s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH1 [GOOD] Test command err: Trying to start YDB, gRPC: 9041, MsgBus: 18290 2025-04-17T18:21:29.946764Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348950740326877:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:29.947185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f01/r3tmp/tmpM2z2c4/pdisk_1.dat 2025-04-17T18:21:30.877892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:30.885837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:30.885925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:30.899499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9041, node 1 2025-04-17T18:21:31.180673Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:31.180698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:31.180709Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:31.180823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18290 TClient is connected to server localhost:18290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:32.156873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:32.218463Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:34.356166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348972215163885:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.356237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348972215163893:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.356314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:34.364393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:34.376580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348972215163899:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:34.443031Z node 1 :TX_PROXY ERROR: Actor# [1:7494348972215163950:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:34.791354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:34.935824Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494348950740326877:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:34.935885Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:21:35.075727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:35.075916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:35.076107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:35.076295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:35.076406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:35.076770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:35.076875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:35.076997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:35.077114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:35.077236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:35.077359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:35.077465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494348972215164181:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:35.100242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:35.100356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:35.100564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:35.100701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:35.100820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:35.100928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:35.101019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:35.101138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:35.101240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:35.101346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:35.101483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:35.101599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494348972215164187:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:35.111520Z node 1 :TX_ ... -17T18:22:49.140332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.146959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.148781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.152878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.154313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.158715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.159861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.164317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.165333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.169825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.175303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.181016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.181787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.188482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.192529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.195611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.200827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.202426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.206048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.207702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.212060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.214094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.217649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.219566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.223144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.225704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.228743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.231304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.234508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.236784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.239413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.241858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.244179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.247719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.248757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.252715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.253374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.258479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.258665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.263650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.412700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:49.445473Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fsd871peabq4rnhv5fw2m", SessionId: ydb://session/3?node_id=1&id=ZTFhNGRhZjktYmU0YmFhMDYtN2E0N2JjOC1hZmU0MmY3Mw==, Slow query, duration: 35.165569s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:49.791818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:49.792250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:49.792749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494349152603826046:7607];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-17T18:22:49.793074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:50.714538Z node 1 :KQP_YQL WARN: TraceId: 01js2ftg7j171tdcqy8kts4jnq, SessionId: CompileActor 2025-04-17 18:22:50.669 WARN ydb-core-kqp-ut-join(pid=634958, tid=0x00007F58F91A6640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed 2025-04-17T18:22:53.144534Z node 1 :KQP_YQL WARN: TraceId: 01js2ftjmr7m2gfgrmbfs3dkwr, SessionId: CompileActor 2025-04-17 18:22:53.144 WARN ydb-core-kqp-ut-join(pid=634958, tid=0x00007F58F8997640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 6686, MsgBus: 27098 2025-04-17T18:22:21.802372Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349170894482422:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:21.802506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e9f/r3tmp/tmplGcvN9/pdisk_1.dat 2025-04-17T18:22:22.491903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:22.504087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:22.504172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:22.508507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6686, node 1 2025-04-17T18:22:22.794654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:22.794673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:22.794679Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:22.794776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27098 TClient is connected to server localhost:27098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:23.385574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:23.399140Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:25.236844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349188074352056:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:25.236960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:25.237386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349188074352068:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:25.240907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:25.252588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349188074352070:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:25.357921Z node 1 :TX_PROXY ERROR: Actor# [1:7494349188074352121:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:25.725995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:25.915535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:25.960587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:25.992335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.026625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.201032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.287506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.322630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.360536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.398034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.475400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.536132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.604563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:26.775758Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349170894482422:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:26.775833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:27.282069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:22:27.317953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.355477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.395827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.432130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.469834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.513208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.584269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.618883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.656782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.696376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.731109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.783672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.834617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.885549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.929463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:22:27.979482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.547806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.549082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.553149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.555860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.558172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.561444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.563031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.565967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.568280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.571824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.573248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.577009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.577955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.582923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.585964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.588468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.593662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.598492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.598652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.603709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.604456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.609660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.611387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.615161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.617125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.620543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.623397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.626386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.630352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.635000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.636099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.641354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.641660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.646929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.648405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.651981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.652731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.657341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.657804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.662192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.664919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.670340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.678401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.683467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.718199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:22:57.806576Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fsvxja270pfkar0fcycny", SessionId: ydb://session/3?node_id=1&id=OThmNzc4NjAtOWZlNGQ3ZjgtNTNmYmY5MjQtOGRkMmU1ODk=, Slow query, duration: 28.507594s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:22:58.354903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:58.355347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:22:58.355976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494349312628435465:7001];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-17T18:22:58.356312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] Test command err: Trying to start YDB, gRPC: 4290, MsgBus: 10312 2025-04-17T18:22:29.191343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349205830800593:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:29.191385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e98/r3tmp/tmp2uYJjx/pdisk_1.dat 2025-04-17T18:22:29.737437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:29.737542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:29.747134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:29.790961Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4290, node 1 2025-04-17T18:22:29.893550Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:29.893572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:29.893583Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:29.893706Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10312 TClient is connected to server localhost:10312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:30.613443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:30.629411Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:32.741438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349218715703149:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:32.741562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:32.741783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349218715703161:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:32.745528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:32.770386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349218715703163:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:32.846695Z node 1 :TX_PROXY ERROR: Actor# [1:7494349218715703214:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:33.125919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.268282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.314612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.401753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.472229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.696858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.741989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.780146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.814301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.846145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.883214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.927945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:22:33.972228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:34.213392Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349205830800593:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:34.213564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:34.946301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:22:34.986669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.059723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.099917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.172153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.204956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.264475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.297567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.366168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.399004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.431365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.460575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.503315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.542664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.584097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.618134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:22:35.644893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.197092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.202682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.203301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.208997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.209513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.216146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.219658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.225017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.230999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.231444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.236938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.240554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.242609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.248193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.253602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.258886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.259187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.270134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.277389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.282877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.287882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.293210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.296319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.305840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.306967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.316063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.319616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.325500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.329244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.339110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.342847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.347992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.349935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.358553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.370289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.375439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.380571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.385527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.390425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.395334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.400086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.400544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.406459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.407814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.412083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.413060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:06.525450Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ft3b68p40dnav8jhve2f7", SessionId: ydb://session/3?node_id=1&id=NjcwNmQ1MDAtNTJlMWRhMzMtYzgwZjg1NDYtZThhZTFlNDQ=, Slow query, duration: 29.622533s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:06.870627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:06.870653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:06.871457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> KqpJoinOrder::TestJoinHint2+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH22 [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12814, MsgBus: 5148 2025-04-17T18:22:35.842545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349233196335395:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:35.843341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e8a/r3tmp/tmpx9TFPh/pdisk_1.dat 2025-04-17T18:22:36.409101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:36.409187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:36.411527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:36.412134Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12814, node 1 2025-04-17T18:22:36.536465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:36.536485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:36.536497Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:36.536604Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5148 TClient is connected to server localhost:5148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:37.237654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:37.263443Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:39.471721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349250376205244:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:39.471846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:39.478310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349250376205256:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:39.486415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:39.506393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349250376205258:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:39.573392Z node 1 :TX_PROXY ERROR: Actor# [1:7494349250376205309:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:39.903338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.015243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.052781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.087946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.116681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.332093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.378214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.427069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.520692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.599082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.650491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.728411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.774323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.844002Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349233196335395:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:40.844075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:41.520854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:22:41.578162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.617736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.651887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.733772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.781338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.825511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.862645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.906722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.943780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.992369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.072063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.112242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.145277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.197825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.233221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.307146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.365560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.367879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.373160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.376055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.389095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.391077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.394898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.402985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.408705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.409481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.418436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.423018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.428374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.433623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.436154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.441285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.450230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.455006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.460518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.463528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.471890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.476862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.482147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.484981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.492205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.496286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.505463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.507889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.513076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.517728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.526891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.528412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.533709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.540117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.549699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.563192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.575188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.592463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.603205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.605906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.610431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.616201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.616471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.646348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.646711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:13.710442Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fta3p57qtj90n31w1ktef", SessionId: ydb://session/3?node_id=1&id=NWRmNmI1ZmMtN2Y2NDBmZmUtMWFlZjQ5OGUtYTYzOGM5ODk=, Slow query, duration: 29.879387s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:13.984593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:13.984988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:13.985222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7494349323390666888:4601];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-04-17T18:23:13.985583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10389, MsgBus: 21582 2025-04-17T18:21:51.791796Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349041788214453:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:51.795377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ecf/r3tmp/tmpdD745A/pdisk_1.dat 2025-04-17T18:21:52.513660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:52.513899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:52.513975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:52.522948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10389, node 1 2025-04-17T18:21:52.834708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:52.834728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:52.834736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:52.834844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21582 TClient is connected to server localhost:21582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:53.849204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:55.957025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349058968084233:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:55.957126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:55.957542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349058968084245:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:55.961688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:55.973996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349058968084247:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:56.047396Z node 1 :TX_PROXY ERROR: Actor# [1:7494349063263051594:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:56.429962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.730550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:56.730712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:56.730931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:56.730998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:56.731056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:56.731120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:56.731186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:56.731256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:56.731314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:56.731403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:56.731481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:56.731541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494349063263051852:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:56.754703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:56.754763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:56.755021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:56.755124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:56.755241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:56.755344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:56.755429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:56.755527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:56.755631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:56.755721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:56.755809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:56.755898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349063263051872:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:56.776847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349063263051876:2361];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:56.776899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349063263051876:2361];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:56.777099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;sel ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.124194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039194;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.130044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.133964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.143919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.148499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039190;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.154018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.158865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.168371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.170676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.180335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.184195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.189498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.193754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.202832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.206959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.212344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.216844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.226358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.230846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.235932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.240819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.245873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.255268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.259920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.265194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.270600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039204;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.276101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.281077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.287558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.294480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.301290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.303864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.311190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.325409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.339564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.349727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.363955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.374676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.390768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.400683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.405884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.415077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.422618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.424974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.431134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.654156Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ft32ebq4adqb9x0q7w2kx", SessionId: ydb://session/3?node_id=1&id=ZTdhNDBiZjItNWQxMGE5OWQtMmM5ODFiYzItNjJlZDczZTI=, Slow query, duration: 34.030955s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:10.945468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:10.945853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349290896364864:9340];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T18:23:10.946225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:10.947394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH22 [GOOD] Test command err: Trying to start YDB, gRPC: 1651, MsgBus: 15577 2025-04-17T18:21:43.575708Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349010765547018:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:43.576113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ee5/r3tmp/tmpDWu97Z/pdisk_1.dat 2025-04-17T18:21:44.297989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:44.298103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:44.299837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:44.359911Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1651, node 1 2025-04-17T18:21:44.482711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:44.482733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:44.482739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:44.482837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15577 TClient is connected to server localhost:15577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:45.367195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:45.405278Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:47.723300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349027945416731:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:47.723427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:47.723910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349027945416743:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:47.728158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:47.745802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349027945416745:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:47.818821Z node 1 :TX_PROXY ERROR: Actor# [1:7494349027945416796:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:48.104359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:48.430355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:48.432809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:48.432981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:48.433248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:48.433352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:48.433437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:48.433540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:48.433633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:48.433724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:48.433818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:48.433933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:48.434033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:48.434125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494349032240384364:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:48.442533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:48.442745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:48.442858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:48.442975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:48.443082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:48.443199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:48.443290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:48.443394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:48.443485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:48.443572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:48.443670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7494349032240384356:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:48.486679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349032240384358:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:48.486730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349032240384358:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstr ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.388440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.393926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.401821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.403956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.415880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.419339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.425222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.429239Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.431127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.435082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.436256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.439662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.440933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.444267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.445644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.448888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.450624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.453742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.456346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.458264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.461679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.466639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.472054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.476692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.480363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.481237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.484923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.485892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.490602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.495645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.500729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.505793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.509189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.510597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.515363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.519709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.524677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.529234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.533914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.539022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.547842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.552670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.556960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.562162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.669482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:03.750421Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fswzcdaffye951ct78xp6", SessionId: ydb://session/3?node_id=1&id=NDFjZGI1OTEtNGU2YTViZDUtYmI4N2MzYjQtM2YxODEwNTE=, Slow query, duration: 33.369635s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:04.083037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:04.083515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:04.084000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494349221218981018:7696];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T18:23:04.084374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22344, MsgBus: 23719 2025-04-17T18:22:37.651001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349241607256722:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:37.651524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e85/r3tmp/tmpw6XKhE/pdisk_1.dat 2025-04-17T18:22:38.223314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:38.223428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:38.224629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:38.265013Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22344, node 1 2025-04-17T18:22:38.402589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:38.402621Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:38.402629Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:38.402728Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23719 TClient is connected to server localhost:23719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:39.178322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:39.223062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:41.139189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349258787126568:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.139307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349258787126559:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.139464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.146257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:41.162701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349258787126573:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:41.241353Z node 1 :TX_PROXY ERROR: Actor# [1:7494349258787126624:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:41.736810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.886111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.920713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.966591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.050979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.196561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.240931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.287853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.322322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.357809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.432408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.483861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.543835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.655362Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349241607256722:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:42.655456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:43.315266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:22:43.360542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.395598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.435550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.469425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.504712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.557891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.629085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.680114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.711455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.744829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.774861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.804403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.851214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.922406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.978439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:22:44.040818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.376631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.380102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.381889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.385208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.387026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.390437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.392139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.395697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.397255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.400680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.402105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.405774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.406971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.412922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.415202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.418301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.421226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.423435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.426757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.428716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.432649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.434630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.439058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.439764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.444770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.445893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.450818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.452430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.456428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.458603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.462326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.464755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.467687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.472714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.473118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.478666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.478666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.484558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.484562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.490418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.490881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.496683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.496691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.500950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.502173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.579324Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ftbmbf5hbxczkdcb2w82a", SessionId: ydb://session/3?node_id=1&id=NDdiOGM3NTItMmZhOGU2MDMtN2Q1ODU4MmYtYTBjNzUxOTY=, Slow query, duration: 30.191564s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:15.800384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:15.801257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494349370456302370:5880];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-17T18:23:15.801599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:15.802163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20190, MsgBus: 12856 2025-04-17T18:22:40.543289Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349252100885960:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:40.543330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e7c/r3tmp/tmpmAm835/pdisk_1.dat 2025-04-17T18:22:41.188287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:41.188452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:41.190285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:41.215279Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20190, node 1 2025-04-17T18:22:41.426650Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:41.426670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:41.426678Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:41.426773Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12856 TClient is connected to server localhost:12856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:42.280791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:42.314075Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:44.510608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349269280755603:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:44.510692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:44.514247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349269280755615:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:44.519333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:44.542114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349269280755617:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:44.634021Z node 1 :TX_PROXY ERROR: Actor# [1:7494349269280755670:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:44.885833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.002690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.116113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.150391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.188152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.380361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.419883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.462063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.514444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.550259Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349252100885960:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:45.550450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:45.561994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.619182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.705997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:22:45.756943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.638744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:22:46.673594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.703321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.770073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.837635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.925089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.037286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.081714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.132724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.169883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.212629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.244765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.322564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.366049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.396901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.425625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.457018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.165291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.167919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.171152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.171719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.176693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.182007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.190926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.196017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.201627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.201768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.207557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.212232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.213009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.216905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.217422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.221829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.222433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.227074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.227179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.233017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.233017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.239206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.240119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.244844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.245779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.250722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.251280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.255112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.256612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.261781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.262633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.268048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.268611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.274019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.274525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.280320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.280377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.286571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.286595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.292987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.293021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.299168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.376808Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ftf9gca7w504bxzzjmq66", SessionId: ydb://session/3?node_id=1&id=ZmFhNzZlODktYzVlNWI2NWUtZjc0OTk5Y2YtYjY2YTg5MGU=, Slow query, duration: 29.240206s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:18.672162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:18.672562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:18.672842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494349389539866384:6014];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-17T18:23:18.673464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22778, MsgBus: 10234 2025-04-17T18:21:55.721353Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349062285768753:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:55.721694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ec8/r3tmp/tmpQXquKc/pdisk_1.dat 2025-04-17T18:21:56.223587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:56.223696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:56.225159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22778, node 1 2025-04-17T18:21:56.328647Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:21:56.328666Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:21:56.386778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:56.437447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:56.437464Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:56.437475Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:56.437562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10234 TClient is connected to server localhost:10234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:57.269164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:57.299363Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:59.720627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349079465638479:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:59.720953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349079465638468:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:59.720999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:59.724468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:59.741600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349079465638482:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:59.825696Z node 1 :TX_PROXY ERROR: Actor# [1:7494349079465638535:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:00.442345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:00.697956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:00.698123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:00.699741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:00.699786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:00.700138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:00.700240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:00.700318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:00.700401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:00.700479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:00.700574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:00.700688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:00.700779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:00.700867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:00.700960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349083760606108:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:00.703485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:00.703605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:00.703679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:00.703756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:00.703827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:00.703925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:00.704017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:00.704094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:00.704170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:00.704240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349083760606075:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:00.769556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349083760606145:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22 ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.214441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.220307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.224623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.234078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.235070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.244568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.248007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.253985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.257287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.259320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.264653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.269436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.277880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.278629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.287577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.288194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.293284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.299024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.305045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.306908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.312630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.314328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.322968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.328412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.330664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.336403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.342421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.352235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.355475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.360678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.364550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.371123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.376233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.381227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.383084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.388393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.392468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.401941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.407556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.410728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.415636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.420648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.425147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.430244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.438879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:10.717681Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ft6hq2rt1cnbp4mh1179f", SessionId: ydb://session/3?node_id=1&id=MjcxYjY3ZmEtZDFmMTM4YjItYjUxNTljMGUtMmRhY2RkMmI=, Slow query, duration: 30.533950s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:10.958566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:10.958941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:10.959224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494349264149268912:7794];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-17T18:23:10.959889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/00101c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit.txt 2025-04-17T18:23:13.478219Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> KqpJoinOrder::GeneralPrioritiesBug2 [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> BasicUsage::WriteSessionCloseWaitsForWrites |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug2 [GOOD] Test command err: Trying to start YDB, gRPC: 21533, MsgBus: 32368 2025-04-17T18:22:42.233821Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349261649319355:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:42.241058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e73/r3tmp/tmpeT9UvV/pdisk_1.dat 2025-04-17T18:22:42.741954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:42.742064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:42.756006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:42.796831Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21533, node 1 2025-04-17T18:22:43.043857Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:43.043884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:43.043890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:43.043980Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32368 TClient is connected to server localhost:32368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:43.956416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:43.971743Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:46.185013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349278829189191:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.185133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.185297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349278829189203:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:46.188572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:46.198500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349278829189205:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:46.283722Z node 1 :TX_PROXY ERROR: Actor# [1:7494349278829189256:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:46.568941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.708924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.772925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.833231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:46.928222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.105852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.152411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.219649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.229934Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349261649319355:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:47.230043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:47.273149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.342018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.379220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.417247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.459356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.180169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:22:48.246418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.311003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.385575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.416630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.445277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.515850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.579901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.666038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.738445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.772132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.819828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.858963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.894020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.929686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:22:48.973807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:22:49.008368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.438732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.438732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.444070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.444070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.449784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.449826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.456285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.459152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.461812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.464576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.466713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.469103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.471386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.473686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.475811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.478514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.480301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.483307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.484857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.488162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.489644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.492863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.494116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.497982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.499452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.502832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.504285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.530603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.530603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.535859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.535859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.541130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.541129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.546335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.546392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.551335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.551335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.556402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.556402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.560951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.562337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.565907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.567313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.571067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.571838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.576877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.617246Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ftgq27kf2z51s6prwm42b", SessionId: ydb://session/3?node_id=1&id=ZWVjMjJiMjQtZmVmM2I2YjgtYjg2OWYxZGItYTY2YjZhZDA=, Slow query, duration: 33.022394s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:23.888646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:23.888710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:23.889281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> BasicStatistics::NotFullStatisticsDatashard [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: 2025-04-17T18:16:43.500660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:16:43.500806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:16:43.500951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001524/r3tmp/tmpPTtYwP/pdisk_1.dat 2025-04-17T18:16:43.941389Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16622, node 1 2025-04-17T18:16:44.282308Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:16:44.282368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:16:44.282399Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:16:44.283118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:16:44.285458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:16:44.387652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:44.387774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:44.405039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16175 2025-04-17T18:16:45.118663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:16:48.601622Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:16:48.645263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:48.645373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:48.692968Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:16:48.695474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:48.962564Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.963250Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.963780Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.963956Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.964088Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.964319Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.964399Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.964492Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:48.964589Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:16:49.147508Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:16:49.147627Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:16:49.167950Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:16:49.322574Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:16:49.357289Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:16:49.357385Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:16:49.407150Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:16:49.408388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:16:49.408588Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:16:49.408651Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:16:49.408700Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:16:49.408747Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:16:49.408803Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:16:49.408854Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:16:49.409285Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:16:49.437554Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:49.437685Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1867:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:16:49.444845Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1880:2607] 2025-04-17T18:16:49.448126Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1903:2616] 2025-04-17T18:16:49.448354Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1903:2616], schemeshard id = 72075186224037897 2025-04-17T18:16:49.462060Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:16:49.484407Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:16:49.484515Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:16:49.484586Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:16:49.494151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:16:49.511314Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:16:49.511460Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:16:49.712598Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:16:49.884621Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:16:49.957366Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:16:50.922691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.922823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:50.938838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:16:51.504114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2540:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:51.504265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:51.505485Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2545:3126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:16:51.505682Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:16:51.505763Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2547:3128] 2025-04-17T18:16:51.505828Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2547:3128] 2025-04-17T18:16:51.506433Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2548:2993] 2025-04-17T18:16:51.506681Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2547:3128], server id = [2:2548:2993], tablet id = 72075186224037894, status = OK 2025-04-17T18:16:51.506882Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2548:2993], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:16:51.506937Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-17T18:16:51.507141Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:16:51.507218Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2545:3126], StatRequests.size() = 1 2025-04-17T18:16:51.525210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2552:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:51.525306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:51.525711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2557:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:16:51.531725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-17T18:16:51.685358Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-17T18:16:51.685436Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:16:51.757419Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2547:3128], schemeshard count = 1 2025-04-17T18:16:52.110280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... ICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:17.842425Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:22:19.382799Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:19.382868Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:22.174980Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:22.186774Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:22.186843Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:24.862563Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:22:24.862771Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:24.863104Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:22:24.874027Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:24.874097Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:27.555694Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:27.555767Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:28.942822Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:30.511554Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:30.511636Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:31.918420Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:22:31.918781Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:22:31.919059Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:33.345116Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:33.345188Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:35.954849Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:35.966878Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:35.966955Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:38.835045Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:22:38.835213Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:38.835566Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:22:38.846711Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:38.846772Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:41.522854Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:41.522937Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:42.829215Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:44.258819Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:44.258899Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:45.315416Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:22:45.315771Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:22:45.316020Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:46.546793Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:46.546863Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:48.898775Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:48.923287Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:48.923352Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:51.650773Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:22:51.651163Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:22:51.651459Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:51.662779Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:51.662846Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:54.327025Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:54.327099Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:55.574906Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:22:56.909053Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-17T18:22:56.909126Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:22:56.909159Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-17T18:22:56.909191Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-17T18:22:57.116524Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:57.116596Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:22:58.429062Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:22:58.429442Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:22:58.429706Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:22:59.916208Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:22:59.916278Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:02.650694Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:23:02.663393Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:02.663455Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:05.426461Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:23:05.426812Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:23:05.427135Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:23:05.437877Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:05.437927Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:08.002927Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:08.002997Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:09.350004Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:23:10.928020Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:10.928089Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:12.366353Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:23:12.366688Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:23:12.367008Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:23:13.778988Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:13.779060Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:16.450293Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:23:16.463390Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:16.463460Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:19.366752Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:23:19.366945Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:23:19.367271Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:23:19.378642Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:19.378715Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:21.848189Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:21.848259Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:23.098308Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:23:24.522939Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:24.523011Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:25.823678Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:23:25.824063Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:23:25.824365Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:23:27.173600Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:27.173673Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:27.184635Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-17T18:23:27.184721Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 235.000000s, at schemeshard: 72075186224037897 2025-04-17T18:23:27.184939Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-04-17T18:23:27.206515Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:23:29.686868Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:23:29.697674Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:23:29.697765Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:23:32.170761Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-17T18:23:32.171216Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 ... waiting for TEvPropagateStatistics (done) 2025-04-17T18:23:32.171645Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:13295:7431]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:23:32.172139Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:23:32.176518Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-17T18:23:32.176606Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [2:13295:7431], StatRequests.size() = 1 |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> KikimrIcGateway::TestListPath |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> KqpJoinOrder::TPCDS87-ColumnStore [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] Test command err: Trying to start YDB, gRPC: 10285, MsgBus: 61665 2025-04-17T18:21:59.458674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349079633795963:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:59.542055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ebe/r3tmp/tmpaNNynd/pdisk_1.dat 2025-04-17T18:21:59.999042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:59.999142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:00.001252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10285, node 1 2025-04-17T18:22:00.100405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:00.276107Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:00.276134Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:00.276142Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:00.276264Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61665 TClient is connected to server localhost:61665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:01.116400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:01.134326Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:03.305317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349096813665661:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:03.305410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:03.305649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349096813665673:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:03.309130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:03.323932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:22:03.325226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349096813665675:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:03.414029Z node 1 :TX_PROXY ERROR: Actor# [1:7494349096813665726:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:03.813707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.092145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:04.092374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:04.092649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:04.092766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:04.092902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:04.093012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:04.093138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:04.093241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:04.093358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:04.093466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:04.093571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:04.093691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349101108633303:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:04.097706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:04.097755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:04.097898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:04.098013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:04.098110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:04.098412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:04.098519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:04.098613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:04.098721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:04.098820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:04.098917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:04.099023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349096813665993:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:04.138123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349096813665999:2356];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:04.138123Z no ... tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.533098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.535856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.545176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.546089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.551916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.551949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.558013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.565476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.569693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.579556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.583323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.588801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.593023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.598006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.598441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.602674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.603628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.608107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.608979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.613535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.614300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.617905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.619705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.622872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.625007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.627750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.630479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.633327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.635819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.637187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.640826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.642438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.646173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.647769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.651787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.652924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.657208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.658722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.663033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.664195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.671370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.677531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.677781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.683296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.684281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.688754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:18.840077Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ftba64qpxz06f0m51cq7b", SessionId: ydb://session/3?node_id=1&id=MjY2NGQ0ZTEtZGJiNzhhN2EtZDQzZjc3MTItZjQ1NzcxMjk=, Slow query, duration: 33.777207s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:19.089014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:19.089030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:19.089611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpJoinOrder::TPCDS95-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 25330, MsgBus: 12363 2025-04-17T18:22:36.810075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349235658920420:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:36.821605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e88/r3tmp/tmpJ5stVU/pdisk_1.dat 2025-04-17T18:22:37.359020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:37.359139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:37.362528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:37.364517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25330, node 1 2025-04-17T18:22:37.632415Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:37.632435Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:37.632441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:37.632543Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12363 TClient is connected to server localhost:12363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:38.393895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:40.484596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349252838790190:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:40.484724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:40.485071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349252838790202:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:40.488159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:40.508945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349252838790204:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:40.586600Z node 1 :TX_PROXY ERROR: Actor# [1:7494349252838790255:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:40.874355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:40.984397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.039275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.074380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.118064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.363798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.412363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.462517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.506571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.581169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.645871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.717575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.761634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.814470Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349235658920420:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:41.814530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:42.406636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:22:42.456154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.500023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.544785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.599138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.636600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.702066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.742222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.787062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.824657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.893302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.945779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.973400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.003585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.074291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.118082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.160266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.189069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.274878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.276896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.283177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.284381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.295556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.297395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.303174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.309457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.312964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.321282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.329273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.334915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.339363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.343482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.351531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.355856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.361237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.370086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.372652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.377949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.383684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.393934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.394049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.399706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.400547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.405386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.405854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.411701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.412172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.418391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.419677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.423807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.429304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.434451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.439277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.444023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.448663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.453286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.457944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.462687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.462775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.467873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.468226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.474283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.476451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.590454Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ftavz5ccpp6yzmn7029m5", SessionId: ydb://session/3?node_id=1&id=OWRmOWQ5YTgtNzliNzljNjUtYjNlNzFlOTktMTU2ODZlNDU=, Slow query, duration: 30.982097s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:16.093718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:16.094138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:16.094608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7494349325853250747:4473];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-04-17T18:23:16.094971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/001017/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit.txt 2025-04-17T18:23:27.290106Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpJoinOrder::CanonizedJoinOrderTPCH4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18731, MsgBus: 64254 2025-04-17T18:22:37.016471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349238169618942:2170];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:37.016861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e87/r3tmp/tmpgmVkdN/pdisk_1.dat 2025-04-17T18:22:37.523056Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:37.526471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:37.526564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:37.529349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18731, node 1 2025-04-17T18:22:37.655153Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:37.655179Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:37.655185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:37.655301Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64254 TClient is connected to server localhost:64254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:38.485562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:38.534510Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:40.627127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349255349488680:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:40.627249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:40.627354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349255349488692:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:40.633991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:40.646962Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:22:40.650360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349255349488694:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:40.723581Z node 1 :TX_PROXY ERROR: Actor# [1:7494349255349488745:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:41.035392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.189908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.235770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.278895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.315025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.471274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.504356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.553150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.621229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.660005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.694698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.733836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:22:41.782317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.007155Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349238169618942:2170];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:42.007263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:42.445925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:22:42.488555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.524932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.565187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.649198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.686516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.716144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.765062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.833472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.884526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.929431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.980887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.015510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.059557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.091865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:22:43.128998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2 ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.699372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.703022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.704236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.707749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038434;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.709328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038430;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.713545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.715361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038436;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.719341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.720618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038426;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.724437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.726485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038440;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.730243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.730909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.735547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.735616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038472;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.741753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.742014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.746955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.747078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.751849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.751968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.757047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.757480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.762239Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.766699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038432;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.771237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.775741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.780822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038456;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.785932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.790975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.797113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.797113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.802980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.803591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.809853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.815828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.815828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.821646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.821796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.827365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.829533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038500;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.832754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.834786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.837969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.840817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.908079Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ftardckd24mtcw4f7qx9v", SessionId: ydb://session/3?node_id=1&id=ZGM2ZDZmYTktZjE5OTVlNmQtYzI4NjY5NjgtZGMwZjJkMGI=, Slow query, duration: 31.414536s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:16.487276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:16.487410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7494349367018664187:5809];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-17T18:23:16.487769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:16.488061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/001018/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit.txt >> BasicStatistics::TwoServerlessTwoSharedDbs >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> KqpJoinOrder::TPCDS34+ColumnStore [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH12 [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> BasicStatistics::TwoDatabases >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH4 [GOOD] Test command err: Trying to start YDB, gRPC: 4686, MsgBus: 64512 2025-04-17T18:22:09.088273Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349120525820443:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:09.088699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ead/r3tmp/tmpjHIFso/pdisk_1.dat 2025-04-17T18:22:09.771237Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:09.772309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:09.772370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:09.792332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4686, node 1 2025-04-17T18:22:09.970652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:09.970670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:09.970676Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:09.970768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64512 TClient is connected to server localhost:64512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:10.863707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:10.886939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:13.389638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349137705690155:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:13.389757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:13.390006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349137705690167:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:13.393827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:13.407027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:22:13.407513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349137705690169:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:13.475905Z node 1 :TX_PROXY ERROR: Actor# [1:7494349137705690220:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:13.800184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:14.100337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:14.100611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:14.100683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:14.100727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:14.101016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:14.101170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:14.101284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:14.101398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:14.101519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:14.101673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:14.101775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:14.101905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:14.102010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:14.102104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349137705690487:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:14.110583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:14.110836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:14.110977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:14.111116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:14.111227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:14.111329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:14.111462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:14.111580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:14.111690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:14.111796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349137705690485:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:14.162278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349142000657811:2361];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:14.162339Z node ... ller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.134543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.134601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.141270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.146895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.152164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.157552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.162822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.167982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.173642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.179096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.184378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.187814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.189860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.199647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.202979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.205554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.209187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.210091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.216194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.218223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.223088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.224809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.233256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.233381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.239289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.239698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.245954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.246073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.251607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.251824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.257175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.258390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.262738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.268157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.268191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.274073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.275225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.279787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.280200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.285399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.285409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.291148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.291695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.297077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.297618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:28.425000Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ftnvs9kxy0shzv9w1qdrz", SessionId: ydb://session/3?node_id=1&id=NzNmOTFmMmItN2Y1ZDAyMjAtMWYwYWM5NTMtOGZjYmQ3ZDY=, Slow query, duration: 32.559361s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:28.708822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:28.708901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:28.709199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494349425468554938:10834];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-17T18:23:28.709552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23804, MsgBus: 8417 2025-04-17T18:22:01.532892Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349086918208074:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:01.533278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eb8/r3tmp/tmpy0xFa3/pdisk_1.dat 2025-04-17T18:22:02.375362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:02.375475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:02.400582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:02.464610Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23804, node 1 2025-04-17T18:22:02.704306Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:02.704324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:02.704330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:02.704430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8417 TClient is connected to server localhost:8417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:03.770685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:03.783023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:06.445965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349108393045089:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:06.446064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:06.450308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349108393045101:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:06.454340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:06.473333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349108393045103:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:06.522322Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349086918208074:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:06.522570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:06.546732Z node 1 :TX_PROXY ERROR: Actor# [1:7494349108393045154:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:06.937192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:07.208791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:07.208816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:07.208969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:07.208986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:07.209215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:07.209349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:07.209377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:07.209467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:07.209501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:07.209554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:07.209598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:07.209651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:07.209708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:07.209755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:07.209853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:07.209865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:07.210009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:07.210208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:07.210350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:07.210371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:07.210484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349112688012713:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:07.210485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:07.210694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:07.210806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349112688012735:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:07.274197Z node 1 :TX_C ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.223459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.227003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.230633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.236460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.237104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.242562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.246688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.248492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.254101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.254310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.260830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.263735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.265636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.271077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.271778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.277118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.277348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.281508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.284414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.285692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.290220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.290448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.295074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.296237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.299721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.302366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.304521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.307955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.309203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.313945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.315250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.319973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.321779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.325838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.328003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.330518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.333903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.337318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.339521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.343819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.347085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.350546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.353204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.355925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.360492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:19.464512Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ftcf2en90n8xchgmr0t8c", SessionId: ydb://session/3?node_id=1&id=NzRjYzU5NGYtYjY3MjZjOTUtNjk1ZThlOWYtZjdmY2E5OWM=, Slow query, duration: 33.221999s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:19.977621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:19.978082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:19.978478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349340321327441:9491];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-17T18:23:19.978860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH12 [GOOD] Test command err: Trying to start YDB, gRPC: 61193, MsgBus: 1363 2025-04-17T18:22:14.922597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349143029161070:2180];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ea6/r3tmp/tmpkT9Rxo/pdisk_1.dat 2025-04-17T18:22:15.254354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:22:15.525853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:15.530912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:15.530996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:15.535973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61193, node 1 2025-04-17T18:22:15.654589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:15.654606Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:15.654613Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:15.654717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1363 TClient is connected to server localhost:1363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:16.455723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:16.474025Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:18.588956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349160209030787:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:18.589057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:18.589411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349160209030799:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:18.593177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:18.604910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349160209030801:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:18.706693Z node 1 :TX_PROXY ERROR: Actor# [1:7494349160209030852:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:19.203738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:19.505200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:19.505367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:19.505572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:19.505655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:19.505738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:19.505818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:19.505901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:19.506000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:19.506114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:19.508229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:19.508293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:19.508445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:19.508528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:19.508607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:19.508685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:19.508762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:19.508838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:19.508938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:19.509029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:19.509119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:19.509223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349164503998427:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:19.522311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:19.522497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:19.522602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349164503998419:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:19.560218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349164503998431:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:19.560273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349164503998431:2359];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstra ... tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.465437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.470602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.470602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.475779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.475779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.480777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.480777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.485586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.485585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.490453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.490453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.495211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.495211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.500220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.500220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.505462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.505507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.510380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.510388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.515578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.515587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.520659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.520660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.525761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.525761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.531174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.532345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.536383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.537099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.540806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.541628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.545364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.546711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.549853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.550673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.554715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.555395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.559591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.560412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.564166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.564775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.569920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.577219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.586916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.586916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.635724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:30.729192Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ftsb66de0szp0a0qmpa86", SessionId: ydb://session/3?node_id=1&id=ZTU1YjA4MzYtYWY2OGZjYWMtNzIyNzZmNDktZTNkOTM5Zg==, Slow query, duration: 31.298376s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:30.982187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:30.982203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:30.982867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestDropResourcePool |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28940, MsgBus: 23194 2025-04-17T18:21:55.466687Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349059803996916:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:55.466821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ec9/r3tmp/tmpEC3Zj4/pdisk_1.dat 2025-04-17T18:21:56.223606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:56.223712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:56.231268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:21:56.235902Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28940, node 1 2025-04-17T18:21:56.494753Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:56.494791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:56.494801Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:56.494951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23194 TClient is connected to server localhost:23194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:57.314752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:57.327906Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:59.302860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349076983866762:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:59.302969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:59.303168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349076983866774:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:59.307269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:59.318167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349076983866776:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:59.417989Z node 1 :TX_PROXY ERROR: Actor# [1:7494349076983866827:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:59.732807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:59.965864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:59.966093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:59.966452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:59.966563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:59.966657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:59.966753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:59.966842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:59.966947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:59.967066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:59.967179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:59.967280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:59.967387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349076983867041:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:59.974261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:59.974308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:59.974484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:59.974580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:59.974676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:59.974786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:59.974889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:59.974996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:59.975106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:59.975199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:59.975320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:59.975414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7494349076983867037:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:00.012178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349076983867064:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:00.012232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349076983867064:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abs ... 76710714; 2025-04-17T18:23:15.541655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.561181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.561184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.566711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.566711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.570114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.573091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.578486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.578572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.584152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.591185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.595609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.600752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.605669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.610148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.614814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.619060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.623575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.628497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.628700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.633633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.633633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.638882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.638882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.644085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.644085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.649485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.649913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.656128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.656663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.664098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.664371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.670363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.670588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.676120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039226;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.676395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.689195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039216;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.689556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.695180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:15.799899Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ft7cza2srbqyw7f3p6tv5", SessionId: ydb://session/3?node_id=1&id=ZGM0OGIxZjEtMjFjNjdjZGEtMjc1NGI5MjctZjYzYWQ4YWI=, Slow query, duration: 34.743796s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:16.381121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:16.382022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494349265962464478:7836];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T18:23:16.386416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:16.387214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:37.621551Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fvkvp303kwrah0w3wj9vr", SessionId: ydb://session/3?node_id=1&id=ZGM0OGIxZjEtMjFjNjdjZGEtMjc1NGI5MjctZjYzYWQ4YWI=, Slow query, duration: 11.038141s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n$bla1 = (select ws_order_number\n from web_sales\n group by ws_order_number\n having COUNT(DISTINCT ws_warehouse_sk) > 1);\n\n-- start query 1 in stream 0 using template query94.tpl and seed 2031708268\nselect\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\n left semi join $bla1 bla1 on (ws1.ws_order_number = bla1.ws_order_number)\n left only join web_returns on (ws1.ws_order_number = web_returns.wr_order_number)\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'NE'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\norder by `order count`\nlimit 100;\n", parameters: 0b >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> KikimrIcGateway::TestDropResourcePool [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_dispatch.py::TestMapping::test_mapping >> test_retry.py::TestRetry::test_fail_first[kikimr0] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 11613, MsgBus: 21362 2025-04-17T18:23:35.623350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349487878289817:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:23:35.623399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b98/r3tmp/tmpjZaJMc/pdisk_1.dat 2025-04-17T18:23:36.062426Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:23:36.109810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:36.110566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:36.122087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11613, node 1 2025-04-17T18:23:36.409460Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:23:36.409485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:23:36.409492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:23:36.409613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21362 TClient is connected to server localhost:21362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:23:37.290056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:23:37.390915Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:23:38.696402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349500763192410:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:38.696501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:39.431362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:23:39.587242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:23:39.630397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:23:39.667633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:23:39.747696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349505058160026:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:39.747774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:39.747938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349505058160031:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:39.753610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-17T18:23:39.765126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349505058160033:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-04-17T18:23:39.871292Z node 1 :TX_PROXY ERROR: Actor# [1:7494349505058160086:2573] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:23:40.623432Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349487878289817:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:23:40.623803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16930, MsgBus: 23112 2025-04-17T18:23:41.368379Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349514407648109:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b98/r3tmp/tmpB38b5X/pdisk_1.dat 2025-04-17T18:23:41.534555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:23:41.602959Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:23:41.616630Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:41.616708Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:41.617964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16930, node 2 2025-04-17T18:23:41.758592Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:23:41.758615Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:23:41.758623Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:23:41.758741Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23112 TClient is connected to server localhost:23112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:23:42.315856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:23:42.334625Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:23:44.755546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349527292550537:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:44.755651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:44.804978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T18:23:44.883939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-17T18:23:44.912275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:23:44.935948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:23:44.978127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349527292550848:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:44.978244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:44.978462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349527292550853:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:44.981841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-17T18:23:44.991886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349527292550855:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-17T18:23:45.080965Z node 2 :TX_PROXY ERROR: Actor# [2:7494349531587518204:2566] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Info: Success, code: 4 2025-04-17T18:23:45.288189Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found Trying to start YDB, gRPC: 1650, MsgBus: 12441 2025-04-17T18:23:46.207163Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494349536820515119:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:23:46.207224Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b98/r3tmp/tmpZQuU2g/pdisk_1.dat 2025-04-17T18:23:46.463200Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1650, node 3 2025-04-17T18:23:46.558782Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:46.558886Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:46.561817Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:46.569098Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:23:46.569121Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:23:46.569128Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:23:46.569234Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12441 TClient is connected to server localhost:12441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:23:47.074068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:23:47.081758Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:23:47.104217Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> KqpJoinOrder::Chain65Nodes [GOOD] >> test_cp_ic.py::TestCpIc::test_discovery >> KqpJoinOrder::TPCDS95+ColumnStore [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] 2025-04-17T18:23:03.482409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:23:03.482532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:23:03.482635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:23:03.482687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:23:03.490269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:23:03.490358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:23:03.490543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:23:03.490630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:23:03.494649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:23:03.687981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:23:03.688080Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:23:03.748630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:23:03.748775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:23:03.748938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:23:03.806584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:23:03.807152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:23:03.826295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:23:03.830615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:23:03.875027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:23:03.914691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:23:03.914792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:23:03.914961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:23:03.915014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:23:03.915106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:23:03.915345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:23:03.939050Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-17T18:23:04.269892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:23:04.278427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:23:04.282313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:23:04.286338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:23:04.286459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:23:04.293669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:23:04.293829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:23:04.294126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:23:04.294299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:23:04.294338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:23:04.294373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:23:04.303060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:23:04.303144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:23:04.303184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:23:04.311020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:23:04.311096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:23:04.311142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:23:04.311192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:23:04.321768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:23:04.338946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:23:04.346282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:23:04.347595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:23:04.347746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:23:04.347804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:23:04.353461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:23:04.353558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:23:04.353808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:23:04.353903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:23:04.367359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:23:04.367431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:23:04.367662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:23:04.367703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:23:04.368013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:23:04.368060Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:23:04.368156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:23:04.368193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:23:04.368233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:23:04.368267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:23:04.368303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:23:04.368341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:23:04.368392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:23:04.368429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:23:04.368498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:23:04.368548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:23:04.368579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:23:04.377672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:23:04.377843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:23:04.377892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-17T18:23:52.025523Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 2025-04-17T18:23:52.026238Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T18:23:52.026287Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T18:23:52.026355Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-04-17T18:23:52.026502Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:23:52.027054Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.027135Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.027166Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T18:23:52.027200Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-17T18:23:52.027255Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:23:52.028136Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.028230Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.028273Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T18:23:52.028304Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-17T18:23:52.028334Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:23:52.028396Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-17T18:23:52.031047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T18:23:52.031087Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-17T18:23:52.031124Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-17T18:23:52.031659Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-17T18:23:52.031787Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:23:52.031974Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-04-17T18:23:52.032942Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:23:52.033046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 17179871339 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:23:52.033102Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000007, at schemeshard: 72057594046678944 2025-04-17T18:23:52.033215Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-17T18:23:52.033288Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T18:23:52.033330Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T18:23:52.033379Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-17T18:23:52.033422Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T18:23:52.033496Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:23:52.033565Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:23:52.033609Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-04-17T18:23:52.033660Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-17T18:23:52.033702Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-17T18:23:52.033737Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-17T18:23:52.033805Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-17T18:23:52.033846Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-04-17T18:23:52.033906Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-17T18:23:52.033946Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-17T18:23:52.034711Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.036110Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:23:52.036149Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:23:52.036297Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-17T18:23:52.036410Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:23:52.036446Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-17T18:23:52.036483Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-17T18:23:52.037194Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.037274Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.037305Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T18:23:52.037358Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-17T18:23:52.037410Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-17T18:23:52.038068Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.038140Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.038171Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-17T18:23:52.038220Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:23:52.038249Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-17T18:23:52.038323Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-04-17T18:23:52.038372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-17T18:23:52.040569Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.041002Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-17T18:23:52.041084Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-17T18:23:52.041138Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-17T18:23:52.042842Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:23:52.042907Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:592:2547] TestWaitNotification: OK eventTxId 102 |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::Chain65Nodes [GOOD] Test command err: Trying to start YDB, gRPC: 19524, MsgBus: 25966 2025-04-17T18:19:29.722151Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494348431428588398:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:19:29.722226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f9c/r3tmp/tmpIlMJvi/pdisk_1.dat 2025-04-17T18:19:30.070785Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19524, node 1 2025-04-17T18:19:30.077646Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:19:30.114977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:19:30.115153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:19:30.117420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:19:30.125054Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:19:30.125085Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:19:30.125098Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:19:30.125277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25966 TClient is connected to server localhost:25966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:19:30.625101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:19:30.654624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:19:32.930514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348444313490947:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:32.930653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.184728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.325562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608458346:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.325636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.334343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.366732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608458421:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.366839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.380262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.414875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608458497:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.414953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.427832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.504992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608458575:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.505063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.518144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.560884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608458651:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.560949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.574152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.641715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608458728:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.641782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.653786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.689798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608458805:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.689882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.702001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.738455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608458881:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.738570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.750248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.783663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608458960:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.783738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.796108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.829800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348448608459037:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.829857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:33.841998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:19:33.876396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadSe ... ult, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.296292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.342410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493364393:2815], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.342482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.351650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.429911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493364479:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.430025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.440233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.484491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493364560:2833], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.484564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.496908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.542203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493364639:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.542269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.555065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.598036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493364719:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.598202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.608657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.668925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493364800:2860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.669021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.680338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.771938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493364892:2871], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.772033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.783771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.824904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493364970:2880], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.825017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.838833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.887146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493365051:2890], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.887222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.900146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.947825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493365132:2899], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.947893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.959629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-04-17T18:19:36.988856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348461493365210:2908], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.988915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:36.999217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.036471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348465788332585:2917], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:37.036537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:37.047603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-04-17T18:19:37.086624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348465788332663:2926], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:37.086700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:37.086862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494348465788332668:2929], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:19:37.090968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710723:3, at schemeshard: 72057594046644480 2025-04-17T18:19:37.105588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494348465788332670:2930], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710723 completed, doublechecking } 2025-04-17T18:19:37.199140Z node 1 :TX_PROXY ERROR: Actor# [1:7494348465788332730:5779] txid# 281474976710724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 70], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:19:45.067205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:19:45.067245Z node 1 :IMPORT WARN: Table profiles were not loaded
: Warning: Execution, code: 1060
: Warning: Cost Based Optimizer could not be applied to this query: Enumeration is too large, use PRAGMA MaxDPHypDPTableSize='4294967295' to disable the limitation, code: 8000 |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] |95.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCDS23+ColumnStore [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 16867, MsgBus: 19928 2025-04-17T18:22:05.721564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349103390394717:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:05.722081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eaf/r3tmp/tmpr0cMI3/pdisk_1.dat 2025-04-17T18:22:06.575324Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:06.604497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:06.604598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:06.611190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16867, node 1 2025-04-17T18:22:06.886647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:06.886669Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:06.886680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:06.886786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19928 TClient is connected to server localhost:19928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:07.814475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:07.832490Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:10.281628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349124865231731:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:10.281729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:10.282096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349124865231743:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:10.286008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:10.307382Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:22:10.308094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349124865231745:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:10.370306Z node 1 :TX_PROXY ERROR: Actor# [1:7494349124865231796:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:10.712718Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349103390394717:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:10.712771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:10.761353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:11.026391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:11.026590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:11.026827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:11.026959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:11.027056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:11.027161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:11.027341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:11.027502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:11.027603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:11.027758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:11.027900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:11.028051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494349124865232047:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:11.051255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:11.051445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:11.051627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:11.051760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:11.051857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:11.051963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:11.052066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:11.052176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:11.052290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:11.052380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:11.052482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:11.052583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349124865232043:2360];tablet_id=72075186224037 ... 81474976710714; 2025-04-17T18:23:22.861313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.862050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.867440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.873083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.878718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.881494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.886275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.888043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.891089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.893375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.899652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.900904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.905211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.906918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.911542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.913321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.917962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.919610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.923353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.927725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.931460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.933659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.936961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.939806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.944877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.945920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.951295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.952697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.963697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.964635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.975679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.978769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.986223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.986732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.992452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.992579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:22.998125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.003662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:23.287968Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fth4g5z1parymvsttwzsp", SessionId: ydb://session/3?node_id=1&id=YzhiYzc5ODktNDg4ZDMzYzgtZmFjNzc3MTQtN2FiYjg1OWI=, Slow query, duration: 32.261369s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:23.892298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:23.892678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:23.893256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349352498544591:9197];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:23:23.893571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:45.788522Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fvv0perd7bdjssepftc8n", SessionId: ydb://session/3?node_id=1&id=YzhiYzc5ODktNDg4ZDMzYzgtZmFjNzc3MTQtN2FiYjg1OWI=, Slow query, duration: 11.877245s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n$ws_wh =\n(select ws1.ws_order_number ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2\n from web_sales ws1 cross join web_sales ws2\n where ws1.ws_order_number = ws2.ws_order_number\n and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk);\n-- start query 1 in stream 0 using template query95.tpl and seed 2031708268\n select\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\nwhere\n cast(d_date as date) between cast('2002-4-01' as date) and\n (cast('2002-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'AL'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\nand ws1.ws_order_number in (select ws_order_number\n from $ws_wh)\nand ws1.ws_order_number in (select wr_order_number\n from web_returns cross join $ws_wh ws_wh\n where wr_order_number = ws_wh.ws_order_number)\norder by `order count`\nlimit 100;\n", parameters: 0b |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> Normalizers::ColumnChunkNormalizer >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] Test command err: 2025-04-17T18:23:47.505109Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:23:47.609435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:23:47.634428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:23:47.634718Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:23:47.642324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-04-17T18:23:47.642568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-17T18:23:47.642744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:23:47.642926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:23:47.643055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:23:47.643191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:23:47.643290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:23:47.643393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:23:47.643511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:23:47.643638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:23:47.643762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:23:47.643873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:23:47.643986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:23:47.676923Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:23:47.677163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-04-17T18:23:47.677211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-17T18:23:47.677486Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-04-17T18:23:47.677618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-17T18:23:47.677697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-17T18:23:47.677740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-17T18:23:47.677908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:23:47.677991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:23:47.678034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:23:47.678063Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-17T18:23:47.678170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:23:47.678257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:23:47.678295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:23:47.678323Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-17T18:23:47.678484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:23:47.678544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:23:47.678595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:23:47.678643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-17T18:23:47.678752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:23:47.678808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:23:47.678866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:23:47.678900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:23:47.678982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:23:47.679026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:23:47.679059Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:23:47.679103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:23:47.679164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:23:47.679197Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:23:47.679581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=65; 2025-04-17T18:23:47.679665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T18:23:47.679754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-17T18:23:47.679830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-17T18:23:47.680008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:23:47.680056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:23:47.680089Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:23:47.680316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:23:47.680369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:23:47.680398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:23:47.680536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:23:47.680575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:23:47.680612Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;eve ... UG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:23:56.587958Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:23:56.588016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T18:23:56.588077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-17T18:23:56.588137Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-17T18:23:56.588185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:23:56.588286Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:23:56.588323Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-17T18:23:56.588361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:23:56.588581Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:23:56.588754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:23:56.588798Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:23:56.588923Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-17T18:23:56.588995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-17T18:23:56.589158Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:494:2497];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-17T18:23:56.589303Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:23:56.589420Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:23:56.589532Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:23:56.590539Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:23:56.590678Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:23:56.590820Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:23:56.590864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:496:2498] finished for tablet 9437184 2025-04-17T18:23:56.591361Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:494:2497];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.23},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.233}],"full":{"a":1744914236357789,"name":"_full_task","f":1744914236357789,"d_finished":0,"c":0,"l":1744914236590923,"d":233134},"events":[{"name":"bootstrap","f":1744914236357982,"d_finished":2758,"c":1,"l":1744914236360740,"d":2758},{"a":1744914236590511,"name":"ack","f":1744914236588547,"d_finished":1008,"c":1,"l":1744914236589555,"d":1420},{"a":1744914236590489,"name":"processing","f":1744914236365157,"d_finished":145274,"c":9,"l":1744914236589558,"d":145708},{"name":"ProduceResults","f":1744914236359641,"d_finished":2410,"c":12,"l":1744914236590849,"d":2410},{"a":1744914236590852,"name":"Finish","f":1744914236590852,"d_finished":0,"c":0,"l":1744914236590923,"d":71},{"name":"task_result","f":1744914236365179,"d_finished":144121,"c":8,"l":1744914236588408,"d":144121}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:23:56.591469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:494:2497];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:23:56.591911Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:494:2497];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.23},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.233}],"full":{"a":1744914236357789,"name":"_full_task","f":1744914236357789,"d_finished":0,"c":0,"l":1744914236591523,"d":233734},"events":[{"name":"bootstrap","f":1744914236357982,"d_finished":2758,"c":1,"l":1744914236360740,"d":2758},{"a":1744914236590511,"name":"ack","f":1744914236588547,"d_finished":1008,"c":1,"l":1744914236589555,"d":2020},{"a":1744914236590489,"name":"processing","f":1744914236365157,"d_finished":145274,"c":9,"l":1744914236589558,"d":146308},{"name":"ProduceResults","f":1744914236359641,"d_finished":2410,"c":12,"l":1744914236590849,"d":2410},{"a":1744914236590852,"name":"Finish","f":1744914236590852,"d_finished":0,"c":0,"l":1744914236591523,"d":671},{"name":"task_result","f":1744914236365179,"d_finished":144121,"c":8,"l":1744914236588408,"d":144121}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:23:56.592013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:23:56.357238Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589264;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589264;selected_rows=0; 2025-04-17T18:23:56.592081Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:23:56.592365Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:496:2498];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS23+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18905, MsgBus: 12301 2025-04-17T18:21:51.826463Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349043905434015:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:51.831928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ed0/r3tmp/tmpiiwp2j/pdisk_1.dat 2025-04-17T18:21:52.371842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:21:52.371924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:21:52.386651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18905, node 1 2025-04-17T18:21:52.442791Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:21:52.442809Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:21:52.594477Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:21:52.602746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:21:52.602767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:21:52.602776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:21:52.602883Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12301 TClient is connected to server localhost:12301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:21:53.549988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:21:53.576935Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:21:55.956013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349061085303726:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:55.956117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:55.956495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349061085303739:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:21:55.960831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:21:55.974139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349061085303741:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:21:56.064860Z node 1 :TX_PROXY ERROR: Actor# [1:7494349065380271088:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:21:56.571253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:21:56.831881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:56.842578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:56.842926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:56.843054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:56.843166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:56.843278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:56.843426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:56.843531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:56.843669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:56.843776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:56.843880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:56.844109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349065380271333:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:56.877976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21:56.878021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:21:56.878266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:21:56.878359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:21:56.878447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:21:56.878533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:21:56.878615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:21:56.878708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:21:56.878813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:21:56.878933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:21:56.879036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:21:56.879116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349065380271335:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:21:56.974239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494349065380271331:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:21 ... tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.083036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.084764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.089045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.091561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.093825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.095820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.098940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.099225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.104846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.105363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.109301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.113908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.114218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.119562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.119578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.124822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.124822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.130835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.132438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.136935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.137804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.143677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.144426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.149887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.155003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.160050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.165665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.170680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.175652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.181456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.188120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.193611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.202781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.210372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.216734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.223303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.228464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039200;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.235470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.241741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039190;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.248497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.250890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039194;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.254445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.260955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039216;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.267037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.267810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039224;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.273561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:11.425792Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2ft39pfzhmmnyezvy2y84a", SessionId: ydb://session/3?node_id=1&id=ZWYzZTJlZmQtOGVkZGE3OTQtODBkZTgyYTgtZDgwOGQ1N2U=, Slow query, duration: 34.570223s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:12.022946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:12.023568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:12.023698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> Normalizers::ColumnChunkNormalizer [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ColumnChunkNormalizer [GOOD] Test command err: 2025-04-17T18:23:58.159714Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:23:58.261231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:23:58.285572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:23:58.285850Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:23:58.293447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-17T18:23:58.293649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:23:58.293857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:23:58.293967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:23:58.294085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:23:58.294229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:23:58.294347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:23:58.294441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:23:58.294551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:23:58.294659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:23:58.294778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:23:58.294917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:23:58.338601Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:23:58.338894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:23:58.338947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-17T18:23:58.339172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:23:58.339350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:23:58.339428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:23:58.339497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-17T18:23:58.339578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:23:58.339665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:23:58.339719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:23:58.339748Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-17T18:23:58.339925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:23:58.339989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:23:58.340027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:23:58.340055Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-17T18:23:58.340146Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:23:58.340205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:23:58.340276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:23:58.340312Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:23:58.340383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:23:58.340417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:23:58.340466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:23:58.340526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:23:58.340561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:23:58.340585Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:23:58.341039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=110; 2025-04-17T18:23:58.341136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-17T18:23:58.341206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-17T18:23:58.341287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-17T18:23:58.341472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:23:58.341540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:23:58.341584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:23:58.341835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:23:58.341892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:23:58.341923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:23:58.342087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:23:58.342131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:23:58.342168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:23:58.342377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:23:58.342437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:23:58.342466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:23:58.342583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:23:58.342722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normaliz ... abletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:24:02.200357Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-17T18:24:02.200423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-17T18:24:02.200484Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-17T18:24:02.200545Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-17T18:24:02.200618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-17T18:24:02.200746Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:24:02.200783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-17T18:24:02.200822Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-17T18:24:02.201057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:24:02.201252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:24:02.201320Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-17T18:24:02.201438Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-17T18:24:02.201534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-17T18:24:02.201721Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:477:2483];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-17T18:24:02.201865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:24:02.201971Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:24:02.202094Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:24:02.203031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-17T18:24:02.203209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:24:02.203364Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:24:02.203416Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:479:2484] finished for tablet 9437184 2025-04-17T18:24:02.203899Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:477:2483];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.299},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.301}],"full":{"a":1744914241901567,"name":"_full_task","f":1744914241901567,"d_finished":0,"c":0,"l":1744914242203476,"d":301909},"events":[{"name":"bootstrap","f":1744914241901777,"d_finished":2949,"c":1,"l":1744914241904726,"d":2949},{"a":1744914242202997,"name":"ack","f":1744914242201027,"d_finished":1095,"c":1,"l":1744914242202122,"d":1574},{"a":1744914242202973,"name":"processing","f":1744914241908134,"d_finished":147544,"c":9,"l":1744914242202125,"d":148047},{"name":"ProduceResults","f":1744914241903622,"d_finished":2923,"c":12,"l":1744914242203399,"d":2923},{"a":1744914242203404,"name":"Finish","f":1744914242203404,"d_finished":0,"c":0,"l":1744914242203476,"d":72},{"name":"task_result","f":1744914241908161,"d_finished":146260,"c":8,"l":1744914242200872,"d":146260}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:24:02.203999Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:477:2483];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-17T18:24:02.204483Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:477:2483];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.299},{"events":["l_ProduceResults","f_Finish"],"t":0.301},{"events":["l_ack","l_processing","l_Finish"],"t":0.302}],"full":{"a":1744914241901567,"name":"_full_task","f":1744914241901567,"d_finished":0,"c":0,"l":1744914242204048,"d":302481},"events":[{"name":"bootstrap","f":1744914241901777,"d_finished":2949,"c":1,"l":1744914241904726,"d":2949},{"a":1744914242202997,"name":"ack","f":1744914242201027,"d_finished":1095,"c":1,"l":1744914242202122,"d":2146},{"a":1744914242202973,"name":"processing","f":1744914241908134,"d_finished":147544,"c":9,"l":1744914242202125,"d":148619},{"name":"ProduceResults","f":1744914241903622,"d_finished":2923,"c":12,"l":1744914242203399,"d":2923},{"a":1744914242203404,"name":"Finish","f":1744914242203404,"d_finished":0,"c":0,"l":1744914242204048,"d":644},{"name":"task_result","f":1744914241908161,"d_finished":146260,"c":8,"l":1744914242200872,"d":146260}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-17T18:24:02.204608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-17T18:24:01.900997Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589264;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589264;selected_rows=0; 2025-04-17T18:24:02.204678Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:189;event=scan_aborted;reason=unexpected on destructor; 2025-04-17T18:24:02.204989Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2025-04-17T18:23:31.047907Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1744914211047877 2025-04-17T18:23:31.403668Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349472956142963:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:23:31.403711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:23:31.480109Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349474859829163:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:23:31.480439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:23:31.663645Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:23:31.663672Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001853/r3tmp/tmp8T8U3D/pdisk_1.dat 2025-04-17T18:23:31.857767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:31.857900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:31.862008Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:23:31.863170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:31.887460Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:23:31.896285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:31.896357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13839, node 1 2025-04-17T18:23:31.906157Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:23:31.906265Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:23:31.924054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:31.944761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001853/r3tmp/yandexGPv8hT.tmp 2025-04-17T18:23:31.944787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001853/r3tmp/yandexGPv8hT.tmp 2025-04-17T18:23:31.944954Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001853/r3tmp/yandexGPv8hT.tmp 2025-04-17T18:23:31.945085Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:23:31.996924Z INFO: TTestServer started on Port 2379 GrpcPort 13839 TClient is connected to server localhost:2379 PQClient connected to localhost:13839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:23:32.265631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:23:34.621954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349487744731279:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:34.622041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494349487744731272:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:34.622414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:34.633079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T18:23:34.645831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349485841045874:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:34.647239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:23:34.688726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494349487744731301:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T18:23:35.002396Z node 2 :TX_PROXY ERROR: Actor# [2:7494349487744731329:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:23:35.033362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:23:35.052163Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349492039698632:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:23:35.053542Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzQzMmM0ZWYtN2ZhMTQ1NGYtN2Y4ZTY3ZTEtZTMyYzhmM2M=, ActorId: [2:7494349487744731270:2308], ActorState: ExecuteState, TraceId: 01js2fvvpq4jpw2dpscqempff8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:23:35.055848Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:23:35.051724Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494349485841045924:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:23:35.057656Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE4YWRkMjgtNTM1OWZjOWUtYjYzNGQ1MTktYzYwNTIwYWY=, ActorId: [1:7494349485841045856:2336], ActorState: ExecuteState, TraceId: 01js2fvvpvc8ns10n9wt5493z1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:23:35.058152Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:23:35.212286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:23:35.394016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:13839", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-17T18:23:35.702856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01js2fvwm26jkjcw3ssj7pt5hd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhmY2E2YmItYmFjYTc2NjktMzUxOWI5ZC1jZDU4YjM3Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494349490136013725:3031] 2025-04-17T18:23:36.404135Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349472956142963:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:23:36.404225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:23:36.475784Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494349474859829163:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:23:36.475852Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok waiting... 2025-04-17T18:23:41.698773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, b ... ocalhost:14433 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:23:55.777145Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:14433 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:23:56.286298Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:14433 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:23:56.811894Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-17T18:23:56.836450Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-17T18:23:56.836908Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-17T18:23:56.836945Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:14433 2025-04-17T18:23:56.853903Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-17T18:23:56.855288Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-17T18:23:56.855320Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-17T18:23:56.855631Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-17T18:23:56.855736Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:51982 2025-04-17T18:23:56.855750Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:51982 proto=v1 topic=test-topic durationSec=0 2025-04-17T18:23:56.855758Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:23:56.857015Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-17T18:23:56.857104Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-17T18:23:56.857112Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:23:56.857120Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-17T18:23:56.857134Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494349578423722870:2506] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T18:23:56.859494Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494349578423722870:2506] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-17T18:23:57.020945Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494349578423722870:2506] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-17T18:23:57.021610Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494349582718690210:2506] connected; active server actors: 1 2025-04-17T18:23:57.021714Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494349578423722870:2506] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-17T18:23:57.021731Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494349578423722870:2506] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-17T18:23:57.026481Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494349582718690210:2506] disconnected; active server actors: 1 2025-04-17T18:23:57.026508Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494349582718690210:2506] disconnected no session 2025-04-17T18:23:57.124189Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494349578423722870:2506] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T18:23:57.124224Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494349578423722870:2506] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-17T18:23:57.124239Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494349578423722870:2506] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-17T18:23:57.124264Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:23:57.125282Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7494349582718690237:2506], now have 1 active actors on pipe 2025-04-17T18:23:57.125632Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-04-17T18:23:57.128517Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744914237128 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:23:57.128648Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|98bd2fa-57387b18-7b110210-10eba27b_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T18:23:57.127774Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|98bd2fa-57387b18-7b110210-10eba27b_0 2025-04-17T18:23:57.129160Z :INFO: [] MessageGroupId [src] SessionId [src|98bd2fa-57387b18-7b110210-10eba27b_0] Write session: close. Timeout = 0 ms 2025-04-17T18:23:57.129200Z :INFO: [] MessageGroupId [src] SessionId [src|98bd2fa-57387b18-7b110210-10eba27b_0] Write session will now close 2025-04-17T18:23:57.129234Z :DEBUG: [] MessageGroupId [src] SessionId [src|98bd2fa-57387b18-7b110210-10eba27b_0] Write session: aborting 2025-04-17T18:23:57.126208Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:23:57.126243Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:23:57.126336Z node 4 :PERSQUEUE INFO: new Cookie src|98bd2fa-57387b18-7b110210-10eba27b_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-17T18:23:57.130338Z :INFO: [] MessageGroupId [src] SessionId [src|98bd2fa-57387b18-7b110210-10eba27b_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:23:57.126446Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T18:23:57.126493Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T18:23:57.127335Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:23:57.127356Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:23:57.127420Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T18:23:57.130710Z :DEBUG: [] MessageGroupId [src] SessionId [src|98bd2fa-57387b18-7b110210-10eba27b_0] Write session is aborting and will not restart 2025-04-17T18:23:57.131051Z :DEBUG: [] MessageGroupId [src] SessionId [src|98bd2fa-57387b18-7b110210-10eba27b_0] Write session: destroy 2025-04-17T18:23:57.130418Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|98bd2fa-57387b18-7b110210-10eba27b_0 grpc read done: success: 0 data: 2025-04-17T18:23:57.130436Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|98bd2fa-57387b18-7b110210-10eba27b_0 grpc read failed 2025-04-17T18:23:57.130458Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|98bd2fa-57387b18-7b110210-10eba27b_0 grpc closed 2025-04-17T18:23:57.130482Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|98bd2fa-57387b18-7b110210-10eba27b_0 is DEAD 2025-04-17T18:23:57.131177Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:23:57.131627Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494349582718690237:2506] destroyed 2025-04-17T18:23:57.131684Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-04-17T18:23:57.203873Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: 2025-04-17T18:24:00.589118Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715691, task: 1, CA Id [3:7494349595603592346:2558]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-17T18:24:00.625186Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715691, task: 1, CA Id [3:7494349595603592346:2558]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:24:00.676647Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715691, task: 1, CA Id [3:7494349595603592346:2558]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:24:00.744639Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715691, task: 1, CA Id [3:7494349595603592346:2558]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:24:00.850050Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715691, task: 1, CA Id [3:7494349595603592346:2558]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:24:00.987083Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715691, task: 1, CA Id [3:7494349595603592346:2558]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 15290, MsgBus: 23351 2025-04-17T18:22:43.260626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349266137576829:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:43.267243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e18/r3tmp/tmpahrczz/pdisk_1.dat 2025-04-17T18:22:43.999344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:43.999445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:44.001624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:44.059136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15290, node 1 2025-04-17T18:22:44.274850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:44.275111Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:44.275128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:44.275238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23351 TClient is connected to server localhost:23351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:44.968382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:44.998629Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:47.047967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349283317446677:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:47.048095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:47.048471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349283317446689:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:47.052204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:47.066847Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:22:47.067108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349283317446691:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:47.165650Z node 1 :TX_PROXY ERROR: Actor# [1:7494349283317446742:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:47.551557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:47.813505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:47.814471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:47.814630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:47.814842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:47.814949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:47.815039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:47.815135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:47.815239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:47.815340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:47.815432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:47.815518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:47.815615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:47.815721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494349283317446978:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:47.822302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:47.822475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:47.822563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:47.822651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:47.822737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:47.823080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:47.823176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:47.823287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:47.823387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:47.823472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:47.823556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349283317446976:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:47.862870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349283317446961:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:47.862948Z no ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.403196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.409195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.413815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.419134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.428681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.434038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.447343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.458556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.472767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.487377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.494163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.508228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.522304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.535967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.544156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.546907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.556486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.557880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.568815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.574932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.580626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.583176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.588476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.589142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.594868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.595131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.599921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.608229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.613462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.617625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.622559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.629219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.629401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.642276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.642490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.649197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.653720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.661244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.668045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.672897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.677181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.683343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.692340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.712494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.716404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:53.955991Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fvkfz9dsfpy0vnkxbr0h3", SessionId: ydb://session/3?node_id=1&id=NWEyOTM2Yy05NDE0NDZmNi0xYmI3YzA4Yy1kMmQ5MDA1ZQ==, Slow query, duration: 27.748600s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:54.311868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:54.312307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:54.312581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349506655796162:9808];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:23:54.312877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 64418, MsgBus: 63833 2025-04-17T18:22:40.077325Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349252440974736:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:40.077409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e7f/r3tmp/tmpZZz3bz/pdisk_1.dat 2025-04-17T18:22:40.733024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:40.761025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:40.770328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:40.776718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64418, node 1 2025-04-17T18:22:40.986629Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:40.986648Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:40.986655Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:40.986755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63833 TClient is connected to server localhost:63833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:41.817320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:41.836168Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:44.039720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349269620844595:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:44.039802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:44.040094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349269620844607:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:44.043616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:44.052725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349269620844609:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:44.118093Z node 1 :TX_PROXY ERROR: Actor# [1:7494349269620844660:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:44.435649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:44.765940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:44.770987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:44.771148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:44.771412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:44.771507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:44.771589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:44.771684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:44.771785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:44.771878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:44.771965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:44.772077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:44.772171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:44.772253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494349269620844967:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:44.780123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:44.780423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:44.780614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:44.780781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:44.780943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:44.781107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:44.781235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:44.781386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:44.781517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:44.781666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:44.781809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349269620844926:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:44.819028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349269620844946:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:44.819081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349269620844946:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.177238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.182451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.186080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.188640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.194688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.201530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.203571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.212487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.213711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.223424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.228782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.229777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.239377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.240184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.245467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.245518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.250700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.254433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.255737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.259740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.262260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.268565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.275072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.278824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.280659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.286145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.286315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.291919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.294600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.297188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.302707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.310501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.313053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.320117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.326626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.333847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.335049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.339501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.339903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.344973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.345133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.351899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.352048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.357406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.372111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.537408Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fvhxhbj2fhtq0c2hqxksy", SessionId: ydb://session/3?node_id=1&id=YjZjY2M2NDMtYzIyODQwZmQtMWUxNzFmZjYtN2QxMTg0NzU=, Slow query, duration: 27.943688s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:52.838613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:52.838782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:52.839465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7494349497254160296:9735];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-17T18:23:52.839871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] Test command err: Trying to start YDB, gRPC: 24982, MsgBus: 24116 2025-04-17T18:22:38.129244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349243803250851:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:38.129372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e83/r3tmp/tmpAD5nfd/pdisk_1.dat 2025-04-17T18:22:38.738723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:38.770825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:38.770940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:38.779195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24982, node 1 2025-04-17T18:22:38.918651Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:38.918682Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:38.918690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:38.918828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24116 TClient is connected to server localhost:24116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:39.877131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:39.930689Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:22:41.834865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349256688153323:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.835017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.839460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349256688153335:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:41.846265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:22:41.860496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349256688153337:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:22:41.925647Z node 1 :TX_PROXY ERROR: Actor# [1:7494349256688153388:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:42.425765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:22:42.742943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:42.742943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:42.743188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:42.743482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:42.743610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:42.743710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:42.743820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:42.743912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:42.744053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:42.744197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:42.744318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:42.744413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:42.744481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:22:42.744525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494349260983121039:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:42.744601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:22:42.744722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:22:42.744814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:22:42.744915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:22:42.745029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:22:42.745115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:22:42.745197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:22:42.745294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:22:42.745419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:22:42.745512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494349260983120964:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:22:42.794921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349260983120956:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:22:42.794969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494349260983120956:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abs ... tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.913674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.917331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.924272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.927391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.928829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.935518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.941853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.945178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.947171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.952792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.955093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.958481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.963643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.966251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.970437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.972215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.976423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.977506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.982598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.983165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.987507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.988145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.993730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.994122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.998277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:51.999365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.002563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.005682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.008716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.012057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.014126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.017973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.019684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.023698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.025314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.028476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.030420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.033323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.035798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.039180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.041911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.045527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.047886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.052791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.053016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.059351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:23:52.232145Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2fvhdz7txkqn1dhpkxk5yw", SessionId: ydb://session/3?node_id=1&id=NGE0YzU2Y2QtNGFiZmYyNjctNjQxNDFiYzAtYjc2ZTQyMQ==, Slow query, duration: 28.136253s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:23:52.806607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:52.806671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:23:52.807280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |95.9%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] |95.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] |95.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] |95.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] |95.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |95.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] |95.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] |96.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] |96.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] |96.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:40.282880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:40.282963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:40.283004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:40.283043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:40.283086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:40.283132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:40.283193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:40.283279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:40.283626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:40.367502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:40.367569Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:40.380471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:40.381180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:40.381363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:40.391162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:40.391780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:40.392452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:40.392658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:40.395788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:40.397054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:40.397112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:40.397316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:40.397376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:40.397421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:40.397555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:40.403962Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:40.530573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:40.530823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:40.531013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:40.531256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:40.531318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:40.533523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:40.533654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:40.533863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:40.533930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:40.533987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:40.534049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:40.535936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:40.535992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:40.536027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:40.537788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:40.537863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:40.537907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:40.537962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:40.541747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:40.543616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:40.543792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:40.544774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:40.544921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:40.544971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:40.545302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:40.545384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:40.545567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:40.545648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:40.547809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:40.547855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:40.548018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:40.548059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:40.548481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:40.548549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:40.548646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:40.548683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:40.548727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:40.548772Z no ... 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:24:21.721256Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:24:21.721531Z node 118 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 268us result status StatusSuccess 2025-04-17T18:24:21.725345Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:24:21.738751Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1085:2883] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:24:21.738874Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1054:2883] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-17T18:24:21.739036Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1085:2883] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744914261681182 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744914261681182 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744914261681182 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:24:21.741319Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1085:2883] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-17T18:24:21.741412Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1054:2883] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |96.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] |96.0%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2025-04-17T18:24:04.747065Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349613776052920:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:24:04.747111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:24:04.886375Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349613220431390:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:24:04.886448Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001ca4/r3tmp/tmp9h79SA/pdisk_1.dat 2025-04-17T18:24:05.717948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:24:05.746553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:24:05.746652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:24:05.749086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:24:05.749140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:24:05.751317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:24:05.758839Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:24:05.758958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:24:05.760616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7734 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:24:06.372406Z node 1 :TX_PROXY DEBUG: actor# [1:7494349613776053146:2142] Handle TEvNavigate describe path dc-1 2025-04-17T18:24:06.372494Z node 1 :TX_PROXY DEBUG: Actor# [1:7494349622365988153:2419] HANDLE EvNavigateScheme dc-1 2025-04-17T18:24:06.372627Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494349618071020465:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:06.372723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494349618071020815:2383][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494349618071020465:2155], cookie# 1 2025-04-17T18:24:06.374242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494349618071020820:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494349618071020817:2383], cookie# 1 2025-04-17T18:24:06.374273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494349618071020821:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494349618071020818:2383], cookie# 1 2025-04-17T18:24:06.374288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494349618071020822:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494349618071020819:2383], cookie# 1 2025-04-17T18:24:06.374329Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494349613776052786:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494349618071020821:2383], cookie# 1 2025-04-17T18:24:06.374373Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494349613776052789:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494349618071020822:2383], cookie# 1 2025-04-17T18:24:06.374372Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494349613776052783:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494349618071020820:2383], cookie# 1 2025-04-17T18:24:06.374410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494349618071020821:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494349613776052786:2055], cookie# 1 2025-04-17T18:24:06.374424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494349618071020822:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494349613776052789:2058], cookie# 1 2025-04-17T18:24:06.374437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494349618071020820:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494349613776052783:2052], cookie# 1 2025-04-17T18:24:06.374467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494349618071020815:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494349618071020818:2383], cookie# 1 2025-04-17T18:24:06.374488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494349618071020815:2383][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T18:24:06.374502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494349618071020815:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494349618071020819:2383], cookie# 1 2025-04-17T18:24:06.374521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494349618071020815:2383][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T18:24:06.374549Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494349618071020815:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494349618071020817:2383], cookie# 1 2025-04-17T18:24:06.374604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494349618071020815:2383][/dc-1] Unexpected sync response: sender# [1:7494349618071020817:2383], cookie# 1 2025-04-17T18:24:06.374659Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494349618071020465:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T18:24:06.388219Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494349618071020465:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494349618071020815:2383] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T18:24:06.388379Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494349618071020465:2155], cacheItem# { Subscriber: { Subscriber: [1:7494349618071020815:2383] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-17T18:24:06.396078Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494349622365988154:2420], recipient# [1:7494349622365988153:2419], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:24:06.396157Z node 1 :TX_PROXY DEBUG: Actor# [1:7494349622365988153:2419] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:24:06.473667Z node 1 :TX_PROXY DEBUG: Actor# [1:7494349622365988153:2419] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-17T18:24:06.477137Z node 1 :TX_PROXY DEBUG: Actor# [1:7494349622365988153:2419] Handle TEvDescribeSchemeResult Forward to# [1:7494349622365988152:2418] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:24:06.547483Z node 1 :TX_PROXY DEBUG: actor# [1:7494349613776053146:2142] Handle TEvProposeTransaction 2025-04-17T18:24:06.547512Z node 1 :TX_PROXY DEBUG: actor# [1:7494349613776053146:2142] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:24:06.547619Z node 1 :TX_PROXY DEBUG: actor# [1:7494349613776053146:2142] Coo ... omainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:24:24.494947Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494349698379601878:2259], recipient# [3:7494349694084634560:2588], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.495188Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7494349694084634560:2588], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:24:24.570885Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494349672609797944:2194], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.571032Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494349672609797944:2194], cacheItem# { Subscriber: { Subscriber: [3:7494349672609798005:2222] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:24:24.571112Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494349698379601880:2260], recipient# [3:7494349698379601879:2591], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.674599Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494349672609797944:2194], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.674730Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494349672609797944:2194], cacheItem# { Subscriber: { Subscriber: [3:7494349672609798019:2229] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:24:24.674840Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494349698379601883:2261], recipient# [3:7494349698379601882:2592], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.858744Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7494349672609797944:2194], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.858889Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7494349672609797944:2194], cacheItem# { Subscriber: { Subscriber: [3:7494349672609798019:2229] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:24:24.858960Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7494349698379601885:2262], recipient# [3:7494349698379601884:2593], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.950312Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494349617515398919:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.950443Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7494349617515398919:2107], cacheItem# { Subscriber: { Subscriber: [2:7494349617515398946:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:24:24.950525Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494349699119777711:2163], recipient# [2:7494349699119777710:2350], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.990803Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494349617515398919:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:24.990935Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7494349617515398919:2107], cacheItem# { Subscriber: { Subscriber: [2:7494349617515398946:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:24:24.991029Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494349699119777713:2164], recipient# [2:7494349699119777712:2351], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:25.110705Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7494349617515398919:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T18:24:25.110848Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7494349617515398919:2107], cacheItem# { Subscriber: { Subscriber: [2:7494349647580170052:2122] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-17T18:24:25.110928Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7494349703414745011:2165], recipient# [2:7494349703414745010:2352], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] |96.0%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_retry.py::TestRetry::test_low_rate[kikimr0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> test_ttl.py::TestTTLAlterSettings::test_case >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> test_dispatch.py::TestMapping::test_mapping [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] Test command err: Trying to start YDB, gRPC: 14874, MsgBus: 61476 2025-04-17T18:21:59.657792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349077317359232:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:21:59.657830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ebc/r3tmp/tmppOVNzy/pdisk_1.dat 2025-04-17T18:22:00.222422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:00.222531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:00.225630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:22:00.280711Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14874, node 1 2025-04-17T18:22:00.489116Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:00.489132Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:00.489138Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:00.489224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61476 TClient is connected to server localhost:61476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:22:01.520645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:01.542925Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:01.559654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:22:01.803129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:02.055057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:02.165393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:22:03.987040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349094497230105:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:03.987148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:04.323354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.373656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.467211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.541405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.574586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.659227Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494349077317359232:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:22:04.659273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:22:04.663734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.727236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:22:04.831218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349098792198073:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:04.831288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:04.831568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349098792198078:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:22:04.835354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:22:04.852122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-04-17T18:22:04.852386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349098792198080:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:22:04.942512Z node 1 :TX_PROXY ERROR: Actor# [1:7494349098792198137:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:22:06.228987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:22:06.269290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:22:06.304839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:22:06.348645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:8:40: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 4961, MsgBus: 15754 2025-04-17T18:22:08.644240Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494349114800818096:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ebc/r3tmp/tmpJL1KWc/pdisk_1.dat 2025-04-17T18:22:08.757891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:22:08.897916Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:22:08.907376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:22:08.907448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:22:08.911219Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4961, node 2 2025-04-17T18:22:09.051911Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:22:09.051936Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:22:09.051943Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:22:09.052044Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15754 TClient is connected to server localhost:15754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardI ... in>:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:00.112812Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fwmj4dadkgnt4fqwywn6b, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:04.363525Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349613017039674:6022], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:04.366105Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fwrpy50xsqneqxvxqrn1m, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:04.442984Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349613017039687:6028], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:04.445411Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fwrs1df7md8g35134acgc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:13.270098Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349651671745955:6188], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:13.272474Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fx1dq4abykrg53w8b5s0h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:17.346039Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349668851615445:6273], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:17.346734Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fx5bzbvch2sxkk21bnhnx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:17.395086Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349668851615460:6280], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:17.395475Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fx5eddn0sncdrwq7n7xeb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:17.436542Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349668851615474:6286], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:17.438964Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fx5fw21nrs9gacbc7c9ry, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:26.399629Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349707506321729:6447], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:26.402561Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fxe7aeh0bbbxwm64q06v6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:26.447272Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349707506321742:6453], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:26.450240Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fxe9cd6w7vtj4f1xc122b, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:31.799105Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349728981158556:6539], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:31.800267Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fxkh0cym7jw2vrqbxgpg0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:31.832829Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349728981158569:6545], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:31.835159Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fxkj0c05p4na8nfcp1m1g, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:43.966593Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349780520766773:6717], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:43.969067Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fxzbm44s2eg2j4g0rcjcz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:49.560885Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349806290570862:6802], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:49.564611Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fy4w0azqan8mb4h3fdgdd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:49.610497Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349806290570875:6808], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:49.612834Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fy4x7909g72vbzz2kvb55, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-17T18:24:49.652217Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494349806290570895:6817], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-17T18:24:49.652891Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZkYzk1Ny05ZjlkMzlmMC1jYjc4MGQ4My0yMjc3NmVmNw==, ActorId: [2:7494349136275657256:2513], ActorState: ExecuteState, TraceId: 01js2fy4ypa8f5vgvt3zfp1chf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> test_dispatch.py::TestMapping::test_idle >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[delete] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] |96.1%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] >> KqpJoinOrder::FiveWayJoin+ColumnStore |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |96.3%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_auditlog.py::test_dynconfig >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] >> Viewer::JsonAutocompleteColumns |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/00115d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit.txt 2025-04-17T18:25:30.040596Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/001152/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit.txt 2025-04-17T18:25:30.924572Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:25:30.924515Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-17T18:25:30.757029Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpJoin::TwoJoinsWithQueryService |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] >> Viewer::JsonAutocompleteColumns [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/001131/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit.txt 2025-04-17T18:25:36.355189Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> KqpIndexLookupJoin::RightSemi >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumns [GOOD] Test command err: 2025-04-17T18:25:44.370690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:114:2160], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:25:44.371361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:25:44.371432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 25442, node 1 TClient is connected to server localhost:23211 |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpJoin::TwoJoinsWithQueryService [GOOD] |96.5%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] >> OlapEstimationRowsCorrectness::TPCDS78 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::TwoJoinsWithQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 22705, MsgBus: 14252 2025-04-17T18:25:43.507015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350041151427884:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:43.507133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f0b/r3tmp/tmp03U4iO/pdisk_1.dat 2025-04-17T18:25:43.878574Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22705, node 1 2025-04-17T18:25:43.908890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:25:43.909004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:25:43.921203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:25:43.957583Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:25:43.957621Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:25:43.957632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:25:43.957764Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14252 TClient is connected to server localhost:14252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:25:44.471909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:25:46.348850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350054036330404:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:46.348987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:46.599264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:25:46.748790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350054036330513:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:46.748866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:46.755861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:25:46.811031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350054036330592:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:46.811119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:46.824564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:25:46.906676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350054036330673:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:46.906746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:46.907014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350054036330678:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:46.910359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:25:46.919380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350054036330680:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:25:47.016586Z node 1 :TX_PROXY ERROR: Actor# [1:7494350058331298029:2494] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2025-04-17T18:17:47.526501Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:47.608352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:47.630108Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:47.630439Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:47.638145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:47.638379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:47.638605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:47.638735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:47.638836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:47.638930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:47.639030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:47.639136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:47.639251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:47.639417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:47.639510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:47.639600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:47.664323Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:47.664483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:47.664552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:47.664718Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:47.664866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:47.664944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:47.664988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:47.665069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:47.665138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:47.665175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:47.665201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:47.665372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:47.665429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:47.665461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:47.665488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:47.665562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:47.665605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:47.665644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:47.665668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:47.665761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:47.665801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:47.665827Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:47.665866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:47.665903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:47.665925Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:47.666315Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-17T18:17:47.666388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-17T18:17:47.666450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=26; 2025-04-17T18:17:47.666562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-17T18:17:47.666717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:47.666765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:47.666800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:47.666989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:47.667029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:47.667060Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:47.667208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:47.667249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:47.667274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:47.667433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:47.667470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:47.667496Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:47.667615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:47.667648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:47.667689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... FO: tablet_id=9437184;self_id=[1:10487:12443];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; 2025-04-17T18:25:47.238508Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T18:25:47.238639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=27; 2025-04-17T18:25:47.239325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=593; 2025-04-17T18:25:47.239445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=705; 2025-04-17T18:25:47.245606Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T18:25:47.245835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=15; 2025-04-17T18:25:47.264254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=18295; 2025-04-17T18:25:47.286845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=21050; 2025-04-17T18:25:47.286983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=22594; 2025-04-17T18:25:47.287210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=142; 2025-04-17T18:25:47.287410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=121; 2025-04-17T18:25:47.287679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=186; 2025-04-17T18:25:47.287887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=144; 2025-04-17T18:25:47.288125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=184; 2025-04-17T18:25:47.288179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=42283; 2025-04-17T18:25:47.292734Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T18:25:47.292832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-04-17T18:25:47.297404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4478; 2025-04-17T18:25:47.350021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=52497; 2025-04-17T18:25:47.350264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=110; 2025-04-17T18:25:47.350376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=44; 2025-04-17T18:25:47.350457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2025-04-17T18:25:47.350512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-17T18:25:47.350563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-17T18:25:47.350692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=61; 2025-04-17T18:25:47.350752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-17T18:25:47.350894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=88; 2025-04-17T18:25:47.350964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-17T18:25:47.351056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=49; 2025-04-17T18:25:47.351177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=75; 2025-04-17T18:25:47.351288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=69; 2025-04-17T18:25:47.351352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=58457; 2025-04-17T18:25:47.351614Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961132;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:25:47.352877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10487:12443];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:25:47.352976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10487:12443];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:25:47.353082Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:25:47.353180Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T18:25:47.353477Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:25:47.353571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:25:47.353830Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-17T18:25:47.353926Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744913573044;tx_id=18446744073709551615;;current_snapshot_ts=1744913868672; 2025-04-17T18:25:47.353984Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:25:47.354065Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:47.354120Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:47.354285Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:25:47.359821Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:25:47.363132Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:25:47.381253Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:25:47.381341Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:25:47.381383Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:25:47.381488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:25:47.381640Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:25:47.381961Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-17T18:25:47.382079Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744913573044;tx_id=18446744073709551615;;current_snapshot_ts=1744913868672; 2025-04-17T18:25:47.382144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:25:47.382286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:47.382379Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:47.382509Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-17T18:25:47.382585Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10487:12443];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] >> test_auditlog.py::test_dynconfig [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2025-04-17T18:17:24.745607Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:24.835124Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:24.858044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:24.858329Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:24.866258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:24.866479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:24.866690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:24.866830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:24.866946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:24.867049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:24.867153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:24.867280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:24.867412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:24.867570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:24.867661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:24.867726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:24.891015Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:24.891225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:24.891325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:24.891506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:24.891664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:24.891744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:24.891792Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:24.891917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:24.892004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:24.892054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:24.892089Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:24.892301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:24.892381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:24.892427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:24.892464Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:24.892567Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:24.892646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:24.892705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:24.892757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:24.892861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:24.892914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:24.892949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:24.893002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:24.893047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:24.893079Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:24.893509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=73; 2025-04-17T18:17:24.893632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-17T18:17:24.893711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-17T18:17:24.893809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-04-17T18:17:24.894012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:24.894095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:24.894138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:24.894420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:24.894490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:24.894524Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:24.894694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:24.894750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:24.894780Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:24.894977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:24.895031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:24.895062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:24.895219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:24.895274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:24.895326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 4;self_id=[1:15676:17634];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; 2025-04-17T18:25:48.444616Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T18:25:48.444730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-04-17T18:25:48.445185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=371; 2025-04-17T18:25:48.445237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=451; 2025-04-17T18:25:48.450100Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T18:25:48.450347Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=161; 2025-04-17T18:25:48.464755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=14284; 2025-04-17T18:25:48.479468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12617; 2025-04-17T18:25:48.479610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=14715; 2025-04-17T18:25:48.479792Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=110; 2025-04-17T18:25:48.479941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=100; 2025-04-17T18:25:48.480147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=152; 2025-04-17T18:25:48.480325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=127; 2025-04-17T18:25:48.480614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=211; 2025-04-17T18:25:48.480663Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=30255; 2025-04-17T18:25:48.485981Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T18:25:48.486082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-04-17T18:25:48.498386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=12191; 2025-04-17T18:25:48.543104Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=44585; 2025-04-17T18:25:48.543259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=48; 2025-04-17T18:25:48.543365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=31; 2025-04-17T18:25:48.543423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-04-17T18:25:48.543502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=21; 2025-04-17T18:25:48.543562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-04-17T18:25:48.543695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=85; 2025-04-17T18:25:48.543774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=12; 2025-04-17T18:25:48.543888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=68; 2025-04-17T18:25:48.543959Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=11; 2025-04-17T18:25:48.544056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2025-04-17T18:25:48.544172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=68; 2025-04-17T18:25:48.544297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=68; 2025-04-17T18:25:48.544345Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=58197; 2025-04-17T18:25:48.544578Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111592968;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990344;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:25:48.545457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15676:17634];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:25:48.545560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15676:17634];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:25:48.545674Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:25:48.545753Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T18:25:48.546006Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:25:48.546106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:25:48.546376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-17T18:25:48.546474Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744913550396;tx_id=18446744073709551615;;current_snapshot_ts=1744913845900; 2025-04-17T18:25:48.546536Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:25:48.546630Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:48.546686Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:48.546808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:25:48.552194Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:25:48.556631Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:25:48.560723Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:25:48.560788Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:25:48.560823Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:25:48.560901Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:25:48.560991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:25:48.561281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-17T18:25:48.561373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744913550396;tx_id=18446744073709551615;;current_snapshot_ts=1744913845900; 2025-04-17T18:25:48.561434Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:25:48.561498Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:48.561553Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:48.561656Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-17T18:25:48.561789Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15676:17634];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpJoinOrder::TPCDS94-ColumnStore |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> BasicUsage::RetryDiscoveryWithCancel >> KqpIndexLookupJoin::RightSemi [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::RightSemi [GOOD] Test command err: Trying to start YDB, gRPC: 28787, MsgBus: 15850 2025-04-17T18:25:48.223534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350059319543877:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:48.224102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000eae/r3tmp/tmpVSuiSE/pdisk_1.dat 2025-04-17T18:25:48.561350Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28787, node 1 2025-04-17T18:25:48.620721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:25:48.621196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:25:48.625781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:25:48.639547Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:25:48.639573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:25:48.639589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:25:48.639710Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15850 TClient is connected to server localhost:15850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:25:49.116112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:25:49.139565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:25:49.283491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:25:49.458149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.531397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:25:51.058724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350072204447432:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:51.058858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:51.362371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:25:51.387205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:25:51.413369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:25:51.440831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:25:51.471236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:25:51.504669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:25:51.535800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:25:51.609105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350072204448093:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:51.609188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:51.609392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350072204448098:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:51.612979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:25:51.622567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350072204448100:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:25:51.688616Z node 1 :TX_PROXY ERROR: Actor# [1:7494350072204448154:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:25:52.605500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:25:52.633089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T18:25:52.690423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:25:52.715645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:25:52.754225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:25:52.821518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:25:53.227582Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350059319543877:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:53.227637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] >> overlapping_portions.py::TestOverlappingPortions::test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-04-17T18:17:30.329645Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:30.423443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:30.450134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:30.450425Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:30.458400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:30.458627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:30.458861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:30.458989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:30.459124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:30.459237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:30.459358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:30.459482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:30.459661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:30.459780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:30.459893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:30.459996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:30.489245Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:30.489350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:30.489393Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:30.489515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:30.489645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:30.489714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:30.489750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:30.489830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:30.489896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:30.489929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:30.489948Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:30.490080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:30.490120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:30.490142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:30.490162Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:30.490255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:30.490289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:30.490331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:30.490352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:30.490412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:30.490438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:30.490458Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:30.490498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:30.490524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:30.490543Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:30.490830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-17T18:17:30.490909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-17T18:17:30.490964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-04-17T18:17:30.491016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=23; 2025-04-17T18:17:30.491173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:30.491210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:30.491249Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:30.491358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:30.491386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:30.491402Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:30.491519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:30.491548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:30.491566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:30.491688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:30.491723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:30.491743Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:30.491847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:30.491884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:30.491921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 7184;self_id=[1:15675:17633];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; 2025-04-17T18:25:55.999117Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T18:25:55.999229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=13; 2025-04-17T18:25:55.999691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=382; 2025-04-17T18:25:55.999739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=456; 2025-04-17T18:25:56.005598Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T18:25:56.005703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=13; 2025-04-17T18:25:56.017253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11445; 2025-04-17T18:25:56.029902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11172; 2025-04-17T18:25:56.030027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12667; 2025-04-17T18:25:56.030246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=145; 2025-04-17T18:25:56.030394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=91; 2025-04-17T18:25:56.030604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=158; 2025-04-17T18:25:56.030771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=116; 2025-04-17T18:25:56.031018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=193; 2025-04-17T18:25:56.031065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=25311; 2025-04-17T18:25:56.038826Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T18:25:56.038932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=13; 2025-04-17T18:25:56.050553Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=11506; 2025-04-17T18:25:56.099271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=48568; 2025-04-17T18:25:56.099431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=45; 2025-04-17T18:25:56.099520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=34; 2025-04-17T18:25:56.099584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-17T18:25:56.099644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-17T18:25:56.099702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=11; 2025-04-17T18:25:56.099798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=52; 2025-04-17T18:25:56.099861Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2025-04-17T18:25:56.099981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=75; 2025-04-17T18:25:56.100038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-17T18:25:56.100115Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2025-04-17T18:25:56.100228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=67; 2025-04-17T18:25:56.100330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=56; 2025-04-17T18:25:56.100388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=61382; 2025-04-17T18:25:56.100617Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111592952;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990344;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T18:25:56.101487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15675:17633];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T18:25:56.101574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15675:17633];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T18:25:56.101667Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T18:25:56.101726Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T18:25:56.101968Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:25:56.102052Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:25:56.102304Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-17T18:25:56.102397Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744913555982;tx_id=18446744073709551615;;current_snapshot_ts=1744913851487; 2025-04-17T18:25:56.102453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:25:56.102516Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:56.102568Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:56.102708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T18:25:56.107462Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:25:56.112976Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T18:25:56.114289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T18:25:56.114347Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T18:25:56.114381Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T18:25:56.114442Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T18:25:56.114535Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T18:25:56.114864Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-17T18:25:56.114959Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744913555982;tx_id=18446744073709551615;;current_snapshot_ts=1744913851487; 2025-04-17T18:25:56.115016Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T18:25:56.115111Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:56.115178Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T18:25:56.115290Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-17T18:25:56.115377Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_dispatch.py::TestMapping::test_idle [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/001107/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit.txt 2025-04-17T18:25:44.824016Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/0010d5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dynconfig/audit.txt 2025-04-17T18:25:51.458076Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> BasicUsage::WaitEventBlocksBeforeDiscovery >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-04-17 18:25:52,013 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 18:25:52,398 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 544573 715M 680M 632M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/6873/001977/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 546689 2.6G 2.5G 2.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/6873/001977/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_m 549192 453M 376M 422M └─ moto_server s3 --port 62082 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 86, in wait_for if condition_func(): File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 172, in lambda: get_rows_in_portion(bucket1_path) == self.row_count File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 160, in get_rows_in_portion logger.info(f"portions: {portions_stat}, blobs: {table.get_blob_stat_by_tier()}") File "ydb/tests/olap/common/column_table_helper.py", line 41, in get_blob_stat_by_tier results = self.ydb_client.query(stmt) File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...apture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/001977/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/001977', '--source-root', '/home/runner/.ya/build/build_root/6873/001977/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/001977/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...apture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/001977/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/001977', '--source-root', '/home/runner/.ya/build/build_root/6873/001977/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/001977/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalTable |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/0010a1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit.txt 2025-04-17T18:25:59.374328Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/0010a5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit.txt 2025-04-17T18:25:56.580283Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:25:56.580217Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-17T18:25:56.451520Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/001087/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit.txt 2025-04-17T18:26:06.538870Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:06.538823Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-17T18:26:06.466628Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/001080/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit.txt 2025-04-17T18:26:07.177853Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:07.177726Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-17T18:26:07.005870Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 12259, MsgBus: 3779 2025-04-17T18:26:10.874258Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350156259944629:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:10.875156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b87/r3tmp/tmpJ7Xm2n/pdisk_1.dat 2025-04-17T18:26:11.372550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:26:11.372664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:26:11.374889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:26:11.412875Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12259, node 1 2025-04-17T18:26:11.521636Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:26:11.521663Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:26:11.521671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:26:11.522620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3779 TClient is connected to server localhost:3779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:26:12.129331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:26:12.144875Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:26:12.170658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T18:26:12.195538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T18:26:12.213655Z node 1 :TX_PROXY ERROR: Actor# [1:7494350164849879916:2343] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:26:12.213916Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges)
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 64800, MsgBus: 9111 2025-04-17T18:26:14.749580Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350173016298820:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b87/r3tmp/tmpcJVESm/pdisk_1.dat 2025-04-17T18:26:14.838565Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:26:14.903211Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:26:14.924115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:26:14.924204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:26:14.927230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64800, node 2 2025-04-17T18:26:15.040064Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:26:15.040082Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:26:15.040088Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:26:15.040213Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9111 TClient is connected to server localhost:9111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T18:26:15.491197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:26:15.503057Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:26:15.525246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-17T18:26:15.548333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1147, MsgBus: 18861 2025-04-17T18:26:18.517987Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494350188441226766:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:18.518035Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001b87/r3tmp/tmpaIvvki/pdisk_1.dat 2025-04-17T18:26:18.667458Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:26:18.667526Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:26:18.670506Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:26:18.681645Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1147, node 3 2025-04-17T18:26:18.755414Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:26:18.755437Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:26:18.755445Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:26:18.755562Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18861 TClient is connected to server localhost:18861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:26:19.219420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:26:19.230986Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:26:19.247936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 >> test_auditlog.py::test_dml_begin_commit_logged >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 65047, MsgBus: 18453 2025-04-17T18:25:44.969261Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350042834542853:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:44.975410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000ee6/r3tmp/tmpnFvokk/pdisk_1.dat 2025-04-17T18:25:45.399414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:25:45.399488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:25:45.401039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:25:45.436539Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65047, node 1 2025-04-17T18:25:45.524994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:25:45.525022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:25:45.525037Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:25:45.525152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18453 TClient is connected to server localhost:18453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:25:46.065001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:25:46.079861Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:25:47.895419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350055719445394:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:47.895428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350055719445404:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:47.895559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:47.900011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:25:47.909473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350055719445408:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:25:47.987332Z node 1 :TX_PROXY ERROR: Actor# [1:7494350055719445459:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:25:48.245209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.345337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.371700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.440038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.475048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.640846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.735103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.803315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.828752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.855385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.918570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.944600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:25:48.967802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.544912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:25:49.578197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.611302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.643635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.671488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.699922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.725629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.753176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.781790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.811488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.853268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.882406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.907638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.931729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.959709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:25:49.969220Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350042834542853:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:49.969321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:25:49.994583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:25:50.024183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:15.987552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:15.987557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:15.993301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:15.993517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:15.999832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.000042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.005532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.005836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.012641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.014109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.017165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.018954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.024479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.025685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.031678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.037255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.037872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.043073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.043579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.049141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.049148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.055257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.055258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.062536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.067832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.067832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.073606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.073605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.079472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.079473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.085417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.085417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.091173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.091180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.097378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.102875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.104954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.109128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.111432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.115910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.117423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.121755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.122883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.127633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.128323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:16.288039Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2g00wq79r6a6tvqbhycrpb", SessionId: ydb://session/3?node_id=1&id=MzgwZDdjYmYtOTk2ZDkzZWEtZTg4MzNkZDUtODdhNWQyZmQ=, Slow query, duration: 25.288122s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:26:16.498562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:16.498983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:16.499267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7494350120143974388:4993];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-04-17T18:26:16.499567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/001000/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit.txt 2025-04-17T18:26:11.921078Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:11.921032Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-17T18:26:11.742328Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |96.9%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/000ff9/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit.txt 2025-04-17T18:26:18.515864Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> BasicUsage::WriteSessionWriteInHandlers ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/000ffa/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit.txt 2025-04-17T18:26:18.714333Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2025-04-17T18:26:18.714280Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-17T18:26:18.471487Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/000ff5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit.txt 2025-04-17T18:26:22.649091Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:22.649040Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-17T18:26:22.589930Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> test_auditlog.py::test_single_dml_query_logged[upsert] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/000ff1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit.txt 2025-04-17T18:26:24.659749Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> KqpJoinOrder::FiveWayJoin+ColumnStore [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2025-04-17T18:26:30.483928Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1744914390483896 2025-04-17T18:26:30.795861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350240480656191:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:30.796409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:26:30.852602Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350243368708450:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:30.852652Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001837/r3tmp/tmpFACPmV/pdisk_1.dat 2025-04-17T18:26:31.059132Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:26:31.054359Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:26:31.358015Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:26:31.367476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:26:31.367589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:26:31.367704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:26:31.367738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:26:31.371434Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:26:31.371853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:26:31.372643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29011, node 1 2025-04-17T18:26:31.593907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001837/r3tmp/yandexH0RhJp.tmp 2025-04-17T18:26:31.593932Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001837/r3tmp/yandexH0RhJp.tmp 2025-04-17T18:26:31.594136Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001837/r3tmp/yandexH0RhJp.tmp 2025-04-17T18:26:31.594293Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:26:31.651467Z INFO: TTestServer started on Port 30071 GrpcPort 29011 TClient is connected to server localhost:30071 PQClient connected to localhost:29011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:26:31.968248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:26:34.418735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350257660526403:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:26:34.418840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:26:34.419024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350257660526415:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:26:34.423372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:26:34.488696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350257660526417:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:26:34.706375Z node 1 :TX_PROXY ERROR: Actor# [1:7494350257660526504:2684] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:26:34.742172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:26:34.746067Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494350257660526514:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:26:34.747153Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTQ3NmNmNDYtOTJkMjMyYzYtYWM0YTk0NWMtZjk0ZThlMDI=, ActorId: [1:7494350257660526400:2336], ActorState: ExecuteState, TraceId: 01js2g1b9g93sf9f7c2mj2zgxr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:26:34.746756Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494350260548577974:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:26:34.747321Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2JlM2YzNS02YzM3ZGRjMi1mZjQxMWVhZi02ZmU2ZjBhYw==, ActorId: [2:7494350260548577925:2307], ActorState: ExecuteState, TraceId: 01js2g1bac5e7eqbrcr9fp0nvq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:26:34.762543Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:26:34.762431Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:26:34.930959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:26:35.110734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:29011", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-17T18:26:35.427267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2g1c2scjf0arkjwfvsy3x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFjOTc1MjAtNjhiNGQ3OTMtZTMwMDBlZjYtNWNhZWU0NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494350261955494273:3024] 2025-04-17T18:26:35.798295Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350240480656191:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:35.798374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:26:35.853908Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494350243368708450:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:35.853966Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-17T18:26:41.271359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:29011 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: 2025-04-17T18:26:41.352842Z node 1 :PERSQUEUE INFO: proxy answer Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:29011 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 864 ... 892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:26:42.899680Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2025-04-17T18:26:42.899853Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2025-04-17T18:26:42.900883Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2025-04-17T18:26:42.901304Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1744914402901 2025-04-17T18:26:42.901415Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:26:42.901445Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:26:42.901461Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T18:26:42.901473Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:26:42.901485Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000psrc_id 2025-04-17T18:26:42.901506Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-04-17T18:26:42.901520Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:26:42.901534Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:26:42.901547Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:26:42.901614Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:26:42.901671Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2025-04-17T18:26:42.904281Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [1:7494350287725298791:2499] 2025-04-17T18:26:42.904381Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 size 169 2025-04-17T18:26:42.904389Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:26:42.904441Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:26:42.904499Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-17T18:26:42.904639Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2025-04-17T18:26:42.904696Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:26:42.905341Z :DEBUG: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-17T18:26:42.905525Z :DEBUG: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 3000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-17T18:26:42.905550Z :DEBUG: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2025-04-17T18:26:42.905597Z :DEBUG: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2025-04-17T18:26:42.905896Z :DEBUG: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2025-04-17T18:26:42.906307Z :DEBUG: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write session: try to update token 2025-04-17T18:26:42.906356Z :DEBUG: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2025-04-17T18:26:42.907098Z :INFO: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-04-17T18:26:42.907134Z :INFO: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write session will now close 2025-04-17T18:26:42.907196Z :DEBUG: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write session: aborting 2025-04-17T18:26:42.906952Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|bb07b0f6-d19992dc-86b82059-4be76237_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-17T18:26:42.907226Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-17T18:26:42.907652Z :WARNING: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-17T18:26:42.907698Z :DEBUG: [/Root] TraceId [] SessionId [src_id|bb07b0f6-d19992dc-86b82059-4be76237_0] MessageGroupId [src_id] Write session: destroy 2025-04-17T18:26:42.907610Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:26:42.907639Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:26:42.907707Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 4 requestId: cookie: 3 2025-04-17T18:26:42.907755Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-17T18:26:42.908021Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:26:42.908044Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:26:42.908085Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 5 size 116 offset: -1 2025-04-17T18:26:42.908279Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2025-04-17T18:26:42.909863Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|bb07b0f6-d19992dc-86b82059-4be76237_0 grpc read done: success: 0 data: 2025-04-17T18:26:42.910389Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|bb07b0f6-d19992dc-86b82059-4be76237_0 grpc read failed 2025-04-17T18:26:42.911647Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|bb07b0f6-d19992dc-86b82059-4be76237_0 grpc closed 2025-04-17T18:26:42.911961Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|bb07b0f6-d19992dc-86b82059-4be76237_0 is DEAD 2025-04-17T18:26:42.912898Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:26:42.913038Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7494350292020266479:2553] destroyed 2025-04-17T18:26:42.913074Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 190 count 1 nextOffset 3 batches 1 2025-04-17T18:26:42.913531Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 178 WTime 1744914402913 2025-04-17T18:26:42.913638Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:26:42.913658Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:26:42.913673Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-17T18:26:42.913686Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:26:42.913699Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000psrc_id 2025-04-17T18:26:42.913733Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-04-17T18:26:42.913744Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:26:42.913758Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:26:42.913771Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:26:42.913833Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T18:26:42.913852Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:26:42.913934Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 178 2025-04-17T18:26:42.916201Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 178 actorID [1:7494350287725298791:2499] 2025-04-17T18:26:42.916335Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 size 178 2025-04-17T18:26:42.916348Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 123 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:26:42.916383Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-17T18:26:42.916421Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-17T18:26:42.916434Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 5 requestId: cookie: 3 >> KqpJoinOrder::TPCDS94-ColumnStore [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/000fea/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit.txt 2025-04-17T18:26:38.702294Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:38.702239Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-17T18:26:38.602927Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:38.960938Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:38.960898Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-17T18:26:38.810491Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:39.180105Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:39.180062Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-17T18:26:39.068732Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:39.443535Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:39.443505Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-17T18:26:39.288556Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:39.639779Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:39.639737Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-17T18:26:39.553313Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:39.853377Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:39.853343Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-17T18:26:39.749736Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/000feb/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit.txt 2025-04-17T18:26:37.931169Z: {"tx_id":"01js2g1eq97355th8c3hbmcvm5","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:37.931107Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-04-17T18:26:37.928662Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-04-17T18:26:38.274773Z: {"tx_id":"01js2g1eq97355th8c3hbmcvm5","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:38.274725Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-17T18:26:37.946832Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:38.308542Z: {"tx_id":"01js2g1eq97355th8c3hbmcvm5","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:38.308496Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-04-17T18:26:38.283805Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS94-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 4486, MsgBus: 26877 2025-04-17T18:25:52.978075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350079936194836:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:52.978234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e6f/r3tmp/tmpM1LLDr/pdisk_1.dat 2025-04-17T18:25:53.310808Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:25:53.355246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:25:53.355340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4486, node 1 2025-04-17T18:25:53.360125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:25:53.416581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:25:53.416599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:25:53.416609Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:25:53.416695Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26877 TClient is connected to server localhost:26877 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:25:53.876067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:25:53.899035Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:25:55.882636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350092821097395:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:55.882652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350092821097384:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:55.882788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:55.886055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:25:55.897667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350092821097398:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:25:55.981021Z node 1 :TX_PROXY ERROR: Actor# [1:7494350092821097449:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:25:56.279983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.395858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.422140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.448466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.524550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.648975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.679317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.709607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.740083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.776584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.814638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.841567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:25:56.865360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.419418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T18:25:57.491602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.567215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.636655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.667079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.695070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.728684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.757081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.824394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.852783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.882733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.912522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.968234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T18:25:57.977520Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350079936194836:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:57.977581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:25:58.032770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T18:25:58.098426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:25:58.125144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T18:25:58.191583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... ARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.763556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.764581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.768928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.769754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038452;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.774298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.774426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.778358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.779642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.783710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.784692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038520;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.789021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.790080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.793331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038480;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.795242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.800089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.805439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038490;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.811065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.816658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.822442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.827745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.832732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.838414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.843303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.846909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.848011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.853066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.853066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.858667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.858839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.865110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038484;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.865110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.871553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.871572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.877351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038486;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.877351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.886455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.890791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.896143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.897848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.907969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.911539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.914254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.918364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.919884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.924307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038506;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.956940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:27.994535Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2g094f4rgp5rshd3gteq5y", SessionId: ydb://session/3?node_id=1&id=ZDI0YmRkMmQtOTc1ZTk1YjMtMmViYTZkMi1iYjkxOGZm, Slow query, duration: 28.554620s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:26:28.452761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:28.452761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:28.453559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 1328, MsgBus: 16804 2025-04-17T18:25:31.393178Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494349986900097821:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:31.393236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f91/r3tmp/tmpBNNaLl/pdisk_1.dat 2025-04-17T18:25:31.805508Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1328, node 1 2025-04-17T18:25:31.814658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:25:31.814756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:25:31.823123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:25:31.893296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:25:31.893324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:25:31.893333Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:25:31.893450Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16804 TClient is connected to server localhost:16804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:25:32.424754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:25:32.437454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:25:34.350323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349999785000358:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:34.350331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494349999785000370:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:34.350441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:34.354195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:25:34.364926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494349999785000372:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:25:34.440410Z node 1 :TX_PROXY ERROR: Actor# [1:7494349999785000425:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:25:34.725094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:25:34.916263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:34.916484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:25:34.916802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:25:34.916954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:25:34.917071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:25:34.917199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:25:34.917333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:25:34.917525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:25:34.917659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:25:34.917758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:34.917798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:25:34.917829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:25:34.917906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:25:34.917941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:25:34.917989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494349999785000680:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:25:34.918068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:25:34.918505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:25:34.918663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:25:34.918767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:25:34.918869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:25:34.919014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:25:34.919149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:25:34.919283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:25:34.919422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494349999785000712:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:25:34.964843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494349999785000697:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:34.964906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494349999785000697:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstr ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.455357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.459029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.460554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.465848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.465976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.471189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.471474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.476217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.479563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.481947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.485289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.487610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.491822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.493578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.496103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.503726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.505197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.510416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.510971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.515098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.520041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.521001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.525962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.526233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.532143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.536573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.538262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.541315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.543583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.545289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.548975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.550710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.554514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.555662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.561260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.566839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.572830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.579141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.593075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.594208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.598895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.600575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.604809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.605986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.609212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:33.749903Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2g0haz9htr84xhgfp9bed2", SessionId: ydb://session/3?node_id=1&id=YzUwNTNjNDUtM2M1MDU2N2UtY2UyNjJiZmMtMmE3YmJhZg==, Slow query, duration: 25.910150s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:26:33.992754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:33.993050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494350154403860136:7857];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T18:26:33.993223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:33.993594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |97.2%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:29.175497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:29.175579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:29.175616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:29.175649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:29.175694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:29.175742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:29.175805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:29.175893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:29.176207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:29.255969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:29.256026Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:29.264206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:29.264701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:29.264818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:29.271576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:29.272027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:29.272452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:29.272591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:29.274560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:29.275555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:29.275593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:29.275730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:29.275771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:29.275799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:29.275900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:29.280583Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:29.371738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:29.371920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.372073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:29.372261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:29.372306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.373925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:29.374039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:29.374234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.374308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:29.374351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:29.374376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:29.375804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.375840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:29.375867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:29.377007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.377057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.377092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:29.377135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:29.379514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:29.380629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:29.380749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:29.381472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:29.381568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:29.381605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:29.381767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:29.381820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:29.381939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:29.381989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:29.383488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:29.383529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:29.383657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:29.383695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:29.383988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:29.384029Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:29.384100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:29.384124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:29.384148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:29.384185Z no ... " } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:26:50.664500Z node 164 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:26:50.664794Z node 164 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 279us result status StatusSuccess 2025-04-17T18:26:50.665635Z node 164 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:26:50.676805Z node 164 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1181:2964] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:26:50.676933Z node 164 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][164:1150:2964] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-17T18:26:50.677114Z node 164 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1181:2964] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744914410630565 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744914410630565 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744914410630565 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:26:50.679963Z node 164 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1181:2964] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-17T18:26:50.680070Z node 164 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][164:1150:2964] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/000fe5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit.txt >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 2240, MsgBus: 18021 2025-04-17T18:25:38.116634Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350016792465570:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:38.118764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000f4f/r3tmp/tmpaiM6qP/pdisk_1.dat 2025-04-17T18:25:38.452469Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2240, node 1 2025-04-17T18:25:38.549748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:25:38.555597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:25:38.557284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:25:38.575513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:25:38.575538Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:25:38.575548Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:25:38.575670Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18021 TClient is connected to server localhost:18021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:25:39.092478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:25:41.200526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350029677368125:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:41.200564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350029677368113:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:41.200669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:41.205078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:25:41.215050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350029677368127:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:25:41.312320Z node 1 :TX_PROXY ERROR: Actor# [1:7494350029677368178:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:25:41.576579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:25:41.811183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:41.811419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:25:41.811705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:25:41.811831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:25:41.811940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:25:41.812058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:25:41.812164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:25:41.812280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:25:41.812409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:25:41.812544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:25:41.812661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:25:41.812779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494350029677368433:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:25:41.831506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:41.831566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:25:41.831769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:25:41.831879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:25:41.831981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:25:41.832082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:25:41.832188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:25:41.832290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:25:41.832382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:25:41.832477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:25:41.832572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:25:41.832660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494350029677368442:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:25:41.844622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494350029677368492:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:41.844688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494350029677368492:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:25:41.844877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_ ... ller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.752677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.757217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.757287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.762057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.762219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.766812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.767122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.771214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.771839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.775599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.776513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.779695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.781258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.783806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.786061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.787376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.790779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.791266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.795087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.795333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.798521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.800604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.802832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.806014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.807179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.811041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.811448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.815737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.815810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.820139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.820472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.825758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.825903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.831003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.831413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.837103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.837147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.842144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.842339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.846972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.847313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.852476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.852480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.858108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.858286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:42.946453Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2g0s9k5vt15z79sbsr19cb", SessionId: ydb://session/3?node_id=1&id=ZWU3ZDc2OTctMWQxNTQyYzEtYTMzNjkxYWItZGM1NzU1Yzk=, Slow query, duration: 26.958975s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:26:43.156848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:43.157266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7494350261605658220:10669];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-04-17T18:26:43.157372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:43.157644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/000fdd/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit.txt 2025-04-17T18:26:49.419845Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:49.419797Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-17T18:26:49.346057Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> BasicUsage::FallbackToSingleDb |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:50.232432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:50.232493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:50.232523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:50.232550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:50.232585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:50.232617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:50.232670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:50.232744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:50.233006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:50.292530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:50.292584Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:50.304579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:50.305192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:50.305378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:50.316564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:50.317149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:50.317805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:50.318032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:50.321124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:50.322538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:50.322602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:50.322832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:50.322884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:50.322927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:50.323075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:50.330481Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:50.414605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:50.414823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.414988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:50.415166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:50.415207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.417033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:50.417146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:50.417308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.417357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:50.417402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:50.417428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:50.418746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.418784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:50.418811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:50.420027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.420069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.420101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:50.420151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:50.422775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:50.423984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:50.424128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:50.424813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:50.424931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:50.424964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:50.425142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:50.425189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:50.425333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:50.425383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:50.426835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:50.426869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:50.426978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:50.427008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:50.427261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:50.427303Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:50.427375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:50.427401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:50.427438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:50.427481Z no ... CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:27:05.000711Z node 179 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:27:05.001014Z node 179 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 340us result status StatusSuccess 2025-04-17T18:27:05.001943Z node 179 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:27:05.013844Z node 179 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][179:1037:2805] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:27:05.013945Z node 179 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][179:1038:2805] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:27:05.014025Z node 179 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][179:960:2805] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-17T18:27:05.014098Z node 179 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][179:960:2805] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-17T18:27:05.014260Z node 179 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][179:1037:2805] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744914424970018 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744914424970018 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:27:05.014447Z node 179 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][179:1038:2805] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1744914424970018 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:27:05.018627Z node 179 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][179:1037:2805] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-04-17T18:27:05.018771Z node 179 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][179:960:2805] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-17T18:27:05.019807Z node 179 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][179:1038:2805] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-17T18:27:05.019904Z node 179 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][179:960:2805] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/6873/000fcb/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit.txt 2025-04-17T18:26:51.385550Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:51.385506Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-17T18:26:51.374617Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:51.507046Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:51.507016Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-17T18:26:51.494055Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:51.626915Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:51.626886Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-17T18:26:51.616202Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:51.752091Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:51.752060Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-17T18:26:51.736758Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:51.873932Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:51.873902Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-17T18:26:51.861512Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-17T18:26:51.997325Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-17T18:26:51.997281Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-17T18:26:51.982968Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] |97.4%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] >> BasicUsage::BasicWriteSession >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> KikimrIcGateway::TestLoadTableMetadata >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> BasicUsage::PropagateSessionClosed >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> BasicUsage::SimpleHandlers [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2025-04-17T18:26:11.860919Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1744914371860895 2025-04-17T18:26:12.194318Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350163967401575:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:12.194368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:26:12.306363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350165746130569:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:12.521244Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001842/r3tmp/tmpYQsIEC/pdisk_1.dat 2025-04-17T18:26:12.554759Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:26:12.567817Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:26:12.984728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:26:12.984823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:26:12.987213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:26:12.987358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:26:12.993374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:26:12.994002Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:26:12.994730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:26:13.045594Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26879, node 1 2025-04-17T18:26:13.285948Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:26:13.286979Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:26:13.330908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001842/r3tmp/yandexKPp6qo.tmp 2025-04-17T18:26:13.330945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001842/r3tmp/yandexKPp6qo.tmp 2025-04-17T18:26:13.331075Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001842/r3tmp/yandexKPp6qo.tmp 2025-04-17T18:26:13.331191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:26:13.387981Z INFO: TTestServer started on Port 29553 GrpcPort 26879 TClient is connected to server localhost:29553 PQClient connected to localhost:26879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:26:13.718471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:26:16.193110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350181147271783:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:26:16.193231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350181147271759:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:26:16.193329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:26:16.197176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:26:16.217796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350181147271788:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:26:16.309630Z node 1 :TX_PROXY ERROR: Actor# [1:7494350181147271884:2692] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:26:16.575695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:26:16.599312Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494350181147271896:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:26:16.599559Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2M3NjQyYzgtMWNmNjc1MC04OTI3Y2Y1NS0xMTgzYzczNg==, ActorId: [1:7494350181147271756:2336], ActorState: ExecuteState, TraceId: 01js2g0sfnbv14cty90j2f12sn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:26:16.607665Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:26:16.616596Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494350182925999963:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:26:16.618230Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTdjNGI4OTEtYjlkNzRjMWEtNWFjY2E1MzUtYjg1MWNhN2U=, ActorId: [2:7494350182925999915:2308], ActorState: ExecuteState, TraceId: 01js2g0shz2zy150zrr5d7each, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:26:16.618589Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:26:16.835640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:26:17.002095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:26879", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-17T18:26:17.193244Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350163967401575:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:17.193325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:26:17.266418Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494350165746130569:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:26:17.266476Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:26:17.293817Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2g0tc83f9hm5estb8cyfh5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNlYjU3YjQtMzJlZDQwMTYtYmY5MGM3N2QtODMxMmNlZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494350185442239600:2994] === CheckClustersList. Ok 2025-04-17T18:26:23.292620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:26879 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:26:23.414365Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC re ... d: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:16.427204Z :INFO: [/Root] [/Root] [12ce1097-5157649c-f99d0e5-fc733477] Closing read session. Close timeout: 18446744073709.551615s 2025-04-17T18:27:16.427238Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:16.427266Z :INFO: [/Root] [/Root] [12ce1097-5157649c-f99d0e5-fc733477] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1727 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:16.427519Z :INFO: [/Root] [/Root] [5e96a7d0-f482ed50-ed3617d5-bb258e26] Closing read session. Close timeout: 18446744073709.551615s 2025-04-17T18:27:16.427551Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-04-17T18:27:16.427575Z :INFO: [/Root] [/Root] [5e96a7d0-f482ed50-ed3617d5-bb258e26] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1726 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:16.427833Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0] Write session: close. Timeout = 0 ms 2025-04-17T18:27:16.427861Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0] Write session will now close 2025-04-17T18:27:16.427924Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0] Write session: aborting 2025-04-17T18:27:16.429131Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:27:16.429203Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0] Write session is aborting and will not restart 2025-04-17T18:27:16.429275Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0] Write session: destroy 2025-04-17T18:27:16.431066Z :INFO: [/Root] [/Root] [3d0812c3-3c63f567-70f352ff-f1fdabac] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:16.431123Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:16.431174Z :INFO: [/Root] [/Root] [3d0812c3-3c63f567-70f352ff-f1fdabac] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1752 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:16.431207Z :INFO: [/Root] [/Root] [12ce1097-5157649c-f99d0e5-fc733477] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:16.431232Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:16.431261Z :INFO: [/Root] [/Root] [12ce1097-5157649c-f99d0e5-fc733477] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1731 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:16.429085Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_5083225547344580360_v1 grpc read done: success# 0, data# { } 2025-04-17T18:27:16.431282Z :INFO: [/Root] [/Root] [5e96a7d0-f482ed50-ed3617d5-bb258e26] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:16.429122Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_5083225547344580360_v1 grpc read failed 2025-04-17T18:27:16.429154Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_5083225547344580360_v1 grpc closed 2025-04-17T18:27:16.429180Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_5083225547344580360_v1 is DEAD 2025-04-17T18:27:16.431338Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-04-17T18:27:16.429974Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_18048021427812145360_v1 grpc read done: success# 0, data# { } 2025-04-17T18:27:16.431366Z :INFO: [/Root] [/Root] [5e96a7d0-f482ed50-ed3617d5-bb258e26] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1730 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:16.429985Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18048021427812145360_v1 grpc read failed 2025-04-17T18:27:16.430002Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18048021427812145360_v1 grpc closed 2025-04-17T18:27:16.430018Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18048021427812145360_v1 is DEAD 2025-04-17T18:27:16.431397Z :INFO: [/Root] [/Root] [5e96a7d0-f482ed50-ed3617d5-bb258e26] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:16.430536Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_5856269192080823893_v1 grpc read done: success# 0, data# { } 2025-04-17T18:27:16.430550Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_5856269192080823893_v1 grpc read failed 2025-04-17T18:27:16.430566Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_5856269192080823893_v1 grpc closed 2025-04-17T18:27:16.431457Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-04-17T18:27:16.430598Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_5856269192080823893_v1 is DEAD 2025-04-17T18:27:16.431494Z :INFO: [/Root] [/Root] [5e96a7d0-f482ed50-ed3617d5-bb258e26] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1730 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:16.431582Z :NOTICE: [/Root] [/Root] [5e96a7d0-f482ed50-ed3617d5-bb258e26] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:27:16.432326Z :INFO: [/Root] [/Root] [12ce1097-5157649c-f99d0e5-fc733477] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:16.432382Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:16.432417Z :INFO: [/Root] [/Root] [12ce1097-5157649c-f99d0e5-fc733477] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:16.432472Z :NOTICE: [/Root] [/Root] [12ce1097-5157649c-f99d0e5-fc733477] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:27:16.433008Z :INFO: [/Root] [/Root] [3d0812c3-3c63f567-70f352ff-f1fdabac] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:16.433073Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:16.433104Z :INFO: [/Root] [/Root] [3d0812c3-3c63f567-70f352ff-f1fdabac] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1754 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:16.433151Z :NOTICE: [/Root] [/Root] [3d0812c3-3c63f567-70f352ff-f1fdabac] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:27:16.435187Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_5856269192080823893_v1 2025-04-17T18:27:16.435232Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494350429382302923:2554] destroyed 2025-04-17T18:27:16.435283Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_3_5856269192080823893_v1 2025-04-17T18:27:16.438842Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0 grpc read done: success: 0 data: 2025-04-17T18:27:16.438868Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0 grpc read failed 2025-04-17T18:27:16.438899Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0 grpc closed 2025-04-17T18:27:16.438917Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|20f187b-c7b03bee-5ed8788f-897c0f6c_0 is DEAD 2025-04-17T18:27:16.439608Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350429382302913:2545] disconnected; active server actors: 1 2025-04-17T18:27:16.439618Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:27:16.439631Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350429382302913:2545] client user disconnected session shared/user_3_2_5083225547344580360_v1 2025-04-17T18:27:16.439680Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-17T18:27:16.439713Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350429382302912:2544] disconnected; active server actors: 1 2025-04-17T18:27:16.439722Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350429382302912:2544] client user disconnected session shared/user_3_1_18048021427812145360_v1 2025-04-17T18:27:16.439737Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350429382302919:2546] disconnected; active server actors: 1 2025-04-17T18:27:16.439746Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350429382302919:2546] client user disconnected session shared/user_3_3_5856269192080823893_v1 2025-04-17T18:27:16.440457Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494350429382302916:2540] destroyed 2025-04-17T18:27:16.440515Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T18:27:17.073658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:27:17.073687Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:17.240439Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710693, task: 1, CA Id [3:7494350442267205028:2581]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-17T18:27:17.274400Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710693, task: 1, CA Id [3:7494350442267205028:2581]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:27:17.331682Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710693, task: 1, CA Id [3:7494350442267205028:2581]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:27:17.393381Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710693, task: 1, CA Id [3:7494350442267205028:2581]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> HttpRouter::Basic [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/public_http/ut/unittest >> HttpRouter::Basic [GOOD] |97.4%| [TS] {RESULT} ydb/core/public_http/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource >> BlobDepotWithTestShard::PlainGroup [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_testshard/unittest >> BlobDepotWithTestShard::PlainGroup [GOOD] |97.4%| [TM] {RESULT} ydb/core/blobstorage/ut_testshard/unittest >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] Test command err: 2025-04-17T18:27:27.064936Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-17T18:27:27.065147Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-17T18:27:27.066059Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-17T18:27:27.066114Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-17T18:27:27.066153Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-17T18:27:27.067037Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-17T18:27:27.074237Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-17T18:27:27.074301Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-17T18:27:27.074343Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-17T18:27:27.079137Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-17T18:27:27.079202Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-17T18:27:27.079262Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-17T18:27:27.079408Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.079452Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-17T18:27:27.079483Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.079508Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-04-17T18:27:27.079536Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.079563Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-04-17T18:27:27.079592Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-17T18:27:27.079656Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:27:27.079677Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-17T18:27:27.079885Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-04-17T18:27:27.079922Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2025-04-17T18:27:27.079956Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-04-17T18:27:27.079986Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2025-04-17T18:27:27.080011Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-17T18:27:27.080064Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-04-17T18:27:27.080094Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-04-17T18:27:27.164292Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-17T18:27:27.164477Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-17T18:27:27.164592Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-17T18:27:27.164618Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-17T18:27:27.164659Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-17T18:27:27.164727Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-17T18:27:27.164892Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-17T18:27:27.164919Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-17T18:27:27.164946Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-17T18:27:27.165091Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-17T18:27:27.165122Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-17T18:27:27.165158Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-17T18:27:27.165246Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.165277Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-17T18:27:27.165317Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.165381Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-04-17T18:27:27.165424Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.165462Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-04-17T18:27:27.165490Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-17T18:27:27.165554Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:27:27.165581Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-17T18:27:27.165702Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-04-17T18:27:27.165739Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2025-04-17T18:27:27.165770Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-04-17T18:27:27.165805Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2025-04-17T18:27:27.165848Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-17T18:27:27.165899Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-04-17T18:27:27.165928Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-04-17T18:27:27.242350Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-17T18:27:27.242517Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-17T18:27:27.242615Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-17T18:27:27.242641Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-17T18:27:27.242671Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-17T18:27:27.242741Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-17T18:27:27.242918Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-17T18:27:27.242950Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-17T18:27:27.242979Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-17T18:27:27.243098Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-17T18:27:27.243146Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-17T18:27:27.243183Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-17T18:27:27.243294Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.243345Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-17T18:27:27.243386Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.243414Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-04-17T18:27:27.243449Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.243481Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-04-17T18:27:27.243513Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-17T18:27:27.243593Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:27:27.243620Z node 3 :STREAMS_C ... :2] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.328213Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Task state saved, need 0 more acks 2025-04-17T18:27:27.328240Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-17T18:27:27.328333Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:27:27.328361Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-17T18:27:27.328454Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 1 2025-04-17T18:27:27.328488Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:6:2053], need 1 more acks 2025-04-17T18:27:27.328523Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 3 2025-04-17T18:27:27.328554Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:8:2055], need 0 more acks 2025-04-17T18:27:27.328581Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-17T18:27:27.328624Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCompleteCheckpointResponse 2025-04-17T18:27:27.328651Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint completed 2025-04-17T18:27:27.328686Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-04-17T18:27:27.328730Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-17T18:27:27.328803Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCreateCheckpointResponse 2025-04-17T18:27:27.328832Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-17T18:27:27.328915Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.328947Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 2 more acks 2025-04-17T18:27:27.328981Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.329021Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2025-04-17T18:27:27.329059Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.329106Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2025-04-17T18:27:27.329132Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-17T18:27:27.329197Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:27:27.329227Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-17T18:27:27.329328Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2025-04-17T18:27:27.329371Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2025-04-17T18:27:27.329409Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2025-04-17T18:27:27.329440Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2025-04-17T18:27:27.329467Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-17T18:27:27.329510Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2025-04-17T18:27:27.329537Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint completed 2025-04-17T18:27:27.329576Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-04-17T18:27:27.329613Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-17T18:27:27.329659Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2025-04-17T18:27:27.329716Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-17T18:27:27.329792Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.329819Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2025-04-17T18:27:27.329864Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.329890Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2025-04-17T18:27:27.329931Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.329970Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2025-04-17T18:27:27.329994Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-17T18:27:27.330041Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-17T18:27:27.330066Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-17T18:27:27.330149Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2025-04-17T18:27:27.330222Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2025-04-17T18:27:27.330258Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2025-04-17T18:27:27.330287Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2025-04-17T18:27:27.330312Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-17T18:27:27.330389Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2025-04-17T18:27:27.330432Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint completed 2025-04-17T18:27:27.411338Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-17T18:27:27.411546Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-17T18:27:27.411652Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-17T18:27:27.411673Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-17T18:27:27.411695Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-17T18:27:27.411729Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-17T18:27:27.411881Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-17T18:27:27.411917Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-17T18:27:27.411943Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-17T18:27:27.412059Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-17T18:27:27.412083Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-17T18:27:27.412112Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-17T18:27:27.412179Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-17T18:27:27.412202Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-17T18:27:27.412233Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-04-17T18:27:27.412255Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-04-17T18:27:27.412303Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-04-17T18:27:27.412321Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-04-17T18:27:27.412361Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2025-04-17T18:27:27.412411Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2025-04-17T18:27:27.412434Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: [my-graph-id.42] [42:1] Checkpoint aborted 2025-04-17T18:27:27.412459Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-04-17T18:27:27.412485Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-17T18:27:27.412536Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2025-04-17T18:27:27.412568Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) |97.4%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString 2025-04-17 18:27:24,888 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 18:27:25,095 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 571839 46.8M 46.2M 23.8M test_tool run_ut @/home/runner/.ya/build/build_root/6873/001208/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk26/testing_out_stuff/test_tool.args 572020 1.9G 1.9G 1.7G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/6873/001208/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-04-17T18:17:26.200901Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:17:26.300790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:17:26.324174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:17:26.324467Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:17:26.332353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:17:26.332589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:17:26.332808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:17:26.332912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:17:26.333032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:17:26.333131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:17:26.333250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:17:26.333423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:17:26.333545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:17:26.333637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:17:26.333730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:17:26.333814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:17:26.360444Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:17:26.360597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:17:26.360655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:17:26.360828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:26.360993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:17:26.361073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:17:26.361114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:17:26.361199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:17:26.361267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:17:26.361305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:17:26.361335Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:17:26.361518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:17:26.361582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:17:26.361618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:17:26.361649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:17:26.361745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:17:26.361810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:17:26.361858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:17:26.361885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:17:26.361972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:17:26.362009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:17:26.362048Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:17:26.362093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:17:26.362131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:17:26.362160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:17:26.362598Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-17T18:17:26.362678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-17T18:17:26.362746Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T18:17:26.362815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-17T18:17:26.362984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:17:26.363041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:17:26.363078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:17:26.363267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:17:26.363319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:17:26.363349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:17:26.363506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:17:26.363567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:17:26.363602Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:17:26.363759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:17:26.363792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:17:26.363820Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:17:26.363942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:17:26.363980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:17:26.364024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:172:2776:0]; 2025-04-17T18:27:21.926135Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:173:2776:0]; 2025-04-17T18:27:21.926211Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:174:2864:0]; 2025-04-17T18:27:21.926272Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:175:2776:0]; 2025-04-17T18:27:21.926327Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:176:2784:0]; 2025-04-17T18:27:21.926399Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:177:2808:0]; 2025-04-17T18:27:21.926455Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:178:2776:0]; 2025-04-17T18:27:21.926512Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:179:2776:0]; 2025-04-17T18:27:21.926568Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:180:2776:0]; 2025-04-17T18:27:21.926623Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:181:2848:0]; 2025-04-17T18:27:21.926677Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:182:2776:0]; 2025-04-17T18:27:21.926735Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:183:2784:0]; 2025-04-17T18:27:21.926797Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:184:2832:0]; 2025-04-17T18:27:21.926859Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:185:2776:0]; 2025-04-17T18:27:21.926919Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:186:2776:0]; 2025-04-17T18:27:21.926982Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:187:2776:0]; 2025-04-17T18:27:21.927046Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:188:2824:0]; 2025-04-17T18:27:21.927112Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:189:2768:0]; 2025-04-17T18:27:21.927175Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:190:2768:0]; 2025-04-17T18:27:21.927259Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:191:2848:0]; 2025-04-17T18:27:21.927324Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:192:2776:0]; 2025-04-17T18:27:21.927387Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:193:2784:0]; 2025-04-17T18:27:21.927509Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:194:2776:0]; 2025-04-17T18:27:21.927572Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:195:2816:0]; 2025-04-17T18:27:21.927633Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:196:2776:0]; 2025-04-17T18:27:21.927692Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:197:2776:0]; 2025-04-17T18:27:21.927749Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:198:2856:0]; 2025-04-17T18:27:21.927811Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:199:2784:0]; 2025-04-17T18:27:21.927869Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:200:2776:0]; 2025-04-17T18:27:21.927933Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:201:2776:0]; 2025-04-17T18:27:21.927992Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:202:2800:0]; 2025-04-17T18:27:21.928052Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:203:2776:0]; 2025-04-17T18:27:21.928112Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:204:2760:0]; 2025-04-17T18:27:21.928173Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:205:10032:0]; 2025-04-17T18:27:21.928232Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:206:2832:0]; 2025-04-17T18:27:21.928297Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:207:2776:0]; 2025-04-17T18:27:21.928359Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:208:2768:0]; 2025-04-17T18:27:21.928420Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:209:2856:0]; 2025-04-17T18:27:21.928481Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:210:2776:0]; 2025-04-17T18:27:21.928542Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:211:2784:0]; 2025-04-17T18:27:21.928603Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:212:2784:0]; 2025-04-17T18:27:21.928663Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:213:2816:0]; 2025-04-17T18:27:21.928724Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:214:2776:0]; 2025-04-17T18:27:21.928780Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:215:2776:0]; 2025-04-17T18:27:21.928840Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:216:2872:0]; 2025-04-17T18:27:21.928901Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:217:2776:0]; 2025-04-17T18:27:21.928961Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:218:2784:0]; 2025-04-17T18:27:21.929021Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:219:2784:0]; 2025-04-17T18:27:21.929079Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:220:2800:0]; 2025-04-17T18:27:21.929133Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:221:2776:0]; 2025-04-17T18:27:21.929191Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:222:2776:0]; 2025-04-17T18:27:21.929248Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:223:2864:0]; 2025-04-17T18:27:21.929301Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:224:2776:0]; 2025-04-17T18:27:21.929356Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:225:2776:0]; 2025-04-17T18:27:21.929414Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:226:2776:0]; 2025-04-17T18:27:21.929475Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:227:2776:0]; 2025-04-17T18:27:21.929540Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:228:2776:0]; 2025-04-17T18:27:21.929601Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:229:2784:0]; 2025-04-17T18:27:21.929666Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:230:2856:0]; 2025-04-17T18:27:21.929729Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:231:2784:0]; 2025-04-17T18:27:21.929789Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:232:2776:0]; 2025-04-17T18:27:21.929849Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:233:2784:0]; 2025-04-17T18:27:21.929914Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:234:2768:0]; 2025-04-17T18:27:21.929983Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:235:2776:0]; 2025-04-17T18:27:21.930052Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:236:2776:0]; 2025-04-17T18:27:21.930114Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:237:2856:0]; 2025-04-17T18:27:21.930192Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:238:2768:0]; 2025-04-17T18:27:21.930257Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:239:2768:0]; 2025-04-17T18:27:21.930316Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:240:2792:0]; 2025-04-17T18:27:21.930384Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:241:2760:0]; 2025-04-17T18:27:21.930442Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:242:2760:0]; 2025-04-17T18:27:21.930505Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:243:2776:0]; 2025-04-17T18:27:21.930567Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:244:2848:0]; 2025-04-17T18:27:21.930625Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:245:2760:0]; 2025-04-17T18:27:21.930682Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:92:255:246:10096:0]; 2025-04-17T18:27:24.099240Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-17T18:27:24.100244Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[272] (CS::GENERAL) apply at tablet 9437184 2025-04-17T18:27:24.179613Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:114 Blob count: 739 2025-04-17T18:27:24.187491Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=117513284;raw_bytes=174740630;count=59;records=1685000} inactive {blob_bytes=221681792;raw_bytes=324411109;count=116;records=3159327} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001208/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk26/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/6873/001208/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk26/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> ServerRestartTest::RestartOnGetSession >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> Graph::CreateGraphShard >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> BasicStatistics::TwoDatabases [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable >> TestFilterSet::FilterGroup >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] Test command err: Trying to start YDB, gRPC: 24866, MsgBus: 22692 2025-04-17T18:25:49.870504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350065542734535:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:49.870981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000e91/r3tmp/tmprw6FLN/pdisk_1.dat 2025-04-17T18:25:50.170655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:25:50.186947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:25:50.187034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:25:50.189880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24866, node 1 2025-04-17T18:25:50.243694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:25:50.243725Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:25:50.243731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:25:50.243860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22692 TClient is connected to server localhost:22692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:25:50.692846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:25:52.600545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350078427637090:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:52.600556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350078427637082:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:52.600675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:52.604277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:25:52.621231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350078427637096:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:25:52.702914Z node 1 :TX_PROXY ERROR: Actor# [1:7494350078427637147:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:25:52.971357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:25:53.192893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:53.192892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:53.193114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:25:53.193391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:25:53.193522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:25:53.193570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:25:53.193634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:25:53.193751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:25:53.193793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:25:53.193875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:25:53.193914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:25:53.194016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:25:53.194024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:25:53.194141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:25:53.194151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:25:53.194275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:25:53.194383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:25:53.194480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:25:53.194621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:25:53.194736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:25:53.194754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:25:53.194846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:25:53.194861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7494350082722604716:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:25:53.194960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494350082722604773:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:25:53.231694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494350082722604768:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:53.231705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494350082722604730:2357];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:25:53.231752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:74943500 ... tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.161887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.161912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.166576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.166598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.171658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.171657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.175784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.179239Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.180609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.184266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.185325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.188854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.190006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.193700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.196662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.197739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.200858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.201336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.205479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.207008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.208967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.211703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.212502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.217466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.218584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.222883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.223496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.227601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.228018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.234089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.235713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.241402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.241557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.247104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.249084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.252324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.258043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.259539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.263496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.264575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.270407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.270827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.276567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.276677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.282708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.284941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T18:26:53.430713Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2g159tfsjfvws3eqdbc375", SessionId: ydb://session/3?node_id=1&id=N2EyZGMzMDktYTc5NDY4YmQtNTJmZGQ2ZjEtZTgyM2IxMmY=, Slow query, duration: 25.147729s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T18:26:53.896676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:53.896681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T18:26:53.897782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: 2025-04-17T18:23:45.987689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2416], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:23:45.988115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:23:45.988248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014f9/r3tmp/tmpqnmRAl/pdisk_1.dat 2025-04-17T18:23:46.410657Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23947, node 1 2025-04-17T18:23:46.665616Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:23:46.665669Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:23:46.665698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:23:46.665910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:23:46.668622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:23:46.784540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:46.784659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:46.805226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21234 2025-04-17T18:23:47.415045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:23:50.799629Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-17T18:23:50.834435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:50.834542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:50.872675Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:23:50.875036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:51.152046Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:51.152605Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:51.153273Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:51.153413Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:51.153527Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:51.153788Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:51.153941Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:51.154038Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:51.154136Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:51.351528Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:51.351636Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:51.367011Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:51.516499Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:23:51.595256Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:23:51.595372Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:23:51.639151Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:23:51.639509Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:23:51.639690Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:23:51.639751Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:23:51.639794Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:23:51.639839Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:23:51.639888Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:23:51.639937Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:23:51.641337Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:23:51.664218Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1934:2586] 2025-04-17T18:23:51.665075Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2025-04-17T18:23:51.673745Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:23:51.673799Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:23:51.673875Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database1/.metadata/_statistics 2025-04-17T18:23:51.678099Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:23:51.678558Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1980:2610], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:23:51.734645Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:2025:2633] 2025-04-17T18:23:51.734857Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2025:2633], schemeshard id = 72075186224037897 2025-04-17T18:23:51.769658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:23:51.776227Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:23:51.776337Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:23:51.961374Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:23:52.204193Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:23:52.305551Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:23:53.453127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:23:57.748203Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:23:57.790167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:57.790484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:57.844100Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:23:57.850677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:58.108241Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:58.108797Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:58.109331Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:58.109501Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:58.109761Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:58.109859Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:58.109951Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:58.110039Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:58.110127Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:58.266745Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:58.266871Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:58.280660Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:58.439983Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:23:58.491090Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-04-17T18:23:58.491200Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-04-17T18:23:58.524048Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-04-17T18:23:58.525824Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-04-17T18:23:58.526011Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:23:58.526072Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-04-17T18:23:58.526130Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:23:58.526201Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:23:58.526255Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:23:58.526323Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-04-17T18:23:58.527487Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-04-17T18:23:58.567084Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-04-17T18:23:58.567192Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3233:2602], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-04-17T18:23:58.579873Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3257:2617] 2025-04-17T18:23:58.584824Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3280:262 ... path count: 2, at schemeshard: 72075186224038898 2025-04-17T18:27:25.675935Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 199.000000s, at schemeshard: 72075186224038898 2025-04-17T18:27:25.676304Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038898, stats size# 49 2025-04-17T18:27:25.696463Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-04-17T18:27:26.753290Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:10333:4786]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:27:26.753638Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T18:27:26.753679Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:10333:4786], StatRequests.size() = 1 2025-04-17T18:27:27.604831Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-04-17T18:27:27.604891Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-04-17T18:27:27.604928Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 3] is data table. 2025-04-17T18:27:27.604971Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 3] 2025-04-17T18:27:27.605416Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-04-17T18:27:27.608162Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:27:27.613468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10360:4461], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:27.613569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10370:4466], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:27.613709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:27.632070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-04-17T18:27:27.766639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:10374:4469], DatabaseId: /Root/Database2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-04-17T18:27:27.941999Z node 2 :TX_PROXY ERROR: Actor# [2:10464:4518] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Database2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:27:27.965086Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:10493:4533]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:27:27.965396Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:27:27.965490Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:10495:4535] 2025-04-17T18:27:27.965553Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:10495:4535] 2025-04-17T18:27:27.966130Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:10496:4536] 2025-04-17T18:27:27.966399Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:10495:4535], server id = [2:10496:4536], tablet id = 72075186224038895, status = OK 2025-04-17T18:27:27.966489Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:10496:4536], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:27:27.966552Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:27:27.966765Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:27:27.966853Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:10493:4533], StatRequests.size() = 1 2025-04-17T18:27:28.076634Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzliNWE3NTItMTQ5YzgzODgtZmRjNDkyNDEtMTE2YTA4NmM=, TxId: 2025-04-17T18:27:28.076699Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzliNWE3NTItMTQ5YzgzODgtZmRjNDkyNDEtMTE2YTA4NmM=, TxId: 2025-04-17T18:27:28.077629Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-17T18:27:28.095995Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-04-17T18:27:28.096053Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:27:28.151858Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-04-17T18:27:28.151918Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:27:28.255493Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:10495:4535], schemeshard count = 1 2025-04-17T18:27:29.026929Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:27:29.038575Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:27:29.038641Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:27:29.038683Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-17T18:27:29.038719Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:27:29.039196Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database1 2025-04-17T18:27:29.042044Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:27:29.058669Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZDAyYTlmNzktYzc5Mjk2ZTMtNTQwYTJkYTctMTg3MjkyZDM=, TxId: 2025-04-17T18:27:29.058759Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZDAyYTlmNzktYzc5Mjk2ZTMtNTQwYTJkYTctMTg3MjkyZDM=, TxId: 2025-04-17T18:27:29.059445Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:27:29.075441Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-17T18:27:29.075501Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:27:29.135681Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [3:10591:4825]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:27:29.136049Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-17T18:27:29.136098Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [3:10591:4825], StatRequests.size() = 1 2025-04-17T18:27:31.167701Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [3:10662:4851]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:27:31.168101Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-17T18:27:31.168152Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [3:10662:4851], StatRequests.size() = 1 2025-04-17T18:27:31.926232Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-04-17T18:27:31.926312Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-04-17T18:27:31.926382Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 4] is data table. 2025-04-17T18:27:31.926438Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 4] 2025-04-17T18:27:31.926992Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-04-17T18:27:31.929858Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:27:31.948571Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjU0MTM4OWYtYzMxOWE3ZTAtYTdkNzVkYi04MDQ1NjUwYw==, TxId: 2025-04-17T18:27:31.948639Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjU0MTM4OWYtYzMxOWE3ZTAtYTdkNzVkYi04MDQ1NjUwYw==, TxId: 2025-04-17T18:27:31.949257Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-17T18:27:31.971627Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 4] 2025-04-17T18:27:31.971697Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:27:32.895605Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-17T18:27:32.895889Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-17T18:27:32.896365Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:27:32.908236Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:27:32.908308Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:27:32.983499Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [3:10746:4865]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:27:32.983856Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-17T18:27:32.983906Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [3:10746:4865], StatRequests.size() = 1 2025-04-17T18:27:32.984753Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:10748:4617]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:27:32.989630Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:27:32.989704Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:10748:4617], StatRequests.size() = 1 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 16214, MsgBus: 14842 2025-04-17T18:27:11.444761Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350419655211602:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:11.444860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb0/r3tmp/tmpYXdT4v/pdisk_1.dat 2025-04-17T18:27:11.898482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:11.899970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:11.900079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:11.905679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16214, node 1 2025-04-17T18:27:12.007006Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:27:12.007042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:27:12.007065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:27:12.007187Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14842 TClient is connected to server localhost:14842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:27:12.697079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:12.736256Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:27:12.773723Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:27:12.803081Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T18:27:14.940244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350432540114191:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:14.940352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:15.241316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:27:15.395822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T18:27:15.450870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:27:15.504076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:27:15.578614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350436835081803:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:15.578715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:15.578915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350436835081808:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:15.582861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-17T18:27:15.599123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350436835081810:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-04-17T18:27:15.669128Z node 1 :TX_PROXY ERROR: Actor# [1:7494350436835081862:2570] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8490, MsgBus: 6632 2025-04-17T18:27:16.824153Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350437390392021:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:16.824204Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb0/r3tmp/tmpFWi9xH/pdisk_1.dat 2025-04-17T18:27:17.014706Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:17.018101Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:17.018149Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:17.021538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8490, node 2 2025-04-17T18:27:17.178706Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:27:17.178727Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:27:17.178733Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:27:17.178828Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6632 TClient is connected to server localhost:6632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:27:17.728888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:17.738713Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:27:17.762042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:17.883614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:18.066031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:18.157982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:20.160582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494350454570262967:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:20.160707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:20.239909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:27:20.291085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:27:20.371565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 ... /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:20.732423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494350454570263641:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:20.735908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:27:20.752861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494350454570263643:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:27:20.821134Z node 2 :TX_PROXY ERROR: Actor# [2:7494350454570263700:3549] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:27:21.825695Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494350437390392021:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:21.825974Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:27:21.990990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-17T18:27:23.070613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:27:23.524135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:1, at schemeshard: 72057594046644480 2025-04-17T18:27:24.120128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T18:27:24.693858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T18:27:25.207043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-17T18:27:25.637973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T18:27:25.732853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T18:27:27.786655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710713:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18863, MsgBus: 20293 2025-04-17T18:27:28.567701Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494350490421332814:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:28.567797Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001bb0/r3tmp/tmpM5slN0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18863, node 3 2025-04-17T18:27:28.670463Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:28.712354Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:28.712452Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:28.714623Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:28.731229Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:27:28.731253Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:27:28.731260Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:27:28.731405Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20293 TClient is connected to server localhost:20293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:27:29.136688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:29.150632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:29.205264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:29.364890Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:29.446300Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:32.156756Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494350507601203749:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:32.156876Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:32.197595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:27:32.229983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T18:27:32.300651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T18:27:32.334172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:27:32.364723Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T18:27:32.396224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T18:27:32.464906Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T18:27:32.546145Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494350507601204413:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:32.546256Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:32.546336Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494350507601204418:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:32.549827Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T18:27:32.560367Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494350507601204420:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T18:27:32.620208Z node 3 :TX_PROXY ERROR: Actor# [3:7494350507601204476:3541] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:27:33.568018Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494350490421332814:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:33.568106Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2025-04-17T18:27:04.105552Z :FallbackToSingleDb INFO: Random seed for debugging is 1744914424105523 2025-04-17T18:27:04.583575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350386069727112:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:04.583870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:27:04.660457Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350385655393039:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:04.660520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:27:04.889532Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001829/r3tmp/tmpLDfaeb/pdisk_1.dat 2025-04-17T18:27:04.906887Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:27:05.258078Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:05.270696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:05.270767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:05.273539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:05.273585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:05.281874Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:27:05.282049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:05.282785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8409, node 1 2025-04-17T18:27:05.676375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001829/r3tmp/yandex2fw37o.tmp 2025-04-17T18:27:05.676402Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001829/r3tmp/yandex2fw37o.tmp 2025-04-17T18:27:05.676562Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001829/r3tmp/yandex2fw37o.tmp 2025-04-17T18:27:05.676719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:27:05.755328Z INFO: TTestServer started on Port 31189 GrpcPort 8409 TClient is connected to server localhost:31189 PQClient connected to localhost:8409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:27:06.209660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:27:08.937915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350403249597302:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:08.938057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:08.938487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350403249597315:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:08.943613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:27:08.981859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350403249597317:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:27:09.278320Z node 1 :TX_PROXY ERROR: Actor# [1:7494350407544564702:2684] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:27:09.310010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:27:09.318552Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494350407544564712:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:27:09.315065Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494350407130229847:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:27:09.317047Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzJjNGYyNzQtMjFlZjZlMjQtYWRiYTUwMmEtNDE4YTc3NDM=, ActorId: [2:7494350407130229805:2307], ActorState: ExecuteState, TraceId: 01js2g2d7sff87r0zvdxy12dj6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:27:09.320135Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:27:09.320006Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzM3NGIyNmUtNGQxMTI1M2YtODA1N2YxNmMtZmExYTJmNg==, ActorId: [1:7494350403249597300:2337], ActorState: ExecuteState, TraceId: 01js2g2d07ee3a7ndejn99hxt7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:27:09.320334Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:27:09.520891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:27:09.584066Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350386069727112:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:09.584210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:27:09.662088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:27:09.666369Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494350385655393039:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:09.666467Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:8409", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-17T18:27:09.963194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2g2dwj6s0kgf1wm2ahmb1c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg4NDM2ZTMtYjE0MDVjMjgtMWFkMWYwNTItYzJjYTYzOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494350411839532411:2981] === CheckClustersList. Ok 2025-04-17T18:27:16.276957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:8409 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:27:16.425872Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:8409 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowW ... S Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-17T18:27:32.074978Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350508411007619:2513] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T18:27:32.077128Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350508411007619:2513] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-17T18:27:32.220605Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350508411007619:2513] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-17T18:27:32.220853Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350508411007672:2513] connected; active server actors: 1 2025-04-17T18:27:32.220910Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350508411007619:2513] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-17T18:27:32.220925Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350508411007619:2513] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-17T18:27:32.221169Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350508411007672:2513] disconnected; active server actors: 1 2025-04-17T18:27:32.221199Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350508411007672:2513] disconnected no session 2025-04-17T18:27:32.315223Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350508411007619:2513] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T18:27:32.315263Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350508411007619:2513] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-17T18:27:32.315280Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350508411007619:2513] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-17T18:27:32.315309Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:27:32.316158Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7494350508411007695:2513], now have 1 active actors on pipe 2025-04-17T18:27:32.316324Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-04-17T18:27:32.316470Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:27:32.316502Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:27:32.316604Z node 4 :PERSQUEUE INFO: new Cookie src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-17T18:27:32.316723Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T18:27:32.316810Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T18:27:32.317356Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:27:32.317383Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:27:32.317498Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T18:27:32.317882Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0 2025-04-17T18:27:32.318800Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744914452318 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:32.318923Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T18:27:32.319295Z :INFO: [] MessageGroupId [src] SessionId [src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0] Write session: close. Timeout = 0 ms 2025-04-17T18:27:32.319347Z :INFO: [] MessageGroupId [src] SessionId [src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0] Write session will now close 2025-04-17T18:27:32.319390Z :DEBUG: [] MessageGroupId [src] SessionId [src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0] Write session: aborting 2025-04-17T18:27:32.320197Z :DEBUG: [] MessageGroupId [src] SessionId [src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0] Write session is aborting and will not restart 2025-04-17T18:27:32.320207Z :INFO: [] MessageGroupId [src] SessionId [src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:27:32.320301Z :DEBUG: [] MessageGroupId [src] SessionId [src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0] Write session: destroy 2025-04-17T18:27:32.320601Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0 grpc read done: success: 0 data: 2025-04-17T18:27:32.320632Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0 grpc read failed 2025-04-17T18:27:32.320658Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0 grpc closed 2025-04-17T18:27:32.320674Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ad0d7303-4a9f08e0-70f37bb5-a5b08d5_0 is DEAD 2025-04-17T18:27:32.321568Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:27:32.321920Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494350508411007695:2513] destroyed 2025-04-17T18:27:32.321980Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. PORTS 19441 14972 Session was created >>> Ready to answer: ok 2025-04-17T18:27:33.334065Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2025-04-17T18:27:33.334195Z :INFO: [/Root] [] [ef09b616-25363afe-64503fe9-8d72f989] Open read subsessions to databases: { name: , endpoint: localhost:14972, path: /Root } 2025-04-17T18:27:33.334514Z :INFO: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Starting read session 2025-04-17T18:27:33.334568Z :DEBUG: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Starting single session 2025-04-17T18:27:33.335477Z :DEBUG: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-17T18:27:33.335525Z :DEBUG: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-17T18:27:33.335568Z :DEBUG: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] [] Reconnecting session to cluster in 0.000000s 2025-04-17T18:27:33.336062Z :ERROR: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:14972
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:14972. 2025-04-17T18:27:33.336135Z :DEBUG: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-17T18:27:33.336173Z :DEBUG: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-17T18:27:33.336351Z :INFO: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:14972" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:14972
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:14972. " } 2025-04-17T18:27:33.336687Z :NOTICE: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:27:33.336733Z :DEBUG: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:14972" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:14972
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:14972. " } 2025-04-17T18:27:33.336860Z :INFO: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Closing read session. Close timeout: 0.010000s 2025-04-17T18:27:33.336902Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:33.336946Z :INFO: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:33.336981Z :INFO: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:33.337014Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:33.337053Z :INFO: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:33.337086Z :INFO: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:33.337124Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:33.337162Z :INFO: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:33.337233Z :NOTICE: [/Root] [/Root] [54d25098-cbc5964d-478b8993-7251a03] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:27:33.802523Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [3:7494350512705975089:2539]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-17T18:27:33.836570Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [3:7494350512705975089:2539]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:27:33.885079Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [3:7494350512705975089:2539]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-17T18:27:33.946665Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [3:7494350512705975089:2539]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] >> TTxDataShardTestInit::TestGetShardStateAfterInitialization |97.5%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardStats::OneChannelStatsCorrect >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] |97.5%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TTxDataShardTestInit::TestTableHasPath >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> TestFilterSet::FilterGroup [GOOD] >> TDataShardRSTest::TestCleanupInRS+UseSink >> SequenceShardTests::Basics >> TestFilterSet::DuplicationValidation >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect |97.5%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest >> MediatorTimeCast::ReadStepSubscribe >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> SequenceShardTests::NegativeIncrement >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> TestFilterSet::DuplicationValidation [GOOD] >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> SequenceShardTests::NegativeIncrement [GOOD] >> TestFilterSet::CompilationValidation >> TTxDataShardTestInit::TestResolvePathAfterRestart |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] |97.5%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2025-04-17T18:27:39.517786Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-17T18:27:39.517940Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-17T18:27:39.535390Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-17T18:27:39.540700Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-17T18:27:39.540778Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-17T18:27:39.547868Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-17T18:27:39.548819Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-04-17T18:27:39.571762Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-17T18:27:39.572210Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-17T18:27:39.572261Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:39.572316Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-17T18:27:39.572577Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2025-04-17T18:27:39.572772Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2025-04-17T18:27:39.585854Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-17T18:27:39.586335Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-17T18:27:39.586443Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2025-04-17T18:27:39.598627Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.599024Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-04-17T18:27:39.599156Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2025-04-17T18:27:39.611537Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.611920Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-17T18:27:39.612023Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2025-04-17T18:27:39.624561Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.625170Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2025-04-17T18:27:39.625266Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2025-04-17T18:27:39.637681Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.638140Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2025-04-17T18:27:39.638277Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2025-04-17T18:27:39.638374Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.638663Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-04-17T18:27:39.638760Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2025-04-17T18:27:39.651127Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.651566Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-04-17T18:27:39.651676Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:39.651745Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.651989Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:39.652144Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:39.664454Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-04-17T18:27:39.664880Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:39.664931Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:39.664998Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-04-17T18:27:39.683605Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-17T18:27:39.683712Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-17T18:27:39.684331Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-17T18:27:39.684950Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-17T18:27:39.686516Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-17T18:27:39.691551Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-17T18:27:39.691614Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:39.691677Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.691980Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-17T18:27:39.692062Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2025-04-17T18:27:39.704488Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.705061Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2025-04-17T18:27:39.705172Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-17T18:27:39.717466Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-17T18:27:39.717953Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-17T18:27:39.718058Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2025-04-17T18:27:39.754281Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.754850Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2025-04-17T18:27:39.754953Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-17T18:27:39.777932Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-17T18:27:39.778444Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-17T18:27:39.778561Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2025-04-17T18:27:39.790940Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:39.791424Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-17T18:27:39.791481Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxGetSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-17T18:27:39.791546Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Complete 2025-04-17T18:27:40.193397Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-17T18:27:40.193493Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-17T18:27:40.202133Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-17T18:27:40.205031Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-17T18:27:40.205092Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-17T18:27:40.206736Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxMarkSchemeShardPipe.Execute SchemeShardId# 123 Generation# 1 Round# 1 2025-04-17T18:27:40.206981Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-17T18:27:40.207054Z node 2 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-04-17T18:27:40.229346Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 7205 ... ENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-17T18:27:40.691685Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:40.691764Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:40.704009Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-17T18:27:40.704353Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-04-17T18:27:40.704404Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_FROZEN PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:40.704454Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:40.704813Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-04-17T18:27:40.704890Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-17T18:27:40.717074Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-17T18:27:40.718663Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Cache# 0 2025-04-17T18:27:40.718780Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2025-04-17T18:27:40.730939Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:40.731376Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-04-17T18:27:40.731437Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-17T18:27:40.731484Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-17T18:27:40.731689Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-17T18:27:40.731755Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-17T18:27:40.743948Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-17T18:27:40.744324Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-17T18:27:40.744415Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-17T18:27:40.756504Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-17T18:27:40.756825Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-17T18:27:40.756908Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-17T18:27:40.768982Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-17T18:27:40.769367Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-17T18:27:40.769417Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2025-04-17T18:27:40.769484Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:40.769778Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-17T18:27:40.769872Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-17T18:27:40.782196Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-17T18:27:40.782772Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-04-17T18:27:40.782928Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-04-17T18:27:40.795476Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-17T18:27:40.795921Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-04-17T18:27:40.796033Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-04-17T18:27:40.808361Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-17T18:27:40.808804Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-17T18:27:40.808865Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2025-04-17T18:27:40.808929Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-17T18:27:40.809211Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-17T18:27:40.809302Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2025-04-17T18:27:40.821774Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:41.225259Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-17T18:27:41.225350Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-17T18:27:41.236592Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-17T18:27:41.238902Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-17T18:27:41.238962Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-17T18:27:41.241031Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2025-04-17T18:27:41.241146Z node 4 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2025-04-17T18:27:41.263697Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-17T18:27:41.264062Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-17T18:27:41.264176Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-04-17T18:27:41.276168Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:41.276531Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-17T18:27:41.276631Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-04-17T18:27:41.288449Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:41.288769Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-04-17T18:27:41.288862Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2025-04-17T18:27:41.301018Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:41.301419Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-04-17T18:27:41.301467Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:41.301530Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:41.301827Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2025-04-17T18:27:41.301923Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-17T18:27:41.313837Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-17T18:27:41.314165Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-17T18:27:41.314304Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-04-17T18:27:41.326277Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-17T18:27:41.326646Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-17T18:27:41.326739Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-04-17T18:27:41.338523Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete |97.5%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2025-04-17T18:27:10.218275Z :BasicWriteSession INFO: Random seed for debugging is 1744914430218238 2025-04-17T18:27:10.767770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350415234156179:2089];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:10.767862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:27:10.850855Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350412729078743:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:10.850949Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:27:11.125797Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:27:11.133410Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00187a/r3tmp/tmpmgapDT/pdisk_1.dat 2025-04-17T18:27:11.529492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:11.529588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:11.531181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:11.531234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:11.539807Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:27:11.539957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:11.541191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:11.572892Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20123, node 1 2025-04-17T18:27:11.769688Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:27:11.769863Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:27:11.814207Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/00187a/r3tmp/yandexMiJWTX.tmp 2025-04-17T18:27:11.814229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/00187a/r3tmp/yandexMiJWTX.tmp 2025-04-17T18:27:11.824588Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/00187a/r3tmp/yandexMiJWTX.tmp 2025-04-17T18:27:11.824737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:27:11.832817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:11.869905Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:11.980236Z INFO: TTestServer started on Port 11996 GrpcPort 20123 TClient is connected to server localhost:11996 PQClient connected to localhost:20123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:27:12.369077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:27:15.768180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350436708993647:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:15.768449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:15.768578Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350415234156179:2089];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:15.768637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:27:15.769135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350436708993674:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:15.811895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:27:15.851001Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494350412729078743:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:15.851049Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:27:15.884877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350436708993708:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:15.885351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:15.888183Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-17T18:27:15.888345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350436708993676:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:27:16.175238Z node 1 :TX_PROXY ERROR: Actor# [1:7494350436708993757:2682] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:27:16.207881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:27:16.205361Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494350438498882796:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:27:16.206561Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmJiMzMxNDMtZDZjMDg1YzEtM2FiMjIyMmMtMmYyMTAzZTU=, ActorId: [2:7494350438498882756:2310], ActorState: ExecuteState, TraceId: 01js2g2kyr16zhzb946gwx6cq0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:27:16.208536Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:27:16.220770Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494350441003961072:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:27:16.222141Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTZhM2M0YjItYzVhYTNlZDktNjAwNDQ2MWUtODI4YTgyYjI=, ActorId: [1:7494350436708993644:2337], ActorState: ExecuteState, TraceId: 01js2g2knm4gjrbk250scgwmdk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:27:16.223370Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:27:16.370811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:27:16.541190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:20123", true, true, 1000), ("dc2", "d ... LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:3929 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:27:36.587977Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:3929 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:27:37.090107Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:3929 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:27:37.594289Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-17T18:27:37.601268Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-17T18:27:37.601818Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-17T18:27:37.601857Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:3929 2025-04-17T18:27:37.607271Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-17T18:27:37.607891Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-17T18:27:37.607933Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-17T18:27:37.608485Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-17T18:27:37.608597Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:32794 2025-04-17T18:27:37.608629Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:32794 proto=v1 topic=test-topic durationSec=0 2025-04-17T18:27:37.608639Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-17T18:27:37.609877Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-17T18:27:37.609975Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-17T18:27:37.609983Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-17T18:27:37.609992Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-17T18:27:37.610005Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350530552584057:2531] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-17T18:27:37.612034Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350530552584057:2531] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-17T18:27:37.739248Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350530552584057:2531] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-17T18:27:37.739617Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350530552584121:2531] connected; active server actors: 1 2025-04-17T18:27:37.740275Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350530552584057:2531] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-17T18:27:37.740304Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350530552584057:2531] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-17T18:27:37.740715Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350530552584121:2531] disconnected; active server actors: 1 2025-04-17T18:27:37.740744Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350530552584121:2531] disconnected no session 2025-04-17T18:27:37.827815Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350530552584057:2531] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-17T18:27:37.827856Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350530552584057:2531] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-17T18:27:37.827871Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7494350530552584057:2531] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-17T18:27:37.827900Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-17T18:27:37.828665Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-04-17T18:27:37.828709Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7494350530552584148:2531], now have 1 active actors on pipe 2025-04-17T18:27:37.828734Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:27:37.828755Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:27:37.828833Z node 3 :PERSQUEUE INFO: new Cookie src|a77daa16-3dba039c-542e3154-e4ba783c_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-17T18:27:37.828929Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-17T18:27:37.828979Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T18:27:37.829095Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-17T18:27:37.829116Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-17T18:27:37.829172Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T18:27:37.829341Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|a77daa16-3dba039c-542e3154-e4ba783c_0 2025-04-17T18:27:37.830199Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744914457830 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:37.830322Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|a77daa16-3dba039c-542e3154-e4ba783c_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T18:27:37.830498Z :INFO: [] MessageGroupId [src] SessionId [src|a77daa16-3dba039c-542e3154-e4ba783c_0] Write session: close. Timeout = 0 ms 2025-04-17T18:27:37.830551Z :INFO: [] MessageGroupId [src] SessionId [src|a77daa16-3dba039c-542e3154-e4ba783c_0] Write session will now close 2025-04-17T18:27:37.830587Z :DEBUG: [] MessageGroupId [src] SessionId [src|a77daa16-3dba039c-542e3154-e4ba783c_0] Write session: aborting 2025-04-17T18:27:37.830990Z :INFO: [] MessageGroupId [src] SessionId [src|a77daa16-3dba039c-542e3154-e4ba783c_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:27:37.831024Z :DEBUG: [] MessageGroupId [src] SessionId [src|a77daa16-3dba039c-542e3154-e4ba783c_0] Write session: destroy 2025-04-17T18:27:37.832393Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|a77daa16-3dba039c-542e3154-e4ba783c_0 grpc read done: success: 0 data: 2025-04-17T18:27:37.832416Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a77daa16-3dba039c-542e3154-e4ba783c_0 grpc read failed 2025-04-17T18:27:37.832440Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a77daa16-3dba039c-542e3154-e4ba783c_0 grpc closed 2025-04-17T18:27:37.832453Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a77daa16-3dba039c-542e3154-e4ba783c_0 is DEAD 2025-04-17T18:27:37.833638Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:27:37.833791Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494350530552584148:2531] destroyed 2025-04-17T18:27:37.833832Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> TTxDataShardBuildIndexScan::BadRequest >> TCreateAndDropViewTest::CheckCreatedView >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> ServerRestartTest::RestartOnGetSession [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> TestFilterSet::CompilationValidation [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> TestFormatHandler::ManyJsonClients >> Graph::MemoryBackendFullCycle [GOOD] >> Graph::LocalBackendFullCycle >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> Splitter::Simple |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] >> Splitter::Simple [GOOD] >> Splitter::Small [GOOD] >> Splitter::Minimal [GOOD] >> Splitter::Trivial >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |97.6%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> MediatorTimeCast::GranularTimecast >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2025-04-17T18:27:37.049029Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:27:37.064874Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:27:37.067791Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:108:2140] 2025-04-17T18:27:37.068824Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:37.086330Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:27:37.125075Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:37.125202Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:37.128046Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:27:37.128132Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:27:37.128187Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:27:37.130040Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:37.130866Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:37.130981Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:131:2140] in generation 2 2025-04-17T18:27:37.155705Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:37.184785Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:27:37.185852Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:37.186009Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:136:2160] 2025-04-17T18:27:37.186042Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:27:37.186073Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:27:37.186103Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:27:37.186339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:108:2140], Recipient [1:108:2140]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:37.186397Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:37.187434Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:27:37.187549Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:27:37.187599Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:27:37.187639Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:37.187674Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:27:37.187703Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:27:37.187735Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:27:37.187761Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:27:37.187796Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:27:37.189761Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [1:99:2134], Recipient [1:108:2140]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 4294969430 } 2025-04-17T18:27:37.189825Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-17T18:27:40.304827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:40.305172Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:40.305362Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000500/r3tmp/tmplyohlI/pdisk_1.dat 2025-04-17T18:27:40.744225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:27:40.790315Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:40.835469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:40.840500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:40.853292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:40.945006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:27:40.981802Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-04-17T18:27:40.982309Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:41.031650Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:41.031846Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:41.033834Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:27:41.033934Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:27:41.033993Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:27:41.034460Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:41.034625Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:41.034740Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-04-17T18:27:41.045820Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:41.045943Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:27:41.046089Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:41.046237Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-04-17T18:27:41.046284Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:41.046339Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:27:41.046385Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:41.046813Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:27:41.046922Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:27:41.047049Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:41.047089Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:41.047137Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:27:41.047232Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:41.047354Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:671:2572], sessionId# [0:0:0] 2025-04-17T18:27:41.048927Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:27:41.050465Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:27:41.050603Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:27:41.052701Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:27:41.063434Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:27:41.063597Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:27:41.214437Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-17T18:27:41.217632Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:27:41.217697Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:41.218075Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:41.218120Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:27:41.218199Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:27:41.218447Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:27:41.218583Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:27:41.218732Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:41.218789Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:27:41.221088Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:27:41.222221Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:41.226217Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:27:41.226277Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:41.226919Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:27:41.227020Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:41.227883Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:41.227929Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:41.227988Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037 ... :45.039961Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:27:45.039984Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:45.040029Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-17T18:27:45.040561Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:27:45.040689Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:27:45.040751Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:27:45.042100Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:27:45.052870Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:27:45.052974Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:27:45.205424Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-17T18:27:45.205960Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:27:45.206001Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:45.206159Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:45.206222Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:27:45.206272Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:27:45.206489Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:27:45.206593Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:27:45.207740Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:45.207812Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:27:45.208170Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:27:45.208498Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:45.209743Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:27:45.209781Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:45.210384Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:27:45.210438Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:45.211125Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:45.211160Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:45.211191Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:27:45.211240Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:27:45.211278Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:27:45.211349Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:45.212414Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:27:45.214558Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:27:45.214609Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:27:45.215128Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:27:45.219370Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-17T18:27:45.221938Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-17T18:27:45.264391Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:742:2622] 2025-04-17T18:27:45.264614Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:45.269244Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:45.270106Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:45.272092Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:27:45.272167Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:27:45.272236Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:27:45.272612Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:45.272844Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:45.272904Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:757:2622] in generation 2 2025-04-17T18:27:45.294327Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:45.294448Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-04-17T18:27:45.294533Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:27:45.294710Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:27:45.294793Z node 3 :TX_DATASHARD DEBUG: Resolve path at 72075186224037888: reason# empty path 2025-04-17T18:27:45.294916Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:761:2632] 2025-04-17T18:27:45.294952Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:45.294994Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:27:45.295031Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:45.295293Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-17T18:27:45.295478Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-17T18:27:45.296237Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:27:45.296331Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:27:45.296552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976715657 message# Source { RawX1: 742 RawX2: 12884904510 } Origin: 72075186224037888 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-17T18:27:45.296899Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2025-04-17T18:27:45.296939Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:45.297275Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:27:45.297365Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:45.297402Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:45.297435Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:27:45.297475Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:45.297683Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:27:45.334732Z node 3 :TX_DATASHARD DEBUG: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-17T18:27:45.335018Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:27:45.335085Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:27:45.335305Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Execute at 72075186224037888 2025-04-17T18:27:45.337243Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:765:2636], serverId# [3:767:2637], sessionId# [0:0:0] 2025-04-17T18:27:45.350773Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Complete at 72075186224037888 |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus >> test_query_cache.py::TestQueryCache::test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> TestFormatHandler::ManyJsonClients [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> TTxDataShardBuildIndexScan::BadRequest [GOOD] >> TTxDataShardBuildIndexScan::RunScan >> TestFormatHandler::ManyRawClients >> BulkUpsert::BulkUpsert >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> BasicUsage::RecreateObserver [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> DataCleanup::ForceDataCleanup >> TxKeys::ComparePointKeys >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2025-04-17T18:25:54.204347Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1744914354204316 2025-04-17T18:25:54.611169Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350088439497275:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:54.612879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:25:54.658640Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350085823351294:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:54.658683Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:25:54.856110Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:25:54.860375Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001851/r3tmp/tmpnin4Hi/pdisk_1.dat 2025-04-17T18:25:55.216373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:25:55.216458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:25:55.223961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:25:55.224027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:25:55.225253Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:25:55.227022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:25:55.230723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:25:55.239551Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7734, node 1 2025-04-17T18:25:55.286361Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:25:55.286384Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:25:55.403935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001851/r3tmp/yandexHg07Tl.tmp 2025-04-17T18:25:55.403961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001851/r3tmp/yandexHg07Tl.tmp 2025-04-17T18:25:55.404091Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001851/r3tmp/yandexHg07Tl.tmp 2025-04-17T18:25:55.404216Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:25:55.463569Z INFO: TTestServer started on Port 27730 GrpcPort 7734 TClient is connected to server localhost:27730 PQClient connected to localhost:7734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:25:55.738505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:25:57.859175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350101324400039:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:57.859263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350101324400031:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:57.859641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:25:57.864264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:25:57.895939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350101324400046:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:25:58.138166Z node 1 :TX_PROXY ERROR: Actor# [1:7494350101324400134:2685] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:25:58.171169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:25:58.197640Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494350098708253521:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:25:58.198420Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGE5MWRmNWQtNDI0NzJlYjAtNjY0OWExMWMtNDI2NzYyNWI=, ActorId: [2:7494350098708253484:2308], ActorState: ExecuteState, TraceId: 01js2g07pyavk4encv44mq2aac, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:25:58.200859Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:25:58.207391Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494350105619367440:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:25:58.207951Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzRjMGY3ZDItNzMwODY3NmEtMTFiZDI5YjctOGMwMzU3YTE=, ActorId: [1:7494350101324400029:2336], ActorState: ExecuteState, TraceId: 01js2g07k05wqh03w3grtwaz57, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:25:58.208374Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:25:58.320010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:25:58.460864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:7734", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-17T18:25:58.659753Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2g088fcrv60abg91bgw886, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA4NTM5MmEtMzlkODRmODQtZWFiNzQzZmUtMmU1MTQ1Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494350105619367903:3032] 2025-04-17T18:25:59.595303Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350088439497275:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:59.595413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:25:59.662292Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494350085823351294:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:25:59.662374Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-17T18:26:05.024223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:7734 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-17T18:26:05.190983Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to lo ... ition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user session is set to 0 (startOffset 0) session shared/user_3_1_2810021216467056877_v1 2025-04-17T18:27:47.191838Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-17T18:27:47.191870Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-17T18:27:47.191892Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-17T18:27:47.191916Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-17T18:27:47.191928Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-17T18:27:47.191941Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-17T18:27:47.191964Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-17T18:27:47.191986Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-17T18:27:47.192039Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-17T18:27:47.194250Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2025-04-17T18:27:47.194266Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T18:27:47.194449Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2810021216467056877_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 SizeLag: 0 WriteTimestampEstimateMS: 1744914467099 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-17T18:27:47.194496Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2810021216467056877_v1 INIT DONE TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-04-17T18:27:47.194576Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2810021216467056877_v1 sending to client partition status >>> Got event: StartPartitionSession { Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 CommittedOffset: 0 EndOffset: 0 } 2025-04-17T18:27:47.195820Z :INFO: [/Root] [/Root] [50afe62c-11d93cc9-e37dd48c-e8bba320] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:47.195871Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:47.195909Z :INFO: [/Root] [/Root] [50afe62c-11d93cc9-e37dd48c-e8bba320] Counters: { Errors: 0 CurrentSessionLifetimeMs: 31 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:47.195994Z :NOTICE: [/Root] [/Root] [50afe62c-11d93cc9-e37dd48c-e8bba320] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T18:27:47.196030Z :DEBUG: [/Root] [/Root] [50afe62c-11d93cc9-e37dd48c-e8bba320] [] Abort session to cluster 2025-04-17T18:27:47.196377Z :INFO: [/Root] [/Root] [1cfa3e5b-fb840fb7-58493c3d-d2b9674] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:47.196417Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:47.196446Z :INFO: [/Root] [/Root] [1cfa3e5b-fb840fb7-58493c3d-d2b9674] Counters: { Errors: 0 CurrentSessionLifetimeMs: 28 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:47.196482Z :NOTICE: [/Root] [/Root] [1cfa3e5b-fb840fb7-58493c3d-d2b9674] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T18:27:47.196499Z :DEBUG: [/Root] [/Root] [1cfa3e5b-fb840fb7-58493c3d-d2b9674] [] Abort session to cluster 2025-04-17T18:27:47.196685Z :INFO: [/Root] [/Root] [5aa4d3b6-29cea80a-a970ab4d-94a8e6f] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:47.196715Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-04-17T18:27:47.196735Z :INFO: [/Root] [/Root] [5aa4d3b6-29cea80a-a970ab4d-94a8e6f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 27 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:47.196766Z :NOTICE: [/Root] [/Root] [5aa4d3b6-29cea80a-a970ab4d-94a8e6f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T18:27:47.196780Z :DEBUG: [/Root] [/Root] [5aa4d3b6-29cea80a-a970ab4d-94a8e6f] [] Abort session to cluster 2025-04-17T18:27:47.196945Z :INFO: [/Root] [/Root] [5aa4d3b6-29cea80a-a970ab4d-94a8e6f] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:47.196982Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-04-17T18:27:47.197017Z :INFO: [/Root] [/Root] [5aa4d3b6-29cea80a-a970ab4d-94a8e6f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 27 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:47.196956Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_7031752373708829218_v1 grpc read done: success# 0, data# { } 2025-04-17T18:27:47.196990Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_7031752373708829218_v1 grpc read failed 2025-04-17T18:27:47.197109Z :NOTICE: [/Root] [/Root] [5aa4d3b6-29cea80a-a970ab4d-94a8e6f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:27:47.197027Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_7031752373708829218_v1 closed 2025-04-17T18:27:47.197248Z :INFO: [/Root] [/Root] [1cfa3e5b-fb840fb7-58493c3d-d2b9674] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:47.197267Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:47.197286Z :INFO: [/Root] [/Root] [1cfa3e5b-fb840fb7-58493c3d-d2b9674] Counters: { Errors: 0 CurrentSessionLifetimeMs: 29 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:47.197231Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_7031752373708829218_v1 is DEAD 2025-04-17T18:27:47.197315Z :NOTICE: [/Root] [/Root] [1cfa3e5b-fb840fb7-58493c3d-d2b9674] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:27:47.197345Z :INFO: [/Root] [/Root] [50afe62c-11d93cc9-e37dd48c-e8bba320] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:47.197324Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_9781995596895398873_v1 grpc read done: success# 0, data# { } 2025-04-17T18:27:47.197364Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-17T18:27:47.197343Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9781995596895398873_v1 grpc read failed 2025-04-17T18:27:47.197366Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9781995596895398873_v1 grpc closed 2025-04-17T18:27:47.197392Z :INFO: [/Root] [/Root] [50afe62c-11d93cc9-e37dd48c-e8bba320] Counters: { Errors: 0 CurrentSessionLifetimeMs: 33 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:47.197423Z :NOTICE: [/Root] [/Root] [50afe62c-11d93cc9-e37dd48c-e8bba320] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:27:47.197408Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9781995596895398873_v1 is DEAD 2025-04-17T18:27:47.197804Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350573474857346:2573] disconnected; active server actors: 1 2025-04-17T18:27:47.198265Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350573474857346:2573] client user disconnected session shared/user_3_3_7031752373708829218_v1 2025-04-17T18:27:47.198463Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-17T18:27:47.198533Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350573474857349:2572] disconnected; active server actors: 1 2025-04-17T18:27:47.198551Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350573474857349:2572] client user disconnected session shared/user_3_2_9781995596895398873_v1 2025-04-17T18:27:47.198589Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2025-04-17T18:27:47.198620Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-17T18:27:47.198644Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000031s 2025-04-17T18:27:47.198878Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2810021216467056877_v1 grpc read done: success# 0, data# { } 2025-04-17T18:27:47.198897Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2810021216467056877_v1 grpc read failed 2025-04-17T18:27:47.198918Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2810021216467056877_v1 closed 2025-04-17T18:27:47.198980Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2810021216467056877_v1 is DEAD 2025-04-17T18:27:47.199166Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_2810021216467056877_v1 2025-04-17T18:27:47.199202Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494350573474857351:2580] destroyed 2025-04-17T18:27:47.199255Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_2810021216467056877_v1 2025-04-17T18:27:47.199432Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350573474857347:2571] disconnected; active server actors: 1 2025-04-17T18:27:47.199471Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350573474857347:2571] client user disconnected session shared/user_3_1_2810021216467056877_v1 >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] >> test_example.py::TestExample::test_example >> SdkCredProvider::PingFromProviderSyncDiscovery >> MediatorTimeCast::GranularTimecast [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull >> TestFormatHandler::ManyRawClients [GOOD] >> TestFormatHandler::ClientValidation >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> MediatorTest::BasicTimecastUpdates >> TRUCalculatorTests::TestReadTable [GOOD] >> TRUCalculatorTests::TestBulkUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: 2025-04-17T18:27:43.729105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:43.729717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:43.729776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000628/r3tmp/tmp2vp8Fo/pdisk_1.dat 2025-04-17T18:27:44.254490Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2025-04-17T18:27:44.255379Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-04-17T18:27:44.259696Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 0 2025-04-17T18:27:44.294149Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:44.338848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:44.339714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:44.352488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:44.437499Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 500 2025-04-17T18:27:44.547728Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1000 2025-04-17T18:27:44.730467Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2025-04-17T18:27:44.857810Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2025-04-17T18:27:44.985213Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2025-04-17T18:27:45.123635Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2025-04-17T18:27:45.171890Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 7000 } 2025-04-17T18:27:45.308333Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2025-04-17T18:27:45.434750Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2025-04-17T18:27:45.438407Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientDestroyed 2025-04-17T18:27:45.458655Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-04-17T18:27:45.459335Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 7000 2025-04-17T18:27:45.471434Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 12000 } 2025-04-17T18:27:45.576147Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 7500 2025-04-17T18:27:45.678608Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8000 2025-04-17T18:27:45.826694Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2025-04-17T18:27:45.952722Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2025-04-17T18:27:46.101223Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2025-04-17T18:27:46.249568Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2025-04-17T18:27:49.391546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:49.391887Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:49.392108Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000628/r3tmp/tmpsZ0ELU/pdisk_1.dat 2025-04-17T18:27:49.687386Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-17T18:27:49.688216Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2025-04-17T18:27:49.688292Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-17T18:27:49.688351Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:650:2548] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2025-04-17T18:27:49.688725Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-17T18:27:49.688926Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 0 2025-04-17T18:27:49.689066Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} 2025-04-17T18:27:49.689328Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-17T18:27:49.689418Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2025-04-17T18:27:49.689474Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:653:2550] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 0} 2025-04-17T18:27:49.689704Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 0 2025-04-17T18:27:49.689955Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-17T18:27:49.690044Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2025-04-17T18:27:49.690097Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:654:2551] {TEvRegisterTabletResult TabletId# 72057594047365123 Entry# 0} 2025-04-17T18:27:49.690342Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 0 2025-04-17T18:27:49.713515Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:49.750873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:49.751004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:49.762606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:49.837462Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 500 2025-04-17T18:27:49.837567Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 500} 2025-04-17T18:27:49.947287Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 1000 2025-04-17T18:27:49.947377Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2025-04-17T18:27:50.135427Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2000 2025-04-17T18:27:50.135515Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... have step 0 and 2000 after sleep 2025-04-17T18:27:50.223841Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... tx1 planned at step 2500 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 2500 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet 2025-04-17T18:27:50.327792Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 3000 ... tx2 planned at step 3000 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 3000 ... unblocking tx1 at tablet2 ... unblocking NKikimr::TEvTxProcessing: ... R_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-17T18:27:50.516459Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-17T18:27:50.527300Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-17T18:27:50.538049Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-17T18:27:50.548795Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-17T18:27:50.570340Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-17T18:27:50.591709Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-17T18:27:50.613193Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2025-04-17T18:27:50.625073Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientDestroyed 2025-04-17T18:27:50.625286Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 7 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-17T18:27:50.625355Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-17T18:27:50.626215Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-17T18:27:50.626403Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 8 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-17T18:27:50.626459Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-17T18:27:50.627054Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-17T18:27:50.627152Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 9 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-17T18:27:50.627185Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-17T18:27:50.633159Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-17T18:27:50.633668Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-04-17T18:27:50.657036Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2500} ... tablet1 at 2500 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-04-17T18:27:50.668215Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... tablet1 at 3000 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-04-17T18:27:50.679365Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... tablet1 at 3500 ... tablet2 at 3500 ... tablet3 at 3500 |97.6%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |97.6%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> TRangeTreap::Simple [GOOD] >> TRangeTreap::Sequential |97.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |97.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |97.7%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> BasicUsage::ReadMirrored [GOOD] >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull >> Splitter::Crit [GOOD] >> Splitter::CritSimple >> test_query_cache.py::TestQueryCache::test [GOOD] >> StatisticsScan::RunScanOnShard >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction >> DataShardReplication::SimpleApplyChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2025-04-17T18:27:15.870322Z :PropagateSessionClosed INFO: Random seed for debugging is 1744914435870286 2025-04-17T18:27:16.450910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350437465543478:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:16.453309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:27:16.542485Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350440820265340:2246];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:16.855717Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:27:16.863119Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:27:16.863373Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/001876/r3tmp/tmpRW03Fd/pdisk_1.dat 2025-04-17T18:27:17.160581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:17.160664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:17.162440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:17.162547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:17.168569Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:27:17.168729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:17.169271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18121, node 1 2025-04-17T18:27:17.255225Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:27:17.255825Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:27:17.446275Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:17.481470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:17.570757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/001876/r3tmp/yandexecd1s1.tmp 2025-04-17T18:27:17.570781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/001876/r3tmp/yandexecd1s1.tmp 2025-04-17T18:27:17.570905Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/001876/r3tmp/yandexecd1s1.tmp 2025-04-17T18:27:17.571033Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:27:17.650140Z INFO: TTestServer started on Port 28112 GrpcPort 18121 TClient is connected to server localhost:28112 PQClient connected to localhost:18121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:27:18.093177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-17T18:27:20.897928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350454645413648:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:20.899165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350454645413629:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:20.899271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:20.903042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T18:27:20.910687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350454645413684:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:20.910805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:21.000541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350454645413652:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T18:27:21.289022Z node 1 :TX_PROXY ERROR: Actor# [1:7494350458940381027:2684] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:27:21.336652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:27:21.339339Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494350458000134677:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:27:21.339673Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494350458940381038:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:27:21.341452Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThhYTJkZjctZGEyZmQyZDktOTdkYmFiMDAtNzNkNjY2Mjc=, ActorId: [1:7494350454645413625:2335], ActorState: ExecuteState, TraceId: 01js2g2rnq8p0z990rbceagpr1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:27:21.343703Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTM1NzU5YzItYWY3NzI3M2QtZDRkMzlhYWUtM2FlNmIyZGQ=, ActorId: [2:7494350458000134629:2309], ActorState: ExecuteState, TraceId: 01js2g2rqt2zwba13c3z6vxswm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:27:21.345408Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:27:21.345425Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:27:21.441961Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350437465543478:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:21.442025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:27:21.522291Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494350440820265340:2246];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:21.522389Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:27:21.609893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:27:21.770003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:18121", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-17T18:27:22.070384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01js2g2snsbh48356t8bew4ev2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjQ4MTJiMjItM2JmNDQ2NGUtYWQ0OTMzNWItNTJmNDRlMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Ro ... :[::1]:39804" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..240 bytes.. Information: { Offset: 2 ProducerId: "src_id" SeqNo: 3 CreateTime: 2025-04-17T18:27:51.665000Z WriteTime: 2025-04-17T18:27:51.696000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "_ip": "ipv6:[::1]:39804", "logtype": "unknown", "server": "ipv6:[::1]:39804" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..350 bytes.. Information: { Offset: 3 ProducerId: "src_id" SeqNo: 4 CreateTime: 2025-04-17T18:27:51.665000Z WriteTime: 2025-04-17T18:27:51.696000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "_ip": "ipv6:[::1]:39804", "logtype": "unknown", "server": "ipv6:[::1]:39804" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..460 bytes.. Information: { Offset: 4 ProducerId: "src_id" SeqNo: 5 CreateTime: 2025-04-17T18:27:51.666000Z WriteTime: 2025-04-17T18:27:51.696000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "_ip": "ipv6:[::1]:39804", "logtype": "unknown", "server": "ipv6:[::1]:39804" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 4 messages in this event 2025-04-17T18:27:51.709526Z :DEBUG: [/Root] [/Root] [3f5d2b5-a2cfe06a-8528d136-112bbd11] [] The application data is transferred to the client. Number of messages 4, size 1180 bytes 2025-04-17T18:27:51.709511Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_420172196722996387_v1 got read request: guid# a5e493c8-f332cab3-5b24595a-e806ced5 2025-04-17T18:27:51.709561Z :DEBUG: [/Root] [/Root] [3f5d2b5-a2cfe06a-8528d136-112bbd11] [] Returning serverBytesSize = 0 to budget 2025-04-17T18:27:51.766493Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|2f5d6069-544e515d-77048fcd-2573ce56_0] Write session will now close 2025-04-17T18:27:51.766570Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|2f5d6069-544e515d-77048fcd-2573ce56_0] Write session: aborting 2025-04-17T18:27:51.767437Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|2f5d6069-544e515d-77048fcd-2573ce56_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2025-04-17T18:27:51.767448Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|2f5d6069-544e515d-77048fcd-2573ce56_0] Write session is aborting and will not restart 2025-04-17T18:27:51.767524Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|2f5d6069-544e515d-77048fcd-2573ce56_0] Write session: destroy 2025-04-17T18:27:51.768197Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|2f5d6069-544e515d-77048fcd-2573ce56_0 grpc read done: success: 0 data: 2025-04-17T18:27:51.768227Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|2f5d6069-544e515d-77048fcd-2573ce56_0 grpc read failed 2025-04-17T18:27:51.768257Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|2f5d6069-544e515d-77048fcd-2573ce56_0 grpc closed 2025-04-17T18:27:51.768274Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|2f5d6069-544e515d-77048fcd-2573ce56_0 is DEAD 2025-04-17T18:27:51.768413Z :INFO: [/Root] [/Root] [3f5d2b5-a2cfe06a-8528d136-112bbd11] Closing read session. Close timeout: 18446744073709.551615s 2025-04-17T18:27:51.768474Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc3:0:1:4:0 2025-04-17T18:27:51.768520Z :INFO: [/Root] [/Root] [3f5d2b5-a2cfe06a-8528d136-112bbd11] Counters: { Errors: 0 CurrentSessionLifetimeMs: 499 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:51.769102Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:27:51.769261Z :INFO: [/Root] [/Root] [3f5d2b5-a2cfe06a-8528d136-112bbd11] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:51.769307Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc3:0:1:4:0 2025-04-17T18:27:51.769349Z :INFO: [/Root] [/Root] [3f5d2b5-a2cfe06a-8528d136-112bbd11] Counters: { Errors: 0 CurrentSessionLifetimeMs: 500 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:51.769401Z :INFO: [/Root] [/Root] [3f5d2b5-a2cfe06a-8528d136-112bbd11] Closing read session. Close timeout: 0.000000s 2025-04-17T18:27:51.769356Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_420172196722996387_v1 grpc read done: success# 0, data# { } 2025-04-17T18:27:51.769376Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_420172196722996387_v1 grpc read failed 2025-04-17T18:27:51.769442Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc3:0:1:4:0 2025-04-17T18:27:51.769407Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_420172196722996387_v1 closed 2025-04-17T18:27:51.769472Z :INFO: [/Root] [/Root] [3f5d2b5-a2cfe06a-8528d136-112bbd11] Counters: { Errors: 0 CurrentSessionLifetimeMs: 500 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:27:51.769554Z :NOTICE: [/Root] [/Root] [3f5d2b5-a2cfe06a-8528d136-112bbd11] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:27:51.769463Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7494350588148424191:2631] destroyed 2025-04-17T18:27:51.769525Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-17T18:27:51.769827Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_420172196722996387_v1 is DEAD 2025-04-17T18:27:51.770406Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350588148424031:2610] disconnected; active server actors: 1 2025-04-17T18:27:51.770436Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7494350588148424031:2610] client user disconnected session shared/user_3_1_420172196722996387_v1 2025-04-17T18:27:51.770510Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7494350588148424032:2610] disconnected; active server actors: 1 2025-04-17T18:27:51.770537Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7494350588148424032:2610] client user disconnected session shared/user_3_1_420172196722996387_v1 2025-04-17T18:27:51.770715Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7494350588148424030:2610] disconnected; active server actors: 1 2025-04-17T18:27:51.770679Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_420172196722996387_v1 2025-04-17T18:27:51.770736Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7494350588148424030:2610] client user disconnected session shared/user_3_1_420172196722996387_v1 2025-04-17T18:27:51.770713Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494350588148424046:2617] destroyed 2025-04-17T18:27:51.770753Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_420172196722996387_v1 2025-04-17T18:27:51.770917Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_420172196722996387_v1 2025-04-17T18:27:51.770956Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7494350588148424045:2616] destroyed 2025-04-17T18:27:51.770982Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_420172196722996387_v1 2025-04-17T18:27:51.771001Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7494350588148424044:2615] destroyed 2025-04-17T18:27:51.771032Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_420172196722996387_v1 2025-04-17T18:27:51.771047Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_420172196722996387_v1 2025-04-17T18:27:51.902018Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7494350588148424206:2625] TxId: 281474976710706. Ctx: { TraceId: 01js2g3pm827631fpvjsbjj3ab, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjBmNWU4ZTUtYmQyYWZjMDUtZTJkYzE0M2YtYTUwZGZkMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-04-17T18:27:51.902710Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494350588148424215:2638], TxId: 281474976710706, task: 2. Ctx: { TraceId : 01js2g3pm827631fpvjsbjj3ab. SessionId : ydb://session/3?node_id=3&id=NjBmNWU4ZTUtYmQyYWZjMDUtZTJkYzE0M2YtYTUwZGZkMDU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7494350588148424206:2625], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-17T18:27:51.903003Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7494350588148424217:2639], TxId: 281474976710706, task: 4. Ctx: { TraceId : 01js2g3pm827631fpvjsbjj3ab. SessionId : ydb://session/3?node_id=3&id=NjBmNWU4ZTUtYmQyYWZjMDUtZTJkYzE0M2YtYTUwZGZkMDU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7494350588148424206:2625], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-17T18:27:52.189189Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710707. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T18:27:52.189326Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7494350592443391524:2640] TxId: 281474976710707. Ctx: { TraceId: 01js2g3q2h46pex88ywxt5ews3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTg5NjY5ZjYtZWNhMzFiYTMtNDk5NjJiMGQtODg5MjEyN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-17T18:27:52.189559Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTg5NjY5ZjYtZWNhMzFiYTMtNDk5NjJiMGQtODg5MjEyN2Q=, ActorId: [3:7494350592443391521:2640], ActorState: ExecuteState, TraceId: 01js2g3q2h46pex88ywxt5ews3, Create QueryResponse for error on request, msg: 2025-04-17T18:27:52.190576Z node 3 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01js2g3q2h46pex88yx1a10z6r" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> XmlBuilderTest::WritesProperly [GOOD] >> XmlBuilderTest::MacroBuilder >> XmlBuilderTest::MacroBuilder [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> TxKeys::ComparePointAndRangeWithInf [GOOD] >> TestFormatHandler::ClientValidation [GOOD] |97.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/http/ut/unittest >> XmlBuilderTest::MacroBuilder [GOOD] |97.7%| [TS] {RESULT} ydb/core/ymq/http/ut/unittest |97.7%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Graph::LocalBordersOnGet [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> TestFormatHandler::ClientError >> MediatorTest::BasicTimecastUpdates [GOOD] |97.7%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2025-04-17T18:27:50.223255Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:27:50.247723Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:27:50.249146Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:126:2151] 2025-04-17T18:27:50.250191Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:50.296997Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:126:2151]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:27:50.303673Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:50.394106Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:50.398509Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:27:50.398611Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:27:50.398722Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:27:50.400269Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:50.400797Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:50.400873Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2151] in generation 2 2025-04-17T18:27:50.402758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:27:50.402805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:50.509017Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:50.539880Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-17T18:27:50.541665Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:50.541816Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-17T18:27:50.541871Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:27:50.541917Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-17T18:27:50.541958Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:27:50.542136Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:50.542210Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:50.543358Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:27:50.543485Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:27:50.543657Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:27:50.543710Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:50.543853Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:27:50.543913Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:27:50.543960Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:27:50.543999Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:27:50.544049Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:27:50.544184Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:50.544241Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:50.544318Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-17T18:27:50.547989Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:126:2151]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-17T18:27:50.548068Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:27:50.548156Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-17T18:27:50.548420Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-17T18:27:50.548503Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-17T18:27:50.548562Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-17T18:27:50.548670Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:27:50.548711Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-17T18:27:50.548763Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-17T18:27:50.548804Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:27:50.549127Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-17T18:27:50.549184Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-17T18:27:50.549222Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-17T18:27:50.549256Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:27:50.549317Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-17T18:27:50.549352Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-17T18:27:50.549386Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-17T18:27:50.549427Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-17T18:27:50.549456Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-17T18:27:50.562230Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-17T18:27:50.562326Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-17T18:27:50.562369Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-17T18:27:50.562436Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-17T18:27:50.563220Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-17T18:27:50.565358Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:50.565418Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:50.565471Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-17T18:27:50.565539Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:126:2151]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-17T18:27:50.565572Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:27:50.565757Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-17T18:27:50.565811Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:27:50.565872Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-17T18:27:50.565916Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-17T18:27:50.578471Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-17T18:27:50.578766Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:27:50.579624Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:126:2151], Recipient [1:126:2151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:50.579820Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:50.580099Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:27:50.580260Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:27:50.580394Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:27:50.580569Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-17T18:27:50.580701Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-17T18:27:50.580895Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:27:50.581054Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-17T18:27:50.581249Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-17T18:27:50.581365Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-17T18:27:50.581879Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-17T18:27:50.581958Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:27:50.581986Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-17T18:27:50.582012Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-17T18:27:50.582037Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-17T18:27:50.582155Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-17T18:27:50.582235Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-17T18:27:50.582374Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-17T18:27:50.582452Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-17T18:27:50.582540Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-17T18:27:50.582679Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-17T18:27:50.582788Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... 25-04-17T18:27:54.764696Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CreateIncrementalRestoreSrc 2025-04-17T18:27:54.764721Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:27:54.764791Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CreateIncrementalRestoreSrc 2025-04-17T18:27:54.764822Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompleteOperation 2025-04-17T18:27:54.764849Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompleteOperation 2025-04-17T18:27:54.765088Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is DelayComplete 2025-04-17T18:27:54.765126Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompleteOperation 2025-04-17T18:27:54.765166Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompletedOperations 2025-04-17T18:27:54.765206Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompletedOperations 2025-04-17T18:27:54.765241Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-17T18:27:54.765282Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompletedOperations 2025-04-17T18:27:54.765312Z node 5 :TX_DATASHARD TRACE: Execution plan for [1000001:1] at 9437184 has finished 2025-04-17T18:27:54.765363Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:54.765402Z node 5 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-17T18:27:54.765443Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:27:54.765479Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:27:54.767071Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:132:2155]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-17T18:27:54.767125Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T18:27:54.767164Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-17T18:27:54.767207Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:27:54.782535Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2025-04-17T18:27:54.782630Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2025-04-17T18:27:54.782712Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:27:54.783481Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:27:54.783537Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2025-04-17T18:27:54.783579Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:27:54.783633Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-17T18:27:54.783673Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2025-04-17T18:27:54.783737Z node 5 :TX_DATASHARD DEBUG: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-17T18:27:54.783800Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-17T18:27:54.783929Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:27:54.784677Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 132 RawX2: 21474838635 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2025-04-17T18:27:54.784795Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:226:2224], Recipient [5:132:2155]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:228:2225] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:27:54.784838Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:27:54.784928Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:122:2148], Recipient [5:132:2155]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-17T18:27:54.784962Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T18:27:54.785005Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-17T18:27:54.785082Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-17T18:27:54.785516Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [5:99:2134], Recipient [5:132:2155]: NActors::TEvents::TEvPoison 2025-04-17T18:27:54.785905Z node 5 :TX_DATASHARD INFO: OnDetach: 9437184 2025-04-17T18:27:54.786021Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 9437184 2025-04-17T18:27:54.797509Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [5:231:2226], Recipient [5:234:2227]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:27:54.801826Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [5:231:2226], Recipient [5:234:2227]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:27:54.801973Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [5:231:2226], Recipient [5:234:2227]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:27:54.809788Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:234:2227] 2025-04-17T18:27:54.810077Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:54.818493Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-04-17T18:27:54.861505Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:54.861668Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:54.863778Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-17T18:27:54.863862Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-17T18:27:54.863927Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-17T18:27:54.864376Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:54.864557Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:54.864619Z node 5 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [5:277:2227] in generation 3 2025-04-17T18:27:54.881201Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:54.881347Z node 5 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-04-17T18:27:54.881459Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-17T18:27:54.881632Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-04-17T18:27:54.881894Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [5:282:2266] 2025-04-17T18:27:54.881937Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-17T18:27:54.881995Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-17T18:27:54.882036Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:27:54.882420Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-17T18:27:54.882541Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-17T18:27:54.882659Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [5:234:2227], Recipient [5:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:54.882715Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:54.883001Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-17T18:27:54.883117Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-17T18:27:54.883275Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:234:2227]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-17T18:27:54.883313Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-17T18:27:54.883373Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-17T18:27:54.883436Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-17T18:27:54.883577Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 234 RawX2: 21474838707 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-04-17T18:27:54.883696Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [5:24:2071], Recipient [5:234:2227]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-04-17T18:27:54.883733Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-17T18:27:54.883780Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-17T18:27:54.883859Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-17T18:27:54.883905Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:54.883947Z node 5 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-17T18:27:54.883989Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-17T18:27:54.884028Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-17T18:27:54.884067Z node 5 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-17T18:27:54.884114Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-17T18:27:54.884222Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:280:2264], Recipient [5:234:2227]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:284:2268] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:27:54.884262Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:27:54.884372Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:122:2148], Recipient [5:234:2227]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-17T18:27:54.884431Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T18:27:54.884480Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-17T18:27:54.884541Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-17T18:27:54.903352Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [5:280:2264], Recipient [5:234:2227]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [5:280:2264] ServerId: [5:284:2268] } 2025-04-17T18:27:54.903419Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed |97.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest >> MediatorTest::MultipleTablets |97.7%| [TA] $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:27:34.596432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:27:34.596570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:27:34.596663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:27:34.596725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:27:34.597547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:27:34.597610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:27:34.597792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:27:34.597897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:27:34.599263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:34.693749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:27:34.693821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:34.715035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:34.715532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:27:34.715758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:27:34.730518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:27:34.730809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:27:34.734382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:27:34.735101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:27:34.749145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:27:34.772390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:27:34.772498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:27:34.773237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:27:34.773309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:27:34.773417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:27:34.773591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:27:34.781865Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:27:34.944626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:27:34.945636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:27:34.947441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:27:34.948654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:27:34.948739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:27:34.957505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:27:34.957656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:27:34.957911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:27:34.958059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:27:34.958129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:27:34.958169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:27:34.960704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:27:34.960787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:27:34.960835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:27:34.963420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:27:34.963510Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:27:34.963578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:27:34.963654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:27:34.967927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:27:34.970564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:27:34.971542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:27:34.972682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:27:34.972833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:27:34.972895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:27:34.974396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:27:34.974502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:27:34.974746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:27:34.975086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:27:34.978688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:27:34.978765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:27:34.978959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:27:34.979016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:27:34.979440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:27:34.979533Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:27:34.979654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:27:34.979698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:27:34.979740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:27:34.979772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:27:34.979823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:27:34.979895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:27:34.979936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:27:34.979971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:27:34.980045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:27:34.980085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:27:34.980122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:27:34.982218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:27:34.982377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:27:34.982422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cs 2025-04-17T18:27:55.477091Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 101 } Time: 101 2025-04-17T18:27:55.477121Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.477150Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.477185Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.477285Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 102 } Time: 102 2025-04-17T18:27:55.477313Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.477342Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.477374Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.477447Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 103 } Time: 103 2025-04-17T18:27:55.477472Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.477500Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.477535Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.477622Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 104 } Time: 104 2025-04-17T18:27:55.477646Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.477672Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.477704Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.477802Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 105 } Time: 105 2025-04-17T18:27:55.477832Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.477865Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.477899Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.477985Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 106 } Time: 106 2025-04-17T18:27:55.478010Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.478038Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.478070Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.478152Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 107 } Time: 107 2025-04-17T18:27:55.478190Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.478220Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.478267Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.478362Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 108 } Time: 108 2025-04-17T18:27:55.478392Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.478421Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.478457Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.478551Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2025-04-17T18:27:55.478593Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.478622Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.478654Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.478748Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2025-04-17T18:27:55.478772Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.478797Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.478828Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.478894Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2025-04-17T18:27:55.478918Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.478948Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.478980Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.479076Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2025-04-17T18:27:55.479101Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.479131Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.479166Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.479286Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2025-04-17T18:27:55.479316Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.479344Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.479377Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.479472Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2025-04-17T18:27:55.479499Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.479527Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.479561Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.479647Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2025-04-17T18:27:55.479674Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.479698Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.479730Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.479806Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2025-04-17T18:27:55.479849Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.479877Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.479908Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.480002Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2025-04-17T18:27:55.480028Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.480054Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.480087Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.480166Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2025-04-17T18:27:55.480190Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.480228Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.480261Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.480331Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2025-04-17T18:27:55.480356Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-17T18:27:55.480385Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-17T18:27:55.480424Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-17T18:27:55.480506Z node 6 :GRAPH TRACE: SHARD Handle TEvGraph::TEvGetMetrics from [6:574:2503] 2025-04-17T18:27:55.480573Z node 6 :GRAPH DEBUG: SHARD TTxGetMetrics::Execute 2025-04-17T18:27:55.480630Z node 6 :GRAPH DEBUG: DB Querying from 0 to 119 2025-04-17T18:27:55.493721Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.493817Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.493845Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.493873Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.493900Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.493926Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.493949Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.493975Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494003Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494027Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494050Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494077Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494103Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494128Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494155Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494210Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494232Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494275Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494300Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494325Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494349Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494375Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494400Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494424Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494449Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494473Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494515Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494542Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494568Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494593Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494617Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494640Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494665Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494689Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494716Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494741Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494765Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494787Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494815Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494847Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494872Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494896Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494923Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494946Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494972Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.494997Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495021Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495045Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495069Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495093Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495119Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495146Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495169Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495195Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495218Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495242Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495266Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495288Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495314Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495338Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-17T18:27:55.495373Z node 6 :GRAPH DEBUG: SHARD TTxGetMetric::Complete 2025-04-17T18:27:55.495433Z node 6 :GRAPH TRACE: SHARD TxGetMetrics returned 60 points for request 3 2025-04-17T18:27:55.495627Z node 6 :GRAPH TRACE: SVC TEvMetricsResult 3 2025-04-17T18:27:55.495692Z node 6 :GRAPH TRACE: SVC TEvMetricsResult found request 3 resending to [6:575:2504] |97.7%| [TS] {RESULT} ydb/core/graph/ut/unittest >> DataCleanup::ForceDataCleanup [GOOD] >> DataCleanup::ForceDataCleanupWithoutCompaction >> BlobDepot::BasicPutAndGet >> test.py::test_local [GOOD] >> Splitter::CritSimple [GOOD] >> Coordinator::ReadStepSubscribe >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::CritSimple [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 82944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964800;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964800;columns=1; >> test_example.py::TestExample::test_example [GOOD] >> test_example.py::TestExample::test_example2 [GOOD] >> QueryActorTest::SimpleQuery >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] |97.7%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> BlobDepot::BasicPutAndGet [GOOD] >> BlobDepot::TestBlockedEvGetRequest >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> TabletService_ChangeSchema::Basics >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> StatisticsScan::RunScanOnShard [GOOD] >> TestFormatHandler::ClientError [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> MediatorTest::MultipleTablets [GOOD] >> BlobDepot::TestBlockedEvGetRequest [GOOD] >> DataShardReplication::SimpleApplyChanges [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> GraphShard::NormalizeAndDownsample1 [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction >> TestFormatHandler::ClientErrorWithEmptyFilter >> MediatorTest::TabletAckBeforePlanComplete >> BlobDepot::BasicRange >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> DataShardReplication::SplitMergeChanges >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> BlobDepot::BasicRange [GOOD] >> BlobDepot::BasicDiscover >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> DataCleanup::ForceDataCleanupWithoutCompaction [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> GraphShard::CreateGraphShard [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> BlobDepot::BasicDiscover [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> DataCleanup::MultipleDataCleanups >> DataShardStats::HistogramStatsCorrect >> QueryActorTest::SimpleQuery [GOOD] >> BlobDepot::BasicBlock >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> QueryActorTest::Rollback >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: 2025-04-17T18:27:56.869918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:56.870504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:56.870556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00068f/r3tmp/tmpbGb7fy/pdisk_1.dat 2025-04-17T18:27:57.432116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:27:57.486901Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:57.534729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:57.538383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:57.552842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:57.654729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:27:58.007652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:58.007831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:58.007935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:58.017497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:27:58.177026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:27:58.240321Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:27:59.053602Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2g3wxk9xx79ry5dr1zvq72, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjVhNzEyNjMtNjVjOWVlZDEtOTBiMzU2YTAtNGUxNzYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] Test command err: 2 2 |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-17T18:28:00.651957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:28:00.652065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:28:00.652117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:28:00.652156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:28:00.652877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:28:00.652931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:28:00.653024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:28:00.653123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:28:00.654300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:28:00.742348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:28:00.742407Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:00.758589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:28:00.759026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:28:00.759199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:28:00.769987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:28:00.770327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:28:00.773690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:28:00.774059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:28:00.780478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:28:00.792398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:28:00.792502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:28:00.793344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:28:00.793412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:28:00.793462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:28:00.793604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-17T18:28:00.800802Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-17T18:28:00.924952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:28:00.926021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:28:00.927608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:28:00.928821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:28:00.928892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:28:00.932077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:28:00.932230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:28:00.932414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:28:00.932529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:28:00.932563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:28:00.932631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:28:00.934642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:28:00.934694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:28:00.934733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:28:00.936601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:28:00.936637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:28:00.936677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:28:00.936708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:28:00.941037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:28:00.943110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:28:00.943925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:28:00.945000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:28:00.945148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:28:00.945203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:28:00.946447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:28:00.946523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:28:00.946750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:28:00.946834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:28:00.949494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:28:00.949539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:28:00.949709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:28:00.949749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:28:00.950108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:28:00.950158Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:28:00.950311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:28:00.950347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:28:00.950385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:28:00.950411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:28:00.950463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-17T18:28:00.950539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:28:00.950582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-17T18:28:00.950616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-17T18:28:00.950680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-17T18:28:00.950714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-17T18:28:00.950744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-17T18:28:00.952575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:28:00.952690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-17T18:28:00.952733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8944 2025-04-17T18:28:01.164996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:1 Got OK TEvInitTenantSchemeShardResult from schemeshard tablet: 72075186234409546 shardIdx: 72057594046678944:2 at schemeshard: 72057594046678944 2025-04-17T18:28:01.169163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186234409548, partId: 1 2025-04-17T18:28:01.169340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:1, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409548 2025-04-17T18:28:01.169383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:1 HandleReply TEvConfigureStatus operationId:102:1 at schemeshard:72057594046678944 2025-04-17T18:28:01.169417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:1 Got OK TEvConfigureStatus from tablet# 72075186234409548 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-04-17T18:28:01.169448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 3 -> 128 2025-04-17T18:28:01.172413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-17T18:28:01.173057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-17T18:28:01.173965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-17T18:28:01.174091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-17T18:28:01.174127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-17T18:28:01.174193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:1, at tablet# 72057594046678944 2025-04-17T18:28:01.174251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 2/2 2025-04-17T18:28:01.174378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:28:01.176138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-17T18:28:01.176284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-17T18:28:01.176611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:28:01.176737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:28:01.176780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute operation part is already done, operationId: 102:0 2025-04-17T18:28:01.176832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-04-17T18:28:01.177109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 128 -> 240 2025-04-17T18:28:01.177174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-04-17T18:28:01.177302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-17T18:28:01.177391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:409:2374], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186234409549, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-17T18:28:01.179388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:28:01.179433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-17T18:28:01.179609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:28:01.179639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-17T18:28:01.179953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-17T18:28:01.180001Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2025-04-17T18:28:01.180074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 240 -> 240 2025-04-17T18:28:01.180559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:28:01.180646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-17T18:28:01.180678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-17T18:28:01.180721Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-17T18:28:01.180755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-17T18:28:01.180818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2025-04-17T18:28:01.182832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-17T18:28:01.182882Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:1 ProgressState 2025-04-17T18:28:01.182949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-17T18:28:01.182974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-17T18:28:01.183026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-17T18:28:01.183056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-17T18:28:01.183081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-04-17T18:28:01.183116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-17T18:28:01.183166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-17T18:28:01.183199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-17T18:28:01.183313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-17T18:28:01.183349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-17T18:28:01.183371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-17T18:28:01.183434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-17T18:28:01.184615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-17T18:28:01.186322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-17T18:28:01.186386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-17T18:28:01.186860Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-17T18:28:01.186963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-17T18:28:01.187000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:568:2497] TestWaitNotification: OK eventTxId 102 2025-04-17T18:28:01.188437Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-17T18:28:01.188649Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/db1" took 242us result status StatusSuccess 2025-04-17T18:28:01.190025Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 GraphShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> BlobDepot::BasicBlock [GOOD] >> BlobDepot::BasicCollectGarbage >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] |97.8%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |97.8%| [TM] {RESULT} ydb/tests/functional/serializable/py3test |97.8%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest >> MediatorTest::TabletAckBeforePlanComplete [GOOD] >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> TRangeTreap::Sequential [GOOD] >> TRangeTreap::Random >> MediatorTest::TabletAckWhenDead >> TestJsonParser::Simple1 >> TRangeTreap::Random [GOOD] >> TestJsonParser::Simple1 [GOOD] >> TestJsonParser::Simple2 >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed >> TestJsonParser::Simple2 [GOOD] >> CoordinatorTests::Route >> TestJsonParser::Simple3 >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName >> BlobDepot::BasicCollectGarbage [GOOD] >> BlobDepot::VerifiedRandom >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> LeaderElectionTests::Test1 >> TestJsonParser::Simple3 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> TestJsonParser::Simple4 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> LeaderElectionTests::Test1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/locks/ut_range_treap/unittest >> TRangeTreap::Random [GOOD] Test command err: NOTE: building treap of size 1000000 got height 46 and needed 1000000 ops (1000000 inserts 0 updates 0 deletes) and 29658751 comparisons (29.658751 per op) NOTE: building treap of size 8382 got height 34 and needed 11392 ops (9774 inserts 226 updates 1392 deletes) and 235273 comparisons (20.65247542 per op) Checking point 9400 ... found 2146 ranges, needed 7637 comparisons (3.558713886 per range) Checking point 5912 ... found 2183 ranges, needed 8019 comparisons (3.67338525 per range) Checking point 960 ... found 753 ranges, needed 1841 comparisons (2.444887118 per range) Checking point 7226 ... found 2155 ranges, needed 8043 comparisons (3.73225058 per range) Checking point 2980 ... found 1820 ranges, needed 5466 comparisons (3.003296703 per range) Checking point 999 ... found 778 ranges, needed 1906 comparisons (2.449871465 per range) Checking point 7543 ... found 2165 ranges, needed 8061 comparisons (3.723325635 per range) Checking point 8898 ... found 2117 ranges, needed 7694 comparisons (3.634388285 per range) Checking point 2782 ... found 1739 ranges, needed 5145 comparisons (2.958596895 per range) Checking point 754 ... found 609 ranges, needed 1452 comparisons (2.384236453 per range) >> LeaderElectionTests::TestLocalMode [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets >> TestJsonParser::Simple4 [GOOD] >> TestJsonParser::LargeStrings >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch |97.8%| [TM] {RESULT} ydb/core/tx/locks/ut_range_treap/unittest >> TestJsonParser::LargeStrings [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> TestJsonParser::ManyValues >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots >> TestJsonParser::ManyValues [GOOD] >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit >> test.py::test[solomon-BadDownsamplingAggregation-] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> TestJsonParser::MissingFields >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> DataCleanup::MultipleDataCleanups [GOOD] >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> BlobDepot::VerifiedRandom [GOOD] >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName >> QueryActorTest::Commit [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> MediatorTest::TabletAckWhenDead [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] >> DataCleanup::MultipleDataCleanupsWithOldGenerations >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead >> BlobDepot::LoadPutAndRead >> TestJsonParser::MissingFields [GOOD] >> QueryActorTest::StreamQuery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> MediatorTest::PlanStepAckToReconnectedMediator >> TTxDataShardLocalKMeansScan::BadRequest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> TestJsonParser::NestedTypes >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] >> TestJsonParser::NestedTypes [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> TestJsonParser::SimpleBooleans >> TestJsonParser::SimpleBooleans [GOOD] >> TestJsonParser::ManyBatches >> TestJsonParser::ManyBatches [GOOD] >> TestJsonParser::LittleBatches >> TestJsonParser::LittleBatches [GOOD] >> TestJsonParser::MissingFieldsValidation >> TestJsonParser::MissingFieldsValidation [GOOD] >> TestJsonParser::TypeKindsValidation >> TestJsonParser::TypeKindsValidation [GOOD] >> TestJsonParser::NumbersValidation >> TestJsonParser::NumbersValidation [GOOD] >> TestJsonParser::StringsValidation >> TestJsonParser::StringsValidation [GOOD] >> TestJsonParser::NestedJsonValidation >> TestJsonParser::NestedJsonValidation [GOOD] >> TestJsonParser::BoolsValidation >> TestJsonParser::BoolsValidation [GOOD] >> TestJsonParser::JsonStructureValidation >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] Test command err: 2025-04-17T18:27:48.133531Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-04-17T18:27:48.133597Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-04-17T18:27:48.133668Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-17T18:27:48.133697Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-04-17T18:27:48.133738Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-04-17T18:27:48.133812Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-04-17T18:27:48.136793Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-04-17T18:27:48.150610Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDom ... PDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37820025 2025-04-17T18:28:05.578668Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37820025 2025-04-17T18:28:05.578739Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37820025 2025-04-17T18:28:05.578783Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T18:28:05.589422Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-17T18:28:05.589492Z node 1 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T18:28:05.589593Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-04-17T18:28:05.589633Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-04-17T18:28:05.589662Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-04-17T18:28:05.589694Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-04-17T18:28:05.589715Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-04-17T18:28:05.589730Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-04-17T18:28:05.589758Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-04-17T18:28:05.589788Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-04-17T18:28:05.590361Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-17T18:28:05.591011Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-17T18:28:05.591240Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-17T18:28:05.591391Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-17T18:28:05.591569Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-17T18:28:05.591688Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-17T18:28:05.591852Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-17T18:28:05.591976Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-17T18:28:05.592058Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T18:28:05.592467Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 3:15, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-17T18:28:05.592523Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 4:19, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-17T18:28:05.592560Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 7:31, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-17T18:28:05.592594Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-17T18:28:05.592854Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 140 2025-04-17T18:28:05.592886Z node 1 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-17T18:28:05.603201Z node 1 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-04-17T18:28:05.603298Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-17T18:28:05.603527Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 141 2025-04-17T18:28:05.603602Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 3:15 2025-04-17T18:28:05.603675Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 4:19 2025-04-17T18:28:05.603701Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 7:31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: 2025-04-17T18:23:44.313614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:528:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:23:44.314116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:23:44.314283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0014fe/r3tmp/tmpDowtwm/pdisk_1.dat 2025-04-17T18:23:44.841830Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27822, node 1 2025-04-17T18:23:45.164467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:23:45.164524Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:23:45.164554Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:23:45.164759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:23:45.168957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:23:45.278462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:45.278605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:45.294526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23226 2025-04-17T18:23:45.895960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:23:49.272981Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-17T18:23:49.312765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:49.312887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:49.356307Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-17T18:23:49.358530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:49.615858Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:49.616469Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:49.617046Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:49.617542Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:49.617635Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:49.617711Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:49.617793Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:49.617871Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:49.617980Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:49.823756Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:49.823877Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:49.843687Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:50.026826Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:23:50.077342Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:23:50.077447Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:23:50.132856Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:23:50.134449Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:23:50.134704Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:23:50.134776Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:23:50.134865Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:23:50.134924Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:23:50.134982Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:23:50.135040Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:23:50.135494Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:23:50.182625Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:23:50.182770Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1953:2602], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:23:50.195639Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1978:2618] 2025-04-17T18:23:50.201561Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:2002:2628] 2025-04-17T18:23:50.202004Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2002:2628], schemeshard id = 72075186224037897 2025-04-17T18:23:50.271037Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2025-04-17T18:23:50.300336Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:23:50.300394Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:23:50.300462Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared1/.metadata/_statistics 2025-04-17T18:23:50.311546Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:23:50.318538Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:23:50.318693Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:23:50.531564Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:23:50.697404Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:23:50.851038Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:23:51.910943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T18:23:55.995767Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:23:56.055167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:56.055253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:56.113090Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:23:56.117471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:56.348391Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:56.349019Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:56.349553Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:56.349714Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:56.349991Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:56.350082Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:56.350169Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:56.350332Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:56.350453Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:23:56.567434Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:23:56.567550Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:23:56.583401Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:23:56.755459Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:23:56.849409Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-04-17T18:23:56.849557Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-04-17T18:23:56.918427Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-04-17T18:23:56.919802Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-04-17T18:23:56.920021Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:23:56.920105Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-04-17T18:23:56.920175Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:23:56.920248Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:23:56.920303Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:23:56.920363Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-04-17T18:23:56.926390Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-04-17T18:23:57.002887Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-04-17T18:23:57.003053Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3227:2599], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-04-17T18:23:57.010331Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3238:2608] 2025-04-17T18:23:57.013896Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3258:2619] 2 ... Query: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:28:00.974671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12588:5256], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:00.974778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12597:5261], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:00.974890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:00.993417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-04-17T18:28:01.068199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:12602:5264], DatabaseId: /Root/Shared2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-04-17T18:28:01.243835Z node 2 :TX_PROXY ERROR: Actor# [2:12693:5312] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Shared2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:01.267754Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:12722:5327]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:28:01.268249Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-17T18:28:01.268352Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:12724:5329] 2025-04-17T18:28:01.268431Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:12724:5329] 2025-04-17T18:28:01.269083Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:12725:5330] 2025-04-17T18:28:01.269339Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:12725:5330], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-17T18:28:01.269416Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:28:01.269646Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:12724:5329], server id = [2:12725:5330], tablet id = 72075186224038895, status = OK 2025-04-17T18:28:01.269809Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:28:01.269883Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:12722:5327], StatRequests.size() = 1 2025-04-17T18:28:01.386531Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWIzN2ExNmUtMmIyNDczZGYtY2Q2ODUzMGEtOTY3YzM1OWE=, TxId: 2025-04-17T18:28:01.386599Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWIzN2ExNmUtMmIyNDczZGYtY2Q2ODUzMGEtOTY3YzM1OWE=, TxId: 2025-04-17T18:28:01.387307Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-17T18:28:01.414799Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-04-17T18:28:01.414859Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:28:01.460507Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-04-17T18:28:01.460562Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-17T18:28:01.562868Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:12724:5329], schemeshard count = 1 2025-04-17T18:28:02.135457Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-17T18:28:02.135528Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 231.000000s, at schemeshard: 72075186224037899 2025-04-17T18:28:02.135870Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-17T18:28:02.151543Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-17T18:28:02.473720Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T18:28:02.485896Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:28:02.485957Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:28:02.486016Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-17T18:28:02.486053Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:28:02.486380Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared1 2025-04-17T18:28:02.488685Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:28:02.505808Z node 3 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 3, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-04-17T18:28:00.000000Z 2025-04-17T18:28:02.506873Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YTliZmFhMTMtNjdmNTA5Mi03ZGM1MWE0NS1iYzhjNGQ4OA==, TxId: 2025-04-17T18:28:02.506924Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YTliZmFhMTMtNjdmNTA5Mi03ZGM1MWE0NS1iYzhjNGQ4OA==, TxId: 2025-04-17T18:28:02.507824Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-17T18:28:02.523316Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-17T18:28:02.523372Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:28:02.620811Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [3:12827:5669]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:28:02.621193Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-17T18:28:02.621229Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [3:12827:5669], StatRequests.size() = 1 2025-04-17T18:28:04.811824Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [3:12910:5703]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:28:04.812340Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-17T18:28:04.812387Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:12910:5703], StatRequests.size() = 1 2025-04-17T18:28:05.442204Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038900 2025-04-17T18:28:05.442285Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 222.000000s, at schemeshard: 72075186224038900 2025-04-17T18:28:05.442545Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038900, stats size# 26 2025-04-17T18:28:05.458351Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-04-17T18:28:05.720886Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-04-17T18:28:05.720949Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-04-17T18:28:05.720996Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038900, LocalPathId: 2] is data table. 2025-04-17T18:28:05.721034Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038900, LocalPathId: 2] 2025-04-17T18:28:05.721492Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-04-17T18:28:05.724244Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-17T18:28:05.741819Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjQwNWMxYzUtNzgzZDE4M2EtOWEyZmQ4ZGItMWUyOWUyMWM=, TxId: 2025-04-17T18:28:05.741885Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjQwNWMxYzUtNzgzZDE4M2EtOWEyZmQ4ZGItMWUyOWUyMWM=, TxId: 2025-04-17T18:28:05.742572Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-17T18:28:05.759053Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038900, LocalPathId: 2] 2025-04-17T18:28:05.759107Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-17T18:28:06.822545Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-04-17T18:28:06.822922Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-17T18:28:06.823599Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-17T18:28:06.835706Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T18:28:06.835765Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-17T18:28:06.923291Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:13012:5723]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:28:06.923706Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-17T18:28:06.923754Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:13012:5723], StatRequests.size() = 1 2025-04-17T18:28:06.924785Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:13014:5428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-17T18:28:06.929975Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-17T18:28:06.930636Z node 2 :STATISTICS DEBUG: [72075186224038895] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-17T18:28:06.930691Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-17T18:28:06.931009Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T18:28:06.931105Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:13014:5428], StatRequests.size() = 1 >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead >> TestJsonParser::JsonStructureValidation [GOOD] >> TestPurecalcFilter::Simple1 |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/example/py3test >> test_example.py::TestExample::test_example2 [GOOD] |97.8%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest |97.8%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test |97.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} |97.8%| [TM] {RESULT} ydb/tests/example/py3test >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable+UseSink >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists >> test_result_limits.py::TestResultLimits::test_many_rows >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] >> DataCleanup::MultipleDataCleanupsWithOldGenerations [GOOD] >> DataCleanup::ForceDataCleanupWithRestart >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting >> Coordinator::ReadStepSubscribe [GOOD] >> Coordinator::LastStepSubscribe >> MediatorTest::WatcherReconnect >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-04-17T18:18:26.802850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T18:18:26.802912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:26.802944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-17T18:18:26.802969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T18:18:26.803004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T18:18:26.803034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T18:18:26.803078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T18:18:26.803133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T18:18:26.803372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:18:26.862753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-17T18:18:26.862801Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-17T18:18:26.871829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:18:26.872605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T18:18:26.872779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-17T18:18:26.881166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T18:18:26.881730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T18:18:26.882250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:26.882419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-17T18:18:26.885073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:26.886511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:26.886570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:26.886796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T18:18:26.886864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:26.886912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T18:18:26.887053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-17T18:18:26.893370Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-17T18:18:26.993318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-17T18:18:26.993517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:26.993702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-17T18:18:26.993912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-17T18:18:26.993961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:26.996212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:26.996324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-17T18:18:26.996518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:26.996606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-17T18:18:26.996653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T18:18:26.996683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T18:18:26.998448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:26.998497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-17T18:18:26.998526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T18:18:26.999995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:27.000051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:27.000095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:27.000163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T18:18:27.003796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:18:27.005286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T18:18:27.005434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-17T18:18:27.006388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-17T18:18:27.006509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:18:27.006553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:27.006827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-17T18:18:27.006900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-17T18:18:27.007052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-17T18:18:27.007114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-17T18:18:27.008983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-17T18:18:27.009021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-17T18:18:27.009162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-17T18:18:27.009199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-17T18:18:27.009541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-17T18:18:27.009591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-17T18:18:27.009677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:27.009711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-17T18:18:27.009742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-17T18:18:27.009782Z no ... } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:28:11.976497Z node 201 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-17T18:28:11.976758Z node 201 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 273us result status StatusSuccess 2025-04-17T18:28:11.977552Z node 201 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-17T18:28:11.988569Z node 201 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][201:1123:2889] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-17T18:28:11.988685Z node 201 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][201:1070:2889] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-17T18:28:11.988842Z node 201 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][201:1123:2889] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744914491965381 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744914491965381 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744914491965381 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-17T18:28:11.991836Z node 201 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][201:1123:2889] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-17T18:28:11.991937Z node 201 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][201:1070:2889] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> BlobDepot::LoadPutAndRead [GOOD] >> BlobDepot::DecommitPutAndRead >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] |97.8%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] >> TestPurecalcFilter::Simple1 [GOOD] |97.8%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestPurecalcFilter::Simple2 |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> TDqSolomonWriteActorTest::TestWriteFormat >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> MediatorTest::WatcherReconnect [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> MediatorTest::MultipleSteps >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit >> test_timeout.py::TestTimeout::test_timeout |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> DataShardReplication::ReplicatedTable+UseSink [GOOD] >> DataShardReplication::ReplicatedTable-UseSink >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> TopicSessionTests::TwoSessionWithoutPredicate >> DataCleanup::ForceDataCleanupWithRestart [GOOD] >> DataCleanup::OutReadSetsCleanedAfterCopyTable >> QueryActorTest::StreamQuery [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram |97.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |97.9%| [TS] {RESULT} ydb/tests/library/ut/py3test >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> BasicExample::BasicExample >> BlobDepot::DecommitPutAndRead [GOOD] >> BlobDepot::DecommitVerifiedRandom >> MediatorTest::MultipleSteps [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2025-04-17T18:27:58.324720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350619813349072:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:58.325319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004b5/r3tmp/tmp6u1Ocu/pdisk_1.dat 2025-04-17T18:27:58.673189Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:58.737651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:58.738284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:58.741171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17866 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:27:58.991577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:59.017236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:59.131266Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-17T18:28:00.935444Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T18:28:00.938014Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T18:28:00.943126Z node 1 :KQP_PROXY DEBUG: Request has 18444999159228.608520s seconds to be completed 2025-04-17T18:28:00.955319Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZTY4NjYwMmMtOTRjZjJhYzQtY2IyMzc3YmMtODQ3YWFmNDc=, workerId: [1:7494350628403284217:2314], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T18:28:00.955391Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T18:28:00.955563Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T18:28:00.955647Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T18:28:00.955686Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T18:28:00.955708Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T18:28:00.955751Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T18:28:00.955831Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:00.955889Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:00.956010Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:00.956069Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:00.956099Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:00.958305Z node 1 :KQP_PROXY DEBUG: [TQueryBase] RunDataQuery: SELECT 42 2025-04-17T18:28:00.962432Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZTY4NjYwMmMtOTRjZjJhYzQtY2IyMzc3YmMtODQ3YWFmNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7494350628403284217:2314] 2025-04-17T18:28:00.962486Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7494350628403284219:2358] 2025-04-17T18:28:00.965182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350628403284232:2319], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:00.965214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350628403284220:2316], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:00.965303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:00.972536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:28:00.982749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350628403284234:2320], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:28:01.061092Z node 1 :TX_PROXY ERROR: Actor# [1:7494350632698251581:2393] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:01.730813Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7494350628403284218:2315], selfId: [1:7494350619813349169:2278], source: [1:7494350628403284217:2314] 2025-04-17T18:28:01.731030Z node 1 :KQP_PROXY DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZTY4NjYwMmMtOTRjZjJhYzQtY2IyMzc3YmMtODQ3YWFmNDc=, TxId: 2025-04-17T18:28:01.732628Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZTY4NjYwMmMtOTRjZjJhYzQtY2IyMzc3YmMtODQ3YWFmNDc=, TxId: 2025-04-17T18:28:01.733031Z node 1 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=1&id=ZTY4NjYwMmMtOTRjZjJhYzQtY2IyMzc3YmMtODQ3YWFmNDc=, workerId: [1:7494350628403284217:2314], local sessions count: 0 2025-04-17T18:28:02.294106Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350635669405223:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:02.294201Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004b5/r3tmp/tmpOOu5kx/pdisk_1.dat 2025-04-17T18:28:02.404890Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:02.424871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:02.424956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:02.429189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23321 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:28:02.596029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:02.601558Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:28:02.604518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:02.634887Z node 2 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-17T18:28:04.988060Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T18:28:04.988798Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T18:28:04.989169Z node 2 :KQP_PROXY DEBUG: Request has 18444999159224.562477s seconds to be completed 2025-04-17T18:28:04.991406Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MWQ4YzZjMDYtODFkMGZjODctMmNhNDFkZTYtNWFlMzI5ZDU=, workerId: [2:7494350644259340508:2312], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T18:28:04.991452Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T18:28:04.991590Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T18:28:04.991658Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T18:28:04.991709Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T18:28:04.991745Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T18:28:04.991821Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T18:28:04.991897Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller th ... etadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004b5/r3tmp/tmpAKH5zG/pdisk_1.dat 2025-04-17T18:28:09.437627Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:09.482539Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:09.482628Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:09.484191Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15771 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:28:09.637724Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:09.651188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:09.714082Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-17T18:28:12.329834Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T18:28:12.330606Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T18:28:12.331655Z node 4 :KQP_PROXY DEBUG: Request has 18444999159217.219984s seconds to be completed 2025-04-17T18:28:12.334068Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=YzA2MjU4OWEtNjA2NTkyZmEtZGU1MDM5NmYtMmQ1NGUzNDE=, workerId: [4:7494350679344748729:2314], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-17T18:28:12.334111Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T18:28:12.334324Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T18:28:12.334428Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-17T18:28:12.334477Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-04-17T18:28:12.334495Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-17T18:28:12.334537Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-17T18:28:12.334632Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:12.334683Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:12.334713Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:12.334738Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:12.334767Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:12.335746Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-04-17T18:28:12.337094Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-17T18:28:12.339932Z node 4 :KQP_PROXY DEBUG: TraceId: "01js2g4axga60wka8v3bjtz1sk", Created new session, sessionId: ydb://session/3?node_id=4&id=OWI3ZWI1NGUtZDExNmUxNTUtMjhmYWYwOWItNGI0MzEyNA==, workerId: [4:7494350679344748755:2315], database: /dc-1, longSession: 0, local sessions count: 2 2025-04-17T18:28:12.340190Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2g4axga60wka8v3bjtz1sk, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=OWI3ZWI1NGUtZDExNmUxNTUtMjhmYWYwOWItNGI0MzEyNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7494350679344748755:2315] 2025-04-17T18:28:12.340226Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7494350679344748756:2359] 2025-04-17T18:28:12.341076Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494350679344748757:2316], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:12.341080Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494350679344748765:2319], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:12.341138Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:12.368711Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:28:12.379793Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494350679344748771:2320], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:28:12.467193Z node 4 :TX_PROXY ERROR: Actor# [4:7494350679344748824:2395] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:13.395283Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:14.351696Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7494350666459846158:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:14.351825Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:28:16.476311Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-17T18:28:16.480014Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Cancel stream request 2025-04-17T18:28:16.480105Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=YzA2MjU4OWEtNjA2NTkyZmEtZGU1MDM5NmYtMmQ1NGUzNDE=, TxId: 2025-04-17T18:28:16.481424Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-17T18:28:16.628370Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-17T18:28:16.798776Z node 4 :KQP_PROXY DEBUG: Request has 18444999159212.752881s seconds to be completed 2025-04-17T18:28:16.801024Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=Y2FlNDIwOTMtNzVmYjg3YjctMTAxNjAwNy1kYzljYTg1Zg==, workerId: [4:7494350696524618068:2339], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-17T18:28:16.801214Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-17T18:28:16.801273Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-17T18:28:16.801724Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-04-17T18:28:16.801766Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=YzA2MjU4OWEtNjA2NTkyZmEtZGU1MDM5NmYtMmQ1NGUzNDE=, workerId: [4:7494350679344748729:2314], local sessions count: 2 2025-04-17T18:28:16.801826Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-17T18:28:16.803379Z node 4 :KQP_PROXY DEBUG: TraceId: "01js2g4f91afe5scb2q9k2y4he", Created new session, sessionId: ydb://session/3?node_id=4&id=Y2JlYjEyNWEtNGZmMWUyODYtYjAwYTU5OTUtYmMzNTg3Y2Q=, workerId: [4:7494350696524618072:2340], database: /dc-1, longSession: 0, local sessions count: 3 2025-04-17T18:28:16.803570Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01js2g4f91afe5scb2q9k2y4he, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=Y2JlYjEyNWEtNGZmMWUyODYtYjAwYTU5OTUtYmMzNTg3Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7494350696524618072:2340] 2025-04-17T18:28:16.803601Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7494350696524618073:2427] 2025-04-17T18:28:16.873993Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744914496865, txId: 281474976715663] shutting down 2025-04-17T18:28:16.874413Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-17T18:28:16.876903Z node 4 :KQP_PROXY DEBUG: TraceId: "01js2g4f91afe5scb2q9k2y4he", Forwarded response to sender actor, requestId: 5, sender: [4:7494350696524618070:2425], selfId: [4:7494350666459846382:2270], source: [4:7494350696524618072:2340] 2025-04-17T18:28:16.877401Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=Y2JlYjEyNWEtNGZmMWUyODYtYjAwYTU5OTUtYmMzNTg3Y2Q=, workerId: [4:7494350696524618072:2340], local sessions count: 2 2025-04-17T18:28:16.878522Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-17T18:28:16.878700Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-17T18:28:16.878780Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=Y2FlNDIwOTMtNzVmYjg3YjctMTAxNjAwNy1kYzljYTg1Zg==, TxId: 2025-04-17T18:28:16.879277Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=Y2FlNDIwOTMtNzVmYjg3YjctMTAxNjAwNy1kYzljYTg1Zg==, workerId: [4:7494350696524618068:2339], local sessions count: 1 |97.9%| [TS] {RESULT} ydb/library/query_actor/ut/unittest >> TestPurecalcFilter::Simple2 [GOOD] >> MediatorTest::WatchesBeforeFirstStep |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> TestPurecalcFilter::ManyValues |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> test_commit.py::TestCommit::test_commit >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TCreateAndDropViewTest::DropView [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag >> test.py::test_wait_for_cluster_ready [GOOD] >> test.py::test_counter >> SequenceProxy::Basics >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> GenericProviderLookupActor::Lookup >> test_commit.py::TestCommit::test_commit [GOOD] >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupWithErrors [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow >> DataShardReplication::ReplicatedTable-UseSink [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> test.py::test_counter [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2025-04-17 18:28:22.024 INFO ydb-library-yql-providers-generic-actors-ut(pid=766949, tid=0x00007F60E4F0BB40) [generic] yql_generic_lookup_actor.cpp:151: New generic proivider lookup source actor(ActorId=[1:4:2051]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2025-04-17 18:28:22.047 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=766949, tid=0x00007F60E4F0BB40) [generic] yql_generic_lookup_actor.cpp:288: ActorId=[1:4:2051] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } ite ... left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2025-04-17 18:28:22.159 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=766949, tid=0x00007F60E1884640) [generic] yql_generic_lookup_actor.cpp:319: ActorId=[2:7494350723138521184:2051] Got TListSplitsStreamIterator 2025-04-17 18:28:22.160 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=766949, tid=0x00007F60E1884640) [generic] yql_generic_lookup_actor.cpp:196: ActorId=[2:7494350723138521184:2051] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Expected: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Actual: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2025-04-17 18:28:22.160 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=766949, tid=0x00007F60E1884640) [generic] yql_generic_lookup_actor.cpp:229: ActorId=[2:7494350723138521184:2051] Got ReadSplitsStreamIterator from Connector 2025-04-17 18:28:22.160 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=766949, tid=0x00007F60E1884640) [generic] yql_generic_lookup_actor.cpp:341: ActorId=[2:7494350723138521184:2051] Got DataChunk 2025-04-17 18:28:22.161 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=766949, tid=0x00007F60E1884640) [generic] yql_generic_lookup_actor.cpp:352: ActorId=[2:7494350723138521184:2051] Got EOF 2025-04-17 18:28:22.161 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=766949, tid=0x00007F60E1884640) [generic] yql_generic_lookup_actor.cpp:402: Sending lookup results for 3 keys |97.9%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest >> test.py::test_viewer_nodes >> test.py::test_viewer_nodes [GOOD] >> test.py::test_storage_groups [GOOD] >> test.py::test_viewer_sysinfo [GOOD] >> test.py::test_viewer_vdiskinfo [GOOD] >> test.py::test_viewer_pdiskinfo [GOOD] >> test.py::test_viewer_bsgroupinfo [GOOD] >> test.py::test_viewer_tabletinfo |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test.py::test_viewer_tabletinfo [GOOD] >> test.py::test_viewer_describe >> BasicExample::BasicExample [GOOD] >> DataShardCompaction::CompactBorrowed >> test.py::test_viewer_describe [GOOD] >> test.py::test_viewer_cluster [GOOD] >> DataShardBackgroundCompaction::ShouldCompact >> test.py::test_viewer_tenantinfo [GOOD] >> test.py::test_viewer_tenantinfo_db >> test.py::test_viewer_tenantinfo_db [GOOD] >> test.py::test_viewer_healthcheck [GOOD] >> test.py::test_viewer_acl >> MediatorTest::WatchesBeforeFirstStep [GOOD] >> test.py::test_viewer_acl [GOOD] >> test.py::test_viewer_autocomplete >> test.py::test_viewer_autocomplete [GOOD] >> test.py::test_viewer_check_access >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> test.py::test_viewer_check_access [GOOD] >> test.py::test_viewer_query >> test_http_api.py::TestHttpApi::test_simple_analytics_query >> MediatorTest::RebootTargetTablets >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] >> BlobDepot::DecommitVerifiedRandom [GOOD] >> KeyValueGRPCService::SimpleAcquireLock >> NodeWardenDsProxyConfigRetrieval::Disconnect >> TGRpcRateLimiterTest::CreateResource >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery >> test.py::test_viewer_query [GOOD] >> test.py::test_viewer_query_issue_13757 >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> Coordinator::LastStepSubscribe [GOOD] >> Coordinator::RestoreDomainConfiguration >> test.py::test_viewer_query_issue_13757 [GOOD] >> test.py::test_viewer_query_issue_13945 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest >> BlobDepot::DecommitVerifiedRandom [GOOD] Test command err: Mersenne random seed 4033667418 RandomSeed# 229323814099879945 Mersenne random seed 2356191655 Mersenne random seed 3726195599 Mersenne random seed 4153412504 Mersenne random seed 260427438 2025-04-17T18:28:02.332724Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.332902Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.332973Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.333037Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.333098Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.333161Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.333240Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.333304Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.333646Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [51f66e1b0b9165b7] Result# TEvPutResult {Id# [15:1:1:0:1:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-17T18:28:02.334991Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.335187Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.335259Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.335319Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.335377Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.335453Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.335511Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.335573Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.354962Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.355201Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.355275Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.355352Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.355412Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.355471Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.355529Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.355587Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-17T18:28:02.355857Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [fda48b8b24a34408] Result# TEvPutResult {Id# [16:2:2:0:2:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Mersenne random seed 1304984032 Read over the barrier, blob id# [15:1:1:0:1:100:0] Read over the barrier, blob id# [15:1:2:0:1:100:0] 2025-04-17T18:28:03.527163Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-17T18:28:03.527477Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-17T18:28:03.527563Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-17T18:28:03.527651Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-17T18:28:03.527723Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-17T18:28:03.527776Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-17T18:28:03.527830Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-17T18:28:03.527884Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 Put over the barrier, blob id# [15:1:1:0:99:100:0] Put over the barrier, blob id# [15:1:3:0:99:100:0] 2025-04-17T18:28:03.561064Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-17T18:28:03.561437Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-17T18:28:03.561537Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-17T18:28:03.561619Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-17T18:28:03.561701Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-17T18:28:03.561800Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-17T18:28:03.561877Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-17T18:28:03.561953Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 Read over the barrier, blob id# [15:1:5:0:1:100:0] Read over the barrier, blob id# [15:1:6:0:1:100:0] Read over the barrier, blob id# [15:1:19:0:1:100:0] Read over the barrier, blob id# [15:2:1:0:1:100:0] Read over the barrier, blob id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:1:17:0:1:100:0] TEvRange returned collected blob with id# [15:1:19:0:1:100:0] TEvRange returned collected blob with id# [15:2:1:0:1:100:0] TEvRange returned collected blob with id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:2:3:0:1:100:0] TEvRange returned collected blob with id# [15:2:4:0:1:100:0] TEvRange returned collected blob with id# [15:2:5:0:1:100:0] TEvRange returned collected blob with id# [15:2:6:0:1:100:0] Read over the barrier, blob id# [100:1:3:0:1:100:0] Read over the barrier, blob id# [100:1:5:0:1:100:0] Read over the barrier, blob id# [100:1:6:0:1:100:0] Read over the barrier, blob id# [100:2:1:0:1:100:0] Read over the barrier, blob id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:3:0:1:100:0] TEvRange returned collected blob with id# [100:2:4:0:1:100:0] TEvRange returned collected blob with id# [100:2:5:0:1:100:0] TEvRange returned collected blob with id# [100:2:6:0:1:100:0] Mersenne random seed 2627307960 Read over the barrier, blob id# [101:1:1:0:16445664:427:0] Read over the barrier, blob id# [101:1:1:0:16445664:427:0] Read over the barrier, blob id# [101:1:1:0:16445664:427:0] TEvRange returned collected blob with id# [100:1:1:2:10297652:547:0] TEvRange returned collected blob wit ... kId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 28 2 hard] barrier# 5:0 new key# [17 1 28 5 hard] barrier# 4:3 Read over the barrier, blob id# [17:1:3:1:5969941:307:0] Read over the barrier, blob id# [17:2:8:0:3010522:712:0] Read over the barrier, blob id# [17:1:8:2:1778696:412:0] Read over the barrier, blob id# [17:1:2:0:1410897:625:0] Read over the barrier, blob id# [17:1:8:1:368055:271:0] Read over the barrier, blob id# [17:1:3:1:5969941:307:0] Read over the barrier, blob id# [17:1:3:1:16053105:93:0] Read over the barrier, blob id# [17:2:8:1:4206249:798:0] Read over the barrier, blob id# [17:1:1:1:1476986:573:0] Read over the barrier, blob id# [17:1:8:1:6234543:786:0] Read over the barrier, blob id# [15:4:2:2:3716965:996:0] 2025-04-17T18:28:22.839475Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 3 hard] barrier# 4:1 new key# [16 2 33 0 hard] barrier# 4:0 2025-04-17T18:28:22.839833Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 3 hard] barrier# 4:1 new key# [16 2 33 0 hard] barrier# 4:0 2025-04-17T18:28:22.839969Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 3 hard] barrier# 4:1 new key# [16 2 33 0 hard] barrier# 4:0 2025-04-17T18:28:22.840128Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 3 hard] barrier# 4:1 new key# [16 2 33 0 hard] barrier# 4:0 2025-04-17T18:28:22.840283Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 3 hard] barrier# 4:1 new key# [16 2 33 0 hard] barrier# 4:0 2025-04-17T18:28:22.840420Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 3 hard] barrier# 4:1 new key# [16 2 33 0 hard] barrier# 4:0 2025-04-17T18:28:22.840567Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 3 hard] barrier# 4:1 new key# [16 2 33 0 hard] barrier# 4:0 2025-04-17T18:28:22.840724Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 3 hard] barrier# 4:1 new key# [16 2 33 0 hard] barrier# 4:0 TEvRange returned collected blob with id# [15:4:2:2:3716965:996:0] TEvRange returned collected blob with id# [15:4:2:2:3716965:996:0] TEvRange returned collected blob with id# [15:4:2:2:3716965:996:0] Read over the barrier, blob id# [17:2:10:0:2424445:798:0] Read over the barrier, blob id# [17:2:9:0:4446455:693:0] Read over the barrier, blob id# [17:1:3:1:5969941:307:0] Read over the barrier, blob id# [17:1:3:1:16053105:93:0] Read over the barrier, blob id# [17:1:8:2:1321453:107:0] Read over the barrier, blob id# [17:1:3:1:16053105:93:0] Read over the barrier, blob id# [17:1:3:1:5969941:307:0] Read over the barrier, blob id# [17:2:8:1:4206249:798:0] Read over the barrier, blob id# [17:2:8:0:3010522:712:0] Read over the barrier, blob id# [17:1:3:1:5969941:307:0] Read over the barrier, blob id# [17:1:8:1:368055:271:0] Read over the barrier, blob id# [17:1:8:2:798468:104:0] Read over the barrier, blob id# [17:1:8:2:798468:104:0] Read over the barrier, blob id# [16:2:3:0:1686895:596:0] Read over the barrier, blob id# [16:2:3:0:1686895:596:0] Read over the barrier, blob id# [15:2:2:0:4903095:892:0] 2025-04-17T18:28:23.339427Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 28 0 soft] barrier# 1:4 new key# [17 0 29 0 soft] barrier# 1:3 2025-04-17T18:28:23.340175Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 28 0 soft] barrier# 1:4 new key# [17 0 29 0 soft] barrier# 1:3 2025-04-17T18:28:23.340362Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 28 0 soft] barrier# 1:4 new key# [17 0 29 0 soft] barrier# 1:3 2025-04-17T18:28:23.340517Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 28 0 soft] barrier# 1:4 new key# [17 0 29 0 soft] barrier# 1:3 2025-04-17T18:28:23.340661Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 28 0 soft] barrier# 1:4 new key# [17 0 29 0 soft] barrier# 1:3 2025-04-17T18:28:23.340797Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 28 0 soft] barrier# 1:4 new key# [17 0 29 0 soft] barrier# 1:3 2025-04-17T18:28:23.340948Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 28 0 soft] barrier# 1:4 new key# [17 0 29 0 soft] barrier# 1:3 2025-04-17T18:28:23.341098Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 28 0 soft] barrier# 1:4 new key# [17 0 29 0 soft] barrier# 1:3 Read over the barrier, blob id# [16:2:3:0:1686895:596:0] 2025-04-17T18:28:23.446544Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 25 7 hard] barrier# 3:0 new key# [17 0 29 4 hard] barrier# 2:0 2025-04-17T18:28:23.446933Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 25 7 hard] barrier# 3:0 new key# [17 0 29 4 hard] barrier# 2:0 2025-04-17T18:28:23.447095Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 25 7 hard] barrier# 3:0 new key# [17 0 29 4 hard] barrier# 2:0 2025-04-17T18:28:23.447253Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 25 7 hard] barrier# 3:0 new key# [17 0 29 4 hard] barrier# 2:0 2025-04-17T18:28:23.447418Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 25 7 hard] barrier# 3:0 new key# [17 0 29 4 hard] barrier# 2:0 2025-04-17T18:28:23.447578Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 25 7 hard] barrier# 3:0 new key# [17 0 29 4 hard] barrier# 2:0 2025-04-17T18:28:23.447751Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 25 7 hard] barrier# 3:0 new key# [17 0 29 4 hard] barrier# 2:0 2025-04-17T18:28:23.447915Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 25 7 hard] barrier# 3:0 new key# [17 0 29 4 hard] barrier# 2:0 Read over the barrier, blob id# [15:4:2:1:15054876:810:0] 2025-04-17T18:28:23.617194Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 28 2 hard] barrier# 5:0 new key# [17 1 31 0 hard] barrier# 4:3 2025-04-17T18:28:23.618260Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 28 2 hard] barrier# 5:0 new key# [17 1 31 0 hard] barrier# 4:3 2025-04-17T18:28:23.618461Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 28 2 hard] barrier# 5:0 new key# [17 1 31 0 hard] barrier# 4:3 2025-04-17T18:28:23.618622Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 28 2 hard] barrier# 5:0 new key# [17 1 31 0 hard] barrier# 4:3 2025-04-17T18:28:23.618780Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 28 2 hard] barrier# 5:0 new key# [17 1 31 0 hard] barrier# 4:3 2025-04-17T18:28:23.618947Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 28 2 hard] barrier# 5:0 new key# [17 1 31 0 hard] barrier# 4:3 2025-04-17T18:28:23.619100Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 28 2 hard] barrier# 5:0 new key# [17 1 31 0 hard] barrier# 4:3 2025-04-17T18:28:23.619252Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 28 2 hard] barrier# 5:0 new key# [17 1 31 0 hard] barrier# 4:3 2025-04-17T18:28:23.625670Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 0 39 1 soft] existing barrier# 4:0 new barrier# 5:0 2025-04-17T18:28:23.626520Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 0 39 1 soft] existing barrier# 4:0 new barrier# 5:0 2025-04-17T18:28:23.626682Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 0 39 1 soft] existing barrier# 4:0 new barrier# 5:0 2025-04-17T18:28:23.626889Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 0 39 1 soft] existing barrier# 4:0 new barrier# 5:0 2025-04-17T18:28:23.627030Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 0 39 1 soft] existing barrier# 4:0 new barrier# 5:0 2025-04-17T18:28:23.627168Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 0 39 1 soft] existing barrier# 4:0 new barrier# 5:0 2025-04-17T18:28:23.627305Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 0 39 1 soft] existing barrier# 4:0 new barrier# 5:0 2025-04-17T18:28:23.627442Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 0 39 1 soft] existing barrier# 4:0 new barrier# 5:0 Read over the barrier, blob id# [16:1:1:0:8832542:974:0] |97.9%| [TS] {RESULT} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest >> DataCleanup::OutReadSetsCleanedAfterCopyTable [GOOD] >> DataCleanup::BorrowerDataCleanedAfterCopyTable |97.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> TestPurecalcFilter::ManyValues [GOOD] >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] >> test.py::test_viewer_query_issue_13945 [GOOD] >> test.py::test_pqrb_tablet >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TSentinelBaseTests::GuardianDataCenterRatio >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianRackRatio [GOOD] >> TSentinelTests::Smoke >> test.py::test_pqrb_tablet [GOOD] >> test.py::test_viewer_nodes_issue_14992 >> test.py::test_viewer_nodes_issue_14992 [GOOD] >> test.py::test_operations_list [GOOD] >> test.py::test_operations_list_page [GOOD] >> TestPurecalcFilter::NullValues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> test.py::test_operations_list_page_bad [GOOD] Test command err: 2025-04-17T18:27:38.116149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:38.116794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:38.116867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004a9/r3tmp/tmp8TiAfC/pdisk_1.dat 2025-04-17T18:27:38.610903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:27:38.654861Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:38.696154Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:27:38.697824Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:27:38.698069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:38.698672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:38.710975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:38.801047Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:27:38.801129Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:27:38.802665Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T18:27:38.907213Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:27:38.907310Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:27:38.907843Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:27:38.907928Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:27:38.908211Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:27:38.908422Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:27:38.908518Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:27:38.912024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:27:38.913016Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:27:38.914411Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:27:38.914473Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:27:38.955748Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:27:38.956712Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:27:38.957149Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:27:38.957386Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:38.999698Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:27:39.000742Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:39.000875Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:39.002514Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:27:39.002582Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:27:39.002656Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:27:39.004552Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:39.004697Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:39.004782Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:27:39.015533Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:39.062451Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:27:39.063986Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:39.064194Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:27:39.064236Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:39.064271Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:27:39.064305Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:39.064524Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:39.065480Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:39.066930Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:27:39.067028Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:27:39.067123Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:39.067158Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:39.067215Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:27:39.067257Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:27:39.067284Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:27:39.067309Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:27:39.067364Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:39.067490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:39.067526Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:39.067565Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:27:39.069214Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:27:39.069261Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:27:39.069377Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:27:39.069780Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:27:39.069850Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:27:39.069979Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:27:39.070099Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:27:39.070137Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:27:39.070203Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:27:39.070247Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:27:39.070552Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:27:39.070592Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:27:39.070637Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:27:39.070668Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:27:39.070739Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:27:39.070766Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:27:39.070794Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:27:39.070822Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:27:39.070847Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:27:39.072556Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:27:39.072633Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025- ... ACE: 72075186224037889 readContinue iterator# {[8:1096:2864], 0} sends rowCount# 0, bytes# 0, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:28:23.287880Z node 8 :TX_DATASHARD DEBUG: 72075186224037889 read iterator# {[8:1096:2864], 0} finished in ReadContinue 2025-04-17T18:28:23.287986Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037889:1:7} Tx{3, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:23.288031Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037889:1:7} Tx{3, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:23.289113Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1096:2864], Recipient [8:1088:2861]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:28:23.289168Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-04-17T18:28:23.289253Z node 8 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2025-04-17T18:28:23.471280Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] Handle TEvExecuteKqpTransaction 2025-04-17T18:28:23.471368Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-17T18:28:23.472687Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877761, Sender [8:1115:2879], Recipient [8:1085:2859]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:23.472761Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:23.472840Z node 8 :TX_DATASHARD DEBUG: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1113:2878], serverId# [8:1115:2879], sessionId# [0:0:0] 2025-04-17T18:28:23.472982Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2g4nm14cc41xh5499sb9hq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=NGZiNDdkZmMtODhhMGI2MmUtOGE5YjcxODEtY2E3YzE2MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:23.474754Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553215, Sender [8:1119:2880], Recipient [8:1085:2859]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-17T18:28:23.474797Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2025-04-17T18:28:23.474924Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-17T18:28:23.474999Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:23.475121Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-04-17T18:28:23.475205Z node 8 :TX_DATASHARD DEBUG: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=6, epoch=1} 2025-04-17T18:28:23.475794Z node 8 :TX_DATASHARD DEBUG: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=4, epoch=1} 2025-04-17T18:28:23.476143Z node 8 :TX_DATASHARD DEBUG: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2025-04-17T18:28:23.476236Z node 8 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to repeatable v1500/18446744073709551615 2025-04-17T18:28:23.476311Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-04-17T18:28:23.476402Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-17T18:28:23.476450Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-04-17T18:28:23.476497Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-17T18:28:23.476537Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-17T18:28:23.476573Z node 8 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037890 2025-04-17T18:28:23.476626Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-17T18:28:23.476648Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-17T18:28:23.476666Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-04-17T18:28:23.476683Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-17T18:28:23.476801Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-04-17T18:28:23.477054Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Restart 2025-04-17T18:28:23.477083Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2025-04-17T18:28:23.477170Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:23.477241Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} touch new 0b, 65b lo load (65b in total), 0b requested for data (4194304b in total) 2025-04-17T18:28:23.477312Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2025-04-17T18:28:23.477390Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} requests PageCollection [72075186224037888:1:28:1:12288:190:0] 65 bytes, 1 pages: [0 4] 2025-04-17T18:28:23.477466Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, 65b, pages {1 wait, 1 load}, freshly touched 1 pages 2025-04-17T18:28:23.477631Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} got result TEvResult{1 pages [72075186224037888:1:28:1:12288:190:0] ok OK}, category 1 2025-04-17T18:28:23.477791Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-04-17T18:28:23.477820Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-17T18:28:23.477888Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-04-17T18:28:23.478048Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[8:1119:2880], 0} after executionsCount# 2 2025-04-17T18:28:23.478107Z node 8 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[8:1119:2880], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-17T18:28:23.478243Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-17T18:28:23.478269Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-04-17T18:28:23.478289Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T18:28:23.478309Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-04-17T18:28:23.478347Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-17T18:28:23.478371Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T18:28:23.478398Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037890 has finished 2025-04-17T18:28:23.478441Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-17T18:28:23.478523Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:23.478598Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2025-04-17T18:28:23.478646Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-17T18:28:23.478795Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553217, Sender [8:1085:2859], Recipient [8:1085:2859]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-17T18:28:23.478830Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2025-04-17T18:28:23.478916Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2025-04-17T18:28:23.478986Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:23.479050Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[8:1119:2880], 0}, firstUnprocessedQuery# 0 2025-04-17T18:28:23.479120Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[8:1119:2880], 0}, FirstUnprocessedQuery# 0 2025-04-17T18:28:23.479213Z node 8 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[8:1119:2880], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:28:23.479269Z node 8 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[8:1119:2880], 0} finished in ReadContinue 2025-04-17T18:28:23.479353Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:23.479428Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:23.480124Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1119:2880], Recipient [8:1085:2859]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:28:23.480162Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-04-17T18:28:23.480215Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } >> test.py::test_scheme_directory >> test.py::test_scheme_directory [GOOD] >> test.py::test_topic_data ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2025-04-17T18:28:24.622438Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-17T18:28:24.651976Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/6873/0007b8/r3tmp/tmpLZj29y/static.dat" PDiskGuid: 4441025489091481058 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 4441025489091481058 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 4441025489091481058 } } } } AvailabilityDomains: 0 } 2025-04-17T18:28:24.654074Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/6873/0007b8/r3tmp/tmpLZj29y/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-17T18:28:24.661651Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T18:28:24.665676Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 4441025489091481058 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:28:24.667138Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 4441025489091481058 2025-04-17T18:28:24.667198Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-17T18:28:24.668874Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2025-04-17T18:28:24.668931Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:28:24.669039Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-17T18:28:24.669572Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-17T18:28:24.689557Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-17T18:28:24.694527Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-17T18:28:24.696791Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:28:24.696862Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:28:24.720615Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:28:24.720684Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-17T18:28:24.725585Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-17T18:28:24.739604Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-17T18:28:24.746070Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-17T18:28:24.749590Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/6873/0007b8/r3tmp/tmpLZj29y/static.dat" PDiskGuid: 4441025489091481058 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 4441025489091481058 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 4441025489091481058 } } } } AvailabilityDomains: 0 } 2025-04-17T18:28:24.750244Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-17T18:28:24.821134Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-17T18:28:24.821188Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-17T18:28:24.822233Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "n\301\357~\304\254\343\332\206^\370T\017\224N\231\207MP\210" } 2025-04-17T18:28:24.823215Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-17T18:28:24.823279Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 2146435075 Sender# [1:46:2089] SessionId# [0:0:0] Cookie# 0 2025-04-17T18:28:24.823332Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.102500s 2025-04-17T18:28:24.823411Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-04-17T18:28:24.823439Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-04-17T18:28:24.840353Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-17T18:28:24.847655Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-17T18:28:24.893212Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-17T18:28:24.919039Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-17T18:28:24.922075Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-17T18:28:24.922963Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:28:24.927344Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-17T18:28:24.931059Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2042} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-17T18:28:24.931950Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-04-17T18:28:24.932263Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-17T18:28:24.950112Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-17T18:28:24.950460Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-17T18:28:24.951354Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-17T18:28:24.952281Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:28:24.952426Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-17T18:28:24.952624Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T18:28:24.982926Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-17T18:28:24.983065Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:28:24.999285Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-17T18:28:24.999420Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:28:24.999500Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-17T18:28:24.999579Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:28:24.999692Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-17T18:28:24.999743Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:28:24.999782Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-17T18:28:24.999839Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:28:25.015164Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-17T18:28:25.015298Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:28:25.028316Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-17T18:28:25.028475Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-17T18:28:25.031970Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-17T18:28:25.032028Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2173} LoadFinished 2025-04-17T18:28:25.044391Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-17T18:28:25.044476Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed Pipe connected clientId# [1:28:2075] 2025-04-17T18:28:25.044869Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:28:2075] ServerId# [1:124:2146] TabletId# 72057594037932033 PipeClientId# [1:28:2075] 2025-04-17T18:28:25.045600Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 4441025489091481058 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-04-17T18:28:25.046897Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/6873/0007b8/r3tmp/tmpLZj29y/static.dat" PDiskConfig { ExpectedSlotCount: 2 } } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-17T18:28:25.049100Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# SectorMap:/home/runner/.ya/build/build_root/6873/0007b8/r3tmp/tmpLZj29y/static.dat 2025-04-17T18:28:25.063050Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: Initial Replicated: false DiskSpace: Green } } 2025-04-17T18:28:25.063254Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-17T18:28:25.063365Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } 2025-04-17T18:28:25.063646Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 4441025489091481058 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-04-17T18:28:25.063738Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 4441025489091481058 Status: READY OnlyPhantomsRemain: false } } 2025-04-17T18:28:25.064590Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-17T18:28:25.066725Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-17T18:28:25.066886Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-04-17T18:28:25.067162Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-17T18:28:25.067248Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-17T18:28:25.082641Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:28:2075] 2025-04-17T18:28:25.083236Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:28:2075] ServerId# [1:124:2146] TabletId# 72057594037932033 PipeClientId# [1:28:2075] 2025-04-17T18:28:25.083326Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:139:2159] ControllerId# 72057594037932033 2025-04-17T18:28:25.083355Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-17T18:28:25.083765Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2025-04-17T18:28:25.083801Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2147483648 2025-04-17T18:28:25.083828Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:265} RequestGroupConfig GroupId# 2147483648 2025-04-17T18:28:25.084060Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 Pipe connected clientId# [1:139:2159] 2025-04-17T18:28:25.084280Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:139:2159] ServerId# [1:142:2161] TabletId# 72057594037932033 PipeClientId# [1:139:2159] 2025-04-17T18:28:25.084523Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 4441025489091481058 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-04-17T18:28:25.084755Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-04-17T18:28:25.084893Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:142:2161] RecipientRewrite# [1:90:2122] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2025-04-17T18:28:25.084948Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2025-04-17T18:28:25.085049Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-17T18:28:25.085143Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 0 } } 2025-04-17T18:28:25.105431Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:792} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/6873/0007b8/r3tmp/tmpLZj29y/static.dat" PDiskGuid: 4441025489091481058 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4441025489091481058 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4441025489091481058 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "c370b168-b34c8ca4-e4be1bf9-a10079af" Comprehensive: true AvailDomain: 0 } 2025-04-17T18:28:25.105635Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/6873/0007b8/r3tmp/tmpLZj29y/static.dat" PDiskGuid: 4441025489091481058 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4441025489091481058 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4441025489091481058 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-04-17T18:28:25.105854Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 4441025489091481058 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-17T18:28:25.106791Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 4441025489091481058 2025-04-17T18:28:25.107038Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-04-17T18:28:25.111916Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:792} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4441025489091481058 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2025-04-17T18:28:25.112048Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4441025489091481058 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-04-17T18:28:25.119216Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 2025-04-17T18:28:25.120913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 4441025489091481058 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-04-17T18:28:25.124237Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-04-17T18:28:25.129485Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2025-04-17T18:28:25.140621Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-17T18:28:25.141800Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 4441025489091481058 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-04-17T18:28:25.143211Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-17T18:28:25.143894Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 4441025489091481058 Status: READY OnlyPhantomsRemain: false } } |97.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_commit.py::TestCommit::test_commit [GOOD] |97.9%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> SequenceProxy::DropRecreate [GOOD] >> TMemoryController::Counters >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed >> test.py::test[solomon-BadDownsamplingInterval-] >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> test_yt_reading.py::TestYtReading::test_partitioned_reading >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring >> TTxDataShardLocalKMeansScan::BuildToBuild >> MediatorTest::RebootTargetTablets [GOOD] >> AttributesMD5Test::AmazonSampleWithString [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> AttributesMD5Test::AmazonSampleWithBinary [GOOD] >> InflyTest::AddMessage [GOOD] >> InflyTest::DeleteMessage [GOOD] >> InflyTest::ChangeMesageVisibility [GOOD] >> TCreateAndDropViewTest::DropNonexistingView >> InflyTest::ReceiveMessages [GOOD] >> InflyTest::DeleteReceivedMessage [GOOD] >> MessageDelayStatsTest::All [GOOD] >> MessageDelayStatsTest::BigTimeDiff [GOOD] >> MessageDelayStatsTest::MaxMessageDelay [GOOD] >> Metering::BillingRecords >> TSentinelTests::Smoke [GOOD] >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> TSentinelTests::PDiskUnknownState >> MediatorTest::ResendSubset >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |97.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2025-04-17T18:28:22.638661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:28:22.638710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:22.740461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:28:23.590908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-04-17T18:28:23.948091Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:28:23.948559Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00060d/r3tmp/tmpL12r7o/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:28:23.949103Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00060d/r3tmp/tmpL12r7o/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00060d/r3tmp/tmpL12r7o/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8527452942097617412 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:28:24.819630Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-17T18:28:24.819735Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:24.867406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-17T18:28:25.466009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-04-17T18:28:25.748628Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-17T18:28:25.749153Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6873/00060d/r3tmp/tmpBo3AFD/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-17T18:28:25.749360Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6873/00060d/r3tmp/tmpBo3AFD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6873/00060d/r3tmp/tmpBo3AFD/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16971901148544549475 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-17T18:28:25.910311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944 2025-04-17T18:28:26.247836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944 >> KeyValueGRPCService::SimpleAcquireLock [GOOD] |98.0%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test >> KeyValueGRPCService::SimpleExecuteTransaction |98.0%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest |98.0%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> Coordinator::RestoreDomainConfiguration [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> TGRpcRateLimiterTest::CreateResource [GOOD] >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> TSentinelTests::PDiskUnknownState [GOOD] >> TGRpcRateLimiterTest::UpdateResource >> test.py::test_topic_data [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed >> TSentinelTests::PDiskErrorState >> test.py::test_transfer_describe >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect >> test.py::test_transfer_describe [GOOD] |98.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> TestPurecalcFilter::NullValues [GOOD] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] >> KqpTpch::Query01 >> TIndexProcesorTests::TestCreateIndexProcessor >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected >> TestPurecalcFilter::PartialPush >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |98.0%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx >> DataCleanup::BorrowerDataCleanedAfterCopyTable [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::test_transfer_describe [GOOD] >> MediatorTest::ResendSubset [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] |98.0%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> KeyValueGRPCService::SimpleExecuteTransaction [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration >> TMemoryController::Counters [GOOD] >> TMemoryController::Counters_HardLimit >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage >> MediatorTest::ResendNotSubset >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading >> TCreateAndDropViewTest::DropNonexistingView [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_data_cleanup/unittest >> DataCleanup::BorrowerDataCleanedAfterCopyTable [GOOD] Test command err: 2025-04-17T18:27:52.556389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:304:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:52.556940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:52.557013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000448/r3tmp/tmpnB9GUn/pdisk_1.dat 2025-04-17T18:27:53.118630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:27:53.174110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:53.227607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:53.228351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:53.243166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:53.340538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:27:53.824912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:770:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:53.825035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:53.825122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:53.835036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:27:54.026964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:784:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:27:54.099193Z node 1 :TX_PROXY ERROR: Actor# [1:858:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:27:55.609030Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2g3rtxdeb68kq7ct111fg9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2IxYTkzZjktZjkzZmMwZDUtZmY5MmJlNmEtMjVhN2IyYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:27:59.720616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:325:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:59.720934Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:59.721128Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000448/r3tmp/tmpfGoIJh/pdisk_1.dat 2025-04-17T18:28:00.005039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:00.033565Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:00.070092Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:00.070260Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:00.081890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:00.162775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:00.498490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:763:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:00.498608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:772:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:00.498686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:00.502529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:28:00.626892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:777:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:28:00.661590Z node 2 :TX_PROXY ERROR: Actor# [2:850:2695] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:00.925809Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2g3zbg0f0k83qzy44642fh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTQ3NDBjMTctYmU5NjhiZmEtYzY5ZjBmZTktMTJmYTZhZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:04.944883Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:04.945083Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:04.945226Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000448/r3tmp/tmpHahdCa/pdisk_1.dat 2025-04-17T18:28:05.236874Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:05.268137Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:05.307556Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:05.307705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:05.319331Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:05.400377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:05.743909Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:770:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:05.744031Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:05.744148Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:05.749188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:28:05.913594Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:784:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:28:05.948700Z node 3 :TX_PROXY ERROR: Actor# [3:858:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:06.257689Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2g44fefe4s5g4h5rpth6gc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjFlNjk2M2QtNjg4YWI0NTQtOGFhOWE4ZjctYzcwY2ViZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:10.176573Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:301:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:10.176913Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:10.177125Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya ... RVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:325:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:15.439024Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:15.439254Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000448/r3tmp/tmpv5wXtV/pdisk_1.dat 2025-04-17T18:28:15.716538Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:15.746022Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:15.783078Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:15.783219Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:15.794787Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:15.875686Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:16.249612Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:770:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:16.249722Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:16.249804Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:16.255134Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:28:16.420678Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:784:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:28:16.455607Z node 5 :TX_PROXY ERROR: Actor# [5:858:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:16.785218Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2g4eqqafcb177r8wvr3vy7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OTAwODk0NWEtZTU1NDk2YzQtMzgxNjE1OGQtMTAwZmIzMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:21.515007Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:326:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:21.515204Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:21.515365Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000448/r3tmp/tmptA8N20/pdisk_1.dat 2025-04-17T18:28:21.796511Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:21.825719Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:21.864034Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:21.864201Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:21.875864Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:21.987606Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:22.661703Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:879:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:22.661842Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:889:2734], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:22.661934Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:22.667951Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:28:22.819423Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:893:2737], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:28:22.857401Z node 6 :TX_PROXY ERROR: Actor# [6:954:2779] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:23.214380Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2g4n03062gpx8kjq9kmxsy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZjdlNzdjOGMtZmRkZGQ3OTQtYTE2MGU3Mi1iY2EzMGMwYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:23.553677Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2g4njdd2ah5xspbpc5g4v1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZGI4NDY4MzctODk3ZDZlODktOWFiODEyNjItYTdjNWYwNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:28.675619Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:299:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:28.675868Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:28.675996Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000448/r3tmp/tmp59Q6PF/pdisk_1.dat 2025-04-17T18:28:29.000593Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:29.036793Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:29.080003Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:29.080133Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:29.095854Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:29.182562Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:29.919878Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:878:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:29.919994Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:889:2734], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:29.920089Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:29.926821Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-17T18:28:30.081426Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:892:2737], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:28:30.118566Z node 7 :TX_PROXY ERROR: Actor# [7:953:2779] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:30.450020Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2g4w2x4ykhq4ge9hkjxkh8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=M2EwOTAxOGYtZjc1MTE3Y2MtYTcyNDY4NGItOTEwNzUyYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:30.831000Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2g4wm830t0kbfpbnzdexpk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjU5OWQ3MDQtNjE4YTBhNWEtYzEwOTlkYjEtNGRiMGEwYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:31.382576Z node 7 :TX_DATASHARD WARN: DataCleanup of tablet# 72075186224037888: has borrowed parts, requested from [7:595:2520] |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_data_cleanup/unittest >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TGRpcRateLimiterTest::DropResource >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable >> Metering::BillingRecords [GOOD] >> Metering::MockedNetClassifierOnly >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> TDqPqRdReadActorTests::TestReadFromTopic2 >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> TestPurecalcFilter::PartialPush [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed >> MediatorTest::ResendNotSubset [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKey |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> TestPurecalcFilter::CompilationValidation >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> MediatorTest::OneCoordinatorResendTxNotLost >> TDqPqRdReadActorTests::SessionError >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit >> TDqPqRdReadActorTests::SessionError [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> TDqPqRdReadActorTests::ReadWithFreeSpace >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> TopicSessionTests::TwoSessionsWithOffsets >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] >> test_ctas.py::TestYtCtas::test_simple_ctast >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: 2025-04-17T18:27:58.029042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:58.029682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:58.029740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004ca/r3tmp/tmpT5Fgou/pdisk_1.dat 2025-04-17T18:27:58.554644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:27:58.605996Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:58.647166Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:27:58.648857Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:27:58.649157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:58.649936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:58.662772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:58.752250Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:27:58.752349Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:27:58.753370Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T18:27:58.865450Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:27:58.865563Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:27:58.866291Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:27:58.866418Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:27:58.866770Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:27:58.867004Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:27:58.867114Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:27:58.870386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:27:58.871463Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:27:58.872182Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:27:58.872264Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:27:58.929579Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:27:58.930800Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:27:58.932442Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:27:58.932712Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:58.982551Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:27:58.983345Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:58.983448Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:58.985216Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:27:58.985313Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:27:58.985372Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:27:58.987029Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:58.987194Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:58.987293Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:27:58.998218Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:59.039346Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:27:59.040522Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:59.040697Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:27:59.040742Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:59.040797Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:27:59.040843Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:59.041081Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:59.041137Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:59.042809Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:27:59.042919Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:27:59.043039Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:59.043087Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:59.043240Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:27:59.043313Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:27:59.043354Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:27:59.043388Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:27:59.043447Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:59.043606Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:59.043648Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:59.043696Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:27:59.045244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:27:59.045297Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:27:59.045421Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:27:59.045775Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:27:59.045854Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:27:59.045946Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:27:59.046012Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:27:59.046054Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:27:59.046097Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:27:59.046146Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:27:59.046472Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:27:59.046514Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:27:59.046558Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:27:59.046603Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:27:59.046659Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:27:59.046687Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:27:59.046734Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:27:59.046767Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:27:59.046809Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:27:59.048322Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:27:59.048426Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 720751862 ... 8:28:37.111352Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:871:2702], Recipient [8:665:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-17T18:28:37.111594Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T18:28:37.111701Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-04-17T18:28:37.111826Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:28:37.111885Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-04-17T18:28:37.111937Z node 8 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:28:37.111985Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:28:37.112046Z node 8 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-17T18:28:37.112104Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:28:37.112134Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:28:37.112164Z node 8 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T18:28:37.112191Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:28:37.112325Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T18:28:37.112654Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:28:37.112729Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1500/18446744073709551615 2025-04-17T18:28:37.112787Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[8:871:2702], 0} after executionsCount# 1 2025-04-17T18:28:37.112856Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:871:2702], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:28:37.112954Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:871:2702], 0} finished in read 2025-04-17T18:28:37.113058Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:28:37.113089Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:28:37.113122Z node 8 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:28:37.113152Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:28:37.113206Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:28:37.113231Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:28:37.113265Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-17T18:28:37.113323Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:28:37.113456Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T18:28:37.113675Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [8:61:2108], Recipient [8:665:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_SUBSCRIBED 2025-04-17T18:28:37.115049Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:871:2702], Recipient [8:665:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:28:37.115120Z node 8 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } 2025-04-17T18:28:37.118912Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [8:875:2706], Recipient [8:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:37.119017Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:37.119086Z node 8 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [8:874:2705], serverId# [8:875:2706], sessionId# [0:0:0] 2025-04-17T18:28:37.119411Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549570, Sender [8:873:2704], Recipient [8:665:2570]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2025-04-17T18:28:37.119611Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v1500/18446744073709551615 ImmediateWriteEdge# v1000/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-04-17T18:28:37.119772Z node 8 :TX_DATASHARD TRACE: Lock 281474976715660 marked broken at v{min} 2025-04-17T18:28:37.131778Z node 8 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-17T18:28:37.132672Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [8:24:2071], Recipient [8:665:2570]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 1501} 2025-04-17T18:28:37.132737Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-04-17T18:28:37.132782Z node 8 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-17T18:28:37.132844Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:28:37.304197Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2g534dce7xgmks22eq26jw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ODU5OTI1YjYtNDNiNjYxOGMtZDdlMDA4MDQtODJhMTNiOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:37.306201Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:898:2723], Recipient [8:665:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-17T18:28:37.306433Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-17T18:28:37.306527Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-17T18:28:37.306634Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:28:37.306692Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-17T18:28:37.306740Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:28:37.306784Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:28:37.306837Z node 8 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-17T18:28:37.306886Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:28:37.306912Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:28:37.306937Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-17T18:28:37.306959Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-17T18:28:37.307097Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-17T18:28:37.307398Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715660, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:28:37.307464Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1500/18446744073709551615 2025-04-17T18:28:37.307525Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[8:898:2723], 0} after executionsCount# 1 2025-04-17T18:28:37.307586Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:898:2723], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-17T18:28:37.307680Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:898:2723], 0} finished in read 2025-04-17T18:28:37.307762Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:28:37.307791Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-17T18:28:37.307819Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:28:37.307844Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:28:37.307891Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-17T18:28:37.307913Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:28:37.307946Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-17T18:28:37.307991Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-17T18:28:37.308108Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-17T18:28:37.308712Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:898:2723], Recipient [8:665:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-17T18:28:37.308779Z node 8 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-17T18:28:37.311601Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [8:61:2108], Recipient [8:665:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_NOT_FOUND { items { uint32_value: 1 } items { uint32_value: 11 } } >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest >> ReadUpdateWrite::Load >> RangeOps::Intersection [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable [GOOD] >> TCreateAndDropViewTest::DropSameViewTwice >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] |98.0%| [TM] {RESULT} ydb/tests/fq/common/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest >> TGenerateQueueIdTests::MakeQueueIdBasic [GOOD] >> TParseParamsTests::CreateUser [GOOD] >> TParseParamsTests::ChangeMessageVisibilityBatchRequest [GOOD] >> TParseParamsTests::DeleteMessageBatchRequest [GOOD] >> TParseParamsTests::MessageBody [GOOD] >> TParseParamsTests::SendMessageBatchRequest [GOOD] >> TParseParamsTests::DeleteQueueBatchRequest [GOOD] >> TParseParamsTests::PurgeQueueBatchRequest [GOOD] >> TParseParamsTests::GetQueueAttributesBatchRequest [GOOD] >> TParseParamsTests::UnnumberedAttribute [GOOD] >> TParseParamsTests::UnnumberedAttributeName [GOOD] >> TParseParamsTests::FailsOnInvalidDeduplicationId [GOOD] >> TParseParamsTests::FailsOnInvalidGroupId [GOOD] >> TParseParamsTests::FailsOnInvalidReceiveRequestAttemptId [GOOD] >> TParseParamsTests::FailsOnInvalidMaxNumberOfMessages [GOOD] >> TParseParamsTests::FailsOnInvalidWaitTime [GOOD] >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged >> KeyValueGRPCService::SimpleRenameUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKey >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] |98.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/ut/unittest >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] |98.0%| [TS] {RESULT} ydb/core/ymq/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery >> TabletService_Restart::OnlyAdminsAllowed [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> DataShardCompaction::CompactBorrowed [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus >> TestPurecalcFilter::CompilationValidation [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits >> TestRawParser::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: 2025-04-17T18:28:01.825068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:01.825606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:01.825673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmp3fhUA3/pdisk_1.dat 2025-04-17T18:28:02.335270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:02.386106Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:02.431303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:02.432160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:02.444671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema 2025-04-17T18:28:06.459641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:06.459856Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:06.459979Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmpy8w5zp/pdisk_1.dat 2025-04-17T18:28:06.706078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:06.731936Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:06.767771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:06.767889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:06.779308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) 2025-04-17T18:28:09.998861Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:09.999041Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:09.999190Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmp7SMFDr/pdisk_1.dat 2025-04-17T18:28:10.276480Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:10.349724Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:10.388332Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:10.388442Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:10.400156Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:13.688121Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:13.688500Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:13.688619Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmpzVdj61/pdisk_1.dat 2025-04-17T18:28:14.022432Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:14.051259Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:14.088854Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:14.089022Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:14.100642Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:17.525731Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:17.526062Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:17.526313Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmp6KC4yT/pdisk_1.dat 2025-04-17T18:28:17.812216Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:17.844125Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:17.880870Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:17.880995Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:17.892343Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:21.426540Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:325:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:21.426681Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:21.426792Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmpBtMuqV/pdisk_1.dat 2025-04-17T18:28:21.699785Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:21.729763Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:21.767148Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:21.767301Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:21.778800Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:25.874470Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:169:2215], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:25.874658Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:25.874933Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmp5qWuwW/pdisk_1.dat 2025-04-17T18:28:26.217043Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:26.257914Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:26.296712Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:26.296844Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:26.308333Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:30.412701Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:30.412921Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:30.413060Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmpxsJQWo/pdisk_1.dat 2025-04-17T18:28:30.848912Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:30.887874Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:30.930522Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:30.930683Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:30.942263Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:35.013121Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:35.013411Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:35.013616Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmpqP76ua/pdisk_1.dat 2025-04-17T18:28:35.318892Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:35.352408Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:35.390702Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:35.390873Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:35.402490Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 2025-04-17T18:28:35.536512Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:39.820131Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:103:2149], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:39.820302Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:39.820393Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0006b9/r3tmp/tmpjBozR8/pdisk_1.dat 2025-04-17T18:28:40.115600Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:40.157107Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:40.201056Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:40.201270Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:40.213774Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2025-04-17T18:28:40.632003Z node 10 :IMPORT WARN: Table profiles were not loaded >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected |98.1%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery >> ConfigGRPCService::ReplaceConfig >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: 2025-04-17T18:28:20.411193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:20.411723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:20.411833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000420/r3tmp/tmp3uMF0D/pdisk_1.dat 2025-04-17T18:28:20.921195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:20.976386Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:21.028606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:21.029801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:21.042546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:21.148339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:21.636791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:771:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:21.636912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:21.637011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:21.643920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:28:21.807659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:785:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:28:21.888999Z node 1 :TX_PROXY ERROR: Actor# [1:859:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:22.808958Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2g4kzy41ygzqry5tbpjz3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmRlYTYxZTItYjEyMjdjLWE0MmY4YzM0LWU4OTY3MThi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:23.043779Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2g4n8t6a38e8x66cvczhw4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZmMTc5MTAtNDhjN2E5MjEtMWRjYTQ1N2ItMzkxNTA0ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:23.189615Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2g4ndkevwk2x7cydv50xz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdmMzg5MjctNTNkN2M5ZTgtZTgzOWU5ZDMtZmYyYTI2N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:23.448503Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2g4nhz7x6mjrket8a94qgp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA4MWQwMjMtMjFiYWQ5OGUtYzU0NDJmOTgtNWUxYjZiY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-17T18:28:27.008307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:27.008657Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:27.008869Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000420/r3tmp/tmpCfdjxx/pdisk_1.dat 2025-04-17T18:28:27.314638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:27.343509Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:27.382780Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:27.382935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:27.394541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:27.484276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:27.853840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:822:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:27.853929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:832:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:27.854286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:27.859827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:28:28.052724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:836:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:28:28.087864Z node 2 :TX_PROXY ERROR: Actor# [2:914:2736] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:28.749776Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2g4t2b06xv4cxqdrmbwy6z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTU4YTc1ZjgtZmJjY2RiYTUtOWE1YWQ5MTktMmQ1NDA3ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:28.779516Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2g4t2b06xv4cxqdrmbwy6z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTU4YTc1ZjgtZmJjY2RiYTUtOWE1YWQ5MTktMmQ1NDA3ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:28.793729Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2g4t2b06xv4cxqdrmbwy6z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTU4YTc1ZjgtZmJjY2RiYTUtOWE1YWQ5MTktMmQ1NDA3ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:29.175169Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2g4v0wdab6ss01ay372v08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDY2YzBmYjYtZDc5MTE0ZWYtNDRmMDdjYzMtNTRhMTdiOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:29.222265Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2g4v0wdab6ss01ay372v08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDY2YzBmYjYtZDc5MTE0ZWYtNDRmMDdjYzMtNTRhMTdiOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:29.235124Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2g4v0wdab6ss01ay372v08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDY2YzBmYjYtZDc5MTE0ZWYtNDRmMDdjYzMtNTRhMTdiOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:29.239905Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2g4v0wdab6ss01ay372v08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDY2YzBmYjYtZDc5MTE0ZWYtNDRmMDdjYzMtNTRhMTdiOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:29.743974Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2g4veh0gpr8zn35g31tyj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDEyZWVmODItZTYxNjZjM2MtZTczZTA0MDMtNjIwM2I0N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:29.786658Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2g4veh0gpr8zn35g31tyj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDEyZWVmODItZTYxNjZjM2MtZTczZTA0MDMtNjIwM2I0N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:29.793112Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2g4veh0gpr8zn35g31tyj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDEyZWVmODItZTYxNjZjM2MtZTczZTA0MDMtNjIwM2I0N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2 ... xvvfzv9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2UwYWNjNzgtOTFlNzVhNmYtMWJjZTFlYS0zOWZjNGE0OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-17T18:28:30.100661Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2g4w4j6tb7gafxmd1yptbt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjQ2N2RjNzgtY2JhMTdlMzctZmRkMmM1YmYtODVjNTFmNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-17T18:28:33.761393Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:33.761573Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:33.761747Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000420/r3tmp/tmpbwbjJr/pdisk_1.dat 2025-04-17T18:28:34.043711Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:34.072575Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:34.109237Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:34.109373Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:34.120855Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:34.218836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:34.538083Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:770:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:34.538250Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:780:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:34.538332Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:34.544133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:28:34.707280Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:784:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:28:34.742384Z node 3 :TX_PROXY ERROR: Actor# [3:858:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:34.855819Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2g50k8bzf9sjevda3hmbrx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjE3MDQwMjYtOWY0NzU4NTctMjlkNDI4ZDItNzIwODUwNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:35.057077Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2g50z9anfva9pbz4zkngdq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mjk1MTM2OWQtYTA5M2JlNzctYWM0NzRiMjEtYjY4NTI3Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } } 2025-04-17T18:28:35.242230Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2g513tah2z718r5chpwjph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmRhYjUyZWQtN2NiZGM4ODMtNzkxZGM0OC1hNWI3YmEzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:35.461019Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2g51ag0esnxpr233s09y0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmMwNmJiOGMtZjIxMzRiYzctZTIzYzM3NDAtZjY1YTEwZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } 2025-04-17T18:28:35.676973Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2g51gbeg7nhjedz217nrt4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTczYzNmN2MtY2Y2OThlMDYtYWI2YzA5LWZmOWRlNTU5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:35.864612Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2g51r0a6y55qwjt49enjts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzY3M2E0ZGMtZTIwYzM5ZjYtNjEyNzg5NWItMWM0OGMwOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } 2025-04-17T18:28:39.700563Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:39.700945Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:39.701073Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000420/r3tmp/tmpPOIMYU/pdisk_1.dat 2025-04-17T18:28:40.012878Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:40.045838Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:40.083064Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:40.083206Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:40.094909Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:40.192482Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:40.528339Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:771:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:40.528447Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:781:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:40.528531Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:40.534956Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T18:28:40.708243Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:785:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T18:28:40.745665Z node 4 :TX_PROXY ERROR: Actor# [4:859:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:40.833197Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:869:2702], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2025-04-17T18:28:40.835541Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Yjc4ZGZiZjQtZjEwZWZlMTAtMWY4ZjdjMC0xZTljOTg1Mg==, ActorId: [4:768:2639], ActorState: ExecuteState, TraceId: 01js2g56ee2k24bhmyrdsjjmnv, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-17T18:28:40.874304Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:891:2718], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2025-04-17T18:28:40.876598Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTQ1MGUzYmUtZWFlMmY1NDYtNTRjNmVmNDUtZWI0OWQ5MjY=, ActorId: [4:883:2710], ActorState: ExecuteState, TraceId: 01js2g56r78wq4p67g2n6n0vw7, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |98.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource >> TestRawParser::Simple [GOOD] >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] Test command err: 2025-04-17T18:27:55.184988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:55.185578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:55.185635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00043d/r3tmp/tmp1YXW8F/pdisk_1.dat 2025-04-17T18:27:55.812505Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-04-17T18:27:55.813156Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-04-17T18:27:55.813802Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:617:2532] connected 2025-04-17T18:27:55.813940Z node 1 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [1:600:2522] HANDLE TEvMediatorConfiguration Version# 1 2025-04-17T18:27:55.819044Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-04-17T18:27:55.819309Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-04-17T18:27:55.819999Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:623:2537] connected 2025-04-17T18:27:55.820100Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-04-17T18:27:55.820221Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:621:2536] bucket# 0 ... waiting for watcher to connect (done) 2025-04-17T18:27:55.822368Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-04-17T18:27:55.822438Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-04-17T18:27:55.822489Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:620:2535] bucket.ActiveActor 2025-04-17T18:27:55.822584Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:621:2536]} 2025-04-17T18:27:55.822678Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2025-04-17T18:27:55.835768Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:627:2541] connected 2025-04-17T18:27:55.835868Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-17T18:27:55.835922Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:625:2539] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 4 Coordinator# 72057594046316545 2025-04-17T18:27:55.836306Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2025-04-17T18:27:55.836365Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:620:2535] bucket.ActiveActor step# 1000 2025-04-17T18:27:55.836435Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2025-04-17T18:27:55.836684Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2025-04-17T18:27:55.855858Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-04-17T18:27:55.855939Z node 1 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-04-17T18:27:55.856041Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [1:619:2534] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2025-04-17T18:27:55.856139Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2025-04-17T18:27:55.856180Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2025-04-17T18:27:55.856234Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND Ev to# [1:620:2535] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2025-04-17T18:27:55.856293Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:620:2535] bucket.ActiveActor step# 1010 2025-04-17T18:27:55.856368Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}} marker# M4 2025-04-17T18:27:55.856546Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-04-17T18:27:55.857654Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [1:649:2553] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:27:55.857755Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-17T18:27:55.857819Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for blocked plan step (done) ... waiting for no pending commands 2025-04-17T18:27:55.858368Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-04-17T18:27:55.858426Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:621:2536] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2025-04-17T18:27:55.858701Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2025-04-17T18:27:55.858752Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:625:2539] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1} 2025-04-17T18:27:55.858862Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) 2025-04-17T18:27:59.235096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:59.235321Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:59.235448Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00043d/r3tmp/tmpxjrBMn/pdisk_1.dat 2025-04-17T18:27:59.524865Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-04-17T18:27:59.525349Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-04-17T18:27:59.525864Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:617:2532] connected 2025-04-17T18:27:59.525963Z node 2 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [2:600:2522] HANDLE TEvMediatorConfiguration Version# 1 2025-04-17T18:27:59.526340Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-04-17T18:27:59.526470Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-04-17T18:27:59.526903Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:623:2537] connected 2025-04-17T18:27:59.526975Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-17T18:27:59.527013Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [2:621:2536] bucket# 0 ... waiting for watcher to connect (done) 2025-04-17T18:27:59.527318Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-17T18:27:59.527366Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-04-17T18:27:59.527408Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [2:620:2535] bucket.ActiveActor 2025-04-17T18:27:59.527468Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:620:2535] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [2:621:2536]} 2025-04-17T18:27:59.527542Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:620:2535] Mediator# 72057594047365120 SEND to# [2:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} ... waiting for no pending commands 2025-04-17T18:27:59.540898Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-17T18:27:59.540968Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:621:2536] bucket# 0 2025-04-17T18:27:59.541056Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-17T18:27:59.541083Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:621:2536] bucket# 0 2025-04-17T18:27:59.541128Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-17T18:27:59.541155Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:621:2536] bucket# 0 ... waiti ... ket# 0 ... waiting for no pending commands (done) 2025-04-17T18:28:40.794578Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:665:2561] connected 2025-04-17T18:28:40.794679Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-17T18:28:40.794727Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:663:2559] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316545 2025-04-17T18:28:40.795092Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:668:2564] connected 2025-04-17T18:28:40.795194Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-17T18:28:40.795238Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:666:2562] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316546 2025-04-17T18:28:40.795559Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-04-17T18:28:40.795607Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-04-17T18:28:40.795674Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-04-17T18:28:40.795705Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-04-17T18:28:40.795809Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [12:620:2535] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:663:2559]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:666:2562]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M0 2025-04-17T18:28:40.795953Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:620:2535] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:663:2559]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:666:2562]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M1 2025-04-17T18:28:40.796008Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 marker# M2 2025-04-17T18:28:40.796061Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:620:2535] MediatorId# 72057594047365120 SEND Ev to# [12:621:2536] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 marker# M3 2025-04-17T18:28:40.796108Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 2 marker# M2 2025-04-17T18:28:40.796142Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:620:2535] MediatorId# 72057594047365120 SEND Ev to# [12:621:2536] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 2 marker# M3 2025-04-17T18:28:40.796181Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:620:2535] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [12:621:2536] bucket.ActiveActor step# 1010 2025-04-17T18:28:40.796275Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:663:2559]}{TTx Moderator# 0 txid# 2 AckTo# [12:666:2562]}}} marker# M4 2025-04-17T18:28:40.796486Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:663:2559]}{TTx Moderator# 0 txid# 2 AckTo# [12:666:2562]}}} marker# M4 2025-04-17T18:28:40.796603Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-04-17T18:28:40.797198Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:674:2568] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:28:40.797264Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-17T18:28:40.797306Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-04-17T18:28:40.797346Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-17T18:28:40.797646Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:675:2569] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:28:40.797687Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-04-17T18:28:40.797717Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-04-17T18:28:40.797749Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-17T18:28:40.808799Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:678:2572] connected 2025-04-17T18:28:40.808944Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-17T18:28:40.809003Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:676:2570] Cookie# 2 CompleteStep# 1010 LatestKnownStep# 1010 SubjectiveTime# 4 Coordinator# 72057594046316546 2025-04-17T18:28:40.809288Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-04-17T18:28:40.809341Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-04-17T18:28:40.809434Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvRequestLostAcks to# [12:620:2535] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} 2025-04-17T18:28:40.809550Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:620:2535] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} AckTo# [12:676:2570] 2025-04-17T18:28:40.809603Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 2 marker# M2 2025-04-17T18:28:40.809658Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:620:2535] MediatorId# 72057594047365120 SEND Ev to# [12:621:2536] step# 1010 forTablet# 72057594047365121 txid# 2 marker# M3 2025-04-17T18:28:40.809740Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 2 marker# M2 2025-04-17T18:28:40.809790Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:620:2535] MediatorId# 72057594047365120 SEND Ev to# [12:621:2536] step# 1010 forTablet# 72057594047365122 txid# 2 marker# M3 2025-04-17T18:28:40.809878Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:676:2570]}}} 2025-04-17T18:28:40.809951Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:676:2570]}}} 2025-04-17T18:28:40.823676Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365121 ClientId: [12:670:2566] ServerId: [12:674:2568] } 2025-04-17T18:28:40.865373Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:40.889988Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:703:2585] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-17T18:28:40.890122Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-17T18:28:40.890167Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-04-17T18:28:40.890231Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-17T18:28:40.902157Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365122 ClientId: [12:672:2567] ServerId: [12:675:2569] } 2025-04-17T18:28:40.918028Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:724:2595] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-17T18:28:40.918142Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-04-17T18:28:40.918230Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-04-17T18:28:40.918289Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:621:2536] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-17T18:28:40.942880Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:40.943007Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:40.954643Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected |98.1%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest >> TestRawParser::ManyValues >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter >> TestRawParser::ManyValues [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> TDqPqRdReadActorTests::Backpressure >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery >> TestRawParser::TypeKindsValidation >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] >> TTestYqlToMiniKQLCompile::CheckResolve >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow >> TestRawParser::TypeKindsValidation [GOOD] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery >> test_http_api.py::TestHttpApi::test_stop_idempotency >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange >> Metering::MockedNetClassifierOnly [GOOD] >> Metering::MockedNetClassifierLabelTransformation >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> TTestYqlToMiniKQLCompile::Extract >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData >> TTestYqlToMiniKQLCompile::Extract [GOOD] >> TopicSessionTests::BadDataSessionError >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2025-04-17T18:27:35.451433Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-17T18:27:35.451466Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_0 == "str1"' (filter id: [0:0:0]) 2025-04-17T18:27:35.451501Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_0 == "str1"; 2025-04-17T18:27:35.451538Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-17T18:27:35.463038Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7494350521912080923:2051] 2025-04-17T18:27:38.004705Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7494350521912080923:2051] [id 1]: Started compile request 2025-04-17T18:27:38.649999Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7494350521912080923:2051] [id 1]: Compilation completed for request 2025-04-17T18:27:38.650092Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [1:7494350521912080923:2051] 2025-04-17T18:27:38.650263Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-17T18:27:38.650310Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-17T18:27:38.650359Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-04-17T18:27:38.650388Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_1 == "str2"' (filter id: [1:0:0]) 2025-04-17T18:27:38.650424Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_1 == "str2"; 2025-04-17T18:27:38.650448Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Send compile request with id 2 2025-04-17T18:27:38.650523Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7494350521912080923:2051] 2025-04-17T18:27:38.650569Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7494350521912080923:2051] [id 2]: Started compile request 2025-04-17T18:27:38.673747Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7494350521912080923:2051] [id 2]: Compilation completed for request 2025-04-17T18:27:38.673879Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 2 from [1:7494350521912080923:2051] 2025-04-17T18:27:38.673931Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 2 2025-04-17T18:27:38.673949Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Filter compilation finished 2025-04-17T18:27:38.673975Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [2:0:0] 2025-04-17T18:27:38.674038Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 3 clients, number rows: 3 2025-04-17T18:27:38.674052Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [1:0:0]) 2025-04-17T18:27:38.674061Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-04-17T18:27:38.676146Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Add 3 rows to client [2:0:0] without filtering 2025-04-17T18:27:38.676170Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [0:0:0]) 2025-04-17T18:27:38.676176Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-04-17T18:27:38.676224Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Remove filter with id [2:0:0] 2025-04-17T18:27:38.676283Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 2 clients, number rows: 1 2025-04-17T18:27:38.676297Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [1:0:0]) 2025-04-17T18:27:38.676302Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-17T18:27:38.676328Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [0:0:0]) 2025-04-17T18:27:38.676342Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-17T18:27:38.935220Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-17T18:27:38.935263Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a1 = "str1"' (filter id: [0:0:0]) 2025-04-17T18:27:38.935315Z node 2 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 = "str1"; 2025-04-17T18:27:38.935345Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-17T18:27:38.935533Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7494350532842655295:2051] 2025-04-17T18:27:41.573128Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7494350532842655295:2051] [id 1]: Started compile request 2025-04-17T18:27:41.597866Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7494350532842655295:2051] [id 1]: Compilation completed for request 2025-04-17T18:27:41.598019Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [2:7494350532842655295:2051] 2025-04-17T18:27:41.598106Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-17T18:27:41.598142Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-17T18:27:41.598168Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-17T18:27:41.917567Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-17T18:27:41.917597Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a2 ... 50' (filter id: [0:0:0]) 2025-04-17T18:27:41.917614Z node 3 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-04-17T18:27:41.917647Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-17T18:27:41.917738Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7494350545857132142:2051] 2025-04-17T18:27:44.450265Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [3:7494350545857132142:2051] [id 1]: Started compile request 2025-04-17T18:27:44.478116Z node 3 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [3:7494350545857132142:2051] [id 1]: Compilation failed for request 2025-04-17T18:27:44.478301Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [3:7494350545857132142:2051] 2025-04-17T18:27:44.478361Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-17T18:27:44.478516Z node 3 :FQ_ROW_DISPATCHER ERROR: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation error: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues: generated.sql:2:36: Error: mismatched input '.' expecting {'$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FULL, FUNCTION, GLOB, GLOBAL, GRANT, GROUP, GROUPING, GROUPS, HASH, HAVING, HOP, IF, IGNORE, ILIKE, IMMEDIATE, IMPORT, IN, INCREMENT, INCREMENTAL, INDEX, INDEXED, INHERITS, INITIAL, INITIALLY, INNER, INSERT, INSTEAD, INTERSECT, INTO, IS, ISNULL, JOIN, JSON_EXISTS, JSON_QUERY, JSON_VALUE, KEY, LAST, LEFT, LEGACY, LIKE, LIMIT, LIST, LOCAL, LOGIN, MANAGE, MATCH, MATCHES, MATCH_RECOGNIZE, MEASURES, MICROSECONDS, MILLISECONDS, MODIFY, NANOSECONDS, NATURAL, NEXT, NO, NOLOGIN, NOT, NOTNULL, NULL, NULLS, OBJECT, OF, OFFSET, OMIT, ON, ONE, ONLY, OPTION, OPTIONAL, OR, ORDER, OTHERS, OUTER, OVER, OWNER, PARALLEL, PARTITION, PASSING, PASSWORD, PAST, PATTERN, PER, PERMUTE, PLAN, POOL, PRAGMA, PRECEDING, PRESORT, PRIMARY, PRIVILEGES, PROCESS, QUERY, QUEUE, RAISE, RANGE, REDUCE, REFERENCES, REGEXP, REINDEX, RELEASE, REMOVE, RENAME, REPLACE, REPLICATION, RESET, RESOURCE, RESPECT, RESTART, RESTORE, RESTRICT, RESULT, RETURN, RETURNING, REVERT, REVOKE, RIGHT, RLIKE, ROLLBACK, ROLLUP, ROW, ROWS, SAMPLE, SAVEPOINT, SCHEMA, SECONDS, SEEK, SELECT, SEMI, SET, SETS, SHOW, TSKIP, SEQUENCE, SOURCE, START, STREAM, STRUCT, SUBQUERY, SUBSET, SYMBOLS, SYMMETRIC, SYNC, SYSTEM, TABLE, TABLES, TABLESAMPLE, TABLESTORE, TAGGED, TEMP, TEMPORARY, THEN, TIES, TO, TOPIC, TRANSACTION, TRANSFER, TRIGGER, TUPLE, TYPE, UNBOUNDED, UNCONDITIONAL, UNION, UNIQUE, UNKNOWN, UNMATCHED, UPDATE, UPSERT, USE, USER, USING, VACUUM, VALUES, VARIANT, VIEW, VIRTUAL, WHEN, WHERE, WINDOW, WITH, WITHOUT, WRAPPER, XOR, STRING_VALUE, ID_PLAIN, ID_QUOTED, DIGITS} } subissue: {
: Error: Final yql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; } } 2025-04-17T18:27:47.325958Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [0:0:0] 2025-04-17T18:27:47.326920Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 2 2025-04-17T18:27:47.403908Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-17T18:27:47.404061Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 2 columns 2025-04-17T18:27:47.405058Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-17T18:27:47.405085Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'WHERE col_first = "str_first__large__"' (filter id: [0:0:0]) 2025-04-17T18:27:47.405115Z node 4 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input WHERE col_first = "str_first__large__"; 2025-04-17T18:27:47.405141Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-17T18:27:47.432767Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-17T18:27:47.432801Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-17T18:27:47.433012Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [1:0:0] 2025-04-17T18:27:47.479715Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 3 2025-04-17T18:27:47.558791Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-17T18:27:47.558983Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 3 columns 2025-04-17T18:27:47.559007Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-04-1 ... 71Z node 20 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "hello1", "a2": null, "event": "event1"} 2025-04-17T18:28:08.476097Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-17T18:28:08.611595Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-17T18:28:08.979376Z node 22 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-17T18:28:08.979705Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:08.979743Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": 456, "a2": 42} 2025-04-17T18:28:08.980156Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:08.980206Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "456", "a2": -42} 2025-04-17T18:28:08.980489Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:08.980528Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "str", "a2": 99999} 2025-04-17T18:28:08.980821Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:08.980858Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: {"a1": "456", "a2": 42, "a3": 1.11.1} 2025-04-17T18:28:09.428044Z node 23 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-17T18:28:09.428283Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:09.428324Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": "-456"} 2025-04-17T18:28:09.885644Z node 24 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-17T18:28:09.885950Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:09.885993Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": {"key": "value"}, "a2": {"key2": "value2"}} 2025-04-17T18:28:09.886546Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:09.886589Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": {"key": "value", "nested": {"a": "b", "c":}}, "a2": "str"} 2025-04-17T18:28:09.886906Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:09.886949Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": {"key" "value"}, "a2": "str"} 2025-04-17T18:28:10.349404Z node 25 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-17T18:28:10.349706Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:10.349744Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": true, "a2": false} 2025-04-17T18:28:10.350139Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:10.350195Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "true", "a2": falce} 2025-04-17T18:28:10.815397Z node 26 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-17T18:28:10.815685Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:10.815735Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": Yelse} 2025-04-17T18:28:10.816196Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:10.816251Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "st""r"} 2025-04-17T18:28:10.816432Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:10.816477Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "x"} {"a1": "y"} 2025-04-17T18:28:10.816674Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-17T18:28:10.816716Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: { 2025-04-17T18:28:11.201868Z node 27 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-17T18:28:11.202048Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [27:7494350676885453298:2051] 2025-04-17T18:28:15.279124Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7494350676885453298:2051] [id 0]: Started compile request 2025-04-17T18:28:15.334549Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7494350676885453298:2051] [id 0]: Compilation completed for request 2025-04-17T18:28:15.334714Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [27:7494350676885453298:2051] 2025-04-17T18:28:15.334934Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-17T18:28:15.340509Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-17T18:28:15.811614Z node 28 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-17T18:28:15.811748Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [28:7494350693941751254:2051] 2025-04-17T18:28:19.920685Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7494350693941751254:2051] [id 0]: Started compile request 2025-04-17T18:28:19.968525Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7494350693941751254:2051] [id 0]: Compilation completed for request 2025-04-17T18:28:19.968673Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [28:7494350693941751254:2051] 2025-04-17T18:28:19.968922Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-17T18:28:19.969031Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-17T18:28:20.519677Z node 29 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-17T18:28:20.519864Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [29:7494350712681856567:2051] 2025-04-17T18:28:25.252153Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7494350712681856567:2051] [id 0]: Started compile request 2025-04-17T18:28:25.326755Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7494350712681856567:2051] [id 0]: Compilation completed for request 2025-04-17T18:28:25.326935Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [29:7494350712681856567:2051] 2025-04-17T18:28:25.327217Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-17T18:28:25.327397Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-17T18:28:25.327445Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-17T18:28:25.327485Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-17T18:28:25.327530Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-17T18:28:26.047553Z node 30 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 is null; 2025-04-17T18:28:26.047713Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [30:7494350740414054200:2051] 2025-04-17T18:28:30.649638Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7494350740414054200:2051] [id 0]: Started compile request 2025-04-17T18:28:30.704597Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7494350740414054200:2051] [id 0]: Compilation completed for request 2025-04-17T18:28:30.704745Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [30:7494350740414054200:2051] 2025-04-17T18:28:30.705024Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-17T18:28:31.382224Z node 31 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 50; 2025-04-17T18:28:31.382465Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [31:7494350759313291430:2051] 2025-04-17T18:28:36.194340Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7494350759313291430:2051] [id 0]: Started compile request 2025-04-17T18:28:36.254702Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7494350759313291430:2051] [id 0]: Compilation completed for request 2025-04-17T18:28:36.254864Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [31:7494350759313291430:2051] 2025-04-17T18:28:36.255127Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-17T18:28:36.838076Z node 32 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-04-17T18:28:36.838640Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [32:7494350781749030242:2051] 2025-04-17T18:28:41.817035Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [32:7494350781749030242:2051] [id 0]: Started compile request 2025-04-17T18:28:41.832706Z node 32 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [32:7494350781749030242:2051] [id 0]: Compilation failed for request 2025-04-17T18:28:41.832872Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [32:7494350781749030242:2051] 2025-04-17T18:28:43.005399Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 1 messages to parse 2025-04-17T18:28:43.005455Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1__large_str", "a2": 101, "event": "event1"} 2025-04-17T18:28:43.721313Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 3 messages to parse 2025-04-17T18:28:43.721366Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1", "a2": "101", "event": "event1"} 2025-04-17T18:28:43.721460Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 43, value: {"a1": "hello1", "a2": "101", "event": "event2"} 2025-04-17T18:28:43.721485Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 44, value: {"a2": "101", "a1": "hello1", "event": "event3"} |98.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |98.1%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest |98.1%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> KeyValueGRPCService::SimpleConcatUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKey >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> DataShardReassign::AutoReassignOnYellowFlag |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TMemoryController::Config_ConsumerLimits [GOOD] >> TMemoryController::SharedCache >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] |98.1%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues >> ConfigGRPCService::ReplaceConfig [GOOD] >> ConfigGRPCService::FetchConfig >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::PDiskFaultyState >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges [GOOD] >> TTxDataShardPrefixKMeansScan::BadRequest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> Metering::MockedNetClassifierLabelTransformation [GOOD] >> SHA256Test::SHA256Test [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: 2025-04-17T18:28:26.549802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:26.550679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:26.550736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000462/r3tmp/tmpgRm0nT/pdisk_1.dat 2025-04-17T18:28:27.233263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:27.288656Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:27.329829Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:28:27.331664Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:28:27.331951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:27.332685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:27.345725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:27.439790Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:28:27.439871Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:28:27.440840Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T18:28:27.578522Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:28:27.578649Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:28:27.579400Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:28:27.579523Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:28:27.579888Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:28:27.580095Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:28:27.580223Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:28:27.587056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:27.588568Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:28:27.589369Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:28:27.589443Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:28:27.636490Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:28:27.637555Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:28:27.639342Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:28:27.639609Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:28:27.691633Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:28:27.692597Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:28:27.692731Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:28:27.696703Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:28:27.696796Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:28:27.696877Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:28:27.698125Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:28:27.698343Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:28:27.698478Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:28:27.709345Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:28:27.751560Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:28:27.752684Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:28:27.752895Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:28:27.752945Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:28:27.752988Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:28:27.753031Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:28:27.753275Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:28:27.753331Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:28:27.754605Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:28:27.754772Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:28:27.754919Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:28:27.755006Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:28:27.755124Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:28:27.755195Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:28:27.755235Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:28:27.755275Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:28:27.755328Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:28:27.755481Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:27.755520Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:27.755556Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:28:27.757043Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:28:27.757096Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:28:27.757204Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:28:27.757560Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:28:27.757626Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:28:27.757722Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:28:27.757764Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:28:27.757800Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:28:27.757829Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:28:27.757856Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:28:27.758059Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:28:27.758099Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:28:27.758134Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:28:27.758238Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:28:27.758290Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:28:27.758323Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:28:27.758358Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:28:27.758397Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:28:27.758425Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:28:27.759739Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:28:27.759784Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:28:27.770534Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... :50.529904Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:28:50.529943Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:28:50.529986Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:28:50.530056Z node 5 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-17T18:28:50.530119Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:28:50.530148Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:28:50.530173Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T18:28:50.530416Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-04-17T18:28:50.530463Z node 5 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-17T18:28:50.530518Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-17T18:28:50.530686Z node 5 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-04-17T18:28:50.530750Z node 5 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T18:28:50.530818Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:28:50.530851Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T18:28:50.530895Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-17T18:28:50.530933Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T18:28:50.531011Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:28:50.531040Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-17T18:28:50.531079Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:28:50.531119Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:28:50.531166Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-17T18:28:50.531190Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:28:50.531224Z node 5 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-17T18:28:50.543083Z node 5 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-17T18:28:50.543184Z node 5 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T18:28:50.543252Z node 5 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-17T18:28:50.543377Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:28:50.545642Z node 5 :TX_PROXY DEBUG: actor# [5:59:2106] Handle TEvNavigate describe path /Root/table-1 2025-04-17T18:28:50.545770Z node 5 :TX_PROXY DEBUG: Actor# [5:860:2690] HANDLE EvNavigateScheme /Root/table-1 2025-04-17T18:28:50.546518Z node 5 :TX_PROXY DEBUG: Actor# [5:860:2690] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:28:50.546692Z node 5 :TX_PROXY DEBUG: Actor# [5:860:2690] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2025-04-17T18:28:50.547982Z node 5 :TX_PROXY DEBUG: Actor# [5:860:2690] Handle TEvDescribeSchemeResult Forward to# [5:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-17T18:28:50.549165Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:864:2694], Recipient [5:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:50.549243Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:50.549304Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:863:2693], serverId# [5:864:2694], sessionId# [0:0:0] 2025-04-17T18:28:50.549470Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [5:862:2692], Recipient [5:666:2570]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-17T18:28:50.554733Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:867:2697], Recipient [5:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:50.554823Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:50.554887Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:866:2696], serverId# [5:867:2697], sessionId# [0:0:0] 2025-04-17T18:28:50.555128Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:865:2695], Recipient [5:666:2570]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-17T18:28:50.555315Z node 5 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [5:865:2695], partsCount# 0, memtableSize# 728, memtableWaste# 3880, memtableRows# 3 2025-04-17T18:28:50.558601Z node 5 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.532535Z 2025-04-17T18:28:50.558696Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-04-17T18:28:50.558766Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:865:2695]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:28:50.559562Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [5:657:2564], Recipient [5:666:2570]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:28:50.560144Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:874:2703], Recipient [5:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:50.560206Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:50.560263Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:873:2702], serverId# [5:874:2703], sessionId# [0:0:0] 2025-04-17T18:28:50.560461Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:872:2701], Recipient [5:666:2570]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-17T18:28:50.560570Z node 5 :TX_DATASHARD DEBUG: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 2], requested from# [5:872:2701] is not needed |98.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> ReadUpdateWrite::Load [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] |98.1%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/ut/unittest >> SHA256Test::SHA256Test [GOOD] Test command err: 2025-04-17T18:28:28.415869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350749708174231:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:28.415938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004f5/r3tmp/tmpgi8TXc/pdisk_1.dat 2025-04-17T18:28:28.963111Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:28.973153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:28.973638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:28.986351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:29.033507Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:29.033539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:29.033552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:29.033667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:28:33.416048Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350749708174231:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:33.416169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:28:34.623776Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350774178954011:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:34.623830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004f5/r3tmp/tmpJNkf17/pdisk_1.dat 2025-04-17T18:28:34.770251Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:34.804881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:34.804981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:34.806630Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:34.806646Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:34.806651Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:34.806681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:34.806744Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:28:39.624442Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494350774178954011:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:39.624510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:28:45.755865Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494350822775040653:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:45.763133Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004f5/r3tmp/tmpQZIPKK/pdisk_1.dat 2025-04-17T18:28:45.998540Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:46.035203Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:46.035291Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:46.038996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:46.066033Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:46.066055Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:46.066061Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:46.066227Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:28:50.756456Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7494350822775040653:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:50.756570Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |98.1%| [TS] {RESULT} ydb/core/ymq/actor/ut/unittest >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> TopicSessionTests::BadDataSessionError [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding >> ConfigGRPCService::FetchConfig [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleWriteRead >> TopicSessionTests::WrongFieldType >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/config/ut/unittest >> ConfigGRPCService::FetchConfig [GOOD] Test command err: 2025-04-17T18:28:44.216300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350816489691787:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:44.216452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000550/r3tmp/tmpcmKdPq/pdisk_1.dat 2025-04-17T18:28:45.318041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:45.379384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:45.388013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:45.389383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:45.402972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22318, node 1 2025-04-17T18:28:45.492487Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-17T18:28:45.492882Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-17T18:28:45.493176Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-17T18:28:45.493186Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-17T18:28:45.493286Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-17T18:28:45.493392Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-17T18:28:45.494826Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-17T18:28:45.494872Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-17T18:28:45.499142Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-17T18:28:45.499162Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-17T18:28:45.500899Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:28:45.501007Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:28:45.501017Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:28:45.501027Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:28:45.708032Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# BlobStorageConfig 2025-04-17T18:28:45.714258Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# HiveCreateTablet 2025-04-17T18:28:45.714666Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# TabletStateRequest 2025-04-17T18:28:45.714931Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# SchemeOperationStatus 2025-04-17T18:28:45.717475Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e280] created request Name# ChooseProxy 2025-04-17T18:28:45.717887Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e880] created request Name# ResolveNode 2025-04-17T18:28:45.722282Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ee80] created request Name# FillNode 2025-04-17T18:28:45.722705Z node 1 :GRPC_SERVER DEBUG: [0x51a0000cfc80] created request Name# DrainNode 2025-04-17T18:28:45.723006Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d0280] created request Name# InterconnectDebug 2025-04-17T18:28:45.723306Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d0880] created request Name# TestShardControl 2025-04-17T18:28:45.726687Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d0e80] created request Name# RegisterNode 2025-04-17T18:28:45.727107Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d1480] created request Name# CmsRequest 2025-04-17T18:28:45.730271Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d1a80] created request Name# ConsoleRequest 2025-04-17T18:28:45.730632Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d2080] created request Name# SchemeInitRoot 2025-04-17T18:28:45.738437Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d2680] created request Name# PersQueueRequest 2025-04-17T18:28:45.738844Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d2c80] created request Name# SchemeOperation 2025-04-17T18:28:45.739211Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d3280] created request Name# SchemeDescribe 2025-04-17T18:28:45.927580Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:45.927603Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:45.927610Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:45.927724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:47.035272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "ssd" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:28:47.043677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:47.045571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:28:47.047522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:28:47.047621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:47.054615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:28:47.059958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T18:28:47.060144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:47.060177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:28:47.060257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T18:28:47.060268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-17T18:28:47.064569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:47.064610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:28:47.064624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T18:28:47.071452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:47.071492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:47.071524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:28:47.071548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:28:47.082781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:28:47.083154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:28:47.083169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T18:28:47.083197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:28:47.085296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T18:28:47.086400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:28:47.091713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744914527132, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:28:47.091884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744914527132 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:28:47.091928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:28:47.097444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T18:28:47.097501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:28:47.097705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:28:47.097751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:28:47.104506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:28:47 ... ionPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:28:51.039004Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T18:28:51.039175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:51.039205Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:28:51.039217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T18:28:51.039229Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-17T18:28:51.041618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:51.041652Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:28:51.041666Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T18:28:51.043048Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:28:51.043062Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T18:28:51.043079Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:28:51.043620Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:51.043639Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:51.043656Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:28:51.043683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:28:51.043811Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:28:51.045866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T18:28:51.045976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:28:51.050608Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744914531094, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:28:51.050716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744914531094 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:28:51.050739Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:28:51.050985Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T18:28:51.051016Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:28:51.051131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:28:51.051167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:28:51.059032Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:28:51.059060Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:28:51.059231Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:28:51.059248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7494350843484648509:2373], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T18:28:51.059287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:51.059307Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T18:28:51.059373Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:28:51.059384Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:28:51.059403Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:28:51.059413Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:28:51.059427Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T18:28:51.059446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:28:51.059458Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T18:28:51.059467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-17T18:28:51.059517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-17T18:28:51.059532Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-17T18:28:51.059541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-17T18:28:51.061038Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-17T18:28:51.061097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-17T18:28:51.061109Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-17T18:28:51.061123Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-17T18:28:51.061136Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:28:51.061194Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-04-17T18:28:51.061208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7494350847779616098:2316] 2025-04-17T18:28:51.063569Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:28:51.063647Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:28:51.063661Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:28:51.063685Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:28:51.066105Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710657 2025-04-17T18:28:51.109048Z node 3 :GRPC_SERVER DEBUG: Got grpc request# FetchConfigRequest, traceId# 01js2g5gs461sjgjwszghzy6en, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:32958, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:28:51.115333Z node 3 :GRPC_SERVER DEBUG: [0x51a00009de80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.115528Z node 3 :GRPC_SERVER DEBUG: [0x51a000114c80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.115666Z node 3 :GRPC_SERVER DEBUG: [0x51a0000a0280] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.115799Z node 3 :GRPC_SERVER DEBUG: [0x51a000115280] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.115933Z node 3 :GRPC_SERVER DEBUG: [0x51a00009ea80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.116070Z node 3 :GRPC_SERVER DEBUG: [0x51a0000c0080] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.116204Z node 3 :GRPC_SERVER DEBUG: [0x51a0000a6280] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.116335Z node 3 :GRPC_SERVER DEBUG: [0x51a0000a5680] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.116459Z node 3 :GRPC_SERVER DEBUG: [0x51a0000a4a80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.116586Z node 3 :GRPC_SERVER DEBUG: [0x51a000113a80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.116722Z node 3 :GRPC_SERVER DEBUG: [0x51a000114080] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.116841Z node 3 :GRPC_SERVER DEBUG: [0x51a0000a3280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.116992Z node 3 :GRPC_SERVER DEBUG: [0x51a0000a2080] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.117116Z node 3 :GRPC_SERVER DEBUG: [0x51a00009e480] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.117259Z node 3 :GRPC_SERVER DEBUG: [0x51a00009f080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.117405Z node 3 :GRPC_SERVER DEBUG: [0x51a0000a3e80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-17T18:28:51.117552Z node 3 :GRPC_SERVER DEBUG: [0x51a000114680] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |98.2%| [TM] {RESULT} ydb/services/config/ut/unittest >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack >> TDataShardRSTest::TestCleanupInRS+UseSink [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink >> TCreateAndDropViewTest::DropViewIfExists [GOOD] >> TCreateAndDropViewTest::DropViewInFolder >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding |98.2%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> TTxDataShardPrefixKMeansScan::BadRequest [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 0 MiB, Speed: 0 MiB/s Write: 10% 0.687602s 30% 0.687602s 50% Write: 10% 0.656471s 30% 0.656471s 50% 0.687602s 90% 0.656471s 90% Write: 10% 0.732908s 30% 0.732908s 50% 0.732908s 90% 0.732908s 99% 0.732908s 0.687602s 99% 0.687602s 0.656471s 99% 0.656471s Write: 10% 0.603007s 30% 0.603007s 50% 0.603007s 90% 0.603007s 99% 0.603007s Write: 10% 0.849391s 30% 0.849391s 50% 0.849391s 90% 0.849391s 99% 0.849391s Write: 10% 0.849679s 30% 0.849679s 50% 0.849679s 90% 0.849679s 99% 0.849679s Write: 10% 0.854995s 30% 0.854995s 50% 0.854995s 90% 0.854995s 99% 0.854995s Write: 10% 0.827317s 30% 0.827317s 50% 0.827317s 90% 0.827317s 99% 0.827317s Write: 10% 0.880575s 30% 0.880575s 50% 0.880575s 90% 0.880575s 99% 0.880575s Write: 10% 0.881722s 30% 0.881722s 50% 0.881722s 90% 0.881722s 99% 0.881722s Write: 10% 0.861154s 30% 0.861154s 50% 0.861154s 90% 0.861154s 99% 0.861154s Write: 10% 0.651197s 30% 0.651197s 50% 0.651197s 90% 0.651197s 99% 0.651197s Write: 10% 0.689543s 30% 0.689543s 50% 0.689543s 90% 0.689543s 99% 0.689543s Write: 10% 0.770005s 30% 0.770005s 50% 0.770005s 90% 0.770005s 99% 0.770005s Write: 10% 0.789284s 30% 0.789284s 50% 0.789284s 90% 0.789284s 99% 0.789284s Write: 10% 0.660704s 30% 0.660704s 50% 0.660704s 90% 0.660704s 99% 0.660704s Write: 10% 0.668472s 30% 0.668472s 50% 0.668472s 90% 0.668472s 99% 0.668472s Write: 10% 0.896468s 30% 0.896468s 50% 0.896468s 90% 0.896468s 99% 0.896468s Write: 10% 0.999590s 30% 0.999590s 50% 0.999590s 90% 0.999590s 99% 0.999590s Write: 10% 0.750803s 30% 0.750803s 50% 0.750803s 90% 0.750803s 99% 0.750803s Write: 10% 0.755600s 30% 0.755600s 50% 0.755600s 90% 0.755600s 99% 0.755600s Write: 10% 0.895221s 30% 0.895221s 50% 0.895221s 90% 0.895221s 99% 0.895221s Write: 10% 1.402806s 30% 1.402806s 50% 1.402806s 90% 1.402806s 99% 1.402806s Write: 10% 1.074230s 30% 1.074230s 50% 1.074230s 90% 1.074230s 99% 1.074230s Write: 10% 1.359867s 30% 1.359867s 50% 1.359867s 90% 1.359867s 99% 1.359867s Write: 10% 1.078357s 30% 1.078357s 50% 1.078357s 90% 1.078357s 99% 1.078357s Write: 10% 1.055453s 30% 1.055453s 50% 1.055453s 90% 1.055453s 99% 1.055453s Write: 10% 1.051678s 30% 1.051678s 50% 1.051678s 90% 1.051678s 99% 1.051678s Write: 10% 1.418640s 30% 1.418640s 50% 1.418640s 90% 1.418640s 99% 1.418640s Write: 10% 0.904909s 30% 0.904909s 50% 0.904909s 90% 0.904909s 99% 0.904909s Write: 10% 1.426554s 30% 1.426554s 50% 1.426554s 90% 1.426554s 99% 1.426554s Write: 10% 1.066768s 30% 1.066768s 50% 1.066768s 90% 1.066768s 99% 1.066768s Write: 10% 0.944679s 30% 0.944679s 50% 0.944679s 90% 0.944679s 99% 0.944679s Write: 10% 0.880119s 30% 0.880119s 50% 0.880119s 90% 0.880119s 99% 0.880119s Write: 10% 1.425191s 30% 1.425191s 50% 1.425191s 90% 1.425191s 99% 1.425191s Write: 10% 1.163689s 30% 1.163689s 50% 1.163689s 90% 1.163689s 99% 1.163689s Write: 10% 1.372974s 30% 1.372974s 50% 1.372974s 90% 1.372974s 99% 1.372974s Write: 10% 0.822177s 30% 0.822177s 50% 0.822177s 90% 0.822177s 99% 0.822177s Write: 10% 0.673847s 30% 0.673847s 50% 0.673847s 90% 0.673847s 99% 0.673847s Write: 10% 0.827118s 30% 0.827118s 50% 0.827118s 90% 0.827118s 99% 0.827118s Write: 10% 1.421457s 30% 1.421457s 50% 1.421457s 90% 1.421457s 99% 1.421457s Write: 10% 0.844038s 30% 0.844038s 50% 0.844038s 90% 0.844038s 99% 0.844038s Write: 10% 0.984101s 30% 0.984101s 50% 0.984101s 90% 0.984101s 99% 0.984101s Write: 10% 1.251511s 30% 1.251511s 50% 1.251511s 90% 1.251511s 99% 1.251511s Write: 10% 0.907168s 30% 0.907168s 50% 0.907168s 90% 0.907168s 99% 0.907168s Write: 10% 0.713194s 30% 0.713194s 50% 0.713194s 90% 0.713194s 99% 0.713194s Write: 10% 0.715689s 30% 0.715689s 50% 0.715689s 90% 0.715689s 99% 0.715689s Write: 10% 0.892913s 30% 0.892913s 50% 0.892913s 90% 0.892913s 99% 0.892913s Write: 10% 0.790830s 30% 0.790830s 50% 0.790830s 90% 0.790830s 99% 0.790830s Write: 10% 0.857653s 30% 0.857653s 50% 0.857653s 90% 0.857653s 99% 0.857653s Write: 10% 0.782570s 30% 0.782570s 50% 0.782570s 90% 0.782570s 99% 0.782570s Write: 10% 0.853353s 30% 0.853353s 50% 0.853353s 90% 0.853353s 99% 0.853353s Write: 10% 0.882887s 30% 0.882887s 50% 0.882887s 90% 0.882887s 99% 0.882887s Write: 10% 0.911381s 30% 0.911381s 50% 0.911381s 90% 0.911381s 99% 0.911381s Write: 10% 0.834885s 30% 0.834885s 50% 0.834885s 90% 0.834885s 99% 0.834885s Write: 10% 0.774394s 30% 0.774394s 50% 0.774394s 90% 0.774394s 99% 0.774394s Write: 10% 0.886133s 30% 0.886133s 50% 0.886133s 90% 0.886133s 99% 0.886133s Write: 10% 0.875710s 30% 0.875710s 50% 0.875710s 90% 0.875710s 99% 0.875710s Write: 10% 0.785364s 30% 0.785364s 50% 0.785364s 90% 0.785364s 99% 0.785364s Write: 10% 0.989322s 30% 0.989322s 50% 0.989322s 90% 0.989322s 99% 0.989322s Write: 10% 0.835756s 30% 0.835756s 50% 0.835756s 90% 0.835756s 99% 0.835756s Write: 10% 0.810448s 30% 0.810448s 50% 0.810448s 90% 0.810448s 99% 0.810448s Write: 10% 0.820876s 30% 0.820876s 50% 0.820876s 90% 0.820876s 99% 0.820876s Write: 10% 0.943538s 30% 0.943538s 50% 0.943538s 90% 0.943538s 99% 0.943538s Step 2. read write Write: 10% 0.225706s 30% 0.225706s 50% 0.225706s 90% 0.225706s 99% 0.225706s Write: 10% 0.106732s 30% 0.106732s 50% 0.106732s 90% 0.106732s 99% 0.106732s Write: 10% 0.082541s 30% 0.082541s 50% 0.082541s 90% 0.082541s 99% 0.082541s Write: 10% 0.115171s 30% 0.115171s 50% 0.115171s 90% 0.115171s 99% 0.115171s Write: 10% 0.069271s 30% 0.069271s 50% Write: 10% 0.141045s 30% 0.141045s 50% 0.141045s 90% 0.141045s 99% 0.141045s 0.069271s 90% Write: 10% 0.118625s 30% 0.118625s 50% 0.118625s 90% 0.118625s 99% 0.118625s 0.069271s 99% 0.069271s Write: 10% 0.173466s 30% 0.173466s 50% 0.173466s 90% 0.173466s 99% 0.173466s Write: 10% 0.163449s 30% 0.163449s 50% 0.163449s 90% 0.163449s 99% 0.163449s Write: 10% 0.304003s 30% 0.304003s 50% 0.304003s 90% 0.304003s 99% 0.304003s Write: 10% 0.338298s 30% 0.338298s 50% 0.338298s 90% 0.338298s 99% 0.338298s Write: 10% 0.606242s 30% 0.606242s 50% 0.606242s 90% 0.606242s 99% 0.606242s Write: 10% 0.555765s 30% 0.555765s 50% 0.555765s 90% 0.555765s 99% 0.555765s Write: 10% 0.465219s 30% 0.465219s 50% 0.465219s 90% 0.465219s 99% 0.465219s Write: 10% 0.500851s 30% 0.500851s 50% 0.500851s 90% 0.500851s 99% 0.500851s Write: 10% 0.719684s 30% 0.719684s 50% 0.719684s 90% 0.719684s 99% 0.719684s Write: 10% 0.599895s 30% 0.599895s 50% 0.599895s 90% 0.599895s 99% 0.599895s Write: 10% 0.629372s 30% 0.629372s 50% 0.629372s 90% 0.629372s 99% 0.629372s Write: 10% 0.469138s 30% 0.469138s 50% 0.469138s 90% 0.469138s 99% 0.469138s Write: 10% 0.621007s 30% 0.621007s 50% 0.621007s 90% 0.621007s 99% 0.621007s Write: 10% 0.532214s 30% 0.532214s 50% 0.532214s 90% 0.532214s 99% 0.532214s Write: 10% 0.573401s 30% 0.573401s 50% 0.573401s 90% 0.573401s 99% 0.573401s Write: 10% 0.734707s 30% 0.734707s 50% 0.734707s 90% 0.734707s 99% 0.734707s Write: 10% 0.677035s 30% 0.677035s 50% 0.677035s 90% 0.677035s 99% 0.677035s Write: 10% 0.552862s 30% 0.552862s 50% 0.552862s 90% 0.552862s 99% 0.552862s Write: 10% 0.801892s 30% 0.801892s 50% 0.801892s 90% 0.801892s 99% 0.801892s Write: 10% 0.653461s 30% 0.653461s 50% 0.653461s 90% 0.653461s 99% 0.653461s Write: 10% 0.524532s 30% 0.524532s 50% 0.524532s 90% 0.524532s 99% 0.524532s Write: 10% 0.652999s 30% 0.652999s 50% 0.652999s 90% 0.652999s 99% 0.652999s Write: 10% 0.513785s 30% 0.513785s 50% 0.513785s 90% 0.513785s 99% 0.513785s Write: 10% 0.490136s 30% 0.490136s 50% 0.490136s 90% 0.490136s 99% 0.490136s Write: 10% 0.572161s 30% 0.572161s 50% 0.572161s 90% 0.572161s 99% 0.572161s Write: 10% 0.513901s 30% 0.513901s 50% 0.513901s 90% 0.513901s 99% 0.513901s Write: 10% 0.774646s 30% 0.774646s 50% 0.774646s 90% 0.774646s 99% 0.774646s Write: 10% 0.511678s 30% 0.511678s 50% 0.511678s 90% 0.511678s 99% 0.511678s Write: 10% 0.665710s 30% 0.665710s 50% 0.665710s 90% 0.665710s 99% 0.665710s Write: 10% 0.530269s 30% 0.530269s 50% 0.530269s 90% 0.530269s 99% 0.530269s Write: 10% 0.634646s 30% 0.634646s 50% 0.634646s 90% 0.634646s 99% 0.634646s Write: 10% 0.749241s 30% 0.749241s 50% 0.749241s 90% 0.749241s 99% 0.749241s Write: 10% 0.486780s 30% 0.486780s 50% 0.486780s 90% 0.486780s 99% 0.486780s Write: 10% 0.618150s 30% 0.618150s 50% 0.618150s 90% 0.618150s 99% 0.618150s Write: 10% 0.606659s 30% 0.606659s 50% 0.606659s 90% 0.606659s 99% 0.606659s Write: 10% 0.597889s 30% 0.597889s 50% 0.597889s 90% 0.597889s 99% 0.597889s Write: 10% 0.535521s 30% 0.535521s 50% 0.535521s 90% 0.535521s 99% 0.535521s Write: 10% 0.594474s 30% 0.594474s 50% 0.594474s 90% 0.594474s 99% 0.594474s Write: 10% 0.455010s 30% 0.455010s 50% 0.455010s 90% 0.455010s 99% 0.455010s Write: 10% 0.452304s 30% 0.452304s 50% 0.452304s 90% 0.452304s 99% 0.452304s Write: 10% 0.492274s 30% 0.492274s 50% 0.492274s 90% 0.492274s 99% 0.492274s Write: 10% 0.648710s 30% 0.648710s 50% 0.648710s 90% 0.648710s 99% 0.648710s Write: 10% 0.578743s 30% 0.578743s 50% 0.578743s 90% 0.578743s 99% 0.578743s Write: 10% Write: 10% 0.574044s 30% 0.574044s 50% Write: 10% 0.524001s 30% 0.524001s 50% 0.524001s 90% 0.524001s 99% 0.524001s 0.574044s 90% Write: 10% Write: 10% 0.500414s 30% 0.500414s0.495117s 30% 0.495117s 50% 0.675484s 30% 0.675484s 50% Write: 10% 0.499789s 30% 0.499789s 50% 0.499789s 90% 0.499789s 99% 0.499789s 0.574044s 99% 0.574044s 0.495117s 90% 0.675484s 90% 0.675484s 99% 0.675484s 0.495117s 99% 0.495117s Write: 10% 0.558256s 30% 0.558256s 50% 0.558256s 90% 0.558256s 99% 0.558256s Write: 10% 0.479305s 30% 0.479305s 50% 0.479305s 90% 0.479305s 99% 0.479305s Write: 10% 0.515657s 30% 0.515657s 50% 0.515657s 90% 0.515657s 99% 0.515657s Write: 10% 0.756267s 30% 0.756267s 50% 0.756267s 90% 0.756267s 99% 0.756267s Write: 10% 0.499655s 30% 0.499655s 50% 0.499655s 90% 0.499655s 99% 0.499655s 50% 0.500414s 90% 0.500414s 99% 0.500414s Write: 10% 0.496972s 30% 0.496972s 50% 0.496972s 90% 0.496972s 99% 0.496972s Write: 10% 0.546481s 30% 0.546481s 50% 0.546481s 90% 0.546481s 99% 0.546481s Write: 10% 0.578200s 30% 0.578200s 50% 0.578200s 90% 0.578200s 99% 0.578200s Read: 10% 1.941466s 30% 1.941466s 50% 1.941466s 90% 1.941466s 99% 1.941466s Step 3. write modify Write: 10% 0.814879s 30% 0.814879s 50% 0.814879s 90% 0.814879s 99% 0.814879s Write: 10% 1.042600s 30% 1.042600s 50% 1.042600s 90% 1.042600s 99% 1.042600s Write: 10% 0.533255s 30% 0.533255s 50% 0.533255s 90% 0.533255s 99% 0.533255s Write: 10% 0.770274s 30% 0.770274s 50% 0.770274s 90% 0.770274s 99% 0.770274s Write: 10% 1.182460s 30% 1.182460s 50% 1.182460s 90% 1.182460s 99% 1.182460s Write: 10% 0.882619s 30% 0.882619s 50% 0.882619s 90% Write: 10% 0.801791s 30% 0.801791s 50% Write: 10% 0.852737s 30% 0.852737s 50% 0.882619s 99% 0.882619s 0.801791s 90% 0.801791s 99% 0.801791s 0.852737s 90% 0.852737s 99% 0.852737s Write: 10% 1.069456s 30% 1.069456s 50% Write: 10% 1.069456s 90% 0.687882s 30% 0.687882s 50% 0.687882s 90% Write: 10% 0.870081s 30% 0.870081s 50% 1.069456s 99% 1.069456s 0.687882s 99% 0.870081s 90% 0.687882s 0.870081s 99% 0.870081s Write: 10% 0.690764s 30% 0.690764s 50% 0.690764s 90% Write: 10% 0.813104s 30% 0.813104s 50% 0.813104s 90% 0.690764s 99% 0.690764s 0.813104s 99% 0.813104s Write: 10% 0.928046s 30% 0.928046s 50% 0.928046s 90% 0.928046s 99% 0.928046s Write: 10% 0.951779s 30% 0.951779s 50% 0.951779s 90% 0.951779s 99% 0.951779s Write: 10% 0.917118s 30% 0.917118s 50% 0.917118s 90% 0.917118s 99% 0.917118s Write: 10% 0.882902s 30% 0.882902s 50% 0.882902s 90% 0.882902s 99% 0.882902s Write: 10% 1.283133s 30% 1.283133s 50% 1.283133s 90% 1.283133s 99% 1.283133s Write: 10% 0.992197s 30% 0.992197s 50% 0.992197s 90% 0.992197s 99% 0.992197s Write: 10% 1.077286s 30% 1.077286s 50% 1.077286s 90% 1.077286s 99% 1.077286s Write: 10% 0.900448s 30% 0.900448s 50% 0.900448s 90% 0.900448s 99% 0.900448s Write: 10% 0.528772s 30% 0.528772s 50% 0.528772s 90% 0.528772s 99% 0.528772s Write: 10% 0.920954s 30% 0.920954s 50% 0.920954s 90% 0.920954s 99% 0.920954s Write: 10% 0.980775s 30% 0.980775s 50% 0.980775s 90% 0.980775s 99% 0.980775s Write: 10% 0.489167s 30% 0.489167s 50% 0.489167s 90% 0.489167s 99% 0.489167s Write: 10% 1.363591s 30% 1.363591s 50% 1.363591s 90% 1.363591s 99% 1.363591s Write: 10% 1.167743s 30% 1.167743s 50% 1.167743s 90% 1.167743s 99% 1.167743s Write: 10% 1.176612s 30% 1.176612s 50% 1.176612s 90% 1.176612s 99% 1.176612s Write: 10% 1.425531s 30% 1.425531s 50% 1.425531s 90% 1.425531s 99% 1.425531s Write: 10% 1.133512s 30% 1.133512s 50% 1.133512s 90% 1.133512s 99% 1.133512s Write: 10% 0.666369s 30% 0.666369s 50% 0.666369s 90% 0.666369s 99% 0.666369s Write: 10% 1.305230s 30% 1.305230s 50% 1.305230s 90% 1.305230s 99% 1.305230s Write: 10% 1.195097s 30% 1.195097s 50% 1.195097s 90% 1.195097s 99% 1.195097s Write: 10% 1.559694s 30% 1.559694s 50% 1.559694s 90% 1.559694s 99% 1.559694s Write: 10% 1.430197s 30% 1.430197s 50% 1.430197s 90% 1.430197s 99% 1.430197s Write: 10% 0.543313s 30% 0.543313s 50% 0.543313s 90% 0.543313s 99% 0.543313s Write: 10% 1.262496s 30% 1.262496s 50% 1.262496s 90% 1.262496s 99% 1.262496s Write: 10% 1.425275s 30% 1.425275s 50% 1.425275s 90% 1.425275s 99% 1.425275s Write: 10% 1.234259s 30% 1.234259s 50% 1.234259s 90% 1.234259s 99% 1.234259s Write: 10% 0.898241s 30% 0.898241s 50% Write: 10% 1.665310s 30% 1.665310s 50% 1.665310s 90% 1.665310s 99% 1.665310s Write: 10% 1.669570s 30% 1.669570s 50% 1.669570s 90% 1.669570s 99% 1.669570s Write: 10% 1.257756s 30% 1.257756s 50% Write: 10% 0.715059s 30% 0.715059s 50% 0.715059s 90% 1.257756s 90% 0.898241s 90% 0.898241s 99% 0.898241s 0.715059s 99% 0.715059s 1.257756s 99% 1.257756s Write: 10% 1.148598s 30% 1.148598s 50% 1.148598s 90% 1.148598s 99% 1.148598s Write: 10% 1.404981s 30% 1.404981s 50% 1.404981s 90% 1.404981s 99% 1.404981s Write: 10% 1.228277s 30% 1.228277s 50% 1.228277s 90% 1.228277s 99% 1.228277s Write: 10% 1.585796s 30% 1.585796s 50% 1.585796s 90% 1.585796s 99% 1.585796s Write: 10% 0.753705s 30% 0.753705s 50% 0.753705s 90% 0.753705s 99% 0.753705s Write: 10% 1.191050s 30% 1.191050s 50% 1.191050s 90% 1.191050s 99% 1.191050s Write: 10% 0.819986s 30% 0.819986s 50% 0.819986s 90% 0.819986s 99% 0.819986s Write: 10% 1.287433s 30% 1.287433s 50% 1.287433s 90% 1.287433s 99% 1.287433s Write: 10% 1.376854s 30% 1.376854s 50% 1.376854s 90% Write: 10% 1.282360s 30% 1.282360s 50% 1.282360s 90% 1.282360s 99% 1.282360s 1.376854s 99% 1.376854s Write: 10% 1.354709s 30% 1.354709s 50% Write: 10% 0.780582s 30% 0.780582s 50% Write: 10% 1.411558s 30% 1.411558s 50% 1.411558s 90% 1.411558s 99% 1.411558s 0.780582s 90% 1.354709s 90% Write: 10% 1.310408s 30% 1.310408s 50% Write: 10% 1.205518s 30% 1.205518s 50% 1.205518s 90% 1.205518s 99% 1.205518s 0.780582s 99% 0.780582s 1.354709s 99% 1.354709s 1.310408s 90% 1.310408s 99% 1.310408s Write: 10% 1.398976s 30% 1.398976s 50% 1.398976s 90% 1.398976s 99% 1.398976s Write: 10% 0.877955s 30% 0.877955s 50% 0.877955s 90% 0.877955s 99% 0.877955s Write: 10% 1.516054s 30% 1.516054s 50% 1.516054s 90% 1.516054s 99% 1.516054s Write: 10% 0.726983s 30% 0.726983s 50% 0.726983s 90% 0.726983s 99% 0.726983s Write: 10% 1.351388s 30% 1.351388s 50% 1.351388s 90% 1.351388s 99% 1.351388s Update: 10% 0.283952s 30% 0.283952s 50% 1.060447s 90% 1.060447s 99% 1.060447s Step 4. read modify write Write: 10% 0.496501s 30% 0.496501s 50% 0.496501s 90% 0.496501s 99% 0.496501s Write: 10% 0.547474s 30% 0.547474s 50% 0.547474s 90% 0.547474s 99% 0.547474s Write: 10% 0.206240s 30% 0.206240s 50% 0.206240s 90% 0.206240s 99% 0.206240s Write: 10% 0.606306s 30% 0.606306s 50% 0.606306s 90% 0.606306s 99% 0.606306s Write: 10% 0.335647s 30% 0.335647s 50% 0.335647s 90% 0.335647s 99% 0.335647s Write: 10% 0.751642s 30% 0.751642s 50% 0.751642s 90% 0.751642s 99% 0.751642s Write: 10% 0.678712s 30% 0.678712s 50% 0.678712s 90% 0.678712s 99% 0.678712s Write: 10% 1.088653s 30% 1.088653s 50% 1.088653s 90% 1.088653s 99% 1.088653s Write: 10% 1.139624s 30% 1.139624s 50% 1.139624s 90% 1.139624s 99% 1.139624s Write: 10% 0.881663s 30% 0.881663s 50% 0.881663s 90% 0.881663s 99% 0.881663s Write: 10% 1.372525s 30% 1.372525s 50% 1.372525s 90% 1.372525s 99% 1.372525s Write: 10% 0.988567s 30% 0.988567s 50% 0.988567s 90% 0.988567s 99% 0.988567s Write: 10% 1.200217s 30% 1.200217s 50% 1.200217s 90% 1.200217s 99% 1.200217s Write: 10% 1.460732s 30% 1.460732s 50% 1.460732s 90% 1.460732s 99% 1.460732s Write: 10% 1.160590s 30% 1.160590s 50% 1.160590s 90% 1.160590s 99% 1.160590s Write: 10% 1.160623s 30% 1.160623s 50% 1.160623s 90% 1.160623s 99% 1.160623s Write: 10% 1.129021s 30% 1.129021s 50% 1.129021s 90% 1.129021s 99% 1.129021s Write: 10% 1.127900s 30% 1.127900s 50% 1.127900s 90% 1.127900s 99% 1.127900s Write: 10% 1.173337s 30% 1.173337s 50% 1.173337s 90% 1.173337s 99% 1.173337s Write: 10% 1.184979s 30% 1.184979s 50% 1.184979s 90% 1.184979s 99% 1.184979s Write: 10% 1.173175s 30% 1.173175s 50% 1.173175s 90% 1.173175s 99% 1.173175s Write: 10% 1.709699s 30% 1.709699s 50% 1.709699s 90% 1.709699s 99% 1.709699s Write: 10% 1.741551s 30% 1.741551s 50% 1.741551s 90% 1.741551s 99% 1.741551s Write: 10% 1.377767s 30% 1.377767s 50% 1.377767s 90% 1.377767s 99% 1.377767s Write: 10% 1.464028s 30% 1.464028s 50% 1.464028s 90% 1.464028s 99% 1.464028s Write: 10% 1.441237s 30% 1.441237s 50% 1.441237s 90% 1.441237s 99% 1.441237s Write: 10% 1.471987s 30% 1.471987s 50% 1.471987s 90% 1.471987s 99% 1.471987s Write: 10% 1.395078s 30% 1.395078s 50% 1.395078s 90% 1.395078s 99% 1.395078s Write: 10% 1.348940s 30% 1.348940s 50% 1.348940s 90% 1.348940s 99% 1.348940s Write: 10% 1.294980s 30% 1.294980s 50% 1.294980s 90% 1.294980s 99% 1.294980s Write: 10% 1.382989s 30% 1.382989s 50% 1.382989s 90% 1.382989s 99% 1.382989s Write: 10% 1.454040s 30% 1.454040s 50% 1.454040s 90% 1.454040s 99% 1.454040s Write: 10% 1.315946s 30% 1.315946s 50% 1.315946s 90% 1.315946s 99% 1.315946s Write: 10% 1.485281s 30% 1.485281s 50% 1.485281s 90% 1.485281s 99% 1.485281s Write: 10% 1.487888s 30% 1.487888s 50% 1.487888s 90% 1.487888s 99% 1.487888s Write: 10% 1.499414s 30% 1.499414s 50% 1.499414s 90% 1.499414s 99% 1.499414s Write: 10% 1.559891s 30% 1.559891s 50% 1.559891s 90% 1.559891s 99% 1.559891s Write: 10% 1.572474s 30% 1.572474s 50% 1.572474s 90% 1.572474s 99% 1.572474s Write: 10% 1.637076s 30% 1.637076s 50% 1.637076s 90% 1.637076s 99% 1.637076s Write: 10% 1.554573s 30% 1.554573s 50% 1.554573s 90% 1.554573s 99% 1.554573s Write: 10% 1.803019s 30% 1.803019s 50% 1.803019s 90% 1.803019s 99% 1.803019s Write: 10% 1.885543s 30% 1.885543s 50% 1.885543s 90% 1.885543s 99% 1.885543s Write: 10% 1.777652s 30% 1.777652s 50% 1.777652s 90% 1.777652s 99% 1.777652s Write: 10% 1.830279s 30% 1.830279s 50% 1.830279s 90% 1.830279s 99% 1.830279s Write: 10% 1.812055s 30% 1.812055s 50% 1.812055s 90% 1.812055s 99% 1.812055s Write: 10% 1.976555s 30% 1.976555s 50% 1.976555s 90% 1.976555s 99% 1.976555s Write: 10% 1.802248s 30% 1.802248s 50% 1.802248s 90% 1.802248s 99% 1.802248s Write: 10% 1.833211s 30% 1.833211s 50% 1.833211s 90% 1.833211s 99% 1.833211s Write: 10% 1.781171s 30% 1.781171s 50% 1.781171s 90% 1.781171s 99% 1.781171s Write: 10% 1.782263s 30% 1.782263s 50% Write: 10% 1.867495s 30% 1.867495s 50% Write: 10% 1.761282s 30% 1.761282s 50% 1.761282s 90% 1.761282s 99% 1.761282s 1.867495s 90% 1.867495s 99% 1.867495s1.782263s 90% 1.782263s 99% 1.782263s Write: 10% 1.866975s 30% 1.866975s 50% Write: 10% 1.889982s 30% 1.889982s 50% 1.889982s 90% 1.889982s 99% 1.889982s 1.866975s 90% 1.866975s 99% 1.866975sWrite: 10% 1.829853s 30% 1.829853s 50% 1.829853s 90% 1.829853s 99% 1.829853s Write: 10% 1.792102s 30% 1.792102s 50% 1.792102s 90% 1.792102s 99% 1.792102s Write: 10% 1.852709s 30% 1.852709s 50% 1.852709s 90% 1.852709s 99% 1.852709s Write: 10% 1.773548s 30% 1.773548s 50% 1.773548s 90% 1.773548s 99% 1.773548s Write: 10% 1.882414s 30% 1.882414s 50% 1.882414s 90% 1.882414s 99% 1.882414s Write: 10% 1.874606s 30% 1.874606s 50% 1.874606s 90% 1.874606s 99% 1.874606s Write: 10% 1.800500s 30% 1.800500s 50% 1.800500s 90% 1.800500s 99% 1.800500s Write: 10% 1.799867s 30% 1.799867s 50% 1.799867s 90% 1.799867s 99% 1.799867s Write: 10% 1.886509s 30% 1.886509s 50% 1.886509s 90% 1.886509s 99% 1.886509s Write: 10% 1.804760s 30% 1.804760s 50% 1.804760s 90% 1.804760s 99% 1.804760s Update: 10% 0.621168s 30% 0.621168s 50% 1.771974s 90% 1.771974s 99% 1.771974s Read: 10% 3.493937s 30% 3.493937s 50% 3.493937s 90% 3.493937s 99% 3.493937s |98.2%| [TM] {RESULT} ydb/tests/olap/high_load/unittest >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery >> test_result_limits.py::TestResultLimits::test_many_rows [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] Test command err: 2025-04-17T18:28:53.024881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:53.025628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:53.025687Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005e7/r3tmp/tmpTxgapH/pdisk_1.dat 2025-04-17T18:28:53.733791Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-04-17T18:28:53.733923Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.758503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:53.760432Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-04-17T18:28:53.760560Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:53.761653Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:1:24576:515:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:53.761773Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:53.761890Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-04-17T18:28:53.772746Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-17T18:28:53.772844Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.778607Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-17T18:28:53.778730Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:53.779183Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:1:24576:129:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:53.779283Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:53.779409Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-04-17T18:28:53.779607Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-17T18:28:53.779653Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.779831Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-17T18:28:53.779908Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:53.780221Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:1:24576:130:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:53.780292Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:53.780363Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-04-17T18:28:53.780482Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-17T18:28:53.780517Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.787416Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-17T18:28:53.787532Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:53.788048Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:53.788157Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:53.788244Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-04-17T18:28:53.794945Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-04-17T18:28:53.795050Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.795140Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:53.795203Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:53.806135Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-04-17T18:28:53.806250Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.806520Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{7, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-17T18:28:53.806592Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:53.838568Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:53.839518Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} queued, type NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig 2025-04-17T18:28:53.839597Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.848538Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:53.848709Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:53.848885Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} queued, type NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig 2025-04-17T18:28:53.848956Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.850777Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:53.850871Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:53.866452Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037932033:2:8:0:0:87:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:53.866609Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} commited cookie 1 for step 8 2025-04-17T18:28:53.866728Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives 2025-04-17T18:28:53.866811Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.867170Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:53.867230Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:53.896325Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:28:53.896521Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} queued, type NKikimr::NTxAllocator::TTxAllocator::TTxReserve 2025-04-17T18:28:53.896596Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:53.896819Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} hope 1 -> done Change{3, redo 76b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-17T18:28:53.896888Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTx ... er{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{20, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-17T18:28:57.589709Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:57.590170Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:57.590285Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:57.590370Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2025-04-17T18:28:57.613259Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-04-17T18:28:57.613365Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:57.613533Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{12, redo 82b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-17T18:28:57.613610Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:57.624353Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037968897:2:10:0:0:94:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:57.624502Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} commited cookie 1 for step 10 2025-04-17T18:28:57.748520Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-17T18:28:57.748630Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:57.748785Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{21, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-17T18:28:57.748886Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:57.749394Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:57.749466Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:57.749551Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2025-04-17T18:28:57.891801Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-17T18:28:57.891895Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:57.892060Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{22, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-17T18:28:57.892118Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:57.892515Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:57.892575Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:57.892675Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2025-04-17T18:28:58.035859Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-17T18:28:58.035947Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:58.036132Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{23, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-17T18:28:58.036192Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:58.036601Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:58.036686Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:58.036785Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2025-04-17T18:28:58.178009Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-17T18:28:58.178146Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:58.178338Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{24, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-17T18:28:58.178423Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:58.178947Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:58.179006Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:58.179099Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2025-04-17T18:28:58.193193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:28:58.193254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:58.219930Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics 2025-04-17T18:28:58.220033Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:58.220184Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:58.220260Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:58.317461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-17T18:28:58.317529Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-17T18:28:58.317747Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2025-04-17T18:28:58.317799Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:58.317891Z node 1 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 15000 last cleanup 0 2025-04-17T18:28:58.317962Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:28:58.317994Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-17T18:28:58.318053Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:28:58.318090Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:28:58.318168Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-17T18:28:58.318283Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:58.318461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:28:58.382291Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-17T18:28:58.382419Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-17T18:28:58.382599Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{25, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-17T18:28:58.382653Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-17T18:28:58.383131Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:58.383193Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-17T18:28:58.383260Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:32} commited cookie 1 for step 31 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2025-04-17T18:28:58.515051Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} CheckYellow current light yellow move channels: 0 1 2 --- Captured TEvReassignTablet event |98.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KeyValueGRPCService::SimpleWriteRead [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2025-04-17T18:28:16.374148Z node 1 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-17T18:28:16.375596Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-17T18:28:16.376798Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-04-17T18:28:16.377845Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-04-17T18:28:16.377872Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:16.386323Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Thu, 17 Apr 2025 18:28:16 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-04-17T18:28:16.389161Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:26.653601Z node 2 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-17T18:28:26.664451Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-17T18:28:26.682772Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-04-17T18:28:26.698008Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-17T18:28:26.713459Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-17T18:28:26.729118Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-17T18:28:26.744425Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-17T18:28:26.759512Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-17T18:28:26.774843Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-17T18:28:26.782377Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2025-04-17T18:28:26.782728Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-04-17T18:28:26.783080Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-17T18:28:26.783379Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-17T18:28:26.783414Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-17T18:28:26.899466Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 17 Apr 2025 18:28:26 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-17T18:28:26.899869Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-17T18:28:26.899894Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-17T18:28:27.022223Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 17 Apr 2025 18:28:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-17T18:28:27.022587Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-17T18:28:27.022611Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-17T18:28:27.065159Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 17 Apr 2025 18:28:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-17T18:28:27.065566Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-17T18:28:27.065585Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-17T18:28:27.189836Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 17 Apr 2025 18:28:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-17T18:28:27.190233Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-17T18:28:27.190255Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-17T18:28:27.255268Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 17 Apr 2025 18:28:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-17T18:28:27.255543Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2025-04-17T18:28:27.255558Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2025-04-17T18:28:27.324480Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 17 Apr 2025 18:28:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-17T18:28:27.324572Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:27.358551Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Thu, 17 Apr 2025 18:28:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 500} 2025-04-17T18:28:27.358662Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:27.413689Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 17 Apr 2025 18:28:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-17T18:28:27.413826Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:37.934663Z node 3 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-17T18:28:37.942349Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-17T18:28:37.946425Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2025-04-17T18:28:37.946593Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2025-04-17T18:28:37.946612Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:37.962442Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Thu, 17 Apr 2025 18:28:37 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 10} 2025-04-17T18:28:37.962549Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:48.367098Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-17T18:28:48.384239Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-17T18:28:48.414754Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-04-17T18:28:48.432698Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-17T18:28:48.439090Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2025-04-17T18:28:48.439468Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-04-17T18:28:48.439991Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-17T18:28:48.440092Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2025-04-17T18:28:48.440104Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-17T18:28:48.506639Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Thu, 17 Apr 2025 18:28:48 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 400} 2025-04-17T18:28:48.506897Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2025-04-17T18:28:48.682658Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 17 Apr 2025 18:28:48 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-17T18:28:48.682768Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2025-04-17T18:28:48.804275Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 17 Apr 2025 18:28:48 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-17T18:28:48.804409Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:49.700392Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-17T18:28:49.701892Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-17T18:28:49.702024Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-04-17T18:28:49.702162Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-04-17T18:28:49.702196Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2025-04-17T18:28:49.706972Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Thu, 17 Apr 2025 18:28:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-04-17T18:28:49.707094Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:49.707866Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-17T18:28:49.707992Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-04-17T18:28:49.708069Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-04-17T18:28:49.708084Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-17T18:28:49.712806Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Thu, 17 Apr 2025 18:28:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-04-17T18:28:49.712929Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |98.2%| [TS] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs >> TopicSessionTests::WrongFieldType [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: 2025-04-17T18:28:26.706116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:26.706791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:26.706862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00044f/r3tmp/tmp5WAD1z/pdisk_1.dat 2025-04-17T18:28:27.366551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:28:27.412215Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:27.458130Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-17T18:28:27.462564Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-17T18:28:27.462847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:27.464198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:27.482976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:27.593581Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-17T18:28:27.593660Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-17T18:28:27.595530Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-17T18:28:27.716888Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-17T18:28:27.716982Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T18:28:27.717590Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T18:28:27.717696Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:28:27.717973Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:28:27.718246Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:28:27.718377Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-17T18:28:27.725718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:27.726888Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-17T18:28:27.727658Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-17T18:28:27.727723Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-17T18:28:27.770559Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:28:27.771549Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:28:27.772909Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:28:27.773205Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:28:27.823728Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:28:27.824587Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:28:27.824693Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:28:27.826458Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:28:27.826542Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:28:27.826590Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:28:27.832240Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:28:27.832424Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:28:27.832518Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:28:27.846691Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:28:27.876947Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:28:27.878953Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:28:27.879654Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:28:27.879718Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:28:27.879753Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:28:27.879786Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:28:27.880008Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:28:27.880051Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:28:27.881292Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:28:27.881468Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:28:27.881610Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:28:27.881660Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:28:27.881759Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:28:27.881803Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:28:27.881873Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:28:27.881908Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:28:27.881961Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:28:27.882113Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:27.882166Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:28:27.882253Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:28:27.883557Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:28:27.883605Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:28:27.883733Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:28:27.884216Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:28:27.884272Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:28:27.884372Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:28:27.884426Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:28:27.884464Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:28:27.884498Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:28:27.884531Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:28:27.884918Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:28:27.884967Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:28:27.885000Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:28:27.885063Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:28:27.885122Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:28:27.885156Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:28:27.885188Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:28:27.885219Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:28:27.885244Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:28:27.886969Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:28:27.887019Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:28:27.898329Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 4-17T18:29:00.924008Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-04-17T18:29:00.924068Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037892 2025-04-17T18:29:00.924266Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:932:2762], Recipient [2:932:2762]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:29:00.924306Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:29:00.924375Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-17T18:29:00.924411Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:29:00.924448Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037892 for ReadTableScan 2025-04-17T18:29:00.924479Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit ReadTableScan 2025-04-17T18:29:00.924544Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037892 error: , IsFatalError: 0 2025-04-17T18:29:00.924585Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-04-17T18:29:00.924616Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit ReadTableScan 2025-04-17T18:29:00.924646Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompleteOperation 2025-04-17T18:29:00.924674Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-04-17T18:29:00.924881Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is DelayComplete 2025-04-17T18:29:00.924914Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2025-04-17T18:29:00.924942Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2025-04-17T18:29:00.924971Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompletedOperations 2025-04-17T18:29:00.925005Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-04-17T18:29:00.925029Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2025-04-17T18:29:00.925123Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037892 has finished 2025-04-17T18:29:00.925153Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:29:00.925178Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037892 2025-04-17T18:29:00.925207Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-17T18:29:00.925234Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-17T18:29:00.938974Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-17T18:29:00.939050Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-17T18:29:00.939088Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-04-17T18:29:00.939149Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1476:3277], exec latency: 0 ms, propose latency: 1 ms 2025-04-17T18:29:00.939197Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-17T18:29:00.939388Z node 2 :TX_PROXY DEBUG: Actor# [2:1476:3277] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2025-04-17T18:29:00.939438Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2025-04-17T18:29:00.939591Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1476:3277], Recipient [2:769:2643]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-04-17T18:29:00.939632Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-17T18:29:00.939718Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:769:2643], Recipient [2:769:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:29:00.939750Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:29:00.939813Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:29:00.939851Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:29:00.939890Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2025-04-17T18:29:00.939920Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2025-04-17T18:29:00.939957Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [71500:281474976715661] at 72075186224037890 2025-04-17T18:29:00.939995Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-17T18:29:00.940026Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-17T18:29:00.940055Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2025-04-17T18:29:00.940085Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-04-17T18:29:00.940311Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Continue 2025-04-17T18:29:00.940341Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:29:00.940370Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-17T18:29:00.940398Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T18:29:00.940424Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T18:29:00.940973Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1508:3306], Recipient [2:769:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-17T18:29:00.941015Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-17T18:29:00.941187Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-04-17T18:29:00.941298Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-04-17T18:29:00.943119Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-17T18:29:00.943162Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037890 2025-04-17T18:29:00.943346Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:769:2643], Recipient [2:769:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:29:00.943383Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:29:00.943443Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-17T18:29:00.943483Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:29:00.943518Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for ReadTableScan 2025-04-17T18:29:00.943548Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-04-17T18:29:00.943581Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2025-04-17T18:29:00.943621Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-17T18:29:00.943667Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2025-04-17T18:29:00.943700Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2025-04-17T18:29:00.943727Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-04-17T18:29:00.943937Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is DelayComplete 2025-04-17T18:29:00.943965Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2025-04-17T18:29:00.944022Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2025-04-17T18:29:00.944048Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompletedOperations 2025-04-17T18:29:00.944080Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-17T18:29:00.944102Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2025-04-17T18:29:00.944147Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037890 has finished 2025-04-17T18:29:00.944178Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:29:00.944210Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-17T18:29:00.944236Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-17T18:29:00.944263Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-17T18:29:00.955176Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:29:00.955264Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-17T18:29:00.955299Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-04-17T18:29:00.955357Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1476:3277], exec latency: 1 ms, propose latency: 1 ms 2025-04-17T18:29:00.955402Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:29:00.955583Z node 2 :TX_PROXY DEBUG: Actor# [2:1476:3277] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2025-04-17T18:29:00.955664Z node 2 :TX_PROXY INFO: Actor# [2:1476:3277] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> TopicSessionTests::RestartSessionIfNewClientWithOffset |98.3%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest |98.3%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TCreateAndDropViewTest::ContextPollution >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery [GOOD] >> Functions::CreateRequest [GOOD] >> Functions::CreateResponse [GOOD] >> KafkaProtocol::ProduceScenario |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutToken >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding >> test_drain.py::TestHive::test_drain_on_stop >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit >> KafkaProtocol::ProduceScenario [GOOD] >> KafkaProtocol::FetchScenario >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TopicSessionTests::ReadNonExistentTopic >> KeyValueGRPCService::SimpleWriteReadWithoutToken [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack >> TCreateAndDropViewTest::ContextPollution [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery >> TTxDataShardPrefixKMeansScan::BuildToPosting [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuild >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable >> KafkaProtocol::FetchScenario [GOOD] >> KafkaProtocol::BalanceScenario |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> TIndexProcesorTests::TestOver1000Queues [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true [GOOD] >> Coordinator::LastEmptyStepResent >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount >> TopicSessionTests::SlowSession |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2025-04-17T18:28:30.493146Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350758795535144:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:30.493434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0004fc/r3tmp/tmpng2i7o/pdisk_1.dat 2025-04-17T18:28:30.894280Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29577, node 1 2025-04-17T18:28:30.960656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:30.960930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:30.973101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:31.005609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:31.005639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:31.005648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:31.006105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:31.336178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:31.363597Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:31.383240Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 TClient is connected to server localhost:22705 waiting... 2025-04-17T18:28:33.601930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-04-17T18:28:33.603861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744914511403 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SQS" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744914511424 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 184467... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:34.080012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:34.104913Z node 1 :TX_PROXY ERROR: Actor# [1:7494350775975405197:2470] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:22705 waiting... 2025-04-17T18:28:34.365461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:1, at schemeshard: 72057594046644480 2025-04-17T18:28:34.372322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1744914514128, "myFolder", "{\"k1\": \"v1\"}"); 2025-04-17T18:28:34.543346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350775975405371:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:34.543460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:34.543935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350775975405383:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:34.559878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480 2025-04-17T18:28:34.572149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350775975405385:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2025-04-17T18:28:34.675736Z node 1 :TX_PROXY ERROR: Actor# [1:7494350775975405440:2630] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:35.514973Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350758795535144:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:35.515049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:28:36.974782Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01js2g50k4cv0mhhj7xvjtv1m6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU0OWE3MzEtMTM1ZGY0ZDMtNzg3NTRhZS1mYzkxNjAwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:37.617176Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01js2g536793j9v144aeqdt4dh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgzZmE5N2MtMjRkMTIwNWYtNzdiYmJjZTctOGUyNmEzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:37.733410Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2g53mb6114zk59c11mzqt8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmYxYTFiMWItMTY3ZjM2MTgtZjgzNDMwZi1jYzk3NjVkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:37.916654Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01js2g53qb0d23vasbjjbjge8c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQzMTU0NzMtNzAwZTY4ODMtM2MwZDUyYzUtMzI5NDI2ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:34.128000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:34.128000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-17T18:28:37.921896Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-04-17T18:28:38.025549Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2g5407fm8mf18gcdqnr5f3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM3YWE0MmEtN2UzYjE5NzItNDgzZGE5Y2YtNzViNTE5ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:38.033780Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01js2g540g0tjmr5h60tggt3te, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ4ZjA1M2ItNzYxN2VjNWYtMTE4ZmJmOWEtMWE0YTAzOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:38.140473Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01js2g543tfkevjmfaw15padnk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc5YTcwZTgtM2I4MTI3NDAtZDM5ODcwMzgtNWI0NWJkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:38.146871Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01js2g5441ffbf256vtnjke4av, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRhYzg2YzAtMmY2YTZmMGUtMjEwNWIwMGUtNTM2NjZmZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... e_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:45.767000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2025-04-17T18:28:45.767000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:45.767000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:45.767000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:44.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-17T18:28:49.140551Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:44.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-17T18:28:49.140626Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:45.767000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-17T18:28:49.140657Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:44.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-17T18:28:49.140686Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-17T18:28:45.767000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-17T18:28:49.140713Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-04-17T18:28:49.244894Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710751. Ctx: { TraceId: 01js2g5eyt4j6h2crs0gz03thv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDhlYzM5ODctMzYyYTM1MzMtOGZiYmNiZDktMTI0YmFjMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.255330Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710752. Ctx: { TraceId: 01js2g5ez4b0ae5hthjptbbvqk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDhlYzM5ODctMzYyYTM1MzMtOGZiYmNiZDktMTI0YmFjMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.269662Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710753. Ctx: { TraceId: 01js2g5ezk39g3r2detb2e87pb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY2MDM5NDItZDgyMDdhMS00ZmU1YWUzNC1kYzY0YTk2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.381833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710754. Ctx: { TraceId: 01js2g5f318pmyewpvxmkxfrep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWIwYjg3YzAtMzNlZDc3NjUtYWUwY2M2NjYtNjg1ZTExMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.391214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710755. Ctx: { TraceId: 01js2g5f3d2s78ddh5agafm3sy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWIwYjg3YzAtMzNlZDc3NjUtYWUwY2M2NjYtNjg1ZTExMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.400869Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710756. Ctx: { TraceId: 01js2g5f3p85h4v9cdjsmrzkdj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIzNzJlNDMtYjM3MDRiMTMtNTU0ZTBjNmEtNDkzNTkzMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.514973Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710757. Ctx: { TraceId: 01js2g5f73d6wtst0zpct9nqdq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNhZTIyYTctNDIzZTY3OWYtYmY5NDgzZWItNTAyOTllNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.521650Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710758. Ctx: { TraceId: 01js2g5f7eb32kx1mzk15hjt05, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNhZTIyYTctNDIzZTY3OWYtYmY5NDgzZWItNTAyOTllNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.530129Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710759. Ctx: { TraceId: 01js2g5f7rdeaktb4t9xskc5dj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE3NmNjOTQtOTJiODQ2ZTQtZTY4NDM3NDAtOTA5MzY4ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.640624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710760. Ctx: { TraceId: 01js2g5ew63jqne42j43xgjtex, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU0OWE3MzEtMTM1ZGY0ZDMtNzg3NTRhZS1mYzkxNjAwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:28:49.646304Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710761. Ctx: { TraceId: 01js2g5fb9aytvshtkmcxadjxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzUwMDYyM2ItNjlkOGEzYy1lZTI1ZjM0MC05OGNhMWM2NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient is connected to server localhost:22705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744914511403 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 5 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710667 CreateStep: 1744914514609 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SQS" Pat... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:50.035491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710762:0, at schemeshard: 72057594046644480 waiting... Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) 2025-04-17T18:28:50.060246Z node 1 :TX_PROXY ERROR: Actor# [1:7494350844694884208:3640] txid# 281474976710763, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:22705 waiting... 2025-04-17T18:28:50.374012Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710764, at schemeshard: 72057594046644480 2025-04-17T18:28:50.420074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710765:1, at schemeshard: 72057594046644480 2025-04-17T18:28:50.444765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710766:0, at schemeshard: 72057594046644480 ===Started add queue batch 2025-04-17T18:29:15.584608Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710767. Ctx: { TraceId: 01js2g681ce75nhvrtfencx3xz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU0OWE3MzEtMTM1ZGY0ZDMtNzg3NTRhZS1mYzkxNjAwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:29:20.319884Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710768. Ctx: { TraceId: 01js2g6cm8fmtk6gsvtknxt1ts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU0OWE3MzEtMTM1ZGY0ZDMtNzg3NTRhZS1mYzkxNjAwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:29:21.617788Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710769. Ctx: { TraceId: 01js2g6e9x5es3d1q0vg6nxyb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYxMjU1YzItMzM5YmUyMzUtYzhiNGU5NjAtOTNjNTA3OWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:29:21.700367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710770. Ctx: { TraceId: 01js2g6en144ttzgke4vhfwkhy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYxMjU1YzItMzM5YmUyMzUtYzhiNGU5NjAtOTNjNTA3OWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:29:21.759728Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710771. Ctx: { TraceId: 01js2g6epw6ft2d08kbxckrfrx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYxMjU1YzItMzM5YmUyMzUtYzhiNGU5NjAtOTNjNTA3OWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:29:21.766566Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710772. Ctx: { TraceId: 01js2g6eq5bst2ee0zpswbwydw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYxMjU1YzItMzM5YmUyMzUtYzhiNGU5NjAtOTNjNTA3OWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T18:29:21.943761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710773. Ctx: { TraceId: 01js2g6eqd3b0xk95zfwefvyvb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWUyZTlmZi0zYjRiMjVkMS0zZDM5MTk3NC0xOTBkYTlkYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |98.4%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection |98.4%| [TM] {asan, default-linux-x86_64, pic, release} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.4%| [TM] {RESULT} ydb/tests/fq/solomon/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardStats::NoData |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> QuoterWithKesusTest::GetsQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuota >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> TopicSessionTests::SlowSession [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries >> TopicSessionTests::TwoSessionsWithDifferentSchemes |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TSentinelTests::PDiskRackGuardFullRack [GOOD] >> TSentinelTests::BSControllerUnresponsive >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrun >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> BulkUpsert::BulkUpsert [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BadRequest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |98.6%| [TM] {RESULT} ydb/tests/fq/http_api/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection >> TMemoryController::MemTable [GOOD] >> TMemoryController::ResourceBroker >> KeyValueGRPCService::SimpleWriteReadOverrun [GOOD] >> KeyValueGRPCService::SimpleWriteReadRange |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |98.6%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KafkaProtocol::BalanceScenario [GOOD] >> KafkaProtocol::OffsetCommitAndFetchScenario >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |98.7%| [TM] {RESULT} ydb/tests/functional/wardens/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> KeyValueGRPCService::SimpleWriteReadRange [GOOD] >> KeyValueGRPCService::SimpleWriteListRange >> TMemoryController::ResourceBroker [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit >> TopicSessionTests::RestartSessionIfQueryStopped >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: 2025-04-17T18:28:03.763251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:693:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:03.763680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:03.763833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:03.764549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:03.764887Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:03.764980Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00071e/r3tmp/tmpRVRtzp/pdisk_1.dat 2025-04-17T18:28:04.310896Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for the first mediator step 2025-04-17T18:28:04.585189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:04.585887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:04.590399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:04.590498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:04.604843Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:28:04.605264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:04.605674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... found first step to be 500 2025-04-17T18:28:04.874156Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 500 ... waiting for the next mediator step ... found second step to be 1000 ... read step subscribe result: [500, 1000] ... read step subscribe update: 2000 2025-04-17T18:28:05.996859Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 2000 ... read step subscribe result: [2000, 2000] ... read step subscribe update: 2500 ... read step subscribe update: 2500 ... read step subscribe update: 3000 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe result: [2000, 6000] 2025-04-17T18:28:08.499402Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-17T18:28:08.499469Z node 1 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 2 2025-04-17T18:28:08.499508Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-04-17T18:28:08.499531Z node 1 :PIPE_SERVER ERROR: [72057594037968897] NodeDisconnected NodeId# 2 2025-04-17T18:28:08.499557Z node 1 :PIPE_SERVER ERROR: [72057594037936129] NodeDisconnected NodeId# 2 2025-04-17T18:28:08.499731Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:115:2096] ServerId# [1:1071:2627] TabletId# 72057594037932033 PipeClientId# [2:115:2096] 2025-04-17T18:28:08.499984Z node 2 :TX_PROXY WARN: actor# [2:238:2129] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-17T18:28:08.500421Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2025-04-17T18:28:08.500489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2025-04-17T18:28:08.501181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2025-04-17T18:28:08.507022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:08.521289Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:28:08.522485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 7000 ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 2025-04-17T18:28:18.600793Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:18.601345Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:18.601528Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:18.603424Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:18.603896Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:18.604016Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00071e/r3tmp/tmpE1PfUf/pdisk_1.dat 2025-04-17T18:28:18.895555Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:19.045152Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1142:2375] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2025-04-17T18:28:19.131496Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:19.131630Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:19.134831Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:19.134913Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:19.149274Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-17T18:28:19.149950Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:19.150416Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:19.772322Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1143:2376] at coordinator 72057594046316545 with seqNo 234 and cookie 345 2025-04-17T18:28:20.508442Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1142:2375] at coordinator 72057594046316545 with seqNo 124 and cookie 245 2025-04-17T18:28:20.520315Z node 3 :TX_COORDINATOR DEBUG: Ignored TEvSubscribeLastStep from [4:1142:2375] at coordinator 72057594046316545 with seqNo 123 existing seqNo 124 2025-04-17T18:28:21.163349Z node 3 :TX_COORDINATOR DEBUG: Processing TEvUnsubscribeLastStep from [4:1142:2375] at coordinator 72057594046316545 with seqNo 124 2025-04-17T18:28:27.903773Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:27.904084Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:27.904304Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00071e/r3tmp/tmp859jNT/pdisk_1.dat 2025-04-17T18:28:28.269264Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:28.313490Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:28.313628Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:28.325176Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected Rebooting coordinator to restore config 2025-04-17T18:28:28.575464Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-17T18:28:28.576897Z node 5 :TX_COORDINATOR INFO: Coordinator# 72057594046316545 restoring static processing params 2025-04-17T18:28:28.577553Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-17T18:28:28.577791Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 version# 0 TTxConfigure Complete Rebooting coordinator a second time 2025-04-17T18:28:28.907247Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-17T18:28:28.908737Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-17T18:28:33.513064Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:486:2218], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:33.513340Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:33.513496Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00071e/r3tmp/tmprm55Ai/pdisk_1.dat 2025-04-17T18:28:33.844262Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:34.079179Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:34.079340Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:34.095331Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected Sending CreateDatabase request 2025-04-17T18:28:34.976134Z node 6 :CMS_TENANT ... d ... coordinator 72057594046316545 gen 2 is planning step 1000 2025-04-17T18:29:55.203753Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 50 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 50}}} marker# M1 2025-04-17T18:29:55.203862Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1000 2025-04-17T18:29:55.203935Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:572:2498] bucket.ActiveActor step# 1000 2025-04-17T18:29:55.204017Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} 2025-04-17T18:29:55.204113Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:572:2498] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} ... waiting for blocked put responses 2025-04-17T18:29:55.217343Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 HANDLE EvProposeTransaction marker# C0 2025-04-17T18:29:55.217437Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 step# 1050 Status# 16 SEND to# [20:595:2519] Proxy marker# C1 ... waiting for blocked put responses ... coordinator 72057594046316545 gen 2 is planning step 1050 2025-04-17T18:29:55.229902Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000000 has been planned 2025-04-17T18:29:55.230030Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:12:1:24576:168:0] response ... waiting for planning for the required step ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 1100 ... starting a new coordinator instance ... waiting for migrated state 2025-04-17T18:29:55.278713Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-17T18:29:55.279147Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-17T18:29:55.285238Z node 20 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete ... blocking state response from [20:549:2400] to [20:690:2563] LastSentStep: 1000 LastAcquiredStep: 0 LastConfirmedStep: 0 ... unblocking put responses and requests 2025-04-17T18:29:55.286090Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-17T18:29:55.286304Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 stepId# 1050 Status# 17 SEND EvProposeTransactionStatus to# [20:595:2519] Proxy 2025-04-17T18:29:55.287703Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:562:2491] disconnnected 2025-04-17T18:29:55.287804Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [20:562:2491] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2025-04-17T18:29:55.313639Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:697:2573] connected 2025-04-17T18:29:55.313803Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2025-04-17T18:29:55.313896Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [20:693:2571] Cookie# 1 CompleteStep# 1000 LatestKnownStep# 1000 SubjectiveTime# 952 Coordinator# 72057594046316545 2025-04-17T18:29:55.314009Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2025-04-17T18:29:55.314078Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 step# 1100 Status# 16 SEND to# [20:595:2519] Proxy marker# C1 2025-04-17T18:29:55.315196Z node 20 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-17T18:29:55.315285Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1050, txid# 10000000 marker# C2 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 1050 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-17T18:29:55.320467Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1050 2025-04-17T18:29:55.320553Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1050] transactions [1] 2025-04-17T18:29:55.320739Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:570:2496] ExecQueue {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2025-04-17T18:29:55.320923Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2025-04-17T18:29:55.321022Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND Ev to# [20:571:2497] step# 1050 forTablet# 72057594047365120 txid# 10000000 marker# M3 2025-04-17T18:29:55.321107Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1050 2025-04-17T18:29:55.321165Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:572:2498] bucket.ActiveActor step# 1050 2025-04-17T18:29:55.321301Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1050 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}} marker# M4 2025-04-17T18:29:55.321505Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:572:2498] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-04-17T18:29:55.321696Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-04-17T18:29:55.326710Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365120 Status: OK ServerId: [20:701:2576] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:29:55.326839Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1050 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 0 RawX2: 0 } } Step: 1050 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... waiting for planned another persistent tx ... coordinator 72057594046316545 gen 3 is planning step 1100 2025-04-17T18:29:55.338925Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000011 has been planned 2025-04-17T18:29:55.339060Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2025-04-17T18:29:55.340034Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1100, txid# 10000011 marker# C2 2025-04-17T18:29:55.340137Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 stepId# 1100 Status# 17 SEND EvProposeTransactionStatus to# [20:595:2519] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 1100 PrevStep: 1050 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-17T18:29:55.340484Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1100 2025-04-17T18:29:55.340555Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1100] transactions [1] 2025-04-17T18:29:55.340707Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:570:2496] ExecQueue {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2025-04-17T18:29:55.340892Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2025-04-17T18:29:55.340991Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND Ev to# [20:571:2497] step# 1100 forTablet# 72057594047365120 txid# 10000011 marker# M3 2025-04-17T18:29:55.341083Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1100 2025-04-17T18:29:55.341143Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:572:2498] bucket.ActiveActor step# 1100 2025-04-17T18:29:55.341263Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1100 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}} marker# M4 2025-04-17T18:29:55.341379Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1100 MediatorId# 72057594046382081 TabletID 72057594047365120} 2025-04-17T18:29:55.341472Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:572:2498] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} 2025-04-17T18:29:55.341666Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 0 RawX2: 0 } } Step: 1100 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 1150 ... observed step: Step: 1150 PrevStep: 1100 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-17T18:29:55.354114Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1100 To# 1150Steps: {{TCoordinatorStep step# 1150 PrevStep# 1100}}} marker# M1 2025-04-17T18:29:55.354207Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1150 2025-04-17T18:29:55.354274Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:572:2498] bucket.ActiveActor step# 1150 2025-04-17T18:29:55.354333Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} 2025-04-17T18:29:55.354378Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:572:2498] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} |98.7%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2025-04-17T18:28:24.951974Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350731037426711:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:24.963773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000419/r3tmp/tmpnFE57F/pdisk_1.dat 2025-04-17T18:28:25.452923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:25.485210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:25.485328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:25.506875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7716, node 1 2025-04-17T18:28:25.761621Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:25.761649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:25.761661Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:25.761809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:26.302919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:26.490044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:28:29.626347Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494350754585382926:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:29.626976Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000419/r3tmp/tmp84OQ8B/pdisk_1.dat 2025-04-17T18:28:29.868735Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29508, node 4 2025-04-17T18:28:29.969482Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:29.969550Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:29.980481Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:30.081242Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:30.081264Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:30.081272Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:30.081427Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:30.408160Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:30.515990Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:28:34.546530Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7494350772780996386:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:34.546757Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000419/r3tmp/tmp7tliDH/pdisk_1.dat 2025-04-17T18:28:34.781449Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13516, node 7 2025-04-17T18:28:34.873994Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:34.874015Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:34.874023Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:34.874155Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:28:34.892706Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:34.892834Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:34.899464Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:35.129559Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:35.201736Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:28:39.205740Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7494350797038204573:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:39.210330Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000419/r3tmp/tmp2h7vG2/pdisk_1.dat 2025-04-17T18:28:39.388708Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:39.421904Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:39.422001Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:39.425119Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14585, node 10 2025-04-17T18:28:39.553311Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:39.553333Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:39.553341Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:39.553498Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28 ... r3tmp/tmpQMpDtw/pdisk_1.dat 2025-04-17T18:29:23.397425Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:29:23.530085Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:29:23.530944Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:29:23.548249Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21298, node 28 2025-04-17T18:29:23.877720Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:29:23.877746Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:29:23.877764Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:29:23.877940Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:29:24.568137Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:29:24.748006Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000419/r3tmp/tmpmo1dkq/pdisk_1.dat 2025-04-17T18:29:32.787745Z node 31 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7494351021767094624:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:29:32.788134Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:29:33.078670Z node 31 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:29:33.157865Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:29:33.157992Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:29:33.161568Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28727, node 31 2025-04-17T18:29:33.511028Z node 31 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:29:33.511060Z node 31 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:29:33.511072Z node 31 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:29:33.511255Z node 31 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:29:34.038113Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:29:34.186725Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:29:41.875673Z node 34 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7494351062814199831:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000419/r3tmp/tmpiRv5A6/pdisk_1.dat 2025-04-17T18:29:42.014696Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:29:42.139967Z node 34 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:29:42.190148Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:29:42.190347Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:29:42.193940Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18338, node 34 2025-04-17T18:29:42.354606Z node 34 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:29:42.354638Z node 34 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:29:42.354653Z node 34 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:29:42.354892Z node 34 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:29:42.696011Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:29:42.848491Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-17T18:29:49.827783Z node 37 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[37:7494351096610897966:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:29:49.838687Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000419/r3tmp/tmpIvYxel/pdisk_1.dat 2025-04-17T18:29:50.066050Z node 37 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:29:50.108792Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:29:50.108902Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:29:50.117311Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62209, node 37 2025-04-17T18:29:50.252324Z node 37 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:29:50.252348Z node 37 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:29:50.252362Z node 37 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:29:50.252549Z node 37 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:29:50.568846Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:29:50.680718Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 |98.7%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> KafkaProtocol::OffsetCommitAndFetchScenario [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> TSentinelTests::BSControllerUnresponsive [GOOD] >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> KeyValueGRPCService::SimpleWriteListRange [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatus >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] Test command err: 2025-04-17T18:28:26.701932Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-04-17T18:28:26.702034Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-04-17T18:28:26.702157Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-17T18:28:26.702229Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-04-17T18:28:26.702297Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-04-17T18:28:26.702409Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-04-17T18:28:26.705228Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-04-17T18:28:26.713630Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDom ... /pdisk-287.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-04-17T18:29:57.568942Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-288.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-289.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-290.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-291.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-04-17T18:29:57.569093Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-264.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-265.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-266.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-267.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-04-17T18:29:57.569241Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-268.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-269.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-270.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-271.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-04-17T18:29:57.569311Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-17T18:29:57.569735Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 71:286, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-17T18:29:57.569790Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 68:273, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-17T18:29:57.569825Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 70:283, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-17T18:29:57.569882Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-17T18:29:57.581033Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-04-17T18:29:57.581109Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-17T18:29:57.594397Z node 65 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-17T18:29:57.594461Z node 65 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-17T18:29:57.594559Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 2 2025-04-17T18:29:57.594595Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-17T18:29:57.594774Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 65, wbId# [65:8388350642965737326:1634689637] 2025-04-17T18:29:57.594822Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 66, wbId# [66:8388350642965737326:1634689637] 2025-04-17T18:29:57.594856Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 67, wbId# [67:8388350642965737326:1634689637] 2025-04-17T18:29:57.594882Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 68, wbId# [68:8388350642965737326:1634689637] 2025-04-17T18:29:57.594921Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 69, wbId# [69:8388350642965737326:1634689637] 2025-04-17T18:29:57.594952Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 70, wbId# [70:8388350642965737326:1634689637] 2025-04-17T18:29:57.594978Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 71, wbId# [71:8388350642965737326:1634689637] 2025-04-17T18:29:57.595002Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 72, wbId# [72:8388350642965737326:1634689637] 2025-04-17T18:29:57.595299Z node 65 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { }, cookie# 123 2025-04-17T18:29:57.595327Z node 65 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-17T18:29:57.595651Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 65, response# PDiskStateInfo { PDiskId: 260 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-260.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 261 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-261.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 262 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-262.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 263 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-263.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-17T18:29:57.596140Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 69, response# PDiskStateInfo { PDiskId: 276 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-276.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 277 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-277.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 278 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-278.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 279 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-279.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-17T18:29:57.596396Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 70, response# PDiskStateInfo { PDiskId: 280 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-280.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 281 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-281.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 282 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-282.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 283 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-283.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-17T18:29:57.596545Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 71, response# PDiskStateInfo { PDiskId: 284 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-284.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 285 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-285.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 286 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-286.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-287.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-17T18:29:57.596715Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-288.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-289.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-290.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-291.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-17T18:29:57.596831Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 68, response# PDiskStateInfo { PDiskId: 272 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-272.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 273 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-273.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 274 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-274.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 275 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-275.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-17T18:29:57.596984Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-268.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-269.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-270.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-271.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-17T18:29:57.597111Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-264.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-265.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-266.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-267.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-17T18:29:57.597185Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0 QueryExecutionLimitBytes: 0 2025-04-17T18:28:30.215212Z node 1 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2025-04-17T18:28:30.216598Z node 1 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2025-04-17T18:28:30.217139Z node 1 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2025-04-17T18:28:30.218815Z node 1 :MEMORY_CONTROLLER INFO: Bootstrapped with config HardLimitBytes: 209715200 2025-04-17T18:28:30.230486Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-17T18:28:30.233515Z node 1 :TABLET_SAUSAGECACHE NOTICE: Bootstrap with config MemoryLimit: 33554432 2025-04-17T18:28:30.245384Z node 1 :MEMORY_CONTROLLER INFO: Consumer SharedCache [1:20:2067] registered 2025-04-17T18:28:30.245726Z node 1 :TABLET_SAUSAGECACHE NOTICE: Register memory consumer 2025-04-17T18:28:30.286080Z node 1 :RESOURCE_BROKER INFO: New config diff: Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2025-04-17T18:28:30.286899Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } ResourceLimit { Cpu: 20 Memory: 62914560 } 2025-04-17T18:28:30.288159Z node 1 :RESOURCE_BROKER INFO: Configure result: Success: true 2025-04-17T18:28:30.293577Z node 1 :MEMORY_CONTROLLER INFO: ResourceBroker configure result Success: true 2025-04-17T18:28:30.337386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:103:2149], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:28:30.337531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:28:30.337847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T18:28:30.465610Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:439:2400] 1 registered 2025-04-17T18:28:30.482015Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2402] 0 registered 2025-04-17T18:28:30.488776Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2402] 2 registered 2025-04-17T18:28:30.489109Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2402] 4 registered 2025-04-17T18:28:30.489297Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2402] 5 registered 2025-04-17T18:28:30.491801Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:462:2404] 1 registered 2025-04-17T18:28:30.492090Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:462:2404] 2 registered 2025-04-17T18:28:30.541012Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 1 registered 2025-04-17T18:28:30.541332Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 2 registered 2025-04-17T18:28:30.584604Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 3 registered 2025-04-17T18:28:30.584881Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 4 registered 2025-04-17T18:28:30.585768Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 5 registered 2025-04-17T18:28:30.585928Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 6 registered 2025-04-17T18:28:30.586068Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 7 registered 2025-04-17T18:28:30.586328Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 8 registered 2025-04-17T18:28:30.586539Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 9 registered 2025-04-17T18:28:30.586701Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 10 registered 2025-04-17T18:28:30.587738Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 11 registered 2025-04-17T18:28:30.587900Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 12 registered 2025-04-17T18:28:30.588042Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 13 registered 2025-04-17T18:28:30.589413Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 14 registered 2025-04-17T18:28:30.595659Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 15 registered 2025-04-17T18:28:30.613384Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 16 registered 2025-04-17T18:28:30.613753Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 17 registered 2025-04-17T18:28:30.614072Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 18 registered 2025-04-17T18:28:30.614258Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 19 registered 2025-04-17T18:28:30.616016Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 20 registered 2025-04-17T18:28:30.616209Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 22 registered 2025-04-17T18:28:30.638071Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 23 registered 2025-04-17T18:28:30.639551Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 24 registered 2025-04-17T18:28:30.642601Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 25 registered 2025-04-17T18:28:30.642826Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 26 registered 2025-04-17T18:28:30.646840Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 27 registered 2025-04-17T18:28:30.647029Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 28 registered 2025-04-17T18:28:30.647152Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 29 registered 2025-04-17T18:28:30.647306Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 30 registered 2025-04-17T18:28:30.647711Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 31 registered 2025-04-17T18:28:30.650446Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 32 registered 2025-04-17T18:28:30.651576Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 33 registered 2025-04-17T18:28:30.651930Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 34 registered 2025-04-17T18:28:30.652146Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 35 registered 2025-04-17T18:28:30.652271Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 36 registered 2025-04-17T18:28:30.652465Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 37 registered 2025-04-17T18:28:30.657475Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 38 registered 2025-04-17T18:28:30.663013Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 39 registered 2025-04-17T18:28:30.667918Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 40 registered 2025-04-17T18:28:30.669205Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 41 registered 2025-04-17T18:28:30.674298Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 42 registered 2025-04-17T18:28:30.682593Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 43 registered 2025-04-17T18:28:30.682905Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 44 registered 2025-04-17T18:28:30.683110Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 45 registered 2025-04-17T18:28:30.683268Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 46 registered 2025-04-17T18:28:30.683393Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 47 registered 2025-04-17T18:28:30.683612Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 48 registered 2025-04-17T18:28:30.683985Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 49 registered 2025-04-17T18:28:30.684308Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2406] 50 registered 2025-04-17T18:28:30.684532Z node 1 :MEMORY_CONTROLLER TRACE: ... 930128Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 105 registered 2025-04-17T18:29:58.930165Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 106 registered 2025-04-17T18:29:58.930241Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 107 registered 2025-04-17T18:29:58.930304Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 108 registered 2025-04-17T18:29:58.930346Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 109 registered 2025-04-17T18:29:58.930378Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 110 registered 2025-04-17T18:29:58.930423Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 111 registered 2025-04-17T18:29:58.930462Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 113 registered 2025-04-17T18:29:58.930505Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 114 registered 2025-04-17T18:29:58.930539Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 115 registered 2025-04-17T18:29:58.930577Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 116 registered 2025-04-17T18:29:58.930970Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 117 registered 2025-04-17T18:29:58.931146Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:470:2405] 118 registered 2025-04-17T18:29:58.931784Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 17 registered 2025-04-17T18:29:58.931892Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 0 registered 2025-04-17T18:29:58.931980Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 18 registered 2025-04-17T18:29:58.932109Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 1 registered 2025-04-17T18:29:58.932399Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 19 registered 2025-04-17T18:29:58.932516Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 2 registered 2025-04-17T18:29:58.932598Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 20 registered 2025-04-17T18:29:58.932719Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 3 registered 2025-04-17T18:29:58.932976Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 21 registered 2025-04-17T18:29:58.933148Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 4 registered 2025-04-17T18:29:58.933205Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 6 registered 2025-04-17T18:29:58.933275Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 9 registered 2025-04-17T18:29:58.933336Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 10 registered 2025-04-17T18:29:58.933380Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 13 registered 2025-04-17T18:29:58.933422Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 14 registered 2025-04-17T18:29:58.933494Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:488:2407] 16 registered 2025-04-17T18:29:58.933529Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:522:2411] 7 registered 2025-04-17T18:29:58.933648Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:522:2411] 1 registered 2025-04-17T18:29:58.933691Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:522:2411] 2 registered 2025-04-17T18:29:58.933759Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:522:2411] 3 registered 2025-04-17T18:29:58.933820Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:522:2411] 4 registered 2025-04-17T18:29:58.933873Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:522:2411] 5 registered 2025-04-17T18:29:58.933912Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:522:2411] 6 registered 2025-04-17T18:29:58.933958Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 102 registered 2025-04-17T18:29:58.934014Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 103 registered 2025-04-17T18:29:58.934047Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 1 registered 2025-04-17T18:29:58.934086Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 104 registered 2025-04-17T18:29:58.934120Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 2 registered 2025-04-17T18:29:58.934164Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 3 registered 2025-04-17T18:29:58.934235Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 4 registered 2025-04-17T18:29:58.934276Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 5 registered 2025-04-17T18:29:58.934312Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 6 registered 2025-04-17T18:29:58.934345Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 7 registered 2025-04-17T18:29:58.934387Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 100 registered 2025-04-17T18:29:58.934418Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:523:2413] 101 registered 2025-04-17T18:29:58.934459Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2415] 1 registered 2025-04-17T18:29:58.934495Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2415] 2 registered 2025-04-17T18:29:58.934533Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2415] 3 registered test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005e8/r3tmp/tmpqpDifN/pdisk_1.dat 2025-04-17T18:29:58.969804Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 1 registered 2025-04-17T18:29:58.970155Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 2 registered 2025-04-17T18:29:58.970698Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 3 registered 2025-04-17T18:29:58.970749Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 4 registered 2025-04-17T18:29:58.970845Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 120 registered 2025-04-17T18:29:58.970980Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 5 registered 2025-04-17T18:29:58.971053Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 121 registered 2025-04-17T18:29:58.971240Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 6 registered 2025-04-17T18:29:58.971280Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 122 registered 2025-04-17T18:29:58.971326Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 7 registered 2025-04-17T18:29:58.971386Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 123 registered 2025-04-17T18:29:58.971536Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 8 registered 2025-04-17T18:29:58.971579Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 125 registered 2025-04-17T18:29:58.971628Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 126 registered 2025-04-17T18:29:58.971664Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 127 registered 2025-04-17T18:29:58.971698Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 128 registered 2025-04-17T18:29:58.971851Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 129 registered 2025-04-17T18:29:58.971893Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 100 registered 2025-04-17T18:29:58.971929Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 101 registered 2025-04-17T18:29:58.971963Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 131 registered 2025-04-17T18:29:58.971997Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 102 registered 2025-04-17T18:29:58.972034Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 103 registered 2025-04-17T18:29:58.972069Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:509:2410] 105 registered 2025-04-17T18:29:59.044960Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:29:59.090110Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:29:59.100363Z node 9 :MEMORY_CONTROLLER INFO: Config updated QueryExecutionLimitPercent: 15 2025-04-17T18:29:59.101013Z node 9 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-17T18:29:59.142710Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:29:59.142900Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:29:59.154441Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:29:59.413305Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.5KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-04-17T18:29:59.414088Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-04-17T18:29:59.414268Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.5KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-04-17T18:29:59.414328Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-17T18:29:59.414473Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-04-17T18:29:59.631204Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.6KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-04-17T18:29:59.631817Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-04-17T18:29:59.631877Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.6KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-04-17T18:29:59.631913Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-17T18:29:59.632010Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-04-17T18:29:59.822693Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-04-17T18:29:59.823511Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-04-17T18:29:59.823596Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-04-17T18:29:59.823669Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-17T18:29:59.823824Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB 2025-04-17T18:29:59.979604Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33.3KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-04-17T18:29:59.980232Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-04-17T18:29:59.980286Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33.3KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-04-17T18:29:59.980322Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-17T18:29:59.980421Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls |98.7%| [TM] {RESULT} ydb/core/cms/ut_sentinel/unittest |98.7%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> QuoterWithKesusTest::PrefetchCoefficient [GOOD] >> QuoterWithKesusTest::GetsQuotaAfterPause >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection >> RowDispatcherTests::OneClientOneSession >> RowDispatcherTests::OneClientOneSession [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 >> RowDispatcherTests::TwoClientOneSession >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection >> RowDispatcherTests::TwoClientOneSession [GOOD] >> RowDispatcherTests::SessionError >> RowDispatcherTests::SessionError [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> RowDispatcherTests::TwoClients4Sessions >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> RowDispatcherTests::ReinitConsumerIfNewGeneration >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection >> KeyValueGRPCService::SimpleGetStorageChannelStatus [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection >> RowDispatcherTests::ProcessNoSession >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth [GOOD] >> KafkaProtocol::CreatePartitionsScenario >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |98.8%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> KqpTpch::Query15 [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink [GOOD] >> KafkaProtocol::CreatePartitionsScenario [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding >> KeyValueGRPCService::SimpleListPartitions [GOOD] >> KafkaProtocol::DescribeConfigsScenario >> KqpTpch::Query16 >> TDataShardRSTest::TestDelayedRSAckForUnknownTx >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> TTxDataShardSampleKScan::BadRequest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding >> KqpTpch::Query16 [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> KqpTpch::Query17 >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TTxDataShardSampleKScan::BadRequest [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> QuoterWithKesusTest::KesusRecreation >> TTxDataShardSampleKScan::RunScan >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> KqpTpch::Query17 [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> KafkaProtocol::DescribeConfigsScenario [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> DataShardStats::HasSchemaChanges_ByKeyFilter >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx >> KqpTpch::Query18 >> KafkaProtocol::AlterConfigsScenario >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2025-04-17T18:28:24.740765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350732487027709:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:24.740888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0007c6/r3tmp/tmpfvxytD/pdisk_1.dat 2025-04-17T18:28:25.369476Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:25.389953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:25.390079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:25.397335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19830, node 1 2025-04-17T18:28:25.459327Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-17T18:28:25.459710Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-17T18:28:25.459831Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-17T18:28:25.459911Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-17T18:28:25.460923Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-17T18:28:25.460938Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-17T18:28:25.460977Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-17T18:28:25.460992Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-17T18:28:25.543457Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-17T18:28:25.543498Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-17T18:28:25.550937Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:28:25.551013Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:28:25.551107Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-17T18:28:25.551125Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-17T18:28:25.634727Z node 1 :GRPC_SERVER DEBUG: [0x51a000027c80] created request Name# BlobStorageConfig 2025-04-17T18:28:25.637365Z node 1 :GRPC_SERVER DEBUG: [0x51a000028280] created request Name# HiveCreateTablet 2025-04-17T18:28:25.637811Z node 1 :GRPC_SERVER DEBUG: [0x51a000028880] created request Name# TabletStateRequest 2025-04-17T18:28:25.638127Z node 1 :GRPC_SERVER DEBUG: [0x51a000028e80] created request Name# SchemeOperationStatus 2025-04-17T18:28:25.639171Z node 1 :GRPC_SERVER DEBUG: [0x51a000029480] created request Name# ChooseProxy 2025-04-17T18:28:25.639699Z node 1 :GRPC_SERVER DEBUG: [0x51a000029a80] created request Name# ResolveNode 2025-04-17T18:28:25.640663Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a080] created request Name# FillNode 2025-04-17T18:28:25.641015Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a680] created request Name# DrainNode 2025-04-17T18:28:25.641335Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ac80] created request Name# InterconnectDebug 2025-04-17T18:28:25.641692Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# TestShardControl 2025-04-17T18:28:25.644285Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# RegisterNode 2025-04-17T18:28:25.644656Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# CmsRequest 2025-04-17T18:28:25.645864Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# ConsoleRequest 2025-04-17T18:28:25.646244Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# SchemeInitRoot 2025-04-17T18:28:25.649553Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# PersQueueRequest 2025-04-17T18:28:25.649958Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# SchemeOperation 2025-04-17T18:28:25.650365Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# SchemeDescribe 2025-04-17T18:28:25.743045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:28:25.743072Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:28:25.743082Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:28:25.743280Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:26.432461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2-pool" Kind: "hdd2" } StoragePools { Name: "hdd-pool" Kind: "hdd" } StoragePools { Name: "hdd1-pool" Kind: "hdd1" } StoragePools { Name: "ssd-pool" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:28:26.433971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:26.435926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:28:26.438056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:28:26.438114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:26.443144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:28:26.445216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T18:28:26.445415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:26.445453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:28:26.445573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-17T18:28:26.445595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-17T18:28:26.448703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:26.448737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:28:26.448750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-17T18:28:26.450234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:26.450258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:28:26.450286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-17T18:28:26.450311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-17T18:28:26.479779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:28:26.480248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:28:26.480273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-17T18:28:26.480292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T18:28:26.483033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-17T18:28:26.483223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:28:26.486702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744914506531, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:28:26.486868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744914506531 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:28:26.486903Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-17T18:28:26.488695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-17T18:28:26.488735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-17T18:28:26.488928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:28:26.488975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:28:26.491551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:28:26.491586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17 ... 7T18:30:12.153770Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:7494351189709152843:2371], at schemeshard: 72057594046644480, txId: 281474976715662, path id: 3 2025-04-17T18:30:12.153840Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T18:30:12.153878Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDeleteParts opId# 281474976715662:0 ProgressState 2025-04-17T18:30:12.153941Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-17T18:30:12.153955Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-17T18:30:12.153985Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-17T18:30:12.153999Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-17T18:30:12.154023Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: false 2025-04-17T18:30:12.154044Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-17T18:30:12.154068Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-17T18:30:12.154081Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-17T18:30:12.154274Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-17T18:30:12.154299Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715662, publications: 2, subscribers: 1 2025-04-17T18:30:12.154315Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 2], 7 2025-04-17T18:30:12.154332Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 3], 18446744073709551615 2025-04-17T18:30:12.154712Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-17T18:30:12.154803Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-17T18:30:12.154822Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715662 2025-04-17T18:30:12.154843Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-04-17T18:30:12.154867Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-17T18:30:12.155147Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-17T18:30:12.155227Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-17T18:30:12.155244Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715662 2025-04-17T18:30:12.155261Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-04-17T18:30:12.155279Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-17T18:30:12.155340Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715662, subscribers: 1 2025-04-17T18:30:12.155364Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [33:7494351194004120822:2345] 2025-04-17T18:30:12.157620Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:30:12.157645Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:30:12.157658Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-17T18:30:12.157758Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-04-17T18:30:12.157802Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-04-17T18:30:12.160636Z node 33 :GRPC_SERVER DEBUG: Got grpc request# ListDirectoryRequest, traceId# 01js2g7zy011c09yh7ht11sw59, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:34732, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-17T18:30:12.160884Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-17T18:30:12.161265Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-17T18:30:12.161521Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-17T18:30:12.161716Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-17T18:30:12.161872Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-17T18:30:12.162204Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-17T18:30:12.162410Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-17T18:30:12.162438Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-17T18:30:12.162508Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-17T18:30:12.164926Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037889 not found 2025-04-17T18:30:12.164959Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037890 not found 2025-04-17T18:30:12.164974Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037888 not found 2025-04-17T18:30:12.165440Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-17T18:30:12.165466Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-17T18:30:12.165520Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-17T18:30:12.165529Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-17T18:30:12.165552Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-17T18:30:12.165570Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-17T18:30:12.165615Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-17T18:30:12.166602Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-17T18:30:12.167360Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037890 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-17T18:30:12.167721Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-17T18:30:12.172649Z node 33 :GRPC_SERVER DEBUG: [0x51a000071a80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.172996Z node 33 :GRPC_SERVER DEBUG: [0x51a000131480] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.173292Z node 33 :GRPC_SERVER DEBUG: [0x51a00011dc80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.173584Z node 33 :GRPC_SERVER DEBUG: [0x51a00007c280] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.173913Z node 33 :GRPC_SERVER DEBUG: [0x51a00007bc80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.174200Z node 33 :GRPC_SERVER DEBUG: [0x51a000035480] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.174492Z node 33 :GRPC_SERVER DEBUG: [0x51a000082e80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.174798Z node 33 :GRPC_SERVER DEBUG: [0x51a000073880] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.175090Z node 33 :GRPC_SERVER DEBUG: [0x51a000094880] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.175345Z node 33 :GRPC_SERVER DEBUG: [0x51a000143480] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.175634Z node 33 :GRPC_SERVER DEBUG: [0x51a00007fe80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.175915Z node 33 :GRPC_SERVER DEBUG: [0x51a00014fa80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.175938Z node 33 :GRPC_SERVER DEBUG: [0x51a000112e80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.176180Z node 33 :GRPC_SERVER DEBUG: [0x51a00002e280] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.176245Z node 33 :GRPC_SERVER DEBUG: [0x51a00009f080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.176446Z node 33 :GRPC_SERVER DEBUG: [0x51a000073280] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-17T18:30:12.176509Z node 33 :GRPC_SERVER DEBUG: [0x51a00002fa80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] Test command err: 2025-04-17T18:28:04.338692Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [1:30:2057] 2025-04-17T18:28:04.339240Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [1:25:2054] 2025-04-17T18:28:04.339372Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [1:25:2054] 2025-04-17T18:28:04.339411Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:26:2054] 2025-04-17T18:28:04.339431Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:26:2054] 2025-04-17T18:28:04.339460Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [3:27:2054] 2025-04-17T18:28:04.339477Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [3:27:2054] 2025-04-17T18:28:04.339602Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-04-17T18:28:04.340731Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-04-17T18:28:04.340872Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2025-04-17T18:28:04.340934Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-04-17T18:28:04.348118Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-04-17T18:28:04.348234Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-04-17T18:28:04.348417Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:31:2055] 2025-04-17T18:28:04.348459Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:31:2055] 2025-04-17T18:28:04.348496Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:31:2055] 2025-04-17T18:28:04.348549Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:32:2056] 2025-04-17T18:28:04.348579Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:31:2055] to new [2:32:2056] 2025-04-17T18:28:04.348609Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:32:2056] 2025-04-17T18:28:04.348685Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-04-17T18:28:04.348725Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-04-17T18:28:04.348835Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-04-17T18:28:04.348892Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-04-17T18:28:04.439239Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [5:30:2057] 2025-04-17T18:28:04.439629Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [5:25:2054] 2025-04-17T18:28:04.439669Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [5:25:2054] 2025-04-17T18:28:04.439705Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [6:26:2054] 2025-04-17T18:28:04.439736Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [6:26:2054] 2025-04-17T18:28:04.439806Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [7:27:2054] 2025-04-17T18:28:04.439826Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [7:27:2054] 2025-04-17T18:28:04.439922Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2025-04-17T18:28:04.440046Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2025-04-17T18:28:04.440176Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2025-04-17T18:28:04.440251Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2025-04-17T18:28:04.554679Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Successfully bootstrapped, local coordinator id [9:5:2052] 2025-04-17T18:28:04.554810Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Successfully bootstrapped, local coordinator id [9:6:2053] 2025-04-17T18:28:04.554851Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Successfully bootstrapped, local coordinator id [9:7:2054] 2025-04-17T18:28:04.554909Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-17T18:28:04.554962Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.554998Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.560631Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-17T18:28:04.560698Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.560727Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.560870Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-17T18:28:04.560901Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.560941Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.613906Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.614029Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.614750Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.614814Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.622418Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.622617Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.622651Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.628239Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.628374Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.628404Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.634620Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.634764Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.634795Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.640380Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.640518Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.640545Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.646716Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.646828Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.646866Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.653123Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.653282Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.653311Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.659439Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.659576Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.659604Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.665756Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-17T18:28:04.665872Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.665935Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.671539Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.671660Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": 2025-04-17T18:28:04.671746Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Coordination node successfully created 2025-04-17T18:28:04.671796Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Start session 2025-04-17T18:28:04.672733Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-04-17T18:28:04.672812Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exist, request accepts it (id: [OwnerId: ... ation node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-17T18:30:10.050829Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [38:18:2059] 2025-04-17T18:30:10.050959Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:14:2056], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-04-17T18:30:10.051115Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-17T18:30:10.051249Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 1 2025-04-17T18:30:10.051548Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:15:2057], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-04-17T18:30:10.051870Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:14:2056] query id QueryId 2025-04-17T18:30:10.051939Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 0 query id QueryId 2025-04-17T18:30:10.052021Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:14:2056] query id QueryId 2025-04-17T18:30:10.052092Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:14:2056] query id QueryId 2025-04-17T18:30:10.052153Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 1 query id QueryId 2025-04-17T18:30:10.052213Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:14:2056] query id QueryId 2025-04-17T18:30:10.052291Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:15:2057] query id QueryId 2025-04-17T18:30:10.052364Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 0 query id QueryId 2025-04-17T18:30:10.052419Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:15:2057] query id QueryId 2025-04-17T18:30:10.052479Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:15:2057] query id QueryId 2025-04-17T18:30:10.052545Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 1 query id QueryId 2025-04-17T18:30:10.052623Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:15:2057] query id QueryId 2025-04-17T18:30:10.052687Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:14:2056], reason ActorUnknown 2025-04-17T18:30:10.052732Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:14:2056] query id QueryId 2025-04-17T18:30:10.052914Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:15:2057], reason ActorUnknown 2025-04-17T18:30:10.052955Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:15:2057] query id QueryId 2025-04-17T18:30:10.053009Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:22:2063] 2025-04-17T18:30:10.053062Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:23:2064] 2025-04-17T18:30:10.295450Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [40:17:2058], tenant Tenant 2025-04-17T18:30:10.321182Z node 40 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [40:18:2059] 2025-04-17T18:30:10.321276Z node 40 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [40:19:2060] Successfully bootstrapped, local coordinator id [40:18:2059] 2025-04-17T18:30:10.321351Z node 40 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-17T18:30:10.321390Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-17T18:30:10.321417Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-17T18:30:10.321769Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [40:18:2059] 2025-04-17T18:30:10.321969Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-04-17T18:30:10.322159Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-17T18:30:10.322604Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:15:2057], read group connection_id2, topicPath topic part id 0 query id QueryId cookie 1 2025-04-17T18:30:10.322755Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id2 topic topic part id 0 2025-04-17T18:30:10.323072Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:22:2063] to [40:14:2056] query id QueryId 2025-04-17T18:30:10.323169Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:14:2056] part id 0 query id QueryId 2025-04-17T18:30:10.323263Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:22:2063] to [40:14:2056] query id QueryId 2025-04-17T18:30:10.323349Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:23:2064] to [40:15:2057] query id QueryId 2025-04-17T18:30:10.323432Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:15:2057] part id 0 query id QueryId 2025-04-17T18:30:10.323520Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:23:2064] to [40:15:2057] query id QueryId 2025-04-17T18:30:10.323627Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:14:2056] topic topic query id QueryId 2025-04-17T18:30:10.323685Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:14:2056] query id QueryId 2025-04-17T18:30:10.323796Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:22:2063] 2025-04-17T18:30:10.323980Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:15:2057] topic topic query id QueryId 2025-04-17T18:30:10.324108Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:15:2057] query id QueryId 2025-04-17T18:30:10.324216Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:23:2064] 2025-04-17T18:30:10.440327Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [42:17:2058], tenant Tenant 2025-04-17T18:30:10.504318Z node 42 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [42:18:2059] 2025-04-17T18:30:10.504409Z node 42 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [42:19:2060] Successfully bootstrapped, local coordinator id [42:18:2059] 2025-04-17T18:30:10.504483Z node 42 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-17T18:30:10.504514Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-17T18:30:10.504541Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-17T18:30:10.504872Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [42:18:2059] 2025-04-17T18:30:10.505069Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [43:16:2053], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 42 2025-04-17T18:30:10.505281Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-17T18:30:10.517425Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvTryConnect to node id 43 2025-04-17T18:30:10.517879Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: EvNodeConnected, node id 43 2025-04-17T18:30:10.518535Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-17T18:30:10.518964Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-04-17T18:30:10.519099Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-17T18:30:10.519407Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 41 2025-04-17T18:30:10.526454Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-17T18:30:10.527779Z node 42 :FQ_ROW_DISPATCHER ERROR: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: UNKNOWN: Temporary failure in name resolution } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2025-04-17T18:30:10.528128Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-04-17T18:30:10.528232Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-17T18:30:10.528530Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 42 2025-04-17T18:30:10.528590Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [43:16:2053] query id QueryId 2025-04-17T18:30:10.528699Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [42:22:2063] 2025-04-17T18:30:10.681282Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [44:17:2058], tenant Tenant 2025-04-17T18:30:10.706897Z node 44 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [44:18:2059] 2025-04-17T18:30:10.707004Z node 44 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [44:19:2060] Successfully bootstrapped, local coordinator id [44:18:2059] 2025-04-17T18:30:10.707092Z node 44 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-17T18:30:10.707129Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-17T18:30:10.707157Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-17T18:30:10.707277Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [44:18:2059] 2025-04-17T18:30:10.707451Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [44:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-04-17T18:30:10.707653Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-17T18:30:10.708187Z node 44 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [44:22:2063] to [44:14:2056] query id QueryId 2025-04-17T18:30:10.708298Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [44:14:2056] topic topic query id QueryId 2025-04-17T18:30:10.708353Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [44:14:2056] query id QueryId 2025-04-17T18:30:10.708452Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [44:22:2063] |98.9%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest |98.9%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest >> TTxDataShardSampleKScan::RunScan [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/build_index/ut/unittest >> TTxDataShardSampleKScan::RunScan [GOOD] Test command err: 2025-04-17T18:27:45.538530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:45.539142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:45.539213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00053e/r3tmp/tmpjH3aLk/pdisk_1.dat 2025-04-17T18:27:46.028519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:27:46.077114Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:46.134813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:46.135452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:46.147958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:46.241596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:27:46.296008Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:27:46.296258Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:46.351398Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:46.351516Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:46.354781Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:27:46.354865Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:27:46.354925Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:27:46.356927Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:46.357099Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:46.357195Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:27:46.367954Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:46.400143Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:27:46.401553Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:46.401758Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:27:46.401841Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:46.401881Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:27:46.401912Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:46.403320Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:27:46.403531Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:27:46.403659Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:46.403695Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:46.403820Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:27:46.403872Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:46.403983Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:27:46.405789Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:27:46.406252Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:27:46.406414Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:27:46.408214Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:27:46.418903Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:27:46.419913Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:27:46.568648Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:27:46.572528Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:27:46.572601Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:46.573241Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:46.573283Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:27:46.573339Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-17T18:27:46.573523Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-17T18:27:46.573628Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-17T18:27:46.574285Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:46.574363Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-17T18:27:46.576543Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-17T18:27:46.578003Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:46.579531Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-17T18:27:46.579583Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:46.580105Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-17T18:27:46.580182Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:46.581322Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:46.581361Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:46.581414Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-17T18:27:46.581468Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:27:46.581577Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-17T18:27:46.581716Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:46.585009Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:27:46.586932Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-17T18:27:46.587525Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-17T18:27:46.587578Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-17T18:27:46.598549Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:743:2624], serverId# [1:744:2625], sessionId# [0:0:0] 2025-04-17T18:27:46.599591Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:27:46.599733Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715658 at tablet 72075186224037888 2025-04-17T18:27:46.621020Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:27:46.745190Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:27:46.745261Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:46.745415Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:46.745456Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-17T18:27:46.745497Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-17T18:27:46.745653Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-17T18:27:46.745902Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:46.756923Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-17T18:27:46.757031Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:46.757097Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:738:2620], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:27:46.757152Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:46.761982Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:776:2645], serverId# [1:777:2646], sessionId# [0:0:0] 2025-04-17T18:27:46.762284Z node 1 :TX_DATASHARD NOTICE: Starting TBuildIndexScan TabletId: 72075186224037888 BuildIndexId: 1 TabletId: 0 OwnerId: 72057594046644480 PathId: 2 TargetName: "/Root/table-2" IndexColumns: "value" IndexColumns: "key" SnapshotTxId: 281474976715658 SnapshotStep: 1500 row version v1500/281474976715658 2025-04-17T18:27:46.764687Z node 1 :TX_DATASHARD ERROR: Rejecting TBuildI ... leKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 1 2025-04-17T18:30:31.048985Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 3) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 2 2025-04-17T18:30:31.049013Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 4) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 3 2025-04-17T18:30:31.049043Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 5) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 4 2025-04-17T18:30:31.049196Z node 19 :BUILD_INDEX NOTICE: Done TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 5 Id: 1 TabletId: 72075186224037888 Status: DONE ReadRows: 5 ReadBytes: 60 RequestSeqNoGeneration: 12 RequestSeqNoRound: 1 Probabilities: 2033423113665717216 Probabilities: 6650325416439211286 Probabilities: 7100737942502591086 Probabilities: 7451216775397157654 Probabilities: 16483534760593850368 Rows: "\002\000\004\000\000\000\036\000\000\000\004\000\000\000\003\000\000\000" Rows: "\002\000\004\000\000\000\024\000\000\000\004\000\000\000\002\000\000\000" Rows: "\002\000\004\000\000\0002\000\000\000\004\000\000\000\005\000\000\000" Rows: "\002\000\004\000\000\000(\000\000\000\004\000\000\000\004\000\000\000" Rows: "\002\000\004\000\000\000\n\000\000\000\004\000\000\000\001\000\000\000" 2025-04-17T18:30:31.049835Z node 19 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:30:31.049877Z node 19 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:30:31.049898Z node 19 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:30:31.049928Z node 19 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:30:31.051611Z node 19 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [19:7494351278720813782:2452], serverId# [19:7494351278720813784:2454], sessionId# [0:0:0] 2025-04-17T18:30:31.051747Z node 19 :BUILD_INDEX NOTICE: Starting TSampleKScan TabletId: 72075186224037888 Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } K: 1 Seed: 111 MaxProbability: 18446744073709551615 Columns: "value" Columns: "key" SeqNoGeneration: 13 SeqNoRound: 1 row version v1744914631082/18446744073709551615 2025-04-17T18:30:31.051803Z node 19 :BUILD_INDEX INFO: Create TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 0 2025-04-17T18:30:31.051929Z node 19 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [19:7494351278720813783:2453], serverId# [19:7494351278720813785:2455], sessionId# [0:0:0] 2025-04-17T18:30:31.052312Z node 19 :BUILD_INDEX NOTICE: Starting TSampleKScan TabletId: 72075186224037888 Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } K: 1 Seed: 111 MaxProbability: 18446744073709551615 Columns: "value" Columns: "key" SeqNoGeneration: 13 SeqNoRound: 1 row version v1744914631082/18446744073709551615 2025-04-17T18:30:31.052361Z node 19 :BUILD_INDEX INFO: Create TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 0 2025-04-17T18:30:31.052694Z node 19 :BUILD_INDEX INFO: Prepare TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 0 2025-04-17T18:30:31.052718Z node 19 :BUILD_INDEX DEBUG: Seek TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 0 2025-04-17T18:30:31.052823Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 1) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 0 2025-04-17T18:30:31.052878Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 2) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 1 2025-04-17T18:30:31.052906Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 3) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 1 2025-04-17T18:30:31.052929Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 4) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 1 2025-04-17T18:30:31.052954Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 5) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 1 2025-04-17T18:30:31.053080Z node 19 :BUILD_INDEX NOTICE: Done TSampleKScan TabletId: 72075186224037888 Id: 1 K: 1 Clusters: 1 Id: 1 TabletId: 72075186224037888 Status: DONE ReadRows: 5 ReadBytes: 60 RequestSeqNoGeneration: 13 RequestSeqNoRound: 1 Probabilities: 1182976742465768660 Rows: "\002\000\004\000\000\000\n\000\000\000\004\000\000\000\001\000\000\000" 2025-04-17T18:30:31.053603Z node 19 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:30:31.053635Z node 19 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:30:31.053652Z node 19 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:30:31.053678Z node 19 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:30:31.056064Z node 19 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [19:7494351278720813792:2460], serverId# [19:7494351278720813794:2462], sessionId# [0:0:0] 2025-04-17T18:30:31.056096Z node 19 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [19:7494351278720813793:2461], serverId# [19:7494351278720813795:2463], sessionId# [0:0:0] 2025-04-17T18:30:31.056229Z node 19 :BUILD_INDEX NOTICE: Starting TSampleKScan TabletId: 72075186224037888 Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } K: 3 Seed: 111 MaxProbability: 18446744073709551615 Columns: "value" Columns: "key" SeqNoGeneration: 14 SeqNoRound: 1 row version v1744914631082/18446744073709551615 2025-04-17T18:30:31.056273Z node 19 :BUILD_INDEX INFO: Create TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 0 2025-04-17T18:30:31.056656Z node 19 :BUILD_INDEX NOTICE: Starting TSampleKScan TabletId: 72075186224037888 Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } K: 3 Seed: 111 MaxProbability: 18446744073709551615 Columns: "value" Columns: "key" SeqNoGeneration: 14 SeqNoRound: 1 row version v1744914631082/18446744073709551615 2025-04-17T18:30:31.056693Z node 19 :BUILD_INDEX INFO: Create TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 0 2025-04-17T18:30:31.056855Z node 19 :BUILD_INDEX INFO: Prepare TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 0 2025-04-17T18:30:31.056879Z node 19 :BUILD_INDEX DEBUG: Seek TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 0 2025-04-17T18:30:31.056982Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 1) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 0 2025-04-17T18:30:31.057025Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 2) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 1 2025-04-17T18:30:31.057057Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 3) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 2 2025-04-17T18:30:31.057100Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 4) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 3 2025-04-17T18:30:31.057130Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 5) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 3 2025-04-17T18:30:31.057242Z node 19 :BUILD_INDEX NOTICE: Done TSampleKScan TabletId: 72075186224037888 Id: 1 K: 3 Clusters: 3 Id: 1 TabletId: 72075186224037888 Status: DONE ReadRows: 5 ReadBytes: 60 RequestSeqNoGeneration: 14 RequestSeqNoRound: 1 Probabilities: 1182976742465768660 Probabilities: 5318693189955351404 Probabilities: 10350790534088709650 Rows: "\002\000\004\000\000\000\n\000\000\000\004\000\000\000\001\000\000\000" Rows: "\002\000\004\000\000\000\024\000\000\000\004\000\000\000\002\000\000\000" Rows: "\002\000\004\000\000\000\036\000\000\000\004\000\000\000\003\000\000\000" 2025-04-17T18:30:31.058829Z node 19 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:30:31.058854Z node 19 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:30:31.058872Z node 19 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:30:31.058899Z node 19 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:30:31.059964Z node 19 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [19:7494351278720813802:2468], serverId# [19:7494351278720813804:2470], sessionId# [0:0:0] 2025-04-17T18:30:31.059999Z node 19 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [19:7494351278720813803:2469], serverId# [19:7494351278720813805:2471], sessionId# [0:0:0] 2025-04-17T18:30:31.060143Z node 19 :BUILD_INDEX NOTICE: Starting TSampleKScan TabletId: 72075186224037888 Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } K: 9 Seed: 111 MaxProbability: 18446744073709551615 Columns: "value" Columns: "key" SeqNoGeneration: 15 SeqNoRound: 1 row version v1744914631082/18446744073709551615 2025-04-17T18:30:31.060206Z node 19 :BUILD_INDEX INFO: Create TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 0 2025-04-17T18:30:31.060418Z node 19 :BUILD_INDEX NOTICE: Starting TSampleKScan TabletId: 72075186224037888 Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } K: 9 Seed: 111 MaxProbability: 18446744073709551615 Columns: "value" Columns: "key" SeqNoGeneration: 15 SeqNoRound: 1 row version v1744914631082/18446744073709551615 2025-04-17T18:30:31.060455Z node 19 :BUILD_INDEX INFO: Create TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 0 2025-04-17T18:30:31.060589Z node 19 :BUILD_INDEX INFO: Prepare TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 0 2025-04-17T18:30:31.060630Z node 19 :BUILD_INDEX DEBUG: Seek TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 0 2025-04-17T18:30:31.060760Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 1) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 0 2025-04-17T18:30:31.060808Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 2) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 1 2025-04-17T18:30:31.060844Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 3) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 2 2025-04-17T18:30:31.060874Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 4) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 3 2025-04-17T18:30:31.060903Z node 19 :BUILD_INDEX TRACE: Feed key (Uint32 : 5) TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 4 2025-04-17T18:30:31.061058Z node 19 :BUILD_INDEX NOTICE: Done TSampleKScan TabletId: 72075186224037888 Id: 1 K: 9 Clusters: 5 Id: 1 TabletId: 72075186224037888 Status: DONE ReadRows: 5 ReadBytes: 60 RequestSeqNoGeneration: 15 RequestSeqNoRound: 1 Probabilities: 1182976742465768660 Probabilities: 5318693189955351404 Probabilities: 10350790534088709650 Probabilities: 15054120856910380438 Probabilities: 17710393315211645100 Rows: "\002\000\004\000\000\000\n\000\000\000\004\000\000\000\001\000\000\000" Rows: "\002\000\004\000\000\000\024\000\000\000\004\000\000\000\002\000\000\000" Rows: "\002\000\004\000\000\000\036\000\000\000\004\000\000\000\003\000\000\000" Rows: "\002\000\004\000\000\0002\000\000\000\004\000\000\000\005\000\000\000" Rows: "\002\000\004\000\000\000(\000\000\000\004\000\000\000\004\000\000\000" 2025-04-17T18:30:31.061201Z node 19 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:30:31.061228Z node 19 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:30:31.061243Z node 19 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:30:31.061268Z node 19 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 |98.9%| [TM] {RESULT} ydb/core/tx/datashard/build_index/ut/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> KafkaProtocol::AlterConfigsScenario [GOOD] >> KafkaProtocol::LoginWithApiKey >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit |98.9%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |98.9%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> KafkaProtocol::LoginWithApiKey [GOOD] >> KafkaProtocol::LoginWithApiKeyWithoutAt |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: 2025-04-17T18:27:42.112420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:42.112901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:42.112948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000486/r3tmp/tmpoQEFa2/pdisk_1.dat 2025-04-17T18:27:42.612574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:27:42.663502Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:42.709331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:42.709834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:42.722682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:42.823432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:27:42.879411Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2579] 2025-04-17T18:27:42.879758Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:42.930672Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:686:2581] 2025-04-17T18:27:42.930885Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:42.940057Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:42.940204Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:42.942623Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:27:42.942740Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:27:42.942811Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:27:42.944773Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:42.944974Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:42.945109Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:720:2579] in generation 1 2025-04-17T18:27:42.945931Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:42.946042Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:42.947275Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-17T18:27:42.947349Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-17T18:27:42.947407Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-17T18:27:42.947680Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:42.947870Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:42.947929Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:726:2581] in generation 1 2025-04-17T18:27:42.949620Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:690:2583] 2025-04-17T18:27:42.949827Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:42.959577Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:42.959767Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:42.960901Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-17T18:27:42.960941Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-17T18:27:42.960978Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-17T18:27:42.961231Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:42.961529Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:692:2585] 2025-04-17T18:27:42.961726Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:42.968874Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:42.968950Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:748:2583] in generation 1 2025-04-17T18:27:42.970006Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:42.970094Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:42.971133Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-17T18:27:42.971196Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-17T18:27:42.971252Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-17T18:27:42.971509Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:42.971575Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:42.971619Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:753:2585] in generation 1 2025-04-17T18:27:42.982679Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:43.013949Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:27:43.017211Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:43.017391Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:758:2621] 2025-04-17T18:27:43.017432Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:43.017482Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:27:43.017518Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:43.018734Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:43.018781Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-17T18:27:43.018842Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:43.018900Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:759:2622] 2025-04-17T18:27:43.018924Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-17T18:27:43.018946Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-17T18:27:43.018970Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:27:43.019401Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:27:43.019501Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:27:43.019583Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:43.019613Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-17T18:27:43.019666Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:43.019745Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:760:2623] 2025-04-17T18:27:43.019777Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-17T18:27:43.019799Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-17T18:27:43.019821Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-17T18:27:43.019874Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:43.019898Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-17T18:27:43.019942Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:43.019987Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:761:2624] 2025-04-17T18:27:43.020009Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-17T18:27:43.020030Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-17T18:27:43.020049Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-17T18:27:43.020659Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:43.020717Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:43.020788Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:27:43.020837Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:43.020886Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-17T18:27:43.020956Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-17T18:27:43.021127Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2573], serverId# [1:724:2605], sessionId# [0:0:0] 2025-04-17T18:27:43.021167Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-17T18:27:43.021193Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:43.021231Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-17T18:27:43.021272Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:27:43.021495Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:27:43.022470Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:27:43.022578Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:27:43.022987Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:672:2574], serverId# [1:715:2598], sessionId# [0:0:0] 2025-04-17T18:27:43.023047Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-04-17T18:27:43.023197Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-04-17T18:27:43.023244Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-17T18:27:43.023293Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove ... lan for [2044:281474976715664] at 72075186224037889 has finished 2025-04-17T18:30:46.617954Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:30:46.617977Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-17T18:30:46.618003Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-17T18:30:46.618032Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-17T18:30:46.618736Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2044} 2025-04-17T18:30:46.619201Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1015:2805], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:30:46.619238Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:30:46.619274Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:1012:2802], serverId# [6:1015:2805], sessionId# [0:0:0] 2025-04-17T18:30:46.619346Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2044} 2025-04-17T18:30:46.619603Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:757:2635], Recipient [6:666:2570]: {TEvReadSet step# 2044 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T18:30:46.619644Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:30:46.619702Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715664 2025-04-17T18:30:46.619817Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2044 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-17T18:30:46.620148Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:30:46.620209Z node 6 :TX_DATASHARD TRACE: Complete execution for [2044:281474976715664] at 72075186224037888 on unit CompleteWrite 2025-04-17T18:30:46.620280Z node 6 :TX_DATASHARD DEBUG: Complete write [2044 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [6:1000:2754] 2025-04-17T18:30:46.620340Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:30:46.620447Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-17T18:30:46.620518Z node 6 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037888 {TEvReadSet step# 2044 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} ... nodata readset 2025-04-17T18:30:46.620633Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:666:2570], Recipient [6:757:2635]: {TEvReadSet step# 2044 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-17T18:30:46.620665Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-17T18:30:46.620693Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2025-04-17T18:30:46.620742Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2044 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-17T18:30:46.620791Z node 6 :TX_DATASHARD TRACE: Processed readset without data from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2025-04-17T18:30:46.620975Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:1000:2754], SessionActorId: [6:942:2754], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[6:1000:2754].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-17T18:30:46.621130Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:1000:2754], SessionActorId: [6:942:2754], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[6:942:2754]. isRollback=0 2025-04-17T18:30:46.621560Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=OGY2NGI4YTEtNWIyMWFjNjUtN2U5MzYxMmMtYmVhMDczNTQ=, ActorId: [6:942:2754], ActorState: ExecuteState, TraceId: 01js2g91gec1wye416txny5s9c, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [6:1001:2754] from: [6:1000:2754] 2025-04-17T18:30:46.621738Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [6:1000:2754], Recipient [6:666:2570]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-04-17T18:30:46.621764Z node 6 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-17T18:30:46.622155Z node 6 :KQP_EXECUTER ERROR: ActorId: [6:1001:2754] TxId: 281474976715664. Ctx: { TraceId: 01js2g91gec1wye416txny5s9c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=OGY2NGI4YTEtNWIyMWFjNjUtN2U5MzYxMmMtYmVhMDczNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-17T18:30:46.622416Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [6:666:2570], Recipient [6:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T18:30:46.622462Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-17T18:30:46.622538Z node 6 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-04-17T18:30:46.622685Z node 6 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-04-17T18:30:46.622795Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-17T18:30:46.622896Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-04-17T18:30:46.622966Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T18:30:46.623018Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-04-17T18:30:46.623067Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-17T18:30:46.623115Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-17T18:30:46.623163Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2044/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2001/0 ImmediateWriteEdgeReplied# v2001/0 2025-04-17T18:30:46.623232Z node 6 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-17T18:30:46.623275Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T18:30:46.623307Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-17T18:30:46.623326Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-04-17T18:30:46.623349Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-04-17T18:30:46.623384Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-17T18:30:46.623477Z node 6 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 2025-04-17T18:30:46.623529Z node 6 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037888 2025-04-17T18:30:46.623577Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-17T18:30:46.623637Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:30:46.623669Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-04-17T18:30:46.623713Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-17T18:30:46.623755Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T18:30:46.623786Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-04-17T18:30:46.623823Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-17T18:30:46.623869Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-17T18:30:46.623910Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-17T18:30:46.623957Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-17T18:30:46.623983Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-17T18:30:46.624017Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-17T18:30:46.624078Z node 6 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-17T18:30:46.624151Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-17T18:30:46.624206Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-17T18:30:46.624287Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:30:46.624489Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-17T18:30:46.624522Z node 6 :TX_DATASHARD TRACE: Complete execution for [2044:281474976715664] at 72075186224037889 on unit CompleteWrite 2025-04-17T18:30:46.624566Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-17T18:30:46.624635Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-17T18:30:46.624865Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=OGY2NGI4YTEtNWIyMWFjNjUtN2U5MzYxMmMtYmVhMDczNTQ=, ActorId: [6:942:2754], ActorState: ExecuteState, TraceId: 01js2g91gec1wye416txny5s9c, Create QueryResponse for error on request, msg: 2025-04-17T18:30:46.625991Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [6:61:2108], Recipient [6:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 6 Status: STATUS_NOT_FOUND |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest >> KafkaProtocol::LoginWithApiKeyWithoutAt [GOOD] >> KafkaProtocol::MetadataScenario >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session |99.0%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet >> KafkaProtocol::MetadataScenario [GOOD] >> KafkaProtocol::MetadataInServerlessScenario |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDqPqRdReadActorTests::Backpressure [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> KqpTpch::Query22 [GOOD] >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDqPqRdReadActorTests::MetadataFields |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> TDqPqRdReadActorTests::MetadataFields [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> KafkaProtocol::MetadataInServerlessScenario [GOOD] >> KafkaProtocol::NativeKafkaBalanceScenario >> DataShardStats::HasSchemaChanges_Families [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 |99.1%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::HasSchemaChanges_Families [GOOD] Test command err: 2025-04-17T18:27:40.256000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:27:40.256646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:27:40.256723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/00042e/r3tmp/tmpZxXoui/pdisk_1.dat 2025-04-17T18:27:40.760163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:27:40.812672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:40.821118Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-17T18:27:40.863312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:40.865037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:40.878378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:40.973448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:27:41.036932Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-17T18:27:41.037954Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-17T18:27:41.038513Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-17T18:27:41.038775Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-17T18:27:41.087725Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T18:27:41.088483Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-17T18:27:41.088607Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-17T18:27:41.092696Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-17T18:27:41.092788Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-17T18:27:41.092843Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-17T18:27:41.094933Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-17T18:27:41.095098Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-17T18:27:41.095192Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-17T18:27:41.106038Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-17T18:27:41.132603Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-17T18:27:41.138340Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-17T18:27:41.138536Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-17T18:27:41.138570Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-17T18:27:41.138605Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-17T18:27:41.138667Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:41.138894Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:41.141316Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:41.143297Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-17T18:27:41.143435Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-17T18:27:41.143598Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-17T18:27:41.143654Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:27:41.143712Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:27:41.143752Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:27:41.143789Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:27:41.143817Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-17T18:27:41.143864Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:27:41.144001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:41.144041Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:41.144102Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-17T18:27:41.145612Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-17T18:27:41.145675Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-17T18:27:41.145781Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-17T18:27:41.146124Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-17T18:27:41.146231Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-17T18:27:41.147379Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-17T18:27:41.147529Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-17T18:27:41.147584Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-17T18:27:41.147635Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-17T18:27:41.147667Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:27:41.147979Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-17T18:27:41.148031Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-17T18:27:41.148069Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-17T18:27:41.148114Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:27:41.148193Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-17T18:27:41.148233Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-17T18:27:41.148272Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-17T18:27:41.148301Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:27:41.148324Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-17T18:27:41.149830Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-17T18:27:41.149880Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-17T18:27:41.160643Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-17T18:27:41.160719Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-17T18:27:41.160751Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-17T18:27:41.160798Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-17T18:27:41.160862Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-17T18:27:41.310000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:41.310065Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:27:41.310099Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-17T18:27:41.310490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-17T18:27:41.310528Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T18:27:41.310643Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-17T18:27:41.310696Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-17T18:27:41.310753Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-17T18:27:41.310787Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-17T18:27:41.314987Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-17T18:27:41.315120Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:27:41.317537Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:41.317590Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-17T18:27:41.317656Z ... lanQueue at 72075186224037888 2025-04-17T18:31:03.270783Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:31:03.270842Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:31:03.274603Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 25500} 2025-04-17T18:31:03.274715Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:31:03.275213Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-17T18:31:03.275257Z node 13 :TX_DATASHARD TRACE: Complete execution for [25500:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-17T18:31:03.275371Z node 13 :TX_DATASHARD DEBUG: Complete [25500 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [13:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-04-17T18:31:03.275450Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715664 state Ready TxInFly 0 2025-04-17T18:31:03.275593Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-17T18:31:03.278318Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1167:2976], Recipient [13:937:2763]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1171:2980] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T18:31:03.278379Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T18:31:03.279432Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:410:2405], Recipient [13:937:2763]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715664 2025-04-17T18:31:03.279492Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-17T18:31:03.279558Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-04-17T18:31:03.279643Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 waiting for schema changes 2025-04-17T18:31:03.292255Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [13:1167:2976], Recipient [13:937:2763]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [13:1167:2976] ServerId: [13:1171:2980] } 2025-04-17T18:31:03.292373Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-17T18:31:04.118868Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:937:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:31:04.118964Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-17T18:31:04.119250Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1, with schema changes 2025-04-17T18:31:04.119492Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 9 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 65 IndexSize: 82 } Channels { Channel: 2 DataSize: 65 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: true LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2418 Memory: 124352 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T18:31:04.120737Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1204:3013], Recipient [13:937:2763]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:31:04.120835Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:31:04.120920Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1203:3012], serverId# [13:1204:3013], sessionId# [0:0:0] 2025-04-17T18:31:04.121204Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [13:1202:3011], Recipient [13:937:2763]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-17T18:31:04.121408Z node 13 :TX_DATASHARD INFO: Started background compaction# 3 of 72075186224037888 tableId# 2 localTid# 1001, requested from [13:1202:3011], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-17T18:31:04.123011Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 2, ts 1970-01-01T00:00:20.452024Z 2025-04-17T18:31:04.123073Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-17T18:31:04.123165Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 2, front# 3 2025-04-17T18:31:04.126036Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-04-17T18:31:04.128582Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:934:2761], Recipient [13:937:2763]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:31:04.129182Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-17T18:31:04.129295Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, with schema changes, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-04-17T18:31:04.129723Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1209:3017], Recipient [13:937:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-17T18:31:04.129815Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-17T18:31:04.129937Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-04-17T18:31:04.133424Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 3, ts 1970-01-01T00:00:30.452024Z 2025-04-17T18:31:04.133507Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-17T18:31:04.133551Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 3, front# 3 2025-04-17T18:31:04.133623Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [13:1202:3011]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-17T18:31:04.135404Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-04-17T18:31:04.135705Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:934:2761], Recipient [13:937:2763]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-17T18:31:04.137119Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-17T18:31:04.137227Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-04-17T18:31:04.137460Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1216:3023], Recipient [13:937:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-17T18:31:04.137515Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-17T18:31:04.137596Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 waiting for no schema changes 2025-04-17T18:31:04.148951Z node 13 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186224037888, tableId# 2, last full compaction# 1970-01-01T00:00:30.452024Z 2025-04-17T18:31:04.965306Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [13:937:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-17T18:31:04.965437Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-17T18:31:04.965616Z node 13 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 35000 last cleanup 0 2025-04-17T18:31:04.965744Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T18:31:04.965812Z node 13 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-17T18:31:04.965883Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-17T18:31:04.965952Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-17T18:31:04.966214Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:937:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-17T18:31:04.966270Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-17T18:31:04.966481Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1 2025-04-17T18:31:04.966664Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 30 HasLoanedParts: false Channels { Channel: 1 DataSize: 80 IndexSize: 82 } Channels { Channel: 2 DataSize: 50 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1479 Memory: 124352 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 >> TDqPqReadActorTest::TestReadFromTopic |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |99.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_public_api.py::TestExplain::test_explain_data_query >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDqPqReadActorTest::TestReadFromTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 63158, MsgBus: 11168 2025-04-17T18:27:42.537374Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350550782818299:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:42.537429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005f7/r3tmp/tmppF4sXu/pdisk_1.dat 2025-04-17T18:27:42.910394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:42.937562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:42.937678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:42.939398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63158, node 1 2025-04-17T18:27:43.149255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:27:43.149288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:27:43.149296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:27:43.149403Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11168 TClient is connected to server localhost:11168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:27:43.798011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:45.347387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350563667720851:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:45.347482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 11246, MsgBus: 21450 2025-04-17T18:27:46.563545Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350567968528658:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:46.563614Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005f7/r3tmp/tmpIXObAO/pdisk_1.dat 2025-04-17T18:27:46.646937Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11246, node 2 2025-04-17T18:27:46.693715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:46.693828Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:46.695265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:27:46.695813Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:27:46.695833Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:27:46.695842Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:27:46.695936Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21450 TClient is connected to server localhost:21450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:27:47.080306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:49.551386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494350580853431204:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:49.551492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 10201, MsgBus: 17586 2025-04-17T18:27:50.324934Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494350586016374480:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:50.324998Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005f7/r3tmp/tmp1DDM3V/pdisk_1.dat 2025-04-17T18:27:50.458503Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:50.474900Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:50.474996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:50.476382Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10201, node 3 2025-04-17T18:27:50.517311Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:27:50.517335Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:27:50.517343Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:27:50.517454Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17586 TClient is connected to server localhost:17586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:27:50.922910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:27:53.296749Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494350598901277020:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:27:53.296814Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 11581, MsgBus: 12038 2025-04-17T18:27:54.009742Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494350600452403406:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:27:54.010461Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005f7/r3tmp/tmpG0PArB/pdisk_1.dat 2025-04-17T18:27:54.148955Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:27:54.189309Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:27:54.189399Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:27:54.195360Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11581, node 4 2025-04-17T18:27:54.263935Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:27:54.263953Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:27:54.263961Z node 4 :NET_ ... "Select")) '())) $7 (Void))) (let $9 (DataSink 'result)) (let $10 (ResPull! (Left! $8) $9 (Key) (Nth (Right! $8) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $10 $9) $1 $7)) ) 2025-04-17 18:31:01.354 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpLogical-ApplyExtractMembersToReadTableRanges 2025-04-17 18:31:01.361 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpLogical-RewriteAggregate 2025-04-17 18:31:01.371 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-04-17 18:31:01.381 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPhysical-PushAggregateCombineToStage 2025-04-17 18:31:01.391 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-17 18:31:01.404 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-17 18:31:01.417 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-17 18:31:01.430 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-17 18:31:01.457 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildShuffleStage 2025-04-17 18:31:01.470 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-04-17 18:31:01.496 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-04-17 18:31:01.664 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] yql_optimize.cpp:135: KqpPeepholeFinal-SetCombinerMemoryLimit 2025-04-17 18:31:01.799 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-04-17 18:31:01.808 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-04-17 18:31:01.860 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Right! (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/view_series"))) (Void) '()))) (let $2 '('('"query_ast" (RemoveSystemMembers (PersistableRepr (SqlProject $1 '((SqlProjectStarItem (TypeOf $1) '"" (lambda '($3) $3) '())))))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'createObject) '('features $2)))) ) 2025-04-17 18:31:01.906 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D997640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('('"query_ast" (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '()))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (let $3 (KiCreateObject! world $1 '"/Root/read_from_one_view" '"VIEW" $2 '0 '0)) (return (Commit! $3 $1 '('('"mode" '"flush")))) ) 2025-04-17 18:31:01.976 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D997640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/read_from_one_view"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Right! $1)) (let $4 (Write! (Left! $1) $2 (Key) (RemoveSystemMembers (Sort (PersistableRepr (SqlProject $3 '((SqlProjectStarItem (TypeOf $3) '"" (lambda '($5) $5) '())))) (Bool 'true) (lambda '($6) (PersistableRepr (Member $6 '"series_id"))))) '('('type) '('autoref)))) (return (Commit! $4 $2)) ) 2025-04-17 18:31:02.064 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D997640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (Sort (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '())) (Bool 'true) (lambda '($7) (Member $7 '"series_id")))) (let $3 '('('"mode" '"flush"))) (let $4 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($2 '() '0)) (KiEffects) '('('"db" '"/Root/series" '"Select")) '())) $3 (Void))) (let $5 (DataSink 'result)) (let $6 (ResPull! (Left! $4) $5 (Key) (Nth (Right! $4) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $6 $5) $1 $3)) ) 2025-04-17 18:31:02.072 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D997640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-04-17 18:31:02.076 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D997640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-04-17 18:31:02.082 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D997640) [KQP] yql_optimize.cpp:135: KqpPhysical-RemoveRedundantSortByPk 2025-04-17 18:31:02.088 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D997640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-04-17 18:31:02.198 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-04-17 18:31:02.207 INFO ydb-core-kqp-ut-view(pid=739525, tid=0x00007F305D196640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/read_from_one_view" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 18417, MsgBus: 11576 2025-04-17T18:31:03.712230Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7494351412557781363:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:31:03.712368Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/0005f7/r3tmp/tmpYaJkAL/pdisk_1.dat 2025-04-17T18:31:03.917430Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:31:03.962460Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:31:03.962615Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:31:03.964777Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18417, node 23 2025-04-17T18:31:04.035096Z node 23 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:31:04.035126Z node 23 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:31:04.035143Z node 23 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:31:04.035344Z node 23 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11576 TClient is connected to server localhost:11576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:31:05.101549Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:31:08.712618Z node 23 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7494351412557781363:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:31:08.712743Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:31:10.826708Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7494351442622553111:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:31:10.826972Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:31:10.853975Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7494351442622553139:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:31:10.854142Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7494351442622553144:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:31:10.854153Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:31:10.860357Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T18:31:10.874545Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7494351442622553146:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:31:10.948845Z node 23 :TX_PROXY ERROR: Actor# [23:7494351442622553197:2366] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |99.1%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest >> TDqPqReadActorTest::TestReadFromTopicFromNow >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[1] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> TDqPqReadActorTest::ReadWithFreeSpace |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/sql/py3test >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] |99.2%| [TM] {RESULT} ydb/tests/sql/py3test >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.NO] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_clickbench.py::TestClickbench::test_clickbench[0] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings >> test_clickbench.py::TestClickbench::test_clickbench[0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.WARNING] >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.ERROR] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] >> TPqWriterTest::TestWriteToTopic >> TPqWriterTest::TestWriteToTopic [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.ERROR] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.NO] >> TPqWriterTest::TestWriteToTopicMultiBatch >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2025-04-17T18:28:42.073829Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-17T18:28:42.722639Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:43.200516Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:43.776728Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:44.340103Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:44.890822Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:45.522376Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:46.407739Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:47.255434Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-17T18:28:48.001358Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:48.675697Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:49.516126Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:50.225239Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:50.907538Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:51.553983Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:52.233573Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:53.082344Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:53.696292Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:54.354601Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:54.998289Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:55.656952Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:56.222866Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:56.913027Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:28:57.429996Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:35.883125Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-17T18:29:36.775643Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:37.624464Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:38.491840Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:39.159567Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:39.959940Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:40.875760Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:41.740600Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:42.558069Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-17T18:29:43.399565Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:44.072480Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:45.030294Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:45.978825Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:46.648490Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:47.332430Z node 86 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:48.409455Z node 87 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:49.250632Z node 88 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:50.017211Z node 89 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:50.825904Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:51.579889Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:52.291052Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:53.178780Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:54.285696Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:29:55.246053Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:30:56.931128Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-17T18:31:00.342705Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:01.277344Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:02.211049Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:03.093534Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:04.029153Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:06.602646Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:09.326858Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:10.182487Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:11.010244Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:11.836021Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:14.540583Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:15.436775Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:17.160872Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:18.013890Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:19.889795Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:20.762515Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:21.600203Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:23.364438Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-17T18:31:24.270093Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:25.160647Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:26.028665Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:26.985381Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:27.946517Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:28.900207Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:29.921972Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:30.939445Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:31.942404Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:32.922779Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:33.993940Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-17T18:31:35.051564Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 |99.3%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] >> TPqWriterTest::TestDeferredWriteToTopic >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> TPqWriterTest::WriteNonExistentTopic >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> TPqWriterTest::TestCheckpoints >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_result_limits.py::TestResultLimits::test_large_row >> TPqWriterTest::TestCheckpoints [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[3] >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure >> TPqWriterTest::TestCheckpointWithEmptyBatch >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2025-04-17T18:28:35.853323Z node 1 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7494350776545331557:2053], metadatafields: , partitions: 666 2025-04-17T18:28:36.062217Z node 1 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-17T18:28:36.062334Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-04-17T18:28:36.062350Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7494350776545331557:2053]) 2025-04-17T18:28:36.062406Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [1:7494350776545331563:2048] 2025-04-17T18:28:36.071290Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [1:7494350776545331558:2054] 2025-04-17T18:28:36.071348Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [1:7494350776545331558:2054], partIds: 666 cookie 1 2025-04-17T18:28:36.071728Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [1:7494350776545331558:2054], cookie 1 2025-04-17T18:28:36.071749Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-04-17T18:28:36.071757Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-04-17T18:28:36.071775Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [1:7494350776545331560:2056], generation 1 2025-04-17T18:28:36.071805Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [1:7494350776545331560:2056], connection id 1 partitions offsets (666 / ), 2025-04-17T18:28:36.072095Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [1:7494350776545331560:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-04-17T18:28:36.072158Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [1:7494350776545331560:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-17T18:28:36.091707Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [1:7494350776545331560:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-17T18:28:36.091745Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-04-17T18:28:36.091750Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-04-17T18:28:36.091773Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-04-17T18:28:36.091931Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-04-17T18:28:36.091942Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-04-17T18:28:36.102334Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2025-04-17T18:28:36.102407Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 2 NotifyCA 1 [1:7494350776545331560:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=2 has pending data 2025-04-17T18:28:36.102417Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7494350776545331563:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [1:7494350776545331560:2056] generation 1 2025-04-17T18:28:36.580652Z node 3 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [3:7494350780650268413:2053], metadatafields: , partitions: 666 2025-04-17T18:28:36.790883Z node 3 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-17T18:28:36.790938Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-04-17T18:28:36.790964Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([3:7494350780650268413:2053]) 2025-04-17T18:28:36.791011Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [3:7494350780650268419:2048] 2025-04-17T18:28:36.791214Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [3:7494350780650268414:2054] 2025-04-17T18:28:36.791249Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [3:7494350780650268414:2054], partIds: 666 cookie 1 2025-04-17T18:28:36.791485Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [3:7494350780650268414:2054], cookie 1 2025-04-17T18:28:36.791522Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-04-17T18:28:36.791531Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-04-17T18:28:36.791553Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [3:7494350780650268416:2056], generation 1 2025-04-17T18:28:36.791584Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [3:7494350780650268416:2056], connection id 1 partitions offsets (666 / ), 2025-04-17T18:28:36.791899Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [3:7494350780650268416:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-04-17T18:28:36.791985Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [3:7494350780650268416:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-17T18:28:36.792801Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [3:7494350780650268416:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-17T18:28:36.792845Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-04-17T18:28:36.792851Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-04-17T18:28:36.792877Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-04-17T18:28:36.792993Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-04-17T18:28:36.793013Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-04-17T18:28:36.793158Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvUndelivered, TSystem::Undelivered from [3:7494350780650268416:2056], reason Disconnected, cookie 999 2025-04-17T18:28:36.793227Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [3:7494350780650268416:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-17T18:28:36.793639Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvRetry, EventQueueId 1 2025-04-17T18:28:36.793697Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [3:7494350780650268416:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-17T18:28:36.793711Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 3 2025-04-17T18:28:36.793727Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-04-17T18:28:36.793758Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 3 2025-04-17T18:28:36.793767Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 1 rows, buffer size 0, free space 974, result size 26 2025-04-17T18:28:36.801418Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2025-04-17T18:28:36.801485Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 2 StartSessionAck 1 NewDataArrived 2 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 1 Retry 1 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 3 NotifyCA 2 [3:7494350780650268416:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=3 has pending data 2025-04-17T18:28:36.801503Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7494350780650268419:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [3:7494350780650268416:2056] generation 1 2025-04-17T18:28:37.238875Z node 5 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [5:7494350788093153441:2053], metadat ... ocal] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [b3cb685c-8f2f1852-58eb9f77-1f1e220a] Write session: send init request: init_request { path: "Checkpoints" producer_id: "b3cb685c-8f2f1852-58eb9f77-1f1e220a" message_group_id: "b3cb685c-8f2f1852-58eb9f77-1f1e220a" } 2025-04-17T18:32:03.004666Z :TRACE: [local] TRACE_EVENT InitRequest 2025-04-17T18:32:03.005077Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [b3cb685c-8f2f1852-58eb9f77-1f1e220a] Write session: OnWriteDone gRpcStatusCode: 0 2025-04-17T18:32:03.005240Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] Successfully connected. Initializing session 2025-04-17T18:32:03.007751Z :INFO: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] Server session id: test_client_1_22_14493186897777260232_v1 2025-04-17T18:32:03.007834Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-17T18:32:03.008130Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-17T18:32:03.012549Z :INFO: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2025-04-17T18:32:03.015730Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] Got ReadResponse, serverBytesSize = 1095, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427705 2025-04-17T18:32:03.015935Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427705 2025-04-17T18:32:03.018321Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2025-04-17T18:32:03.018392Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] Returning serverBytesSize = 1095 to budget 2025-04-17T18:32:03.018472Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] In ContinueReadingDataImpl, ReadSizeBudget = 1095, ReadSizeServerDelta = 52427705 2025-04-17T18:32:03.018759Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-17T18:32:03.018855Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-17T18:32:03.018905Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-17T18:32:03.018959Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-17T18:32:03.018990Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (3-3) 2025-04-17T18:32:03.019034Z :DEBUG: [local] Take Data. Partition 0. Read: {4, 0} (4-4) 2025-04-17T18:32:03.019414Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2025-04-17T18:32:03.019482Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] Returning serverBytesSize = 0 to budget 2025-04-17T18:32:03.019718Z :INFO: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] Closing read session. Close timeout: 0.000000s 2025-04-17T18:32:03.019787Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-04-17T18:32:03.019859Z :INFO: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 51 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:32:03.020034Z :NOTICE: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-17T18:32:03.020096Z :DEBUG: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] [] Abort session to cluster 2025-04-17T18:32:03.021149Z :INFO: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] Closing read session. Close timeout: 0.000000s 2025-04-17T18:32:03.021216Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-04-17T18:32:03.021285Z :INFO: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 52 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:32:03.021419Z :NOTICE: [local] [local] [6a62e24-150adf36-aa16116c-6751a20d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-17T18:32:03.024961Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [b3cb685c-8f2f1852-58eb9f77-1f1e220a] Write session: OnReadDone gRpcStatusCode: 0 2025-04-17T18:32:03.025157Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [b3cb685c-8f2f1852-58eb9f77-1f1e220a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744914723025 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:32:03.025423Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [b3cb685c-8f2f1852-58eb9f77-1f1e220a] Write session established. Init response: last_seq_no: 5 session_id: "b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0" 2025-04-17T18:32:03.025518Z :TRACE: [local] TRACE_EVENT InitResponse partition_id=0 session_id=b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0 2025-04-17T18:32:03.025635Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] MessageGroupId [b3cb685c-8f2f1852-58eb9f77-1f1e220a] Write session: set DirectWriteToPartitionId 0 2025-04-17T18:32:03.025879Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [0] Get partition location async, partition 0, delay 0.000000s 2025-04-17T18:32:03.025956Z :TRACE: [local] TRACE_EVENT DescribePartitionRequest path=local/Checkpoints partition_id=0 2025-04-17T18:32:03.026202Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [0] Getting partition location, partition 0 2025-04-17T18:32:03.034404Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [0] Got PartitionLocation response. Status SUCCESS, proto: partition { active: true partition_location { node_id: 1 generation: 1 } } 2025-04-17T18:32:03.034460Z :TRACE: [local] TRACE_EVENT DescribePartitionResponse partition_id=0 active=1 pl_node_id=1 pl_generation=1 2025-04-17T18:32:03.034512Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [0] GetPreferredEndpoint: partitionId 0, partitionNodeId 1 exists in the endpoint pool. 2025-04-17T18:32:03.034552Z :TRACE: [local] TRACE_EVENT PreferredPartitionLocation Endpoint= NodeId=1 Generation=1 2025-04-17T18:32:03.034596Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Start write session. Will connect to nodeId: 1 2025-04-17T18:32:03.035724Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-17T18:32:03.038957Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session: direct write to partition: 0, generation 1 2025-04-17T18:32:03.039139Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session: send init request: init_request { path: "Checkpoints" producer_id: "b3cb685c-8f2f1852-58eb9f77-1f1e220a" partition_with_generation { generation: 1 } } 2025-04-17T18:32:03.039186Z :TRACE: [local] TRACE_EVENT InitRequest pwg_partition_id=0 pwg_generation=1 2025-04-17T18:32:03.039572Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session: OnWriteDone gRpcStatusCode: 0 2025-04-17T18:32:03.040419Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-17T18:32:03.040481Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session will now close 2025-04-17T18:32:03.040602Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-17T18:32:03.041144Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-17T18:32:03.041808Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-17T18:32:03.041870Z :TRACE: [local] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-04-17T18:32:03.041932Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2025-04-17T18:32:03.042019Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [b3cb685c-8f2f1852-58eb9f77-1f1e220a|e009254f-bf5bd7a-2c07bb3d-ea165b67_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-17T18:32:03.602404Z node 54 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2025-04-17T18:32:03.621076Z node 54 :KQP_COMPUTE DEBUG: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2025-04-17T18:32:03.621405Z node 54 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "88624f02-19493393-a51357ba-ca5add60" } |99.3%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest >> test_clickbench.py::TestClickbench::test_clickbench[3] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic >> test_clickbench.py::TestClickbench::test_clickbench[6] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[7] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] |99.3%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} >> test_clickbench.py::TestClickbench::test_clickbench[7] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] >> QuoterWithKesusTest::KesusRecreation [FAIL] >> QuoterWithKesusTest::AllocationStatistics |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> test_public_api.py::TestSessionNotFound::test_session_not_found >> test_clickbench.py::TestClickbench::test_clickbench[8] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[9] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> QuoterWithKesusTest::AllocationStatistics [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_clickbench.py::TestClickbench::test_clickbench[9] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[10] >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[10] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> KafkaProtocol::NativeKafkaBalanceScenario [GOOD] >> KafkaProtocol::InitProducerId_withoutTransactionalIdShouldReturnRandomInt >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt >> test_clickbench.py::TestClickbench::test_clickbench[11] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] >> KafkaProtocol::InitProducerId_withoutTransactionalIdShouldReturnRandomInt [GOOD] >> KafkaProtocol::InitProducerId_forNewTransactionalIdShouldReturnIncrementingInt >> test_clickbench.py::TestClickbench::test_clickbench[13] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_result_limits.py::TestResultLimits::test_large_row [GOOD] >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[15] >> test_clickbench.py::TestClickbench::test_clickbench[15] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> test_clickbench.py::TestClickbench::test_clickbench[17] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] |99.4%| [TM] {RESULT} ydb/tests/fq/restarts/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2025-04-17T18:28:42.925940Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-17T18:28:42.926114Z node 1 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-17T18:28:42.928879Z node 1 :QUOTER_PROXY WARN: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-04-17T18:28:42.928933Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-17T18:28:42.997426Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-17T18:28:42.997540Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-17T18:28:42.997842Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-17T18:28:42.997941Z node 2 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-04-17T18:28:42.997975Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-17T18:28:43.012191Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-17T18:28:43.072457Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-17T18:28:43.072591Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-17T18:28:43.072881Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "/resource" 2025-04-17T18:28:43.072957Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2025-04-17T18:28:43.073018Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2025-04-17T18:28:43.073107Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-17T18:28:43.073177Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "resource//resource" 2025-04-17T18:28:43.073227Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2025-04-17T18:28:43.073253Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2025-04-17T18:28:43.141620Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-17T18:28:43.141776Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-17T18:28:43.141986Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-17T18:28:43.149585Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-17T18:28:43.197763Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-17T18:28:43.197865Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-17T18:28:43.197941Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-17T18:28:43.198046Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-17T18:28:43.216592Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-17T18:28:43.216727Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-17T18:28:43.217058Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res0" 2025-04-17T18:28:43.217236Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-17T18:28:43.217571Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-17T18:28:43.217622Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-04-17T18:28:43.217666Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res0", 42) 2025-04-17T18:28:43.217739Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2025-04-17T18:28:43.217871Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res1" 2025-04-17T18:28:43.217962Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res1" 2025-04-17T18:28:43.218261Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-17T18:28:43.218307Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-04-17T18:28:43.218347Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res1", 43) 2025-04-17T18:28:43.218402Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-04-17T18:28:43.218621Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res2" 2025-04-17T18:28:43.218716Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res2" 2025-04-17T18:28:43.218952Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-17T18:28:43.218984Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-04-17T18:28:43.219026Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res2", 44) 2025-04-17T18:28:43.219075Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2025-04-17T18:28:43.223752Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2025-04-17T18:28:43.223827Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2025-04-17T18:28:43.223888Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res1". Connected: 1 2025-04-17T18:28:43.224715Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 21474838532 } }) 2025-04-17T18:28:43.224802Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-04-17T18:28:43.225128Z node 5 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-04-17T18:28:43.225162Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-17T18:28:43.225299Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2025-04-17T18:28:43.225353Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2025-04-17T18:28:43.231646Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-17T18:28:43.232104Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-17T18:28:43.232191Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-04-17T18:28:43.232238Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-04-17T18:28:43.232295Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-04-17T18:28:43.232406Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }, { "res1", Normal, {0: Front(1, 2)} }, { "res2", Normal, {0: Front(1, 2)} }]) 2025-04-17T18:28:43.280931Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-17T18:28:43.281061Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-17T18:28:43.281311Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-17T18:28:43.281493Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-17T18:28:43.281801Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-17T18:28:43.281843Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-17T18:28:43.281893Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-17T18:28:43.281978Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-17T18:28:43.298463Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-17T18:28:43.298608Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-17T18:28:43.298943Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-17T18:28:43.299115Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-17T18:28:43.299430Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-17T18:28:43.299467Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-17T18:28:43.299516Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-17T18:28:43.299605Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-17T18:28:43.300029Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res", Consumed: 0, Queue: 25}]) 2025-04-17T18:28:43.300088Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res": { Available: 20, QueueWeight: 25 } 2025-04-17T18:28:43.300123Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res". Connected: 1 2025-04-17T18:28:43.300228Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-04-17T18:28:43.300305Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-17T18:28:43.300491Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyCloseSession("res", 42) 2025-04-17T18:28:43.300562Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Deactivate session to "res". Connected: 1 2025-04-17T18:28:43.300665Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-04-17T18:28:43.325318Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-17T18:28:43.325449Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-17T18:28:43.325598Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-17T18:28:43.325779Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-17T18:28:43.327635Z node 8 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 H ... { Available: -4.000100589, QueueWeight: 5 } 2025-04-17T18:32:59.174388Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.174465Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.200657Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7494351909484007968:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-17T18:32:59.201024Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-17T18:32:59.201067Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-17T18:32:59.201137Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.201175Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.271597Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-17T18:32:59.271638Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-17T18:32:59.370000Z 2025-04-17T18:32:59.271668Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-17T18:32:59.271982Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-17T18:32:59.272012Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -3.000100589, QueueWeight: 5 } 2025-04-17T18:32:59.272054Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.272150Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.302487Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7494351909484007968:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-17T18:32:59.306495Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-17T18:32:59.306551Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-17T18:32:59.306594Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.306653Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.371468Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-17T18:32:59.371510Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-17T18:32:59.470000Z 2025-04-17T18:32:59.371540Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-17T18:32:59.371688Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-17T18:32:59.371722Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -2.000100589, QueueWeight: 5 } 2025-04-17T18:32:59.371781Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.371874Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.402528Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7494351909484007968:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-17T18:32:59.407253Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-17T18:32:59.407318Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-17T18:32:59.407368Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.410264Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.470757Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-17T18:32:59.470819Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-17T18:32:59.570000Z 2025-04-17T18:32:59.470840Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-17T18:32:59.474296Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-17T18:32:59.474338Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.000100589, QueueWeight: 5 } 2025-04-17T18:32:59.474395Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.474470Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.500691Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7494351909484007968:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-17T18:32:59.501073Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-17T18:32:59.501144Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-17T18:32:59.501187Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.501269Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.570693Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-17T18:32:59.570738Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-17T18:32:59.670000Z 2025-04-17T18:32:59.570759Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-17T18:32:59.570895Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-17T18:32:59.570926Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0001005892461, QueueWeight: 5 } 2025-04-17T18:32:59.570965Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.571013Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.600543Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7494351909484007968:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-17T18:32:59.601081Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-17T18:32:59.601133Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-17T18:32:59.601178Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9998994108, 2)} }]) 2025-04-17T18:32:59.601227Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.670510Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0.9998994108. FreeBalance: 0.9998994108 2025-04-17T18:32:59.670554Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-17T18:32:59.770000Z 2025-04-17T18:32:59.670575Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-17T18:32:59.670634Z node 49 :QUOTER_SERVICE TRACE: Charge "Resource" for 5. Balance: 0.9998994108. FreeBalance: 0.9998994108. TicksToFullfill: 5.000502997. DurationToFullfillInUs: 500050.2997. TimeToFullfill: 2025-04-17T18:32:59.267776Z. Now: 2025-04-17T18:32:59.670450Z. LastAllocated: 2025-04-17T18:32:58.767726Z 2025-04-17T18:32:59.670840Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2025-04-17T18:32:59.670871Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.000100589, QueueWeight: 0 } 2025-04-17T18:32:59.670913Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.670944Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.700907Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7494351909484007968:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-17T18:32:59.701363Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-17T18:32:59.701431Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-17T18:32:59.701473Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.701526Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.770259Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-17T18:32:59.800758Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7494351909484007968:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-17T18:32:59.802508Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-17T18:32:59.802562Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-17T18:32:59.802616Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.802701Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.802718Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-17T18:32:59.900983Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-17T18:32:59.901043Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-17T18:32:59.901083Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-17T18:32:59.901185Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T18:32:59.901203Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-17T18:32:59.900619Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7494351909484007968:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-17T18:33:02.998321Z node 49 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[49:7494351905189039849:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:33:02.998405Z node 49 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |99.4%| [TM] {RESULT} ydb/core/quoter/ut/unittest >> KafkaProtocol::InitProducerId_forNewTransactionalIdShouldReturnIncrementingInt [GOOD] >> KafkaProtocol::InitProducerId_forSqlInjectionShouldReturnWithoutDropingDatabase >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_clickbench.py::TestClickbench::test_clickbench[18] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.WARNING] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[19] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query >> test_clickbench.py::TestClickbench::test_clickbench[20] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[21] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.ERROR] >> test_clickbench.py::TestClickbench::test_clickbench[21] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[22] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes >> KafkaProtocol::InitProducerId_forSqlInjectionShouldReturnWithoutDropingDatabase [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnSameProducerIdAndIncrementEpoch >> test_clickbench.py::TestClickbench::test_clickbench[22] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.ERROR] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.NO] >> test_public_api.py::TestBadSession::test_simple >> test_clickbench.py::TestClickbench::test_clickbench[23] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[24] >> test_clickbench.py::TestClickbench::test_clickbench[24] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_scheduling.py::TestSchedule::test_skip_busy[kikimr0] [SKIPPED] >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] >> test_clickbench.py::TestClickbench::test_clickbench[26] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[27] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnSameProducerIdAndIncrementEpoch [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnNewProducerIdIfEpochOverflown >> test_clickbench.py::TestClickbench::test_clickbench[27] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> test_clickbench.py::TestClickbench::test_clickbench[28] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.WARNING] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnNewProducerIdIfEpochOverflown [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfTxnProducerWasNotFound >> test_public_api.py::TestBadSession::test_simple [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfTxnProducerWasNotFound [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfTxnProducerWasFoundButEpochIsOlder >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfTxnProducerWasFoundButEpochIsOlder [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfNewEpochIsLessButProducerIdIsNew |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfNewEpochIsLessButProducerIdIsNew [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondWithProducerFencedErrorIfNewEpochIsLessAndProducerIdIsTheSame >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondWithProducerFencedErrorIfNewEpochIsLessAndProducerIdIsTheSame [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldSendBack_PRODUCER_FENCED_ErrorIfThereIsNoTransactionalIdInState >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldSendBack_PRODUCER_FENCED_ErrorIfThereIsNoTransactionalIdInState [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerEpochExpired >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerEpochExpired [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldForwardItToTheRelevantTransactionalIdActorIfProducerIsValid >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldForwardItToTheRelevantTransactionalIdActorIfProducerIsValid [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldForwardItToExistingTransactionActorIfProducerIsValid >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldForwardItToExistingTransactionActorIfProducerIsValid [GOOD] >> KafkaTransactionCoordinatorActor::OnAddPartitions_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerIsNotInitialized >> KafkaTransactionCoordinatorActor::OnAddPartitions_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerIsNotInitialized [GOOD] >> KafkaTransactionCoordinatorActor::OnTxnOffsetCommit_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerIsNotInitialized >> KafkaTransactionCoordinatorActor::OnTxnOffsetCommit_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerIsNotInitialized [GOOD] >> KafkaTransactionCoordinatorActor::AfterSecondInitializationOldTxnRequestsShouldBeFenced >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> KafkaTransactionCoordinatorActor::AfterSecondInitializationOldTxnRequestsShouldBeFenced [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestDriverCanRecover::test_driver_recovery >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> test_clickbench.py::TestClickbench::test_clickbench[29] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] >> test_clickbench.py::TestClickbench::test_clickbench[30] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] >> test_clickbench.py::TestClickbench::test_clickbench[31] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] >> TMetadataActorTests::TopicMetadataGoodAndBad [GOOD] >> PublishKafkaEndpoints::HaveEndpointInLookup >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] >> test_clickbench.py::TestClickbench::test_clickbench[33] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] [GOOD] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[37] >> PublishKafkaEndpoints::HaveEndpointInLookup [GOOD] >> PublishKafkaEndpoints::MetadataActorGetsEndpoint >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> test_clickbench.py::TestClickbench::test_clickbench[37] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[38] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[38] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[39] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[39] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> PublishKafkaEndpoints::MetadataActorGetsEndpoint [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode >> test_clickbench.py::TestClickbench::test_clickbench[41] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithError |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes >> test_public_api.py::TestJsonExample::test_json_unexpected_failure |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> PublishKafkaEndpoints::DiscoveryResponsesWithError [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_public_api.py::TestJsonExample::test_json_success [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort [GOOD] >> PublishKafkaEndpoints::MetadataActorDoubleTopic |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> test_public_api.py::TestRecursiveCreation::test_mkdir >> PublishKafkaEndpoints::MetadataActorDoubleTopic [GOOD] >> RequestUtilityActors::DescribeConfigs |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> RequestUtilityActors::DescribeConfigs [GOOD] >> Serialization::RequestHeader [GOOD] >> Serialization::ResponseHeader [GOOD] >> Serialization::ApiVersionsRequest [GOOD] >> Serialization::ApiVersionsResponse [GOOD] >> Serialization::ApiVersion_WithoutSupportedFeatures [GOOD] >> Serialization::ProduceRequest [GOOD] >> Serialization::UnsignedVarint32 [GOOD] >> Serialization::UnsignedVarint64 [GOOD] >> Serialization::Varint32 [GOOD] >> Serialization::Varint64 [GOOD] >> Serialization::UnsignedVarint32_Wrong [GOOD] >> Serialization::UnsignedVarint64_Wrong [GOOD] >> Serialization::UnsignedVarint32_Deserialize [GOOD] >> Serialization::TKafkaInt8_NotPresentVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::Struct_IsDefault [GOOD] >> Serialization::TKafkaString_IsDefault [GOOD] >> Serialization::TKafkaString_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaArray_IsDefault [GOOD] >> Serialization::TKafkaArray_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaBytes_IsDefault [GOOD] >> Serialization::TKafkaBytes_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TRequestHeaderData_reference [GOOD] >> Serialization::TKafkaFloat64_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::RequestHeader_reference [GOOD] >> Serialization::ProduceRequestData [GOOD] >> Serialization::ProduceRequestData_Record_v0 [GOOD] >> test_public_api.py::TestAttributes::test_create_table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kafka_proxy/ut/unittest >> Serialization::ProduceRequestData_Record_v0 [GOOD] Test command err: 2025-04-17T18:28:24.751479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494350732409014473:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:24.751567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000417/r3tmp/tmpnwZ3iY/pdisk_1.dat 2025-04-17T18:28:25.276576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:28:25.309511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:25.310437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:25.315280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29961, node 1 2025-04-17T18:28:25.610191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/000417/r3tmp/yandexazsFuK.tmp 2025-04-17T18:28:25.610225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/000417/r3tmp/yandexazsFuK.tmp 2025-04-17T18:28:25.611848Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/000417/r3tmp/yandexazsFuK.tmp 2025-04-17T18:28:25.612004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:28:25.905839Z INFO: TTestServer started on Port 9111 GrpcPort 29961 TClient is connected to server localhost:9111 PQClient connected to localhost:29961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:26.193569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:26.218042Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:28:26.224669Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-17T18:28:26.236783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:28:27.755881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350745293917181:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:27.756007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494350745293917194:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:27.756111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:28:27.771858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-17T18:28:27.782728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494350745293917196:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-17T18:28:27.848419Z node 1 :TX_PROXY ERROR: Actor# [1:7494350745293917262:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:28:28.335434Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494350745293917277:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:28:28.354982Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjcwNTQ0YmMtYjE5MDkzZmMtZDU0MWNhNWMtYWQ5ZjMyN2Q=, ActorId: [1:7494350745293917179:2341], ActorState: ExecuteState, TraceId: 01js2g4syr04g87sra4h2jyewa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:28:28.372907Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:28:28.422411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:28:28.452841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:28:28.567903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:28:28.986296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2g4tv37wsehpxga6p3harm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWZlYWYwMjMtYjU0YWM5YjAtMzlmZThlZmUtOGZjNzAz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494350753883852170:2639] 2025-04-17T18:28:29.751609Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494350732409014473:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:29.751678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Run with port = 29961, kafka port = 15463 2025-04-17T18:28:36.001236Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494350777322237900:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:28:36.001314Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6873/000417/r3tmp/tmpJ1qtBb/pdisk_1.dat 2025-04-17T18:28:36.080023Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10009, node 2 2025-04-17T18:28:36.108856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:28:36.108949Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:28:36.110225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:28:36.127070Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6873/000417/r3tmp/yandex3jME9D.tmp 2025-04-17T18:28:36.127092Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6873/000417/r3tmp/yandex3jME9D.tmp 2025-04-17T18:28:36.127242Z node 2 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6873/000417/r3tmp/yandex3jME9D.tmp 2025-04-17T18:28:36.127362Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:28:36.175831Z INFO: TTestServer started on Port 22741 GrpcPort 10009 TClient is connected to server localhost:22741 PQClient connected to localhost:10009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:28:36.325816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:28:36.346688Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 28147497671565 ... tatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:36:34.289869Z node 42 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:36:34.904962Z node 42 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2gkn956czag4rdk0qtyy0q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=42&id=NTFmYWI0NTAtYTRhMDU0M2MtOTg4MDkzYTAtMjY4MDJhNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [42:7494352834890632430:2666] 2025-04-17T18:36:39.359652Z node 42 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:36:39.359701Z node 42 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 1 partitions CallPersQueueGRPC request to localhost:6163 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:6163 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710675 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic1, dc = dc1 2025-04-17T18:36:40.425936Z node 42 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01js2gktt4cz2www2ewqmdrw32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=42&id=OTMyNGNjMTMtODI3NmUzMWMtM2M0MjE4Yi02ZjdlMjE1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===Query complete TClient::Ls request: /Root/PQ/rt3.dc1--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710675 CreateStep: 1744915000171 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710675 CreateStep: 1744915000171 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic1" name rt3.dc1--topic1 version1 CallPersQueueGRPC request to localhost:6163 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:6163 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic1" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-17T18:36:40.985935Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-17T18:36:40.986800Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-17T18:36:40.986893Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:6163 2025-04-17T18:36:40.998166Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "topic1" message_group_id: "src" } 2025-04-17T18:36:41.614929Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744915001614 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-17T18:36:41.615117Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|63861cea-1c08dc41-8423b14d-7b903ec1_0" topic: "topic1" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-17T18:36:41.615438Z :INFO: [] MessageGroupId [src] SessionId [src|63861cea-1c08dc41-8423b14d-7b903ec1_0] Write session: close. Timeout = 0 ms 2025-04-17T18:36:41.615553Z :INFO: [] MessageGroupId [src] SessionId [src|63861cea-1c08dc41-8423b14d-7b903ec1_0] Write session will now close 2025-04-17T18:36:41.615658Z :DEBUG: [] MessageGroupId [src] SessionId [src|63861cea-1c08dc41-8423b14d-7b903ec1_0] Write session: aborting 2025-04-17T18:36:41.616890Z :DEBUG: [] MessageGroupId [src] SessionId [src|63861cea-1c08dc41-8423b14d-7b903ec1_0] Write session is aborting and will not restart 2025-04-17T18:36:41.616872Z :INFO: [] MessageGroupId [src] SessionId [src|63861cea-1c08dc41-8423b14d-7b903ec1_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:36:41.617022Z :DEBUG: [] MessageGroupId [src] SessionId [src|63861cea-1c08dc41-8423b14d-7b903ec1_0] Write session: destroy Response: self { name: "rt3.dc1--topic1" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744915000171 tx_id: 281474976710675 } } partitioning_settings { min_active_partitions: 1 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } retention_period { seconds: 86400 } supported_codecs { codecs: 1 codecs: 2 codecs: 3 } partition_write_speed_bytes_per_second: 20000000 partition_write_burst_bytes: 20000000 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_allow_unauthenticated_read" value: "true" } attributes { key: "_allow_unauthenticated_write" value: "true" } attributes { key: "_message_group_seqno_retention_period_ms" value: "86400000" } attributes { key: "_partitions_per_tablet" value: "5" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=4294967295 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Check value=18446744073709551615 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=-9223372036854775808 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Buffer size: 251 >>>>> Buffer size: 104 |99.6%| [TM] {RESULT} ydb/core/kafka_proxy/ut/unittest |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_public_api.py::TestDocApiTables::test_create_table |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [GOOD] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.6%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [GOOD] |99.7%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_workload.py::TestYdbWorkload::test[row] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-04-17 18:38:12,204 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 18:38:12,481 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 759237 1002M 999M 909M ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/6873/0006e1/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 901525 1.9G 1.9G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/6873/0006e1/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_inse Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/6873/0006e1/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...home/runner/.ya/build/build_root/6873/0006e1/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/6873/0006e1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/0006e1/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/0006e1', '--source-root', '/home/runner/.ya/build/build_root/6873/0006e1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/0006e1/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...home/runner/.ya/build/build_root/6873/0006e1/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/6873/0006e1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/0006e1/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/0006e1', '--source-root', '/home/runner/.ya/build/build_root/6873/0006e1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/0006e1/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {RESULT} ydb/tests/olap/scenario/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression 2025-04-17 18:38:12,144 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 18:38:12,756 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 759200 823M 822M 746M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/6873/000707/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 763906 5.7G 5.7G 5.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/6873/000707/ydb/tests/olap/column_family/compression/test-results/py3test/testing_ou Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 102, in test_all_supported_compression tasks.start_and_wait_all() File "ydb/tests/olap/common/thread_helper.py", line 49, in start_and_wait_all self.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007fb39e222640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 193 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 208 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 462 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 476 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007fb3db6caa00 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 487 in stop File "ydb/tests/olap/column_family/compression/base.py", line 22 in teardown_class File "contrib/python/pytest/py3/_pytest/python.py", line 777 in _call_with_optional_argument File "contrib/python/pytest/py3/_pytest/python.py", line 860 in xunit_setup_class_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 911 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1024 in finish File "contrib/python/pytest/py3/_pytest/fixtures.py", line 701 in File "contrib/python/pytest/py3/_pytest/runner.py", line 526 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 108 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 306 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...olumn_family-compression', '--basetemp', '/home/runner/.ya/build/build_root/6873/000707/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/000707/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/000707', '--source-root', '/home/runner/.ya/build/build_root/6873/000707/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/000707/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...olumn_family-compression', '--basetemp', '/home/runner/.ya/build/build_root/6873/000707/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/000707/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/000707', '--source-root', '/home/runner/.ya/build/build_root/6873/000707/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/000707/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) 2025-04-17 18:38:43,393 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-17 18:38:43,393 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.8%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpQueryService::ReplyPartLimitProxyNode >> test_workload.py::TestYdbWorkload::test >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> Backup::UuidValue >> ConsistentIndexRead::InteractiveTx ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-04-17 18:38:21,668 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 18:38:23,472 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 766866 596M 595M 515M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/6873/00099c/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 772491 11.8G 11.6G 11.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/6873/00099c/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpc 777685 493M 492M 459M └─ moto_server s3 --port 24174 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 97, in test_import_and_export self.ydb_client.query("INSERT INTO s3_table SELECT * FROM lineitem") File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007fd194299640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 193 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 208 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 462 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 476 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007fd1c464ca00 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 487 in stop File "ydb/tests/olap/s3_import/base.py", line 24 in teardown_class File "contrib/python/pytest/py3/_pytest/python.py", line 777 in _call_with_optional_argument File "contrib/python/pytest/py3/_pytest/python.py", line 860 in xunit_setup_class_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 911 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1024 in finish File "contrib/python/pytest/py3/_pytest/fixtures.py", line 701 in File "contrib/python/pytest/py3/_pytest/runner.py", line 526 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 108 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 306 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...unner/.ya/build/build_root/6873/00099c/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import', '--basetemp', '/home/runner/.ya/build/build_root/6873/00099c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/00099c/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/00099c', '--source-root', '/home/runner/.ya/build/build_root/6873/00099c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/00099c/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...unner/.ya/build/build_root/6873/00099c/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import', '--basetemp', '/home/runner/.ya/build/build_root/6873/00099c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/00099c/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/00099c', '--source-root', '/home/runner/.ya/build/build_root/6873/00099c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/00099c/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) 2025-04-17 18:38:54,088 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-17 18:38:54,088 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> test_tpch.py::TestTpchS1::test_tpch[1] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] |99.8%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_encryption.py::TestEncryption::test_simple_encryption >> Backup::UuidValue [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/scheme.pb" |99.8%| [TM] {RESULT} ydb/tests/functional/backup/unittest >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> KqpQuerySession::NoLocalAttach >> test_workload.py::TestYdbWorkload::test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_workload.py::TestYdbLogWorkload::test[row] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> S3PathStyleBackup::DisableVirtualAddressing >> test_tpch.py::TestTpchS1::test_tpch[2] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] >> S3PathStyleBackup::DisableVirtualAddressing [FAIL] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test |99.8%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [FAIL] Test command err: assertion failed at ydb/tests/functional/backup/s3_path_style/s3_path_style_backup_ut.cpp:85, virtual void NTestSuiteS3PathStyleBackup::TTestCaseDisableVirtualAddressing::Execute_(NUnitTest::TTestContext &): (op->Status().IsSuccess()) TBackTrace::Capture()+28 (0x1C7E19C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1D82590) NTestSuiteS3PathStyleBackup::TTestCaseDisableVirtualAddressing::Execute_(NUnitTest::TTestContext&)+13944 (0x194B228) std::__y1::__function::__func, void ()>::operator()()+280 (0x1954D68) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1DB0C66) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1D89119) NTestSuiteS3PathStyleBackup::TCurrentTest::Execute()+1204 (0x1953BA4) NUnitTest::TTestFactory::Execute()+2438 (0x1D8A9E6) NUnitTest::RunMain(int, char**)+5213 (0x1DAB14D) ??+0 (0x7F5B633F5D90) __libc_start_main+128 (0x7F5B633F5E40) _start+41 (0x192D029) |99.9%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] >> NodeIdDescribe::HasDistribution [GOOD] >> KqpQuerySession::NoLocalAttach [GOOD] >> Transfer::CreateTransfer >> test_workload.py::TestYdbKvWorkload::test[row] >> Transfer::CreateTransfer [GOOD] >> Replication::Types |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest >> Replication::Types [GOOD] >> Replication::PauseAndResumeReplication >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> Replication::PauseAndResumeReplication [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::PauseAndResumeReplication [GOOD] Test command err: DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; CREATE TRANSFER `Transfer_7151053992898562159` FROM `SourceTopic` TO `TargetTable` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost' ); >>>>> ACTUAL: {
: Error: Scheme operation failed, status: ExecError, reason: The transfer is only available in the Enterprise version } >>>>> EXPECTED: The transfer is only available in the Enterprise version DDL: CREATE TABLE `SourceTable_9374027483563006726` ( Key Uint32, Key2 Uuid, v01 Uuid, v02 Uuid NOT NULL, v03 Double, PRIMARY KEY (Key, Key2) ); >>>>> Query: UPSERT INTO `SourceTable_9374027483563006726` (Key,Key2,v01,v02,v03) VALUES ( 1, CAST("00078af5-0000-0000-6c0b-040000000000" as Uuid), CAST("00078af5-0000-0000-6c0b-040000000001" as Uuid), UNWRAP(CAST("00078af5-0000-0000-6c0b-040000000002" as Uuid)), CAST("311111111113.222222223" as Double) ); DDL: CREATE ASYNC REPLICATION `Replication_9374027483563006726` FOR `SourceTable_9374027483563006726` AS `Table_9374027483563006726` WITH ( CONNECTION_STRING = 'grpc://localhost:13131/?database=local' ); >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_9374027483563006726` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_9374027483563006726]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_9374027483563006726` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_9374027483563006726]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=18 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_9374027483563006726` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=17 count=1 DDL: DROP ASYNC REPLICATION `Replication_9374027483563006726`; DDL: DROP TABLE `SourceTable_9374027483563006726` DDL: CREATE TABLE `SourceTable_12444304568151759653` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_12444304568151759653` FOR `SourceTable_12444304568151759653` AS `Table_12444304568151759653` WITH ( CONNECTION_STRING = 'grpc://localhost:13131/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_12444304568151759653` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: INSERT INTO `SourceTable_12444304568151759653` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_12444304568151759653` ORDER BY `Message` Attempt=19 count=1 State: Paused DDL: ALTER ASYNC REPLICATION `Replication_12444304568151759653` SET ( STATE = "Paused" ); >>>>> Query: INSERT INTO `SourceTable_12444304568151759653` (`Key`, `Message`) VALUES (2, 'Message-2'); >>>>> Query: SELECT `Message` FROM `Table_12444304568151759653` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER ASYNC REPLICATION `Replication_12444304568151759653` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_12444304568151759653` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_12444304568151759653` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER ASYNC REPLICATION `Replication_12444304568151759653` SET ( STATE = "Paused" ); DDL: ALTER ASYNC REPLICATION `Replication_12444304568151759653` SET ( STATE = "StandBy" ); DDL: DROP ASYNC REPLICATION `Replication_12444304568151759653`; DDL: DROP TABLE `SourceTable_12444304568151759653` |99.9%| [TM] {RESULT} ydb/tests/functional/replication/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] >> test_tpch.py::TestTpchS1::test_tpch[4] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[4] [FAIL] 2025-04-17 18:41:09,567 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 18:41:10,816 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 841167 602M 599M 522M ydb-tests-functional-tpc-medium --basetemp /home/runner/.ya/build/build_root/6873/000ebf/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 842206 9.0G 8.8G 8.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/6873/000ebf/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/t 843891 4.2G 4.2G 3.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/6873/000ebf/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/t 963800 133M 137M 71.2M └─ ydb -e grpc://localhost:1953 -d /local/test_db workload tpch --path olap_yatests/tpch/s1 run --json /home/runner/.ya/build/build_root/6873/000ebf/ydb/tests/functional/tp Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/load/lib/tpch.py", line 49, in test_tpch self.run_workload_test(self._get_path(), query_num) File "ydb/tests/olap/load/lib/conftest.py", line 266, in run_workload_test result = YdbCliHelper.workload_run( File "ydb/tests/olap/lib/ydb_cli.py", line 320, in workload_run ).process() File "ydb/tests/olap/lib/ydb_cli.py", line 293, in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) File "library/python/testing/yatest_common/yatest/common/process.py", line 656, in execute res.wait(check_exit_code, timeout, on_timeout) File "library/python/testing/yatest_common/yatest/common/process.py", line 400, in wait _wait() File "library/python/testing/yatest_common/yatest/common/process.py", line 335, in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: .../build/build_root/6873/000ebf/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/000ebf/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/000ebf', '--source-root', '/home/runner/.ya/build/build_root/6873/000ebf/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/000ebf/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: ((".../build/build_root/6873/000ebf/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/6873/000ebf/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/6873/000ebf', '--source-root', '/home/runner/.ya/build/build_root/6873/000ebf/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/6873/000ebf/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout",), {}) >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] |99.9%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_workload.py::TestYdbLogWorkload::test[row] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 96 ydb/core/viewer/tests [size:medium] ------ sole chunk ran 28 tests (total:47.87s - test:47.20s canon:0.34s) [fail] test.py::test_topic_data [default-linux-x86_64-release-asan] (4.17s) Test results differ from canonical: test_result['response_metadata']['Messages'][0]['TimestampDiff']: canonical results type differs from test results: expected ('not-zero-number'), got (0) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.test_topic_data.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff ------ FAIL: 27 - GOOD, 1 - FAIL ydb/core/viewer/tests ------ sole chunk ran 12 tests (total:610.39s - recipes:7.37s test:599.44s recipes:3.43s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.6G (9028944K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 737829 44.9M 44.8M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 737880 34.1M 22.5M 9.8M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 739502 716M 716M 413M │ └─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 859609 628M 13.4M 389M │ ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 859610 5.4G 5.5G 5.1G │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test 738581 2.0G 2.0G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/ydb_data_t Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/stderr ydb/tests/fq/streaming_optimize [size:medium] nchunks:4 ------ [test_sql_streaming.py 0/4] chunk ran 7 tests (total:118.96s - recipes:0.57s test:117.70s recipes:0.58s) [fail] test_sql_streaming.py::test[suites-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (30.43s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v0ogggc4/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f04687df9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f04687d9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0468062d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0468160464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0468160464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0468160464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0468061b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f046810e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0468006e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0468006e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0468006c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f046815de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f046815de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423687 byte(s) leaked in 8242 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (14.66s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ej2bbth4/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f167111f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f1671119500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f16709a2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f1670aa0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f1670aa0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f1670aa0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f16709a1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f1670a4e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f1670946e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f1670946e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f1670946c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f1670a9de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f1670a9de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (13.73s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yysfnorl/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb1636cf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb1636c9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb162f52d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb163050464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb163050464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb163050464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb162f51b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb162ffe90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb162ef6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb162ef6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb162ef6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb16304de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb16304de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423457 byte(s) leaked in 8237 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (12.92s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ejthv1v/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f8db8cbf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f8db8cb9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f8db8542d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f8db8640464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f8db8640464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f8db8640464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f8db8541b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f8db85ee90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f8db84e6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f8db84e6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f8db84e6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f8db863de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f8db863de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (13.57s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inw07u59/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f35decff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f35decf9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f35de582d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f35de680464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f35de680464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f35de680464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f35de581b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f35de62e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f35de526e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f35de526e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f35de526c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f35de67de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f35de67de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (13.60s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia1s7nnm/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f02ea95f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f02ea959500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f02ea1e2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f02ea2e0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f02ea2e0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f02ea2e0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f02ea1e1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f02ea28e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f02ea186e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f02ea186e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f02ea186c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f02ea2dde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f02ea2dde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423603 byte(s) leaked in 8240 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (14.64s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvynfs94/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f954580f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9545809500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9545092d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9545190464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9545190464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9545190464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9545091b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f954513e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9545036e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9545036e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9545036c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f954518de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f954518de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 7 tests (total:132.39s - recipes:2.50s test:129.16s recipes:0.60s) [fail] test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (42.45s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkovcqk/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fab2dfdf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fab2dfd9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fab2d862d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fab2d960464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fab2d960464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fab2d960464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fab2d861b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fab2d90e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fab2d806e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fab2d806e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fab2d806c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fab2d95de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fab2d95de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (13.61s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ckjiosi6/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe7b7c3f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe7b7c39500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe7b74c2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe7b75c0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe7b75c0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe7b75c0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe7b74c1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe7b756e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe7b7466e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe7b7466e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe7b7466c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe7b75bde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe7b75bde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423457 byte(s) leaked in 8237 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (13.11s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_senelch8/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fba6808f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fba68089500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fba67912d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fba67a10464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fba67a10464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fba67a10464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fba67911b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fba679be90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fba678b6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fba678b6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fba678b6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fba67a0de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fba67a0de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (12.88s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_drtedx6y/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f237dd7f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f237dd79500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f237d602d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f237d700464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f237d700464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f237d700464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f237d601b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f237d6ae90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f237d5a6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f237d5a6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f237d5a6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f237d6fde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f237d6fde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (13.37s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tyhzrtmb/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f41d0d4f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f41d0d49500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f41d05d2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f41d06d0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f41d06d0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f41d06d0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f41d05d1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f41d067e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f41d0576e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f41d0576e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f41d0576c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f41d06cde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f41d06cde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (13.38s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y53t783c/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2aa6d8f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2aa6d89500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2aa6612d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2aa6710464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2aa6710464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2aa6710464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2aa6611b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2aa66be90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2aa65b6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2aa65b6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2aa65b6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2aa670de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2aa670de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (13.89s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p91it9_m/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f8324f5f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f8324f59500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f83247e2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f83248e0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f83248e0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f83248e0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f83247e1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f832488e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f8324786e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f8324786e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f8324786c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f83248dde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f83248dde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 7 tests (total:130.28s - recipes:1.93s test:127.56s recipes:0.66s) [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (42.46s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6106sqc8/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fdfd291f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fdfd2919500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fdfd21a2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fdfd22a0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fdfd22a0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fdfd22a0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fdfd21a1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fdfd224e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fdfd2146e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fdfd2146e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fdfd2146c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fdfd229de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fdfd229de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423601 byte(s) leaked in 8240 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (13.86s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mem2t2ue/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f272569f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2725699500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2724f22d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2725020464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2725020464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2725020464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2724f21b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2724fce90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2724ec6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2724ec6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2724ec6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f272501de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f272501de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (12.99s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fr42ks2t/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6655b9f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6655b99500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6655422d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6655520464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6655520464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6655520464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6655421b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f66554ce90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f66553c6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f66553c6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f66553c6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f665551de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f665551de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (12.93s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqwf671v/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f034071f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0340719500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f033ffa2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f03400a0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f03400a0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f03400a0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f033ffa1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f034004e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f033ff46e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f033ff46e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f033ff46c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f034009de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f034009de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (12.63s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e580u9v2/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2835fff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2835ff9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2835882d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2835980464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2835980464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2835980464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2835881b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f283592e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2835826e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2835826e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2835826c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f283597de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f283597de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (12.80s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahev4h92/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f62b90cf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f62b90c9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f62b8952d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f62b8a50464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f62b8a50464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f62b8a50464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f62b8951b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f62b89fe90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f62b88f6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f62b88f6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f62b88f6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f62b8a4de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f62b8a4de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (13.51s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e5pqg011/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fd18a47f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fd18a479500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fd189d02d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fd189e00464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fd189e00464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fd189e00464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fd189d01b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fd189dae90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fd189ca6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fd189ca6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fd189ca6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fd189dfde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fd189dfde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:127.63s - recipes:0.52s test:126.27s recipes:0.72s) [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (42.45s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7e_2xb78/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6d4b16f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6d4b169500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6d4a9f2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6d4aaf0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6d4aaf0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6d4aaf0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6d4a9f1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6d4aa9e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6d4a996e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6d4a996e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6d4a996c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6d4aaede0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6d4aaede0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (13.63s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_67r25j_v/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fd6cc57f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fd6cc579500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fd6cbe02d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fd6cbf00464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fd6cbf00464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fd6cbf00464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fd6cbe01b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fd6cbeae90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fd6cbda6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fd6cbda6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fd6cbda6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fd6cbefde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fd6cbefde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (13.21s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q60r62sm/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7d9a97f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7d9a979500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7d9a202d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7d9a300464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7d9a300464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7d9a300464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7d9a201b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7d9a2ae90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7d9a1a6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7d9a1a6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7d9a1a6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7d9a2fde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7d9a2fde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (12.68s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rwjzq8xc/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f41535cf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f41535c9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f4152e52d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f4152f50464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f4152f50464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f4152f50464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f4152e51b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f4152efe90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f4152df6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f4152df6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f4152df6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f4152f4de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f4152f4de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (12.79s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wrfaad3o/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f54c1cff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f54c1cf9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f54c1582d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f54c1680464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f54c1680464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f54c1680464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f54c1581b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f54c162e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f54c1526e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f54c1526e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f54c1526c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f54c167de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f54c167de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (12.38s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__jwsbz4c/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f22a339f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f22a3399500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f22a2c22d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f22a2d20464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f22a2d20464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f22a2d20464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f22a2c21b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f22a2cce90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f22a2bc6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f22a2bc6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f22a2bc6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f22a2d1de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f22a2d1de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (13.80s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u2jbmuei/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f9cd491f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9cd4919500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9cd41a2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9cd42a0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9cd42a0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9cd42a0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9cd41a1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9cd424e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9cd4146e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9cd4146e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9cd4146c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9cd429de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9cd429de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 28 - FAIL ydb/tests/fq/streaming_optimize ------ [test_discovery.py] chunk ran 3 tests (total:159.28s - test:159.17s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 13.8G (14474612K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 832241 44.9M 44.3M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 832328 33.9M 21.7M 9.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 832330 816M 806M 743M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 862822 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 862848 1.3G 1.3G 946M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 874210 1.3G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff 862868 1.4G 1.4G 965M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 874181 1.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff 862869 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 862871 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 862873 1.4G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 874164 1.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff 872753 1.4G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 874195 1.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff 872766 1.4G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 874160 1.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff 872799 1.4G 1.4G 980M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/hive [size:medium] nchunks:80 ------ [test_drain.py 0/20] chunk ran 1 test (total:68.31s - setup:0.03s test:68.04s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-release-asan] (61.17s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 56 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224038015: 4) (72075186224038399: None) (72075186224038628: 4) (72075186224038632: 4) (72075186224038636: 4) (72075186224038652: 4) (72075186224038664: 4) (72075186224038680: 4) (72075186224038740: 4) (72075186224038752: 4). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 6 - GOOD, 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/postgresql [size:medium] ------ sole chunk ran 14 tests (total:98.10s - test:97.56s) [fail] test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [default-linux-x86_64-release-asan] (48.12s) teardown failed: ydb/tests/functional/postgresql/test_postgres.py:77: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==740672==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 26880 byte(s) in 7 object(s) allocated from: E #0 0x1d95a39d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x442412ef in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x442412ef in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x44232b89 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x44232b89 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x4401a956 in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x4401a956 in insert /-S/util/generic/hash.h:153:20 E #9 0x4401a956 in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:345:23 E #10 0x43ffb800 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:472:5 E #11 0x44241300 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:373:11 E #12 0x44241300 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x44241300 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x44232b89 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x44232b89 in NYql::CreateYtNativeState(TIntrusivePtr>, TBas... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.float8.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff ------ FAIL: 13 - GOOD, 1 - FAIL ydb/tests/functional/postgresql ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 4/10] chunk ran 2 tests (total:230.44s - setup:0.01s test:229.42s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (54.79s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (165.96s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 20 - GOOD, 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [34/40] chunk ran 2 tests (total:114.66s - setup:0.02s test:114.58s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [default-linux-x86_64-release-asan] (84.42s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:829: in test_yc_events_processor assert len(lines) >= 2, "Got only %s event lines after all attempts" % len(lines) E AssertionError: Got only 0 event lines after all attempts E assert 0 >= 2 E + where 0 = len([]) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 75 - GOOD, 1 - FAIL ydb/tests/functional/sqs/cloud ------ sole chunk ran 1 test (total:102.29s - test:101.66s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.4G (15056756K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 740225 44.9M 44.9M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 740339 34.4M 22.6M 9.8M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 740390 782M 782M 703M └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini - 743881 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 743882 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 743883 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 743884 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 743887 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 743898 1.7G 1.5G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 743899 1.6G 1.5G 1011M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 743900 1.7G 1.4G 953M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/tpc/medium [size:medium] nchunks:3 ------ [test_tpch.py] chunk ran 22 tests (total:628.91s - test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch.py::TestTpchS1::test_tpch[1] (good) duration: 461.76s test_tpch.py::TestTpchS1::test_tpch[3] (good) duration: 77.26s test_tpch.py::TestTpchS1::test_tpch[4] (fail) duration: 45.61s test_tpch.py::TestTpchS1::test_tpch[2] (good) duration: 36.28s 18 tests were not launched inside chunk. Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/stderr [fail] test_tpch.py::TestTpchS1::test_tpch[4] [default-linux-x86_64-release-asan] (45.61s) ydb/tests/olap/load/lib/tpch.py:49: in test_tpch self.run_workload_test(self._get_path(), query_num) ydb/tests/olap/load/lib/conftest.py:278: in run_workload_test self.process_query_result(result, query_num, qparams.iterations, True) ydb/tests/olap/load/lib/conftest.py:233: in process_query_result raise exc ydb/tests/olap/lib/ydb_cli.py:293: in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:400: in wait _wait() library/python/testing/yatest_common/yatest/common/process.py:335: in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) library/python/pytest/plugins/ya.py:347: in _graceful_shutdown _graceful_shutdown_on_log(not capman.is_globally_capturing()) library/python/pytest/plugins/ya.py:321: in _graceful_shutdown_on_log pytest.exit("Graceful shutdown requested") E Failed: Graceful shutdown requested Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch.py.TestTpchS1.test_tpch.4.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff ------ TIMEOUT: 55 - GOOD, 1 - FAIL, 18 - NOT_LAUNCHED ydb/tests/functional/tpc/medium ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:632.09s - setup:0.02s test:600.11s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (timeout) duration: 626.07s alter_compression.py::TestAlterCompression::test_availability_data test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (626.07s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:633.46s - setup:0.07s test:600.11s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 626.89s Info: Test run has exceeded 8.0G (8388608K) memory limit with 12.7G (13359520K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 766390 44.9M 44.9M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 766702 42.6M 31.0M 18.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 766866 775M 774M 693M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 772491 11.8G 11.6G 11.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/te 777685 493M 0b 0b └─ moto_server s3 --port 24174 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (626.89s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 9 tests (total:624.70s - setup:0.02s test:600.13s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 422.48s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 195.07s 7 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (195.07s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 7 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:7 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:622.30s - setup:0.03s test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 616.90s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (616.90s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:626.91s - setup:0.01s test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (good) duration: 526.09s ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete (timeout) duration: 84.26s ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (84.26s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 5 - GOOD, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/tests/stress/olap_workload/tests [size:medium] ------ sole chunk ran 1 test (total:32.26s - test:32.19s) Test failed with 1 exit code. See logs for more info Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr [crashed] test_workload.py::TestYdbWorkload::test [default-linux-x86_64-release-asan] (0.00s) Test crashed Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - CRASHED ydb/tests/stress/olap_workload/tests ------ [test_disk.py 0/10] chunk ran 1 test (total:51.71s - test:51.63s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.1G (13715484K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 663259 44.9M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 663280 33.8M 22.0M 9.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 663293 792M 790M 712M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 664214 1.3G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 686443 1.3G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 664220 1.2G 1.2G 840M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 686770 1.2G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 664227 1.3G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 686727 1.3G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 664237 1.3G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 686455 1.3G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 664238 1.3G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 664240 1.4G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 686543 1.4G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:108.09s - test:107.91s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.8G (14439892K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 658732 44.9M 44.3M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 658979 33.4M 21.6M 8.9M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 659020 755M 739M 676M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 661092 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 661093 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 695088 1.6G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 661094 1.5G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 694978 1.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 661095 1.6G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 695036 1.6G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 661096 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 695089 1.6G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 661097 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 661098 1.5G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 695079 1.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 661100 1.4G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 695101 1.4G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [1/10] chunk ran 1 test (total:252.25s - test:252.14s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 11.7G (12284740K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 187087 44.9M 44.9M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 187171 34.1M 22.1M 9.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 187173 46.0M 43.9M 23.0M └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testin 190244 5.8G 5.8G 5.7G └─ ydb-core-blobstorage-ut_blobstorage-ut_balancing --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing 249599 5.8G 5.8G 5.7G └─ ydb-core-blobstorage-ut_blobstorage-ut_balancing --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balanc Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff/stderr ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:9.21s - test:9.16s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.83s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:7.67s - test:7.63s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.24s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:82.98s - test:82.93s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (5.67s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:7.20s - test:7.14s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.41s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [12/50] chunk ran 1 test (total:21.95s - test:21.92s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==274491==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018e0e20d bp 0x7fffaa94c420 sp 0x7fffaa94c280 T0) ==274491==The signal is caused by a READ memory access. ==274491==Hint: address points to the zero page. 2025-04-17T18:03:45.801222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T18:03:45.801265Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18e0e20d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18e0e20d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18e0e20d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18e0e20d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18e0e20d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18e33327 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18e33327 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18e33327 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18e33327 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18e33327 in std::__y1::__function::__func< ..[snippet truncated].. 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x197855d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19754f68 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #14 0x18e321d3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19756835 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #16 0x1977fb4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7efc2f6bed8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7efc2f6bee3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16571028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16571028) (BuildId: 1bdc4290ca22bc21b0df7e2b25fc7bf1adbeab0b) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==274491==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 21 - GOOD, 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/federated_query/generic_ut nchunks:10 ------ [0/10] chunk ran 4 tests (total:91.23s - test:60.08s) Chunk exceeded 60s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: GenericFederatedQuery::ClickHouseFilterPushdown (timeout) duration: 41.96s GenericFederatedQuery::ClickHouseManagedSelectAll (good) duration: 16.94s GenericFederatedQuery::ClickHouseSelectCount (good) duration: 16.66s GenericFederatedQuery::ClickHouseManagedSelectConstant (good) duration: 13.78s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/stderr [timeout] GenericFederatedQuery::ClickHouseFilterPushdown [default-linux-x86_64-release-asan] (41.96s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.ClickHouseFilterPushdown.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.ClickHouseFilterPushdown.out ------ [2/10] chunk ran 4 tests (total:91.28s - test:60.13s) Chunk exceeded 60s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: GenericFederatedQuery::IcebergHadoopSaFilterPushdown (timeout) duration: 45.40s GenericFederatedQuery::IcebergHadoopSaSelectAll (good) duration: 16.47s GenericFederatedQuery::IcebergHadoopSaSelectCount (good) duration: 14.62s GenericFederatedQuery::IcebergHadoopSaSelectConstant (good) duration: 12.35s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/stderr [timeout] GenericFederatedQuery::IcebergHadoopSaFilterPushdown [default-linux-x86_64-release-asan] (45.40s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.IcebergHadoopSaFilterPushdown.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.IcebergHadoopSaFilterPushdown.out ------ [4/10] chunk ran 4 tests (total:91.64s - setup:0.04s test:60.11s) Chunk exceeded 60s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: GenericFederatedQuery::IcebergHiveBasicFilterPushdown (timeout) duration: 46.53s GenericFederatedQuery::IcebergHiveBasicSelectAll (good) duration: 16.30s GenericFederatedQuery::IcebergHiveBasicSelectCount (good) duration: 14.11s GenericFederatedQuery::IcebergHiveBasicSelectConstant (good) duration: 12.48s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/stderr [timeout] GenericFederatedQuery::IcebergHiveBasicFilterPushdown [default-linux-x86_64-release-asan] (46.53s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.IcebergHiveBasicFilterPushdown.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.IcebergHiveBasicFilterPushdown.out ------ [5/10] chunk ran 4 tests (total:91.22s - test:60.05s) Chunk exceeded 60s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: GenericFederatedQuery::IcebergHiveSaFilterPushdown (timeout) duration: 47.10s GenericFederatedQuery::IcebergHiveSaSelectAll (good) duration: 15.33s GenericFederatedQuery::IcebergHiveSaSelectCount (good) duration: 14.43s GenericFederatedQuery::IcebergHiveSaSelectConstant (good) duration: 12.58s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/stderr [timeout] GenericFederatedQuery::IcebergHiveSaFilterPushdown [default-linux-x86_64-release-asan] (47.10s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.IcebergHiveSaFilterPushdown.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.IcebergHiveSaFilterPushdown.out ------ [6/10] chunk ran 4 tests (total:61.59s - test:60.05s) Chunk exceeded 60s timeout and was killed List of the tests involved in the launch: GenericFederatedQuery::IcebergHiveTokenFilterPushdown (good) duration: 16.56s GenericFederatedQuery::IcebergHiveTokenSelectAll (good) duration: 15.70s GenericFederatedQuery::IcebergHiveTokenSelectCount (good) duration: 13.22s GenericFederatedQuery::IcebergHiveTokenSelectConstant (good) duration: 11.99s Killed by timeout (60 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/stderr ------ [7/10] chunk ran 4 tests (total:91.10s - test:60.07s) Chunk exceeded 60s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: GenericFederatedQuery::PostgreSQLFilterPushdown (timeout) duration: 47.27s GenericFederatedQuery::PostgreSQLOnPremSelectAll (good) duration: 15.56s GenericFederatedQuery::PostgreSQLSelectCount (good) duration: 14.97s GenericFederatedQuery::PostgreSQLOnPremSelectConstant (good) duration: 11.31s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/stderr [timeout] GenericFederatedQuery::PostgreSQLFilterPushdown [default-linux-x86_64-release-asan] (47.27s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.PostgreSQLFilterPushdown.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.PostgreSQLFilterPushdown.out ------ TIMEOUT: 33 - GOOD, 5 - TIMEOUT ydb/core/kqp/ut/federated_query/generic_ut ydb/core/kqp/ut/indexes [size:medium] nchunks:50 ------ [37/50] chunk ran 2 tests (total:85.55s - test:85.52s) [fail] KqpMultishardIndex::WriteIntoRenamingSyncIndex [default-linux-x86_64-release-asan] (59.08s) assertion failed at ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2499, void NKikimr::NKqp::NTestSuiteKqpMultishardIndex::CheckWriteIntoRenamingIndex(bool): (vp.GetUint32() == startVal++) failed: (2147483649 != 1000) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x199B4BBB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x19E7A11F 2. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2499: CheckWriteIntoRenamingIndex @ 0x1959F8CC 3. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: operator() @ 0x195B3027 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x195B3027 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x195B3027 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x195B3027 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x195B3027 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19EB1315 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19EB1315 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19EB1315 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x19E80CA8 12. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: Execute @ 0x195B21F3 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19E82575 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19EAB88C 15. ??:0: ?? @ 0x7F6D981A4D8F 16. ??:0: ?? @ 0x7F6D981A4E3F 17. ??:0: ?? @ 0x166C7028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff/KqpMultishardIndex.WriteIntoRenamingSyncIndex.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff/KqpMultishardIndex.WriteIntoRenamingSyncIndex.out ------ FAIL: 119 - GOOD, 1 - FAIL ydb/core/kqp/ut/indexes ydb/core/kqp/ut/join [size:medium] nchunks:200 ------ [123/200] chunk ran 1 test (total:28.13s - test:28.05s) [fail] KqpJoinOrder::DatetimeConstantFold+ColumnStore [default-linux-x86_64-release-asan] (16.66s) (NYdb::Dev::NStatusHelpers::TYdbErrorException) Status: CLIENT_DEADLINE_EXCEEDED Issues:
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4322 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.DatetimeConstantFold.ColumnStore.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.DatetimeConstantFold.ColumnStore.out ------ [151/200] chunk ran 1 test (total:26.27s - test:26.19s) [fail] KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [default-linux-x86_64-release-asan] (17.44s) (NYdb::Dev::NStatusHelpers::TYdbErrorException) Status: CLIENT_DEADLINE_EXCEEDED Issues:
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14675 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2.out ------ [98/200] chunk ran 1 test (total:34.32s - test:33.86s) [fail] KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [default-linux-x86_64-release-asan] (18.86s) (NYdb::Dev::NStatusHelpers::TYdbErrorException) Status: CLIENT_DEADLINE_EXCEEDED Issues:
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18506 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.CanonizedJoinOrderTPCDS64_small.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.CanonizedJoinOrderTPCDS64_small.out ------ FAIL: 228 - GOOD, 3 - FAIL ydb/core/kqp/ut/join ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [1/50] chunk ran 4 tests (total:42.92s - test:42.78s) [crashed] KqpExplain::CreateTableAs+Stats [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpExplain.CreateTableAs.Stats.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpExplain.CreateTableAs.Stats.out ------ [15/50] chunk ran 4 tests (total:104.25s - setup:0.01s test:104.16s) [crashed] KqpLimits::ReadsetCountLimit [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.ReadsetCountLimit.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.ReadsetCountLimit.out ------ [47/50] chunk ran 3 tests (total:123.71s - setup:0.04s test:123.53s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (108.01s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x198368DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x19CFB43F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x193D3478 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x193E6487 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x193E6487 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x193E6487 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19D32635 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19D32635 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19D32635 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x19D01FC8 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x193E560B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19D03895 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19D2CBAC 15. ??:0: ?? @ 0x7F8B54D59D8F 16. ??:0: ?? @ 0x7F8B54D59E3F 17. ??:0: ?? @ 0x166DE028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 180 - GOOD, 1 - FAIL, 2 - CRASHED ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [10/50] chunk ran 2 tests (total:41.97s - test:41.91s) [fail] KqpSinkMvcc::OlapMultiSinks [default-linux-x86_64-release-asan] (20.38s) assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:588, void NKikimr::NKqp::CompareYson(const TString &, const TString &, const TString &): (ReformatYson(expected) == ReformatYson(actual)) failed: ("[[[\"2\"]]]" != "[[[\"1\"]]]") , with diff: "[[[\"(2|1)\"]]]" 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:588: CompareYson @ 0x4900DB47 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:368: DoExecute @ 0x1900EF4A 4. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:389: Execute_ @ 0x18FDD558 6. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: operator() @ 0x18FE36B7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18FE36B7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18FE36B7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 15. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: Execute @ 0x18FE2883 16. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 18. ??:0: ?? @ 0x7F15F4F5FD8F 19. ??:0: ?? @ 0x7F15F4F5FE3F 20. ??:0: ?? @ 0x16619028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkMvcc.OlapMultiSinks.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkMvcc.OlapMultiSinks.out ------ [21/50] chunk ran 2 tests (total:32.13s - test:32.07s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (10.11s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x1903269E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x190112E8 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x19018767 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x19018767 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x19018767 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x19017933 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 17. ??:0: ?? @ 0x7F288C673D8F 18. ??:0: ?? @ 0x7F288C673E3F 19. ??:0: ?? @ 0x16619028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ FAIL: 102 - GOOD, 2 - FAIL ydb/core/kqp/ut/tx ydb/core/kqp/ut/yql [size:medium] nchunks:50 ------ [19/50] chunk ran 2 tests (total:98.41s - test:98.36s) [fail] KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [default-linux-x86_64-release-asan] (85.70s) assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:1474, void NKikimr::NKqp::WaitForZeroSessions(const NKqp::TKqpCounters &): (count) Unable to wait for proper active session count, it looks like cancelation doesn`t work 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1B8BA93B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1BD82A4F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:1474: WaitForZeroSessions @ 0x4B4C2153 3. /tmp//-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:734: DoStreamExecuteYqlScriptScanTimeoutBruteForce @ 0x1B43A206 4. /tmp//-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:18: operator() @ 0x1B475C27 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:18:1) &> @ 0x1B475C27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:18:1) &> @ 0x1B475C27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1B475C27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1B475C27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x1BDB9C45 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x1BDB9C45 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x1BDB9C45 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1BD895D8 13. /tmp//-S/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp:18: Execute @ 0x1B474DF3 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1BD8AEA5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1BDB41BC 16. ??:0: ?? @ 0x7F3B196FDD8F 17. ??:0: ?? @ 0x7F3B196FDE3F 18. ??:0: ?? @ 0x18B3F028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/yql/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/yql/test-results/unittest/testing_out_stuff/KqpScripting.StreamExecuteYqlScriptScanOperationTmeoutBruteForce.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/yql/test-results/unittest/testing_out_stuff/KqpScripting.StreamExecuteYqlScriptScanOperationTmeoutBruteForce.out ------ FAIL: 90 - GOOD, 1 - FAIL ydb/core/kqp/ut/yql ydb/core/quoter/ut [size:medium] ------ sole chunk ran 37 tests (total:282.51s - setup:0.04s test:282.35s) [fail] QuoterWithKesusTest::KesusRecreation [default-linux-x86_64-release-asan] (129.93s) (TWithBackTrace) Event queue is still empty.ydb/library/actors/testlib/test_runtime.cpp:1375: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) at /-S/ydb/library/actors/testlib/test_runtime.cpp:0:24 NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:0 __root at /-S/contrib/libs/cxxsupp/libcxx/include/__tree:966:54 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 TIntrusivePtr at /-S/util/generic/ptr.h:510:11 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:527:18 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/quoter/kesus_quoter_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.KesusRecreation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.KesusRecreation.out ------ FAIL: 36 - GOOD, 1 - FAIL ydb/core/quoter/ut ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [4/60] chunk ran 1 test (total:477.84s - test:477.80s) [fail] AnalyzeColumnshard::AnalyzeRebootColumnShard [default-linux-x86_64-release-asan] (465.82s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.h:0:24 DoDestroy at /-S/util/generic/ptr.h:237:13 operator() at /-S/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.out ------ FAIL: 35 - GOOD, 1 - FAIL ydb/core/statistics/aggregator/ut ydb/core/sys_view/ut [size:medium] nchunks:10 ------ [0/10] chunk ran 9 tests (total:619.24s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: LabeledDbCounters::OneTabletRestart (good) duration: 185.41s LabeledDbCounters::TwoTabletsKillOneTablet (good) duration: 127.45s LabeledDbCounters::TwoTablets (good) duration: 67.43s LabeledDbCounters::OneTabletRemoveCounters (good) duration: 65.00s LabeledDbCounters::OneTablet (good) duration: 64.62s ShowCreateView::Basic (good) duration: 41.88s ShowCreateView::FromTable (timeout) duration: 33.02s DbCounters::TabletsSimple (good) duration: 32.61s ShowCreateView::WithPairedTablePathPrefix test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ShowCreateView::FromTable [default-linux-x86_64-release-asan] (33.02s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/ShowCreateView.FromTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/ShowCreateView.FromTable.out ------ [5/10] chunk ran 8 tests (total:266.76s - test:266.69s) [fail] SystemView::PartitionStatsFields [default-linux-x86_64-release-asan] (11.26s) greater-or-equal assertion failed at ydb/core/sys_view/ut_kqp.cpp:586, void NKikimr::NSysView::(anonymous namespace)::TYsonFieldChecker::Uint64GreaterOrEquals(ui64): value.AsUint64() >= expected TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NSysView::NTestSuiteSystemView::TTestCasePartitionStatsFields::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/sys_view/ut_kqp.cpp:2006:15 operator() at /-S/ydb/core/sys_view/ut_kqp.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.PartitionStatsFields.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.PartitionStatsFields.out ------ [6/10] chunk ran 8 tests (total:290.85s - test:284.75s) [fail] SystemView::ShowCreateTableKeyBloomFilter [default-linux-x86_64-release-asan] (12.41s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableKeyBloomFilter.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableKeyBloomFilter.out ------ TIMEOUT: 79 - GOOD, 2 - FAIL, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/sys_view/ut ydb/core/tx/columnshard/ut_rw [size:medium] nchunks:60 ------ [26/60] chunk ran 1 test (total:603.49s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString (timeout) duration: 602.17s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [default-linux-x86_64-release-asan] (602.17s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.out ------ [27/60] chunk ran 1 test (total:605.08s - test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 (timeout) duration: 602.37s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [default-linux-x86_64-release-asan] (602.37s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.out ------ [30/60] chunk ran 1 test (total:609.71s - setup:0.01s test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 (timeout) duration: 600.04s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [default-linux-x86_64-release-asan] (600.04s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKInt64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKInt64.out ------ TIMEOUT: 59 - GOOD, 3 - TIMEOUT ydb/core/tx/columnshard/ut_rw ydb/core/tx/columnshard/ut_schema [size:medium] nchunks:60 ------ [36/60] chunk ran 1 test (total:175.86s - test:174.92s) [crashed] TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [default-linux-x86_64-release-asan] (144.65s) Test crashed (return code: 100) ==327132==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 56224 byte(s) in 2 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x1cb33f22 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x1cb33f22 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x1cb33f22 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x1cb33f22 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x1cb33f22 in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:789:25 #6 0x1cb33f22 in void std::__y1::vector>::__assign_with_size[abi:fe190000](NKikimr::NOlap::TUnifiedBlobId*, NKikimr::NOlap::TUnifiedBlobId*, long) /-S/contrib/libs/cxxsupp/libcxx/include/vector:1378:5 #7 0x1cb2c3b5 in assign /-S/contrib/libs/cxxsupp/libcxx/include/vector:1359:3 #8 0x1cb2c3b5 in operator= /-S/contrib/libs/cxxsupp/libcxx/include/vector:1330:5 #9 0x1cb2c3b5 in NKikimr::NOlap::TPortionMetaConstructor::Build() /-S/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp:53:20 #10 0x1caedcc1 in ..[snippet truncated].. () /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #33 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #34 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #35 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #36 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #37 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #38 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #39 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #40 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #41 0x7fcec09ced8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 3076960 byte(s) leaked in 54995 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.out ------ [38/60] chunk ran 1 test (total:171.44s - test:170.60s) [crashed] TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [default-linux-x86_64-release-asan] (145.59s) Test crashed (return code: 100) ==327026==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 56224 byte(s) in 2 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x1cb33f22 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x1cb33f22 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x1cb33f22 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x1cb33f22 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x1cb33f22 in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:789:25 #6 0x1cb33f22 in void std::__y1::vector>::__assign_with_size[abi:fe190000](NKikimr::NOlap::TUnifiedBlobId*, NKikimr::NOlap::TUnifiedBlobId*, long) /-S/contrib/libs/cxxsupp/libcxx/include/vector:1378:5 #7 0x1cb2c3b5 in assign /-S/contrib/libs/cxxsupp/libcxx/include/vector:1359:3 #8 0x1cb2c3b5 in operator= /-S/contrib/libs/cxxsupp/libcxx/include/vector:1330:5 #9 0x1cb2c3b5 in NKikimr::NOlap::TPortionMetaConstructor::Build() /-S/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp:53:20 #10 0x1caedcc1 in ..[snippet truncated].. () /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #39 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #40 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #41 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #42 0x7f8767e87d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2310254 byte(s) leaked in 41290 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.out ------ FAIL: 41 - GOOD, 2 - CRASHED ydb/core/tx/columnshard/ut_schema ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 3 tests (total:220.42s - test:219.62s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (143.32s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 10 - GOOD, 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/datashard/ut_read_iterator [size:medium] nchunks:10 ------ [9/10] chunk ran 14 tests (total:611.43s - setup:0.01s test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ReadIteratorExternalBlobs::ExtBlobsMultipleColumns (good) duration: 108.45s ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys (good) duration: 93.34s ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded (good) duration: 66.10s ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle (good) duration: 65.07s ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd (good) duration: 61.94s ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot (good) duration: 61.52s ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning (good) duration: 54.10s ReadIteratorExternalBlobs::ExtBlobs (good) duration: 53.75s ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows (timeout) duration: 26.42s DataShardReadIteratorSysTables::ShouldRead (good) duration: 6.65s 2 more tests with 11.54s total duration are not listed. 2 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/stderr [timeout] ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [default-linux-x86_64-release-asan] (26.42s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/ReadIteratorExternalBlobs.ExtBlobsWithCompactingMiddleRows.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/ReadIteratorExternalBlobs.ExtBlobsWithCompactingMiddleRows.out ------ TIMEOUT: 145 - GOOD, 2 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/tx/datashard/ut_read_iterator ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [3/60] chunk ran 1 test (total:30.93s - test:30.90s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 9 - GOOD, 1 - CRASHED ydb/core/tx/tiering/ut ydb/core/tx/tx_proxy/ut_base_tenant [size:medium] nchunks:10 ------ [0/10] chunk ran 3 tests (total:107.33s - setup:0.01s test:107.02s) [fail] TModifyUserTest::ModifyUser [default-linux-x86_64-release-asan] (6.60s) assertion failed at ydb/core/tx/tx_proxy/proxy_ut.cpp:1044, virtual void NTestSuiteTModifyUserTest::TTestCaseModifyUser::Execute_(NUnitTest::TTestContext &): (client.ModifyUser("/dc-1", { .User = "user2", .Password = "pas2user"}, user1Token) == NMsgBusProxy::MSTATUS_OK) failed: (MSTATUS_ERROR != MSTATUS_OK) , with diff: MSTATUS_(ERR|)O(R|K) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NTestSuiteTModifyUserTest::TTestCaseModifyUser::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/tx/tx_proxy/proxy_ut.cpp:1044:13 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/testing_out_stuff/TModifyUserTest.ModifyUser.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/testing_out_stuff/TModifyUserTest.ModifyUser.out ------ FAIL: 24 - GOOD, 1 - FAIL ydb/core/tx/tx_proxy/ut_base_tenant ydb/core/tx/tx_proxy/ut_schemereq [size:medium] nchunks:10 ------ [2/10] chunk ran 30 tests (total:289.39s - test:288.77s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (8.93s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27434 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [default-linux-x86_64-release-asan] (10.38s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30699 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [default-linux-x86_64-release-asan] (9.63s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9132 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20.out ------ [9/10] chunk ran 30 tests (total:220.64s - test:220.57s) [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (10.67s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:2437 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [default-linux-x86_64-release-asan] (10.12s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:31044 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.out ------ FAIL: 295 - GOOD, 5 - FAIL ydb/core/tx/tx_proxy/ut_schemereq ydb/core/viewer/ut [size:medium] nchunks:10 ------ [0/10] chunk ran 6 tests (total:153.71s - setup:0.03s test:153.58s) [fail] Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [default-linux-x86_64-release-asan] (16.92s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation.out ------ [4/10] chunk ran 5 tests (total:613.67s - test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: Viewer::JsonStorageListingV1PDiskIdFilter (timeout) duration: 162.61s Viewer::JsonStorageListingV1NodeIdFilter (good) duration: 142.62s Viewer::JsonStorageListingV1GroupIdFilter (good) duration: 113.12s Viewer::JsonStorageListingV1 (good) duration: 109.04s Viewer::JsonAutocompleteStartOfDatabaseName (good) duration: 55.30s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/stderr [timeout] Viewer::JsonStorageListingV1PDiskIdFilter [default-linux-x86_64-release-asan] (162.61s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.out ------ [6/10] chunk ran 5 tests (total:128.82s - setup:0.07s test:128.30s) [fail] Viewer::Plan2SvgOK [default-linux-x86_64-release-asan] (13.35s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.Plan2SvgOK.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.Plan2SvgOK.out [fail] Viewer::Plan2SvgBad [default-linux-x86_64-release-asan] (10.25s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.Plan2SvgBad.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.Plan2SvgBad.out [fail] Viewer::QueryExecuteScript [default-linux-x86_64-release-asan] (17.32s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.out [fail] Viewer::SelectStringWithBase64Encoding [default-linux-x86_64-release-asan] (50.23s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.SelectStringWithBase64Encoding.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.SelectStringWithBase64Encoding.out ------ [7/10] chunk ran 5 tests (total:166.02s - setup:0.02s test:165.29s) [fail] Viewer::SelectStringWithNoBase64Encoding [default-linux-x86_64-release-asan] (22.78s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.SelectStringWithNoBase64Encoding.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.SelectStringWithNoBase64Encoding.out ------ TIMEOUT: 44 - GOOD, 6 - FAIL, 1 - TIMEOUT ydb/core/viewer/ut ydb/services/persqueue_v1/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 14 tests (total:358.61s - test:358.42s) [crashed] TPersQueueTest::InflightLimit [default-linux-x86_64-release-asan] (43.14s) Test crashed (return code: 100) ==180033==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 3868984 byte(s) in 59 object(s) allocated from: #0 0x1928efff in malloc /-S/contrib/libs/clang18-rt/lib/asan/asan_malloc_linux.cpp:68:3 #1 0x1a545913 in grpc_event_engine::experimental::MemoryAllocator::MakeSlice(grpc_event_engine::experimental::MemoryRequest) /-S/contrib/libs/grpc/src/core/lib/event_engine/memory_allocator.cc:63:13 #2 0x1a52023d in maybe_make_read_slices /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1070:57 #3 0x1a52023d in tcp_handle_read(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1094:5 #4 0x1a524697 in Run /-S/contrib/libs/grpc/src/core/lib/iomgr/closure.h:303:5 #5 0x1a524697 in tcp_read(grpc_endpoint*, grpc_slice_buffer*, grpc_closure*, bool, int) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1156:5 #6 0x1a881d18 in continue_read_action_locked /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2594:3 #7 0x1a881d18 in read_action_locked(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2583:7 #8 0x1a40f2b6 in grpc_combiner_continue_exec_ctx() /-S/contrib/libs/grpc/src/core/lib/iomgr/combiner.cc:231:5 #9 0x1a3e7874 in grpc_core::ExecCtx::Flush() /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:75:17 #10 0x1a533ac4 in end_worker /-S/contrib/l ..[snippet truncated].. llset.cc:48:10 #14 0x1a50e157 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #15 0x1b0ca11e in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #16 0x2029beff in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #17 0x2029beff in NYdbGrpc::Dev::PullEvents(grpc::CompletionQueue*) /-S/ydb/public/sdk/cpp/src/library/grpc/client/grpc_client_low.cpp:190:18 #18 0x1b0e022e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #19 0x1b0e022e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #20 0x1b0e022e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #21 0x1b0e077c in Execute /-S/util/thread/factory.h:15:13 #22 0x1b0e077c in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #23 0x195ddeb4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #24 0x1928cb48 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 7544213 byte(s) leaked in 1622 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.InflightLimit.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.InflightLimit.out ------ [6/10] chunk ran 13 tests (total:352.70s - test:352.39s) [fail] TPersQueueTest::StreamReadManyUpdateTokenAndRead [default-linux-x86_64-release-asan] (26.51s) assertion failed at ydb/services/persqueue_v1/persqueue_ut.cpp:1610, virtual void NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseStreamReadManyUpdateTokenAndRead::Execute_(NUnitTest::TTestContext &): (resp.server_message_case() == Ydb::Topic::StreamReadMessage::FromServer::kReadResponse) { status: UNAUTHORIZED issues { message: "No ReadTopic permissions for \'/Root/PQ/rt3.dc1--acc--topic1\' for subject \'user_without_rights@builtin\'" issue_code: 500018 severity: 1 } } TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.StreamReadManyUpdateTokenAndRead.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.StreamReadManyUpdateTokenAndRead.out ------ FAIL: 132 - GOOD, 1 - FAIL, 1 - CRASHED ydb/services/persqueue_v1/ut ydb/services/ydb/backup_ut [size:medium] nchunks:10 ------ [0/10] chunk ran 14 tests (total:215.78s - test:215.70s) [fail] BackupRestore::RestoreViewDependentOnAnotherView [default-linux-x86_64-release-asan] (13.02s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestore.RestoreViewDependentOnAnotherView.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestore.RestoreViewDependentOnAnotherView.out ------ [2/10] chunk ran 14 tests (total:112.53s - test:112.39s) [crashed] BackupRestore::TestAllPrimitiveTypes-TIMESTAMP64 [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestore.TestAllPrimitiveTypes-TIMESTAMP64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestore.TestAllPrimitiveTypes-TIMESTAMP64.out ------ [5/10] chunk ran 14 tests (total:156.34s - test:156.15s) [fail] BackupRestoreS3::RestoreViewQueryText [default-linux-x86_64-release-asan] (11.37s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestViewQueryTextIsPreserved(char const*, NYdb::Dev::NView::TViewClient&, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewQueryText.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewQueryText.out [fail] BackupRestoreS3::RestoreViewDependentOnAnotherView [default-linux-x86_64-release-asan] (15.06s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestViewDependentOnAnotherViewIsRestored(char const*, char const*, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewDependentOnAnotherView.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewDependentOnAnotherView.out ------ [6/10] chunk ran 14 tests (total:123.14s - test:123.07s) [fail] BackupRestoreS3::TestAllPrimitiveTypes-FLOAT [default-linux-x86_64-release-asan] (5.93s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestPrimitiveType(Ydb::Type_PrimitiveTypeId, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:1392:27 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllPrimitiveTypes-FLOAT.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllPrimitiveTypes-FLOAT.out [fail] BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [default-linux-x86_64-release-asan] (12.30s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestRestoreTableWithIndex(char const*, char const*, NKikimrSchemeOp::EIndexType, bool, NYdb::Dev::NTable::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 NTestSuiteBackupRestoreS3::TTestCaseTestAllIndexTypes::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2659:5 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree.out ------ [7/10] chunk ran 14 tests (total:130.38s - test:130.21s) [fail] BackupRestoreS3::TestAllPrimitiveTypes-JSON [default-linux-x86_64-release-asan] (9.67s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestPrimitiveType(Ydb::Type_PrimitiveTypeId, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:1392:27 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllPrimitiveTypes-JSON.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllPrimitiveTypes-JSON.out ------ [9/10] chunk ran 14 tests (total:63.03s - test:62.98s) [fail] BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [default-linux-x86_64-release-asan] (5.34s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestViewOutputIsPreserved(char const*, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 NTestSuiteBackupRestoreS3::TTestCaseTestAllSchemeObjectTypes::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:17 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllSchemeObjectTypes-EPathTypeView.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllSchemeObjectTypes-EPathTypeView.out ------ FAIL: 132 - GOOD, 7 - FAIL, 1 - CRASHED ydb/services/ydb/backup_ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [29/60] chunk ran 5 tests (total:42.99s - test:42.92s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (5.38s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 286 - GOOD, 1 - FAIL ydb/services/ydb/ut ydb/tests/functional/backup/s3_path_style [size:medium] ------ sole chunk ran 1 test (total:36.18s - recipes:22.89s test:5.39s recipes:7.81s) [fail] S3PathStyleBackup::DisableVirtualAddressing [default-linux-x86_64-release-asan] (2.78s) assertion failed at ydb/tests/functional/backup/s3_path_style/s3_path_style_backup_ut.cpp:85, virtual void NTestSuiteS3PathStyleBackup::TTestCaseDisableVirtualAddressing::Execute_(NUnitTest::TTestContext &): (op->Status().IsSuccess()) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NTestSuiteS3PathStyleBackup::TTestCaseDisableVirtualAddressing::Execute_(NUnitTest::TTestContext&) at /-S/ydb/tests/functional/backup/s3_path_style/s3_path_style_backup_ut.cpp:0:17 operator() at /-S/ydb/tests/functional/backup/s3_path_style/s3_path_style_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/backup/s3_path_style/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/backup/s3_path_style/test-results/unittest/testing_out_stuff/S3PathStyleBackup.DisableVirtualAddressing.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/backup/s3_path_style/test-results/unittest/testing_out_stuff/S3PathStyleBackup.DisableVirtualAddressing.out ------ FAIL: 1 - FAIL ydb/tests/functional/backup/s3_path_style ------ sole chunk ran 2 tests (total:227.02s - setup:0.05s recipes:11.93s test:211.88s recipes:2.84s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.8G (17568352K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 943207 44.9M 44.9M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 943245 34.4M 22.6M 9.9M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 946407 46.0M 45.8M 23.0M │ └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/test_tool.args 946622 2.7G 2.7G 2.7G │ └─ ydb-tests-functional-kqp-kqp_indexes --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/y 944562 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 944563 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 944564 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 944565 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 944566 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 944567 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 944568 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 944569 1.7G 1.7G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/stderr ------ sole chunk ran 2 tests (total:72.56s - setup:0.04s recipes:13.41s test:54.18s recipes:4.85s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.3G (17124988K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 942768 44.9M 44.9M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 942915 31.7M 19.9M 7.4M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 957910 161M 389M 320M │ └─ ydb_recipe --build-root /home/runner/actions_runner/_work/ydb/ydb/tmp/out --source-root /home/runner/actions_runner/_work/ydb/ydb --gdb-path /home/runner/.ya/tools/v4/850 943252 1.9G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 943254 1.9G 1.9G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 943255 2.0G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 943257 2.0G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 943258 1.9G 1.9G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 943261 1.9G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 943262 2.1G 2.1G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 943263 2.0G 1.9G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff/stderr Total 410 suites: 375 - GOOD 25 - FAIL 10 - TIMEOUT Total 9485 tests: 9354 - GOOD 74 - FAIL 30 - NOT_LAUNCHED 16 - TIMEOUT 2 - SKIPPED 9 - CRASHED Cache efficiency ratio is 89.52% (42527 of 47506). Local: 0 (0.00%), dist: 4981 (10.48%), by dynamic uids: 0 (0.00%), avoided: 37546 (79.03%) Dist cache download: count=4597, size=14.92 GiB, speed=173.21 MiB/s Disk usage for tools/sdk at least 31.5 MiB Additional disk space consumed for build cache 905.43 GiB Critical path: [ 41166 ms] [CC] [7rMCfedXeFZUko-G3hCVLA default-linux-x86_64 release asan]: $(SOURCE_ROOT)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp [started: 0 (1744911871764), finished: 41166 (1744911912930)] [ 234 ms] [AR] [ppZEvq_JvP264U4XUz7NWw default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a [started: 41233 (1744911912997), finished: 41467 (1744911913231)] [ 17174 ms] [LD] [lZOWHjUtqpEnJkXmMB_V7w default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/sys_view/ut/ydb-core-sys_view-ut [started: 330884 (1744912202648), finished: 348058 (1744912219822)] [620167 ms] [TM] [rnd-7709366278592016901 asan default-linux-x86_64 release]: ydb/core/sys_view/ut/unittest [started: 373140 (1744912244904), finished: 993307 (1744912865071)] [ 67577 ms] [TA] [rnd-ylqqw0lns3szqakp]: $(BUILD_ROOT)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} [started: 994725 (1744912866489), finished: 1062302 (1744912934066)] Time from start: 3688414.8200683594 ms, time elapsed by graph 746318 ms, time diff 2942096.8200683594 ms. The longest 10 tasks: [633917 ms] [TM] [rnd-za1simcsflowum48 asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1744914501111, finished: 1744915135028] [632529 ms] [TM] [rnd-bic1hucgsp19afzx asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1744914491641, finished: 1744915124170] [629315 ms] [TM] [rnd-1246294893338412347 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1744914669124, finished: 1744915298439] [627560 ms] [TM] [rnd-10506992871402756706 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744911967265, finished: 1744912594825] [625192 ms] [TM] [rnd-3g70wwu3t4eho5qf asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1744914491699, finished: 1744915116891] [622771 ms] [TM] [rnd-4938772738873923168 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744913751506, finished: 1744914374277] [620167 ms] [TM] [rnd-7709366278592016901 asan default-linux-x86_64 release]: ydb/core/sys_view/ut/unittest [started: 1744912244904, finished: 1744912865071] [615392 ms] [TM] [rnd-13543308317098347839 asan default-linux-x86_64 release]: ydb/core/viewer/ut/unittest [started: 1744911999637, finished: 1744912615029] [612079 ms] [TM] [rnd-10435460726859335586 asan default-linux-x86_64 release]: ydb/core/tx/datashard/ut_read_iterator/unittest [started: 1744912367817, finished: 1744912979896] [610837 ms] [TM] [rnd-uehdvgcxkhpecdlt asan default-linux-x86_64 release]: ydb/tests/fq/mem_alloc/py3test [started: 1744914453820, finished: 1744915064657] Total time by type: [149767422 ms] [TM] [count: 4130, ave time 36263.30 msec] [ 11622892 ms] [LD] [count: 375, ave time 30994.38 msec] [ 10346951 ms] [prepare:get from dist cache] [count: 4981, ave time 2077.28 msec] [ 1825778 ms] [TS] [count: 234, ave time 7802.47 msec] [ 574373 ms] [TA] [count: 214, ave time 2683.99 msec] [ 421880 ms] [prepare:tools] [count: 19, ave time 22204.21 msec] [ 188187 ms] [prepare:bazel-store] [count: 3, ave time 62729.00 msec] [ 178796 ms] [prepare:put into local cache, clean build dir] [count: 4996, ave time 35.79 msec] [ 83873 ms] [prepare:AC] [count: 4, ave time 20968.25 msec] [ 83086 ms] [CC] [count: 7, ave time 11869.43 msec] [ 6353 ms] [UN] [count: 2, ave time 3176.50 msec] [ 4808 ms] [AR] [count: 9, ave time 534.22 msec] [ 2365 ms] [prepare:resources] [count: 1, ave time 2365.00 msec] [ 1852 ms] [prepare:put to dist cache] [count: 300, ave time 6.17 msec] [ 704 ms] [ld] [count: 2, ave time 352.00 msec] [ 594 ms] [CF] [count: 2, ave time 297.00 msec] [ 443 ms] [BI] [count: 1, ave time 443.00 msec] [ 436 ms] [SB] [count: 1, ave time 436.00 msec] [ 435 ms] [PK] [count: 1, ave time 435.00 msec] [ 182 ms] [CP] [count: 1, ave time 182.00 msec] [ 11 ms] [prepare:clean] [count: 3, ave time 3.67 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 152167573 ms (92.85%) Total run tasks time - 163887506 ms Configure time - 30.5 s Statistics overhead 1976 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.91yrPoDpBb --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [8061/8061 modules configured] [4105/4464 modules rendered] [2 ymakes processing] [8061/8061 modules configured] [4369/4464 modules rendered] [2 ymakes processing] [8061/8061 modules configured] [4464/4464 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [8067/8067 modules configured] [4464/4464 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution | 3.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests | 3.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud | 3.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut | 3.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut | 4.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace | 2.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader | 3.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart | 3.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator | 3.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit | 3.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk | 3.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel | 4.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup | 3.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli | 5.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots | 5.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool | 6.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests | 6.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk | 6.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut | 6.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging | 7.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml | 9.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test | 9.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp | 9.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests | 9.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common | 9.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay | 9.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat | 9.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |10.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |10.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |10.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |10.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |10.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |10.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |10.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |10.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |10.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |11.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |11.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |11.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |11.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |11.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |11.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |11.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |11.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |12.1%| CLEANING SYMRES |12.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |12.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |12.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |12.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |12.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |12.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |13.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |13.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |13.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |13.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |13.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |13.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |13.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |12.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |12.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |13.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |13.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |13.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |13.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |13.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |13.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |14.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |14.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |14.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |14.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |14.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |14.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |15.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |15.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |16.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |15.3%| PREPARE $(CLANG_FORMAT-1286082657) |15.6%| PREPARE $(TEST_TOOL_HOST-sbr:8444524403) |15.7%| PREPARE $(FLAKE8_PY3-715603131) |15.8%| PREPARE $(PYTHON) |15.9%| PREPARE $(YMAKE_PYTHON3-4256832079) |16.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |16.1%| PREPARE $(CLANG16-1380963495) |16.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |16.3%| PREPARE $(WITH_JDK-sbr:7832760150) |16.4%| PREPARE $(CLANG18-3363451693) |16.5%| PREPARE $(CLANG-2518231432) |16.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |16.7%| PREPARE $(LLD_ROOT-3808007503) |16.8%| PREPARE $(JDK17-472926544) |16.9%| PREPARE $(CLANG-1922233694) |17.0%| PREPARE $(WITH_JDK17-sbr:7832760150) |17.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |17.3%| PREPARE $(GDB) |17.4%| PREPARE $(JDK_DEFAULT-472926544) |17.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |19.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |19.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |19.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |19.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |19.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |20.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |20.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |20.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |20.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |20.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |20.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |20.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |20.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |21.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |21.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |21.2%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |21.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |21.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |21.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |21.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |20.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |20.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |20.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |20.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |20.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |20.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |21.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |21.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |21.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |21.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |21.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |21.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |21.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |21.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |21.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |22.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |22.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |22.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |22.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |22.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |22.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |22.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |22.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |22.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |22.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |23.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |23.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |23.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |23.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |23.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |23.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |23.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |24.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |24.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |22.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |22.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |22.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |23.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |23.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |23.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |24.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |24.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |24.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |25.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |25.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |24.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |24.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |24.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |25.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |25.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |25.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |25.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |25.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |25.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |25.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |26.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |26.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |26.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |26.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |26.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |26.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |26.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |27.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |27.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |27.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |27.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |27.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |27.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |27.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |27.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |27.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |28.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |28.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |27.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |27.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |27.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |27.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |27.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |27.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |27.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |27.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |28.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |28.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |28.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |29.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |29.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |29.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |29.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |29.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |29.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |29.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |29.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |29.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |29.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |29.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |30.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |30.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |30.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |30.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |29.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |30.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |30.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |30.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |30.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |30.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |30.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |30.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |31.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |31.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |31.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |32.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |32.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |31.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |31.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |31.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |31.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |31.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |32.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |32.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |32.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |32.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |32.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |32.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |32.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |32.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |32.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |32.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |32.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |34.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |34.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |34.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |34.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |34.8%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |35.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |35.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |35.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |35.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |36.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |36.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |35.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |35.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |35.8%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |36.0%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |36.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |36.3%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |36.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |36.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |36.8%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |37.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |37.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |37.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |37.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |37.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |37.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |37.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |37.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |38.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |38.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |38.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |37.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |38.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |38.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |38.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |38.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |39.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |39.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |39.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |39.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |39.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |39.3%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |39.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |39.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |39.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |40.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |40.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |40.5%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |40.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |40.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |40.3%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |40.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |40.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |40.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |40.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |40.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |40.9%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |41.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |41.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |41.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |41.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |41.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |41.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |41.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |42.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |42.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |42.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |42.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |42.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |42.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |43.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |42.6%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |42.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |42.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |42.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |43.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |43.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |43.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |43.5%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |43.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |43.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |44.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |44.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |44.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |44.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |44.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |44.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |44.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |44.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |44.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |44.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |45.2%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |45.3%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |37.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |37.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |37.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |37.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |37.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |37.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |37.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |37.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |37.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |37.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |37.9%| COMPACTING CACHE 905.5GiB |37.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |37.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |38.0%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |38.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |38.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |38.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |38.2%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |38.2%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |38.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |38.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |38.4%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |38.4%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |38.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |38.5%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |38.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |38.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |38.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |38.7%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |38.7%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |38.8%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |38.8%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |38.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |38.9%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |39.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |39.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |39.1%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |39.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |39.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |39.2%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |39.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |39.3%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |39.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |39.4%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |39.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |39.5%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |39.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |39.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |39.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |39.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |39.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |39.8%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |39.8%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |39.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |39.9%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |39.9%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |40.0%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |40.0%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |40.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |40.1%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |40.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |40.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |40.3%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |40.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |40.4%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |40.4%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |40.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |40.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |40.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |40.6%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |40.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |40.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |40.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |40.8%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |40.8%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |40.9%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |40.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |41.0%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |41.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |41.0%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |41.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |41.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |41.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |41.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |41.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |41.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |41.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |41.4%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |41.5%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |41.5%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |41.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |41.6%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |41.6%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |41.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |41.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |41.8%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |41.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |41.9%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |41.9%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |42.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |42.0%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |42.1%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |42.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |42.2%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |42.2%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |42.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |42.3%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |42.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |42.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |42.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |42.5%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |42.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |42.6%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |42.6%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |42.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |42.7%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |42.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |42.8%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |42.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |42.9%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |42.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |43.0%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |43.0%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |43.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |43.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |43.2%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |43.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |43.3%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |43.3%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |43.4%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |43.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |43.5%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |43.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |43.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |43.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |43.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |43.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |43.7%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |43.8%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |43.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |43.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |43.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |44.0%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |44.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |44.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |44.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |44.1%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |44.2%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |44.2%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |44.3%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |44.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |44.4%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |44.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |44.5%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |44.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |44.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |44.6%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |44.7%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |44.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |44.7%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |44.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |44.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |44.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |44.9%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |45.0%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |45.0%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |45.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |45.1%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |45.2%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |45.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |45.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |45.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |45.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |45.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |45.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |45.5%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |45.5%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |45.6%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |45.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |45.7%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |45.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |45.8%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |45.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |45.9%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |45.9%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |46.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |46.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |46.0%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |46.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |46.1%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |46.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |46.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |46.3%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |46.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |46.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |46.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |46.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |46.5%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |46.6%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |46.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |46.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |46.7%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |46.7%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |46.8%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |46.8%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |46.9%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |46.9%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy |47.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |47.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |47.1%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |47.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |47.2%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |47.2%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |47.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |47.3%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |47.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |47.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |47.4%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |47.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |47.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |47.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |47.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |47.7%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |47.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |47.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |47.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |47.8%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |47.9%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |47.9%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |48.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |48.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |48.1%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |48.1%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |48.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |48.2%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |48.3%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |48.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |48.4%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |48.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |48.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |48.5%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |48.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |48.6%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |48.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |48.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |48.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |48.8%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |48.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |48.9%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |48.9%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |49.0%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |49.0%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |49.1%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |49.1%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |49.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |49.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |49.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |49.3%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |49.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |49.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |49.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |49.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |49.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |49.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |49.6%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |49.7%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |49.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |49.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |49.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |49.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |49.9%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |49.9%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |50.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |50.0%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |50.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |50.1%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |50.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |50.2%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |50.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |50.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |50.3%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |50.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |50.4%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |50.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |50.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |50.6%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |50.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |50.7%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |50.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |50.8%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |50.8%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |50.9%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |50.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |50.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |51.0%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |51.0%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |51.1%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |51.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |51.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |51.2%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |51.3%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |51.3%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |51.4%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |51.4%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |51.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |51.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |51.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |51.6%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |51.6%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |51.7%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |51.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |51.8%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |51.8%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |51.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |51.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |52.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |52.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |52.1%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |52.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |52.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |52.2%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |52.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |52.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |52.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |52.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |52.4%| [LD] {RESULT} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |52.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |52.5%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |52.6%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |52.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |52.7%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |52.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |52.8%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |52.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |52.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |52.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |52.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |53.0%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |53.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |53.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |53.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |53.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |53.2%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |53.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |53.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |53.4%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |53.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |53.4%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |53.5%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |53.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |53.6%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |53.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |53.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |53.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |53.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |53.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |53.9%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |53.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |54.0%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |54.0%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |54.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |54.1%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |54.1%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |54.2%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |54.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |54.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |54.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |54.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |54.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |54.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |54.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |54.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |54.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |54.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |54.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |54.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |54.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |54.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |54.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |54.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |55.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |55.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |55.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |55.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |55.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |55.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |55.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |55.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |55.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |55.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |56.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |56.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |56.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown >> GenericFederatedQuery::ClickHouseFilterPushdown |56.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |56.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |56.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |56.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest |56.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 |56.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |56.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |56.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |56.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |56.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::StreamReadManyUpdateTokenAndRead |57.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard |58.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] |59.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] Test command err: Starting YDB, grpc: 6332, msgbus: 28591 2025-04-17T18:59:39.689789Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358784297066612:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:39.689900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0001b1/r3tmp/tmpFpaZDO/pdisk_1.dat 2025-04-17T18:59:40.068923Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6332, node 1 2025-04-17T18:59:40.095263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:40.095357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:40.098221Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:59:40.098378Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T18:59:40.101236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:59:40.200259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:59:40.200274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:59:40.200295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:59:40.200382Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28591 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:59:40.504531Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Handle TEvNavigate describe path dc-1 2025-04-17T18:59:40.504591Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034666:2442] HANDLE EvNavigateScheme dc-1 2025-04-17T18:59:40.507703Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034666:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:40.534632Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034666:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:59:40.544687Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034666:2442] Handle TEvDescribeSchemeResult Forward to# [1:7494358788592034665:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:59:40.555981Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Handle TEvProposeTransaction 2025-04-17T18:59:40.556002Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:59:40.556742Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494358788592034681:2450] 2025-04-17T18:59:40.617175Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:59:40.617235Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:59:40.617254Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:59:40.617307Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:40.617587Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:40.617682Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:59:40.617733Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:59:40.617833Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:59:40.618407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:40.619841Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:59:40.619895Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034681:2450] txid# 281474976710657 SEND to# [1:7494358788592034680:2449] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:59:40.631376Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Handle TEvProposeTransaction 2025-04-17T18:59:40.631396Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:59:40.631419Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494358788592034721:2486] 2025-04-17T18:59:40.632928Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:59:40.632962Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:59:40.632987Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:59:40.633120Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:40.633329Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:40.633409Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:59:40.633440Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:59:40.633520Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:59:40.633781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:59:40.635082Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:59:40.635115Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358788592034721:2486] txid# 281474976710658 SEND to# [1:7494358788592034720:2485] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:59:41.548689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358792887002110:2337], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.548694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358792887002101:2334], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.548791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.548990Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Han ... marker# P6 2025-04-17T18:59:41.662939Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002189:2570] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:41.663040Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002189:2570] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:41.663117Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002189:2570] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:59:41.663148Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002189:2570] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-17T18:59:41.663333Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002189:2570] txid# 281474976710660 HANDLE EvClientConnected 2025-04-17T18:59:41.665713Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002189:2570] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T18:59:41.667548Z node 1 :TX_PROXY ERROR: Actor# [1:7494358792887002189:2570] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:59:41.667581Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002189:2570] txid# 281474976710660 SEND to# [1:7494358792887002115:2338] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-17T18:59:41.946954Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Handle TEvProposeTransaction 2025-04-17T18:59:41.946980Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] TxId# 281474976710661 ProcessProposeTransaction 2025-04-17T18:59:41.947017Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7494358792887002218:2583] 2025-04-17T18:59:41.948974Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47922" 2025-04-17T18:59:41.949013Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:59:41.949024Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:59:41.949053Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:41.949381Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:41.949507Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:59:41.949553Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-17T18:59:41.949683Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] txid# 281474976710661 HANDLE EvClientConnected 2025-04-17T18:59:41.971061Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-17T18:59:41.971103Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358792887002218:2583] txid# 281474976710661 SEND to# [1:7494358792887002217:2331] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-17T18:59:42.115818Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Handle TEvProposeTransaction 2025-04-17T18:59:42.115882Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] TxId# 281474976710662 ProcessProposeTransaction 2025-04-17T18:59:42.115936Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7494358797181969540:2600] 2025-04-17T18:59:42.118497Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000\n\031\010\000\022\025\010\001\020\200\010\032\014ordinaryuser \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47926" 2025-04-17T18:59:42.118564Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:59:42.118582Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:59:42.118650Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:42.119051Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:42.119161Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:59:42.119205Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-17T18:59:42.119435Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] txid# 281474976710662 HANDLE EvClientConnected 2025-04-17T18:59:42.119891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.122245Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-17T18:59:42.122296Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969540:2600] txid# 281474976710662 SEND to# [1:7494358797181969539:2345] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-17T18:59:42.150486Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Handle TEvProposeTransaction 2025-04-17T18:59:42.150523Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] TxId# 281474976710663 ProcessProposeTransaction 2025-04-17T18:59:42.150587Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7494358797181969580:2622] 2025-04-17T18:59:42.152521Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969580:2622] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1OTU4MiwiaWF0IjoxNzQ0OTE2MzgyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.fyCFU8u7907LEkyXwWdVXm9Zciuo3sNQlLl20Sslmvyh4Boeg7Z_n2jvbXTRi6JVsjwJJyR_4GIrOoIlQFDW1UXx9symxCg6Qs7W3KdfmsNWM95V7aKx034LrTsWGx4NSEBlXq65HmmHKLei55qFq674TUH6RTKxuxJftE-7roLd5ZVLg3Sa2WMjTUOEMqyyaEUb7KC5SHr2hlPJHHLjAeQ0BY_402fyIkGSYBF06pZ8foVZOHEwB4sJMN5Uww5CPsYd-auLsX-Qvr7pTIfeWxE-A4p-CTU2NOlCYI25hr1RzQXj-pf8rgbbMcbolZmqozWdmmamB87yFcGFCsshsA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1OTU4MiwiaWF0IjoxNzQ0OTE2MzgyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47956" 2025-04-17T18:59:42.152609Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969580:2622] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-17T18:59:42.152644Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969580:2622] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-17T18:59:42.152822Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969580:2622] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-17T18:59:42.152855Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969580:2622] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-17T18:59:42.152887Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969580:2622] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:42.153144Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969580:2622] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:42.153176Z node 1 :TX_PROXY ERROR: Actor# [1:7494358797181969580:2622] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-04-17T18:59:42.153266Z node 1 :TX_PROXY ERROR: Actor# [1:7494358797181969580:2622] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-17T18:59:42.153297Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358797181969580:2622] txid# 281474976710663 SEND to# [1:7494358797181969579:2351] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T18:59:42.158977Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2E0OThiYjEtNzNlODY1ODktOWQ4MWIzNjUtM2U3MDhmMDI=, ActorId: [1:7494358797181969565:2351], ActorState: ExecuteState, TraceId: 01js2hy0dy6pz969h54fpkkn8g, Create QueryResponse for error on request, msg: 2025-04-17T18:59:42.159331Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] Handle TEvExecuteKqpTransaction 2025-04-17T18:59:42.159379Z node 1 :TX_PROXY DEBUG: actor# [1:7494358784297066864:2115] TxId# 281474976710664 ProcessProposeKqpTransaction |59.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |59.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TPersQueueTest::InflightLimit |59.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] Test command err: Starting YDB, grpc: 22994, msgbus: 64723 2025-04-17T18:59:39.689837Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358785946321731:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:39.689931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0001a7/r3tmp/tmp315CI3/pdisk_1.dat 2025-04-17T18:59:40.059571Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:59:40.104284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:40.104356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22994, node 1 2025-04-17T18:59:40.107851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:59:40.200367Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:59:40.200384Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:59:40.200394Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:59:40.200517Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64723 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T18:59:40.504497Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Handle TEvNavigate describe path dc-1 2025-04-17T18:59:40.504538Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289783:2446] HANDLE EvNavigateScheme dc-1 2025-04-17T18:59:40.507667Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289783:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:40.527836Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289783:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T18:59:40.536565Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289783:2446] Handle TEvDescribeSchemeResult Forward to# [1:7494358790241289782:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T18:59:40.555752Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Handle TEvProposeTransaction 2025-04-17T18:59:40.555785Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T18:59:40.556761Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494358790241289798:2454] 2025-04-17T18:59:40.608615Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:59:40.609675Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:59:40.609709Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:59:40.609790Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:40.610104Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:40.610336Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T18:59:40.610426Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T18:59:40.610597Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T18:59:40.615604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:40.619067Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T18:59:40.619124Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289798:2454] txid# 281474976710657 SEND to# [1:7494358790241289797:2453] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T18:59:40.631352Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Handle TEvProposeTransaction 2025-04-17T18:59:40.631387Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T18:59:40.631415Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494358790241289838:2490] 2025-04-17T18:59:40.632983Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T18:59:40.633024Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:59:40.633033Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:59:40.633074Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:40.633273Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:40.633330Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:59:40.633353Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T18:59:40.633456Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T18:59:40.633713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T18:59:40.634944Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T18:59:40.634974Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358790241289838:2490] txid# 281474976710658 SEND to# [1:7494358790241289837:2489] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T18:59:41.514440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358794536257218:2334], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.514440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358794536257230:2337], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.514544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.514775Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Handle TEvProposeTransaction 2025-04-17T18:59:41.514793Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17 ... ReqId# "" txid# 281474976710661 SEND to# [1:7494358794536257341:2593] 2025-04-17T18:59:41.949375Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "dbadmin" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54214" 2025-04-17T18:59:41.949419Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:59:41.949431Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:59:41.949484Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:41.949725Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:41.949830Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:59:41.950020Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-17T18:59:41.950130Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] txid# 281474976710661 HANDLE EvClientConnected 2025-04-17T18:59:41.963242Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-17T18:59:41.963283Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358794536257341:2593] txid# 281474976710661 SEND to# [1:7494358794536257340:2331] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-17T18:59:42.053340Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Handle TEvProposeTransaction 2025-04-17T18:59:42.053364Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] TxId# 281474976710662 ProcessProposeTransaction 2025-04-17T18:59:42.053406Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7494358798831224660:2610] 2025-04-17T18:59:42.056024Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\r\010\001\022\t\032\007dbadmin\n\025\010\000\022\021\010\001\020\200\200\002\032\007dbadmin \000\n\024\010\000\022\020\010\001\020\200\010\032\007dbadmin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54230" 2025-04-17T18:59:42.056083Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:59:42.056133Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:59:42.056194Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:42.056513Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:42.056579Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:59:42.056608Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-17T18:59:42.056724Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] txid# 281474976710662 HANDLE EvClientConnected 2025-04-17T18:59:42.057170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.059102Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-17T18:59:42.059165Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224660:2610] txid# 281474976710662 SEND to# [1:7494358798831224659:2345] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-17T18:59:42.064379Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Handle TEvProposeTransaction 2025-04-17T18:59:42.064402Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] TxId# 281474976710663 ProcessProposeTransaction 2025-04-17T18:59:42.064433Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7494358798831224678:2625] 2025-04-17T18:59:42.065954Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54234" 2025-04-17T18:59:42.065994Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:59:42.066004Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T18:59:42.066033Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:42.066289Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:42.066384Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T18:59:42.066453Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-17T18:59:42.066577Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] txid# 281474976710663 HANDLE EvClientConnected 2025-04-17T18:59:42.066843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.068543Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-17T18:59:42.068576Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224678:2625] txid# 281474976710663 SEND to# [1:7494358798831224677:2346] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-04-17T18:59:42.098951Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Handle TEvProposeTransaction 2025-04-17T18:59:42.098987Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] TxId# 281474976710664 ProcessProposeTransaction 2025-04-17T18:59:42.099032Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7494358798831224710:2639] 2025-04-17T18:59:42.100995Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224710:2639] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1OTU4MiwiaWF0IjoxNzQ0OTE2MzgyLCJzdWIiOiJkYmFkbWluIn0.KaV7PDP4gXRdgkylubCpYJ_wEVhSt5t2rnOYnjYhDj1DOF3ZrmN9C82DeoiIhVWRRnWHkXmNlenm8cN3-pThOcwcfEUAulGEuZwzArnkukJ67ydf8FH-xdKG6N6V_8cm0aNXeQguAfFsGqhpfJbF0fR1PXzDx85kHNjCu2YpEk7o0DAlCR-B1ISP9bkgCh790hATrNH4fzkeAa9pgay6FSKosuGl58TJdcZICRPOipDM9G3xS7grtGe01vB-HSNGd18V-ufQcxjQ1QkVHStLOdvIFGFXXxkIKIm80OQWRD44SgWrIAhhp3YdfTunsPXPyxINVFZGHqDohH9YZgZmiQ\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1OTU4MiwiaWF0IjoxNzQ0OTE2MzgyLCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54250" 2025-04-17T18:59:42.101060Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224710:2639] txid# 281474976710664 Bootstrap, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T18:59:42.101077Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224710:2639] txid# 281474976710664 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-04-17T18:59:42.101123Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224710:2639] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-04-17T18:59:42.101454Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224710:2639] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T18:59:42.101490Z node 1 :TX_PROXY ERROR: Actor# [1:7494358798831224710:2639] txid# 281474976710664, Access denied for dbadmin, attempt to manage user 2025-04-17T18:59:42.101608Z node 1 :TX_PROXY ERROR: Actor# [1:7494358798831224710:2639] txid# 281474976710664, issues: { message: "Access denied for dbadmin" issue_code: 200000 severity: 1 } 2025-04-17T18:59:42.101639Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358798831224710:2639] txid# 281474976710664 SEND to# [1:7494358798831224709:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T18:59:42.107906Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODc3ZmU0YTYtMmQyNWVlODUtMzQ1ZmIwMWUtNjlkNzkyMjM=, ActorId: [1:7494358798831224695:2352], ActorState: ExecuteState, TraceId: 01js2hy0c9563qvjebdr2zabna, Create QueryResponse for error on request, msg: 2025-04-17T18:59:42.109071Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] Handle TEvExecuteKqpTransaction 2025-04-17T18:59:42.109105Z node 1 :TX_PROXY DEBUG: actor# [1:7494358785946321978:2115] TxId# 281474976710665 ProcessProposeKqpTransaction >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] |59.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |60.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks >> KqpSinkTx::OlapInvalidateOnError |61.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] |61.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |62.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |62.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 15046, MsgBus: 24067 2025-04-17T18:59:39.458904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358785159469448:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:39.459112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0003c2/r3tmp/tmpsU4EtI/pdisk_1.dat 2025-04-17T18:59:39.753618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:59:39.801229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:39.802312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:39.805007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15046, node 1 2025-04-17T18:59:39.922989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:59:39.923021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:59:39.923030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:59:39.923178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24067 TClient is connected to server localhost:24067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:59:40.436824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:41.354341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358793749404490:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.354432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.686625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-17T18:59:41.797737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358793749404614:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.797835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.797950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358793749404619:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.800337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-17T18:59:41.805886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358793749404621:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-17T18:59:41.871847Z node 1 :TX_PROXY ERROR: Actor# [1:7494358793749404661:2397] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:59:42.741820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T18:59:43.142659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-17T18:59:43.520667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-17T18:59:43.883991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-17T18:59:44.300932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-17T18:59:44.458104Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358785159469448:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:44.458201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:59:44.620899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-17T18:59:44.648752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-17T18:59:45.604934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715698:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 11386, MsgBus: 2723 2025-04-17T18:59:39.458362Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358782245326784:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:39.458587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0003d4/r3tmp/tmpOITxnC/pdisk_1.dat 2025-04-17T18:59:39.751627Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11386, node 1 2025-04-17T18:59:39.809234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:39.809303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:39.810926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:59:39.922653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:59:39.922669Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:59:39.922677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:59:39.922776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2723 TClient is connected to server localhost:2723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:59:40.442243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:41.318278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358790835261832:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.318364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.686329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T18:59:41.789177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358790835261956:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.789232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358790835261961:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.789234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:41.792943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T18:59:41.798704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358790835261963:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T18:59:41.898102Z node 1 :TX_PROXY ERROR: Actor# [1:7494358790835262003:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:59:42.742240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:59:43.162648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T18:59:43.457796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T18:59:43.775918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T18:59:44.174991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T18:59:44.432514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T18:59:44.457853Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358782245326784:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:44.457904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:59:44.462581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T18:59:46.381012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710708:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 |62.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |63.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> KqpCost::OlapWriteRow |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [GOOD] |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [GOOD] Test command err: === Server->StartServer(false); 2025-04-17T18:59:41.416583Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358792490048459:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:41.416780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:59:41.461910Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494358794302386191:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:41.462468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:59:41.565496Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000319/r3tmp/tmpW1vAxo/pdisk_1.dat 2025-04-17T18:59:41.575515Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:59:41.762463Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:59:41.765865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:41.766599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:41.780132Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:59:41.780602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:59:41.792711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:41.792770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:41.795026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16035, node 1 2025-04-17T18:59:41.954298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/26k2/000319/r3tmp/yandexH9Ccaq.tmp 2025-04-17T18:59:41.954329Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/26k2/000319/r3tmp/yandexH9Ccaq.tmp 2025-04-17T18:59:41.955855Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/26k2/000319/r3tmp/yandexH9Ccaq.tmp 2025-04-17T18:59:41.956005Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:59:42.154991Z INFO: TTestServer started on Port 25567 GrpcPort 16035 TClient is connected to server localhost:25567 PQClient connected to localhost:16035 === TenantModeEnabled() = 0 === Init PQ - start server on port 16035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:59:42.502696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T18:59:42.503423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.505373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T18:59:42.506661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T18:59:42.506771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.510404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T18:59:42.510604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T18:59:42.510865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.511037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T18:59:42.511070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-17T18:59:42.511087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-17T18:59:42.513100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:59:42.513133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-17T18:59:42.513167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T18:59:42.513444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.513490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T18:59:42.513504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-17T18:59:42.515285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.515311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.515369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:59:42.515402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:59:42.521219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T18:59:42.523291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-17T18:59:42.523509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-17T18:59:42.526285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744916382573, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T18:59:42.526822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744916382573 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T18:59:42.526875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:59:42.527293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-17T18:59:42.527334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-17T18:59:42.527556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-17T18:59:42.527633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T18:59:42.529748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T18:59:42.529779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T18:59:42.529970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T18:59:42.530001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7494358792490049109:2390], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-17T18:59:42.530076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:42.530129Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-17T18:59:42.530259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:59:42.530283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:59:42.530310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-17T18:59:42.530326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:59:42.530339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-17T18:59:42.530358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-17T18:59:42.530372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-17T18:59:42.530382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-17T18:59:42.530456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940 ... nWriter 72075186224037899 (partition=0) Received event: NActors::IEventHandle 2025-04-17T18:59:52.017273Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::IEventHandle 2025-04-17T18:59:52.017678Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 TopicId: Topic rt3.dc1--acc--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait for data done: readOffset 11 EndOffset 11 newEndOffset 12 commitOffset 0 clientCommitOffset 0 cookie 12 readingFinished 0 firstRead 0 2025-04-17T18:59:52.017702Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 TopicId: Topic rt3.dc1--acc--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 11 endOffset 12 2025-04-17T18:59:52.017741Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--acc--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 11, endOffset# 12, WTime# 1744916391965, sizeLag# 2177 2025-04-17T18:59:52.017756Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1TEvPartitionReady. Aval parts: 1 2025-04-17T18:59:52.017799Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 performing read request: guid# 3199e998-7ec6d801-3333531a-118cd195, from# TopicId: Topic rt3.dc1--acc--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 1, size# 2612, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-17T18:59:52.017850Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 READ FROM TopicId: Topic rt3.dc1--acc--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 1 maxSize 2612 maxTimeLagMs 0 readTimestampMs 0 readOffset 11 EndOffset 12 ClientCommitOffset 0 committedOffset 0 Guid 3199e998-7ec6d801-3333531a-118cd195 2025-04-17T18:59:52.018052Z :DEBUG: [] MessageGroupId [source] SessionId [source|e2f24092-d9f06b55-26f53368-332dea41_0] Write session got write response: sequence_numbers: 12 offsets: 11 already_written: false write_statistics { persist_duration_ms: 5 } 2025-04-17T18:59:52.018112Z :DEBUG: [] MessageGroupId [source] SessionId [source|e2f24092-d9f06b55-26f53368-332dea41_0] Write session: acknoledged message 12 2025-04-17T18:59:52.019044Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 TopicId: Topic rt3.dc1--acc--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 12 Result { Offset: 11 Data: "... 2115 bytes ..." SourceId: "\000source" SeqNo: 12 WriteTimestampMS: 1744916392010 CreateTimestampMS: 1744916392009 UncompressedSize: 2048 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 20 RealReadOffset: 11 WaitQuotaTimeMs: 0 EndOffset: 12 StartOffset: 0 } Cookie: 11 } 2025-04-17T18:59:52.019147Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 TopicId: Topic rt3.dc1--acc--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 13 from offset12 2025-04-17T18:59:52.019181Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 after read state TopicId: Topic rt3.dc1--acc--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 12 ReadOffset 12 ReadGuid 3199e998-7ec6d801-3333531a-118cd195 has messages 1 2025-04-17T18:59:52.019270Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 read done: guid# 3199e998-7ec6d801-3333531a-118cd195, partition# TopicId: Topic rt3.dc1--acc--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 2206 2025-04-17T18:59:52.019292Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 response to read: guid# 3199e998-7ec6d801-3333531a-118cd195 2025-04-17T18:59:52.019460Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 Process answer. Aval parts: 0 ===Expect ReadResponse, got status: SUCCESS read_response { partition_data { partition_session_id: 1 batches { message_data { offset: 11 seq_no: 12 created_at { seconds: 1744916392 nanos: 9000000 } data: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" uncompressed_size: 2048 message_group_id: "source" } producer_id: "source" write_session_meta { key: "_ip" value: "ipv6:[::1]:55492" } write_session_meta { key: "ident" value: "acc" } write_session_meta { key: "logtype" value: "topic1" } write_session_meta { key: "server" value: "ipv6:[::1]:55492" } codec: 1 written_at { seconds: 1744916392 nanos: 10000000 } } } bytes_size: 2206 } 2025-04-17T18:59:52.701251Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 checking auth because of timeout 2025-04-17T18:59:52.701317Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 auth for : user 2025-04-17T18:59:52.701332Z node 1 :PQ_METACACHE DEBUG: Handle describe topics 2025-04-17T18:59:52.701343Z node 1 :PQ_METACACHE DEBUG: SendSchemeCacheRequest 2025-04-17T18:59:52.701373Z node 1 :PQ_METACACHE DEBUG: send request for 1 topics, got 1 requests infly, db = "" 2025-04-17T18:59:52.701840Z node 1 :PQ_METACACHE DEBUG: Handle SchemeCache response: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 6 ResultSet [{ Path: Root/PQ/rt3.dc1--acc--topic1 TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T18:59:52.701897Z node 1 :PQ_METACACHE DEBUG: Got describe topics SC response 2025-04-17T18:59:52.701919Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 Handle describe topics response 2025-04-17T18:59:52.701998Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 auth is DEAD 2025-04-17T18:59:52.702063Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 closed with error: reason# No ReadTopic permissions for '/Root/PQ/rt3.dc1--acc--topic1' for subject 'user_without_rights@builtin' 2025-04-17T18:59:52.702557Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_5218454141221926968_v1 is DEAD ===Expect UNAUTHORIZED, got response: status: UNAUTHORIZED issues { message: "No ReadTopic permissions for \'/Root/PQ/rt3.dc1--acc--topic1\' for subject \'user_without_rights@builtin\'" issue_code: 500018 severity: 1 } 2025-04-17T18:59:52.702779Z :INFO: [] MessageGroupId [source] SessionId [source|e2f24092-d9f06b55-26f53368-332dea41_0] Write session: close. Timeout = 10000 ms 2025-04-17T18:59:52.702830Z :INFO: [] MessageGroupId [source] SessionId [source|e2f24092-d9f06b55-26f53368-332dea41_0] Write session will now close 2025-04-17T18:59:52.702862Z :DEBUG: [] MessageGroupId [source] SessionId [source|e2f24092-d9f06b55-26f53368-332dea41_0] Write session: aborting 2025-04-17T18:59:52.702905Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_5218454141221926968_v1 2025-04-17T18:59:52.703038Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--acc--topic1] pipe [1:7494358835439724586:2595] disconnected; active server actors: 1 2025-04-17T18:59:52.703069Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037900][rt3.dc1--acc--topic1] pipe [1:7494358835439724586:2595] client user disconnected session shared/user_1_1_5218454141221926968_v1 2025-04-17T18:59:52.703189Z :INFO: [] MessageGroupId [source] SessionId [source|e2f24092-d9f06b55-26f53368-332dea41_0] Write session: gracefully shut down, all writes complete 2025-04-17T18:59:52.703228Z :DEBUG: [] MessageGroupId [source] SessionId [source|e2f24092-d9f06b55-26f53368-332dea41_0] Write session: destroy 2025-04-17T18:59:52.703816Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: source|e2f24092-d9f06b55-26f53368-332dea41_0 grpc read done: success: 0 data: 2025-04-17T18:59:52.703832Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: source|e2f24092-d9f06b55-26f53368-332dea41_0 grpc read failed 2025-04-17T18:59:52.703884Z node 1 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 3 sessionId: source|e2f24092-d9f06b55-26f53368-332dea41_0 2025-04-17T18:59:52.703898Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: source|e2f24092-d9f06b55-26f53368-332dea41_0 is DEAD 2025-04-17T18:59:52.704157Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-17T18:59:52.938793Z node 1 :PQ_METACACHE DEBUG: Check version rescan 2025-04-17T18:59:52.941573Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710703, task: 1, CA Id [1:7494358839734692094:2644]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-04-17T18:59:52.975551Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710703, task: 1, CA Id [1:7494358839734692094:2644]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold+ColumnStore |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> Viewer::Plan2SvgBad |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> Viewer::SelectStringWithBase64Encoding >> Viewer::SelectStringWithNoBase64Encoding >> Viewer::JsonStorageListingV1PDiskIdFilter >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> Viewer::Plan2SvgOK |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> Viewer::QueryExecuteScript |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [FAIL] |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUser |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> KqpSinkMvcc::OlapMultiSinks [FAIL] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> Viewer::Plan2SvgBad [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> Viewer::Plan2SvgOK [GOOD] |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows >> ReadIteratorExternalBlobs::NotExtBlobs >> YdbLogStore::AlterLogTable >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TModifyUserTest::ModifyUser [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: 2025-04-17T19:00:04.300331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358891147337211:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:04.300399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:04.788890Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:04.791541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:04.791705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 31011, node 1 2025-04-17T19:00:04.799071Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:04.799097Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:04.799527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:04.933752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:04.933774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:04.933780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:04.933898Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61806 2025-04-17T19:00:07.221348Z node 1 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgOK [GOOD] Test command err: 2025-04-17T19:00:04.548545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358892407756076:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:04.548629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:05.034830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:05.056528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:05.056630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:05.058411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9757, node 1 2025-04-17T19:00:05.202946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:05.202978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:05.202991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:05.203114Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3573 2025-04-17T19:00:07.527428Z node 1 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUser [GOOD] Test command err: 2025-04-17T19:00:06.946595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358899227483423:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:06.946646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000517/r3tmp/tmpnCIIeP/pdisk_1.dat 2025-04-17T19:00:07.336130Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:07.376321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:07.376843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:07.379611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12354 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T19:00:07.570797Z node 1 :TX_PROXY DEBUG: actor# [1:7494358899227483667:2103] Handle TEvNavigate describe path dc-1 2025-04-17T19:00:07.570863Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358903522451237:2257] HANDLE EvNavigateScheme dc-1 2025-04-17T19:00:07.571781Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494358903522451048:2142], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T19:00:07.571851Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7494358903522451048:2142], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-17T19:00:07.572102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-17T19:00:07.574309Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483364:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494358903522451242:2258] 2025-04-17T19:00:07.574319Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483367:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494358903522451243:2258] 2025-04-17T19:00:07.574373Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494358899227483364:2049] Subscribe: subscriber# [1:7494358903522451242:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T19:00:07.574374Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494358899227483367:2052] Subscribe: subscriber# [1:7494358903522451243:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T19:00:07.574434Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483370:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7494358903522451244:2258] 2025-04-17T19:00:07.574450Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7494358899227483370:2055] Subscribe: subscriber# [1:7494358903522451244:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-17T19:00:07.574459Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451243:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494358899227483367:2052] 2025-04-17T19:00:07.574483Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483367:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494358903522451243:2258] 2025-04-17T19:00:07.574485Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451242:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494358899227483364:2049] 2025-04-17T19:00:07.574498Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483364:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494358903522451242:2258] 2025-04-17T19:00:07.574504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451244:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494358899227483370:2055] 2025-04-17T19:00:07.574522Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483370:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7494358903522451244:2258] 2025-04-17T19:00:07.574583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494358903522451240:2258] 2025-04-17T19:00:07.574625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494358903522451239:2258] 2025-04-17T19:00:07.574682Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7494358903522451238:2258][/dc-1] Set up state: owner# [1:7494358903522451048:2142], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T19:00:07.575894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7494358903522451241:2258] 2025-04-17T19:00:07.575971Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7494358903522451238:2258][/dc-1] Path was already updated: owner# [1:7494358903522451048:2142], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-17T19:00:07.576039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451242:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451239:2258], cookie# 1 2025-04-17T19:00:07.576067Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451243:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451240:2258], cookie# 1 2025-04-17T19:00:07.576089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451244:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451241:2258], cookie# 1 2025-04-17T19:00:07.576120Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483367:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451243:2258], cookie# 1 2025-04-17T19:00:07.576149Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483370:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451244:2258], cookie# 1 2025-04-17T19:00:07.576188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451243:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494358899227483367:2052], cookie# 1 2025-04-17T19:00:07.576203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451244:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494358899227483370:2055], cookie# 1 2025-04-17T19:00:07.576238Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494358903522451240:2258], cookie# 1 2025-04-17T19:00:07.576282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T19:00:07.576338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494358903522451241:2258], cookie# 1 2025-04-17T19:00:07.576358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T19:00:07.576388Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483364:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451242:2258], cookie# 1 2025-04-17T19:00:07.576405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451242:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494358899227483364:2049], cookie# 1 2025-04-17T19:00:07.576426Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7494358903522451239:2258], cookie# 1 2025-04-17T19:00:07.576464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Unexpected sync response: sender# [1:7494358903522451239:2258], cookie# 1 2025-04-17T19:00:07.631942Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494358903522451048:2142], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-17T19:00:07.632362Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494358903522451048:2142], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046644480, cookie: 281474976710666 2025-04-17T19:00:08.043206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710666 2025-04-17T19:00:08.043221Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710666, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 13 2025-04-17T19:00:08.043242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-17T19:00:08.043311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710666, subscribers: 0 2025-04-17T19:00:08.043505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1OTYwOCwiaWF0IjoxNzQ0OTE2NDA4LCJzdWIiOiJ1c2VyMiJ9.jHfZv_7qYu7q9UacdmHh1RmGNYyWE-g4yEXtbeSkxviJFfSx7KZbjNESExOlgFCBTC01a4WCL1JiS93M61xoYr2EIgwwk8DlFRG0yJG9k6uL8_oZDXE-irrtubUdI-7Dg8rQBw3CxGCe8AbZ7LOF4NKJZxS3thtAV7pJeqYZeRVSMfeIGDy7lVfqn6dzb878S60K14LTrMpL6efQt4lCYv_uSoMIaXuJnsBXmxb4UjnxH52LxYVcS4Qt57il7IZSjdufom_jYZPDhdPkUDPiphf3TkBCX8ndaJ3suQZR2YQVqa_AlrFo45Amp1DWpobc7abSKQr3HEFP4wYnpCnlsg" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1OTYwOCwiaWF0IjoxNzQ0OTE2NDA4LCJzdWIiOiJ1c2VyMiJ9.**" IsAdmin: true, at schemeshard: 72057594046644480 2025-04-17T19:00:08.043581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710666 TClient::Ls request: /dc-1 2025-04-17T19:00:08.044183Z node 1 :TX_PROXY DEBUG: actor# [1:7494358899227483667:2103] Handle TEvNavigate describe path /dc-1 2025-04-17T19:00:08.044254Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358907817418679:2376] HANDLE EvNavigateScheme /dc-1 2025-04-17T19:00:08.044353Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7494358903522451048:2142], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-17T19:00:08.044455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7494358903522451048:2142], cookie# 16 2025-04-17T19:00:08.044521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451242:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451239:2258], cookie# 16 2025-04-17T19:00:08.044554Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451243:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451240:2258], cookie# 16 2025-04-17T19:00:08.044568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451244:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451241:2258], cookie# 16 2025-04-17T19:00:08.044589Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483367:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451243:2258], cookie# 16 2025-04-17T19:00:08.044604Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483364:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451242:2258], cookie# 16 2025-04-17T19:00:08.044645Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7494358899227483370:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7494358903522451244:2258], cookie# 16 2025-04-17T19:00:08.044651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451243:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7494358899227483367:2052], cookie# 16 2025-04-17T19:00:08.044691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451242:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7494358899227483364:2049], cookie# 16 2025-04-17T19:00:08.044709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7494358903522451244:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7494358899227483370:2055], cookie# 16 2025-04-17T19:00:08.044754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7494358903522451240:2258], cookie# 16 2025-04-17T19:00:08.044775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Sync is in progress: cookie# 16, size# 3, half# 1, successes# 1, faulires# 0 2025-04-17T19:00:08.044808Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7494358903522451239:2258], cookie# 16 2025-04-17T19:00:08.044830Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Sync is done: cookie# 16, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-17T19:00:08.044857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7494358903522451241:2258], cookie# 16 2025-04-17T19:00:08.044890Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7494358903522451238:2258][/dc-1] Unexpected sync response: sender# [1:7494358903522451241:2258], cookie# 16 2025-04-17T19:00:08.044921Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7494358903522451048:2142], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-17T19:00:08.045011Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7494358903522451048:2142], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7494358903522451238:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744916407843 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-17T19:00:08.045132Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7494358903522451048:2142], cacheItem# { Subscriber: { Subscriber: [1:7494358903522451238:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744916407843 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 16 IsSync: true Partial: 0 } 2025-04-17T19:00:08.045283Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7494358907817418680:2377], recipient# [1:7494358907817418679:2376], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-04-17T19:00:08.045328Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358907817418679:2376] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:08.045406Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358907817418679:2376] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-17T19:00:08.045996Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358907817418679:2376] Handle TEvDescribeSchemeResult Forward to# [1:7494358907817418678:2375] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744916407843 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\010\032\005user1 \003" EffectiveACL: "\n\016\010\001\020\200\010\032\005user1 \003" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 6 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1cn4YN/kKtL98S+wg8Dy\n3z8+oxJrYcdNixfaQJWtwf0Kb5B+21T0B7BsfDh5KGmzmF/ySVM/bg/xpDMYysGm\nBPgjfPZEWwVWKJApFg8WOjTEbdAKERN2Gdv0gZkBZ7Y/tr4f5A7+phA9nlZ8lCgB\nyVs5AGGJ3SJX6Zg8XW2bZSI8aPXzlVj5sxizZGg64xgGfKHJiRj/OVX2UQSwQ83K\nKd0AOpwQCtKEAdNoCtRtvRjGwwaJcfes6JsBlGHZJTbh7pj/jT0CUfhybbNZcjjo\n1lruuJRZMSqlE2LAZIByKVQfXZr3nCmWjMEtI4TRXatyygNm+fHycu+NxzV+Uzgn\nBQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1745002807875 } Sids { Name: "user2" Type: USER } Sids { Name: "user1" Type: USER } Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744916407843 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\010\032\005user1 \003" EffectiveACL: "\n\016\010\001\020\200\010\032\005user1 \003" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744916407857 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "roo... (TRUNCATED) |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] Test command err: 2025-04-17T19:00:04.454911Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358889919454239:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:04.454987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:04.807258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:04.854576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:04.854678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:04.856180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64362, node 1 2025-04-17T19:00:04.933888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:04.933909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:04.933922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:04.934065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:05.409501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:05.431544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T19:00:05.447978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.457793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:05.464487Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T19:00:07.239043Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:07.239095Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:07.629525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358902804356824:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.629615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358902804356819:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.629715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.633371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:07.642196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358902804356833:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T19:00:07.710006Z node 1 :TX_PROXY ERROR: Actor# [1:7494358902804356884:2358] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:08.035815Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmE2MDUwOTUtZjJhMTQ4ZmItYTMxYmQzZjEtMTVkNmYyZWM=, ActorId: [1:7494358902804356817:2340], ActorState: ExecuteState, TraceId: 01js2hysab16evwsawsmtfa8tr, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [FAIL] Test command err: 2025-04-17T19:00:01.444866Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358878851706515:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:01.445023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00033f/r3tmp/tmp8KeIfu/pdisk_1.dat 2025-04-17T19:00:01.803652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8013, node 1 2025-04-17T19:00:01.826662Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:01.826687Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:01.855045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:01.855068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:01.855077Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:01.855247Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T19:00:01.857238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:01.857331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:01.861582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:02.098422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:04.048823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358891736609426:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:04.048924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:04.288455Z node 1 :TX_PROXY DEBUG: actor# [1:7494358878851706739:2141] Handle TEvProposeTransaction 2025-04-17T19:00:04.288501Z node 1 :TX_PROXY DEBUG: actor# [1:7494358878851706739:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:00:04.288562Z node 1 :TX_PROXY DEBUG: actor# [1:7494358878851706739:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494358891736609447:2635] 2025-04-17T19:00:04.360601Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Group" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } IndexDescription { Name: "value_idx" KeyColumnNames: "Value" Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } } } } UserToken: "" DatabaseName: "" 2025-04-17T19:00:04.360658Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:04.361020Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:04.361083Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:04.361270Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:04.361416Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:04.361518Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:00:04.361748Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:00:04.363644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.367280Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:00:04.367358Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609447:2635] txid# 281474976710658 SEND to# [1:7494358891736609446:2342] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:00:04.580094Z node 1 :TX_PROXY DEBUG: actor# [1:7494358878851706739:2141] Handle TEvNavigate describe path /Root 2025-04-17T19:00:04.580137Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609756:2883] HANDLE EvNavigateScheme /Root 2025-04-17T19:00:04.580357Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609756:2883] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:04.580451Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609756:2883] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root" Options { ShowPrivateTable: false } 2025-04-17T19:00:04.581043Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358891736609756:2883] Handle TEvDescribeSchemeResult Forward to# [1:7494358891736609754:2358] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 65 Record# Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744916402159 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/Root:test" Kind: "test" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "ssd" Kind: "ssd" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046644480 2025-04-17T19:00:04.590319Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358891736609758:2359] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/table TableId: [72057594046644480:2:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:04.590346Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358891736609758:2359] [0] Allocate txId 2025-04-17T19:00:04.590662Z node 1 :TX_PROXY DEBUG: actor# [1:7494358878851706739:2141] Handle TEvAllocateTxId 2025-04-17T19:00:04.590736Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358891736609758:2359] [281474976710659] TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T19:00:04.590746Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358891736609758:2359] [281474976710659] Resolve database: name# /Root 2025-04-17T19:00:04.591117Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358891736609758:2359] [281474976710659] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:04.591132Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358891736609758:2359] [281474976710659] Send request: schemeS ... 281474976715759 2025-04-17T19:00:04.672815Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete REQUEST: PUT /test_bucket/table/metadata.json HTTP/1.1 HEADERS: Host: localhost:18242 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 152C4EA1-07ED-4BB3-A22F-CB3DC6B1D711 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=7d341eb8bc8a3b55686729df8ca8eef4955de8ba4a2ddbe53c952427adf3f384 content-length: 79 content-md5: l5IBtTZMXo9aMBCyypZoMQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: ccc517dd89dfd6995101337a918ff58552d9cb53ddc50cecb7a00b5ae601bd7e x-amz-date: 20250417T190004Z S3_MOCK::HttpServeWrite: /test_bucket/table/metadata.json / / 79 REQUEST: PUT /test_bucket/table/scheme.pb HTTP/1.1 HEADERS: Host: localhost:18242 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B314450A-49E7-4869-82A0-1B17C5973B00 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=d3e8af671f2cbca8d8956eb59796a2a4b9f5d0a6a6177db245bf23a3c0515193 content-length: 1192 content-md5: zVIm2WDU6upM/0yq6GRCkw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 3a1e3a967f9c3f3ab1544da8b73f32f1f2a4e401ccdca3c54786a72125d1f966 x-amz-date: 20250417T190004Z S3_MOCK::HttpServeWrite: /test_bucket/table/scheme.pb / / 1192 REQUEST: PUT /test_bucket/table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:18242 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 783D4880-0B63-47FE-B925-ABBD3088F4E9 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=470f2fa7175e9f33c2c25626356732aaba035c52c87be03fb50d6a9de1572a7f content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T190004Z S3_MOCK::HttpServeWrite: /test_bucket/table/data_00.csv / / 0 2025-04-17T19:00:04.774989Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:04.775013Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715759 2025-04-17T19:00:04.775024Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715759, id# 281474976710659, itemIdx# 0 2025-04-17T19:00:04.775253Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36598' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 1 } 2025-04-17T19:00:04.776953Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:04.777025Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:04.777041Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715760, id# 281474976710659 2025-04-17T19:00:04.777109Z node 1 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36598' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/table' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 2] SourcePathType: EPathTypeTable State: Dropping SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715760 2025-04-17T19:00:04.777184Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:04.780730Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:04.780756Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715760, status# StatusAccepted 2025-04-17T19:00:04.780821Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36598' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 0, txId# 281474976715760 2025-04-17T19:00:04.784327Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:04.811046Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:04.811070Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715760 2025-04-17T19:00:04.811081Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715760, id# 281474976710659, itemIdx# 0 2025-04-17T19:00:04.811158Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36598' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T19:00:04.812473Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:04.812539Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:04.812549Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715761, id# 281474976710659 2025-04-17T19:00:04.812591Z node 1 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36598' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976715761 2025-04-17T19:00:04.812656Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:04.813345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715761:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.815263Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:04.815285Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715761, status# StatusAccepted 2025-04-17T19:00:04.815361Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36598' State: AutoDropping WaitTxId: 281474976715761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976715761 2025-04-17T19:00:04.817757Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:04.818135Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358891736610092:2368] [0] Resolve database: name# /Root 2025-04-17T19:00:04.818693Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358891736610092:2368] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:04.818713Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358891736610092:2368] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:04.819255Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358891736610092:2368] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710659 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:18242" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/table" destination_prefix: "/table" } } StartTime { seconds: 1744916404 } } 2025-04-17T19:00:04.823118Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-17T19:00:04.826560Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:04.826600Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715761 2025-04-17T19:00:04.826614Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715761, id# 281474976710659, itemIdx# 4294967295 2025-04-17T19:00:04.828443Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18B7703D) NTestSuiteBackupRestoreS3::TestTableWithIndexBackupRestore(NKikimrSchemeOp::EIndexType, bool)+1398 (0x18BC6D86) NTestSuiteBackupRestoreS3::TTestCaseTestAllIndexTypes::Execute_(NUnitTest::TTestContext&)+217 (0x18BCAD59) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7F7E7C5D8D90) __libc_start_main+128 (0x7F7E7C5D8E40) _start+41 (0x164AC029) |79.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 16961, MsgBus: 29568 2025-04-17T18:59:51.964185Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358835905380928:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:51.964257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000454/r3tmp/tmprHTuP6/pdisk_1.dat 2025-04-17T18:59:52.330379Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:59:52.364166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:52.364664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:52.366082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16961, node 1 2025-04-17T18:59:52.534776Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:59:52.534804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:59:52.534820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:59:52.534934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29568 TClient is connected to server localhost:29568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:59:53.211559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:53.246892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:53.386619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:53.509905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:53.584946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:54.481623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358848790284461:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:54.481771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:55.022748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:59:55.049174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:59:55.072760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T18:59:55.096320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T18:59:55.120752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T18:59:55.147816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T18:59:55.174239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T18:59:55.235860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358853085252423:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:55.235937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:55.235983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358853085252429:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:55.241277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T18:59:55.267702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358853085252431:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T18:59:55.363152Z node 1 :TX_PROXY ERROR: Actor# [1:7494358853085252488:3550] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:59:56.511592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T18:59:56.678586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494358857380220284:2528];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:56.678585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:56.678737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:56.678935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:56.679009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:59:56.679083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:59:56.679152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:59:56.679207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494358857380220284:2528];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:56.679210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:59:56.679270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:59:56.679336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:59:56.679405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494358857380220284:2528];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:56.679419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:59:56.679480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358857380220270:2522];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:59:56.679515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494358857380220284:2528];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_regi ... xUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:59:56.828328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:59:56.828360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:59:56.828530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:59:56.828562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:59:56.828634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:59:56.828687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:59:56.828758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:59:56.828781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:59:56.828842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:59:56.828876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:59:56.829371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:59:56.829444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:59:56.829619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:59:56.829676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:59:56.829801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:59:56.829841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:59:56.830026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:59:56.830065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:59:56.830200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:59:56.830221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:59:56.867842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.868065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.874124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.880307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.886092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.891688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.897145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.902487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.907874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.907886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T18:59:56.963705Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358835905380928:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:56.973450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:59:57.069775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T18:59:57.069775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T18:59:57.070368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T18:59:57.140325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710676;tx_id=281474976710676;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710676; query_phases { duration_us: 7055 cpu_time_us: 2100 affected_shards: 1 } query_phases { duration_us: 5210 cpu_time_us: 308 affected_shards: 1 } compilation { duration_us: 50408 cpu_time_us: 47745 } process_cpu_time_us: 630 total_duration_us: 64633 total_cpu_time_us: 50783 AddressSanitizer:DEADLYSIGNAL ================================================================= ==972531==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018e0e20d bp 0x7ffe5bf7e220 sp 0x7ffe5bf7e080 T0) ==972531==The signal is caused by a READ memory access. ==972531==Hint: address points to the zero page. 2025-04-17T19:00:07.311738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T19:00:07.311785Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18e0e20d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18e0e20d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18e0e20d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18e0e20d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18e0e20d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18e33327 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18e33327 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18e33327 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18e33327 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18e33327 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x197855d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19754f68 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #14 0x18e321d3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19756835 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #16 0x1977fb4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f11a26d2d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f11a26d2e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16571028 in _start (/home/runner/.ya/build/build_root/26k2/000454/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16571028) (BuildId: 1bdc4290ca22bc21b0df7e2b25fc7bf1adbeab0b) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==972531==ABORTING ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] Test command err: Trying to start YDB, gRPC: 14540, MsgBus: 22690 2025-04-17T18:59:46.868145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358816100340287:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:46.870485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000225/r3tmp/tmpTQ1n2Y/pdisk_1.dat 2025-04-17T18:59:47.213163Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:59:47.250503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:47.250619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:47.252844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14540, node 1 2025-04-17T18:59:47.380289Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:59:47.380328Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:59:47.380333Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:59:47.380419Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22690 TClient is connected to server localhost:22690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:59:47.881507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:48.948711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358824690275397:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:48.948717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358824690275409:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:48.948824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:48.953096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:59:48.959824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358824690275411:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:59:49.058679Z node 1 :TX_PROXY ERROR: Actor# [1:7494358828985242759:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:59:49.401553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:59:49.508226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:49.508228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:49.508376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:49.508571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:49.508651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:59:49.508729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:59:49.508820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:49.508879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:59:49.509005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:59:49.509039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:49.509124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:59:49.509126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:59:49.509216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:59:49.509220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:59:49.509296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:59:49.509316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:59:49.509389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:59:49.509404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:59:49.509506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358828985242955:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:59:49.509506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:59:49.509623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:59:49.509708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:59:49.509812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:59:49.509889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7494358828985242971:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:59:49.535913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494358828985242951:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:49.535972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7494358828985242951:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:49.536115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;sel ... tablet_id=72075186224037984;self_id=[1:7494358833280211454:2483];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037984;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.405897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[1:7494358833280211459:2484];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037982;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.405968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[1:7494358833280211842:2547];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037941;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.406056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7494358833280211928:2562];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037936;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.406092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[1:7494358833280211842:2547];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037941;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.406215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;self_id=[1:7494358833280211395:2481];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037995;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.406230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7494358833280211928:2562];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037936;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.406335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;self_id=[1:7494358833280211395:2481];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037995;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.406714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[1:7494358833280211605:2499];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037974;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.406761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[1:7494358833280211450:2482];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037991;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.406811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[1:7494358833280211605:2499];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037974;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.406930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[1:7494358833280211450:2482];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037991;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.407710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[1:7494358833280211480:2488];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037986;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.407779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[1:7494358833280211597:2497];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037976;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.407883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[1:7494358833280211480:2488];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037986;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.407945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[1:7494358833280211597:2497];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037976;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494358833280211351:2471];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[1:7494358833280211661:2506];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037968;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494358833280211351:2471];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[1:7494358833280211661:2506];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037968;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[1:7494358833280211712:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037966;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[1:7494358833280211712:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037966;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[1:7494358833280211830:2541];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037931;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[1:7494358833280211391:2479];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037994;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[1:7494358833280211830:2541];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037931;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[1:7494358833280211391:2479];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037994;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[1:7494358833280211491:2492];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037979;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[1:7494358833280211491:2492];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037979;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7494358833280211780:2526];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037950;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[1:7494358833280211484:2489];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037981;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7494358833280211780:2526];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037950;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.408926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[1:7494358833280211484:2489];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037981;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.409781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[1:7494358833280211735:2512];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037971;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.409991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[1:7494358833280211735:2512];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037971;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.419095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[1:7494358833280211913:2558];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037911;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.419356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[1:7494358833280211913:2558];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037911;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x1903269E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x190112E8 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x19018767 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x19018767 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x19018767 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x19017933 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 17. ??:0: ?? @ 0x7F0BBBD7AD8F 18. ??:0: ?? @ 0x7F0BBBD7AE3F 19. ??:0: ?? @ 0x16619028 |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [FAIL] Test command err: Trying to start YDB, gRPC: 24826, MsgBus: 8249 2025-04-17T18:59:46.842669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358815007224317:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:46.842772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0002de/r3tmp/tmpkt3umG/pdisk_1.dat 2025-04-17T18:59:47.213036Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:59:47.250465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:47.250575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:47.252209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24826, node 1 2025-04-17T18:59:47.380303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:59:47.380336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:59:47.380343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:59:47.380457Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8249 TClient is connected to server localhost:8249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:59:47.930741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:48.959809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358823597159580:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:48.959857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358823597159569:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:48.959952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:48.962789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T18:59:48.968335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358823597159583:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T18:59:49.030931Z node 1 :TX_PROXY ERROR: Actor# [1:7494358827892126930:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:59:49.401541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T18:59:49.505891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:49.505895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:49.506075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:49.506383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:49.506471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:49.506497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:59:49.506584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:59:49.506630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:49.506671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:59:49.506705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:59:49.506755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:59:49.506774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:59:49.506830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:59:49.506835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:59:49.506926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:59:49.506929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:59:49.507033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:59:49.507041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:59:49.507118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:59:49.507125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:59:49.507210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494358827892127126:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:59:49.507244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:59:49.507312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:59:49.507390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358827892127112:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:59:49.532699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494358827892127108:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:49.532748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494358827892127108:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:49.532896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_i ... e::TEvPeriodicWakeup;tablet_id=72075186224037901;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.253675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;self_id=[1:7494358832187095853:2524];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037956;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.253762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494358832187096103:2550];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037901;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.253814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[1:7494358832187095812:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037963;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.253926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358832187096178:2557];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037903;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.253948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[1:7494358832187095812:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037963;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358832187096178:2557];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037903;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7494358832187096001:2541];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7494358832187096001:2541];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7494358832187095999:2540];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037932;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358832187095979:2537];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037924;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7494358832187095999:2540];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7494358832187095979:2537];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037924;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;self_id=[1:7494358832187096213:2561];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037912;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;self_id=[1:7494358832187095987:2539];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037955;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;self_id=[1:7494358832187096213:2561];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037912;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;self_id=[1:7494358832187095987:2539];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037955;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[1:7494358832187096171:2556];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037918;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7494358832187095971:2535];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037933;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.254969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[1:7494358832187096171:2556];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037918;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7494358832187095971:2535];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037933;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[1:7494358832187096029:2546];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037939;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494358832187096027:2545];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037927;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[1:7494358832187096029:2546];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037939;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7494358832187096027:2545];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037927;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[1:7494358832187096237:2564];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037907;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7494358832187096148:2555];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037919;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7494358832187096148:2555];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037919;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[1:7494358832187096237:2564];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037907;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[1:7494358832187095836:2520];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037978;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7494358832187096359:2565];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037910;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7494358832187096359:2565];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037910;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.255955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[1:7494358832187095836:2520];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037978;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.256067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[1:7494358832187096210:2560];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037913;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.256195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[1:7494358832187096143:2553];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037916;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.256204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[1:7494358832187096210:2560];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037913;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:00:07.256349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[1:7494358832187096143:2553];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037916;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:588, void NKikimr::NKqp::CompareYson(const TString &, const TString &, const TString &): (ReformatYson(expected) == ReformatYson(actual)) failed: ("[[[\"2\"]]]" != "[[[\"1\"]]]") , with diff: "[[[\"(2|1)\"]]]" 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:588: CompareYson @ 0x4900DB47 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:368: DoExecute @ 0x1900EF4A 4. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:389: Execute_ @ 0x18FDD558 6. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: operator() @ 0x18FE36B7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18FE36B7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18FE36B7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 15. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: Execute @ 0x18FE2883 16. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 18. ??:0: ?? @ 0x7F446FB99D8F 19. ??:0: ?? @ 0x7F446FB99E3F 20. ??:0: ?? @ 0x16619028 |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.4%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |80.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.7%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |80.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> Viewer::QueryExecuteScript [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> YdbLogStore::AlterLogTable [FAIL] |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::QueryExecuteScript [GOOD] Test command err: 2025-04-17T19:00:04.717225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358891839548139:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:04.717274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:05.118483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:05.122756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:05.122827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:05.125607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16621, node 1 2025-04-17T19:00:05.230911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:05.230941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:05.230948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:05.231072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:05.509205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:05.526940Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T19:00:05.542547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:05.546885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.550570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T19:00:07.530778Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:07.530830Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:07.812226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358904724450725:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.812320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358904724450733:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.812368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.816510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:07.825597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358904724450739:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T19:00:07.911842Z node 1 :TX_PROXY ERROR: Actor# [1:7494358904724450791:2358] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:08.145329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:08.303430Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.303459Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.456517Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.456571Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.515186Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.515221Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.557564Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.557595Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.610061Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.610103Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.657763Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.657785Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.709755Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.709791Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.756152Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.756181Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.805907Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.805937Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.855589Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.855618Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.905333Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.905370Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:08.963973Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:08.964007Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:09.013426Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:09.013457Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:09.064844Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:09.064877Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:09.146790Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:09.146835Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:09.215433Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:09.215467Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:09.222285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T19:00:09.223718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T19:00:09.225418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T19:00:09.717498Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358891839548139:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:09.717575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:10.554750Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:10.554794Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:11.104867Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:11.104906Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:11.446608Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-17T19:00:11.446644Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-17T19:00:11.939718Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-17T19:00:11.940676Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916411966, txId: 281474976710691] shutting down |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown |82.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |82.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] Test command err: 2025-04-17T19:00:12.567159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:12.567848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:12.567912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0004ac/r3tmp/tmpU7urI0/pdisk_1.dat 2025-04-17T19:00:13.025411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.079210Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:13.119244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:13.119379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:13.131131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:13.222423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.622916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2625], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.623047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.623138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.631056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:13.799576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:758:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T19:00:13.867456Z node 1 :TX_PROXY ERROR: Actor# [1:832:2676] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:14.813230Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2hyz5hdn1xz5wad767mbay, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M2Y2ZlZTEtYTJlN2VlYWQtNzQ5NjRmZGQtOTIzMmYyMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> TPersQueueTest::InflightLimit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] Test command err: Starting YDB, grpc: 8732, msgbus: 7738 2025-04-17T19:00:12.928714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358924636675613:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:12.928790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000175/r3tmp/tmp5TxI67/pdisk_1.dat 2025-04-17T19:00:13.303337Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:13.310929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:13.311032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8732, node 1 2025-04-17T19:00:13.323042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:13.347548Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T19:00:13.347779Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-17T19:00:13.407113Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:13.407133Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:13.407138Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:13.407229Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7738 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T19:00:13.635008Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Handle TEvNavigate describe path dc-1 2025-04-17T19:00:13.635104Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643635:2445] HANDLE EvNavigateScheme dc-1 2025-04-17T19:00:13.636125Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643635:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:13.682610Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643635:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T19:00:13.703055Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643635:2445] Handle TEvDescribeSchemeResult Forward to# [1:7494358928931643634:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T19:00:13.755226Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Handle TEvProposeTransaction 2025-04-17T19:00:13.755255Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T19:00:13.755384Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494358928931643648:2451] 2025-04-17T19:00:13.815120Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T19:00:13.815183Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:13.815195Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T19:00:13.815248Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:13.815442Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:13.815542Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T19:00:13.815588Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T19:00:13.815694Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T19:00:13.816277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.818993Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-17T19:00:13.819044Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643648:2451] txid# 281474976710657 SEND to# [1:7494358928931643647:2450] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-17T19:00:13.837896Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Handle TEvProposeTransaction 2025-04-17T19:00:13.837938Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:00:13.837972Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494358928931643691:2490] 2025-04-17T19:00:13.839914Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T19:00:13.839968Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:13.839982Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T19:00:13.840028Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:13.840275Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:13.840350Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:13.840386Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:00:13.840511Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:00:13.840953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.847036Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-17T19:00:13.847084Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928931643691:2490] txid# 281474976710658 SEND to# [1:7494358928931643690:2489] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-17T19:00:15.894399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358937521578382:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:15.894485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358937521578374:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:15.894617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:15.894911Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Handl ... 866Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] txid# 281474976710660 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:16.008899Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-17T19:00:16.009445Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:16.009524Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:16.009660Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:16.009757Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:16.009807Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-17T19:00:16.009991Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] txid# 281474976710660 HANDLE EvClientConnected 2025-04-17T19:00:16.012727Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:00:16.014233Z node 1 :TX_PROXY ERROR: Actor# [1:7494358941816545762:2574] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:16.014268Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545762:2574] txid# 281474976710660 SEND to# [1:7494358937521578388:2339] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-17T19:00:16.293027Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Handle TEvProposeTransaction 2025-04-17T19:00:16.293081Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] TxId# 281474976710661 ProcessProposeTransaction 2025-04-17T19:00:16.293129Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7494358941816545785:2585] 2025-04-17T19:00:16.295490Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56042" 2025-04-17T19:00:16.295548Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:16.295564Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T19:00:16.295609Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:16.295896Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:16.295982Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:16.296022Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-17T19:00:16.296173Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] txid# 281474976710661 HANDLE EvClientConnected 2025-04-17T19:00:16.303164Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-17T19:00:16.303225Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545785:2585] txid# 281474976710661 SEND to# [1:7494358941816545784:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-17T19:00:16.419393Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Handle TEvProposeTransaction 2025-04-17T19:00:16.419427Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] TxId# 281474976710662 ProcessProposeTransaction 2025-04-17T19:00:16.419489Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7494358941816545808:2599] 2025-04-17T19:00:16.421907Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56058" 2025-04-17T19:00:16.421968Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:16.421992Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T19:00:16.422050Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:16.422418Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:16.422512Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:16.422560Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-17T19:00:16.422689Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] txid# 281474976710662 HANDLE EvClientConnected 2025-04-17T19:00:16.423122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T19:00:16.427945Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-17T19:00:16.427990Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545808:2599] txid# 281474976710662 SEND to# [1:7494358941816545807:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-17T19:00:16.482353Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Handle TEvProposeTransaction 2025-04-17T19:00:16.482387Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] TxId# 281474976710663 ProcessProposeTransaction 2025-04-17T19:00:16.482440Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7494358941816545846:2619] 2025-04-17T19:00:16.484804Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545846:2619] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1OTYxNiwiaWF0IjoxNzQ0OTE2NDE2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.XS31Eh-Mxm4maV3dnaDngxkBaLAB4Co1NIQ1_Alg-M9d7MJbYbJPk-KXjpbiiK9qpgkfS-SqRhrBT2Xyio63Mr1RZidjfSoZvp1TVv7J0mbT_RqXFtLhzRXtsBv7md-1UQlgCEdOV6jJaAWMTXpLtC_MBICeR1xDB-A8_qr4fsnpD1pF1D5l-aWim37vJkrZK5oaK5YVZPLZ7aaZsSUSj_E4o0qMc6vtQaynM7btPd0T2j6SNB1_3N3Ym_Qf5mwfNuHO2W2mr8NSh5LCVNGnVj7zve0ixTJTgq_BtrnL-zK08-AmSgNn2DGRtiXUmDv6M49qPzF1Sg2Di0E9dcJ8fw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDk1OTYxNiwiaWF0IjoxNzQ0OTE2NDE2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56086" 2025-04-17T19:00:16.484864Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545846:2619] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:16.484880Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545846:2619] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-17T19:00:16.484923Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545846:2619] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:16.485133Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545846:2619] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:16.485189Z node 1 :TX_PROXY ERROR: Actor# [1:7494358941816545846:2619] txid# 281474976710663, Access denied for ordinaryuser on path /dc-1, with access AlterSchema 2025-04-17T19:00:16.485283Z node 1 :TX_PROXY ERROR: Actor# [1:7494358941816545846:2619] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-17T19:00:16.485318Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358941816545846:2619] txid# 281474976710663 SEND to# [1:7494358941816545845:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-17T19:00:16.495844Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWNmYzk1NzktYzU1MzBlODktYzYxN2EzMmMtOWE5MjQ3MGE=, ActorId: [1:7494358941816545831:2352], ActorState: ExecuteState, TraceId: 01js2hz1yn708x5zq5rcyp06xw, Create QueryResponse for error on request, msg: 2025-04-17T19:00:16.496191Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] Handle TEvExecuteKqpTransaction 2025-04-17T19:00:16.496221Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924636675866:2139] TxId# 281474976710664 ProcessProposeKqpTransaction |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: 2025-04-17T18:59:44.028669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358805451915816:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:44.028794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:59:44.045038Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494358807323146815:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:44.045098Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T18:59:44.142658Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-17T18:59:44.146555Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00030e/r3tmp/tmpZ1xZTC/pdisk_1.dat 2025-04-17T18:59:44.280653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:44.280851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:44.284022Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:59:44.284652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:59:44.309992Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5823, node 1 2025-04-17T18:59:44.332843Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:59:44.332883Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:59:44.350628Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/26k2/00030e/r3tmp/yandexmbAlDh.tmp 2025-04-17T18:59:44.350655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/26k2/00030e/r3tmp/yandexmbAlDh.tmp 2025-04-17T18:59:44.350810Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/26k2/00030e/r3tmp/yandexmbAlDh.tmp 2025-04-17T18:59:44.350946Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:59:44.374919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:44.374987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:44.377548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:59:44.388512Z INFO: TTestServer started on Port 11175 GrpcPort 5823 TClient is connected to server localhost:11175 PQClient connected to localhost:5823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:59:44.584585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T18:59:44.618339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-17T18:59:46.138293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494358815913081719:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:46.138355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494358815913081730:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:46.147113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:46.178709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-17T18:59:46.192708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494358815913081733:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-17T18:59:46.333131Z node 2 :TX_PROXY ERROR: Actor# [2:7494358815913081761:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T18:59:46.352361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T18:59:46.352792Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7494358814041851610:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:59:46.353017Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTg3NGQzZjQtNTc4M2JjMjktNDk3ZTliODEtMzYyZWMyMWY=, ActorId: [1:7494358814041851558:2340], ActorState: ExecuteState, TraceId: 01js2hy4caa11fbjhhsaqhp726, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:59:46.353967Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7494358815913081776:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-17T18:59:46.354119Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTM2YzQzZWUtYjRhY2ViMC00YTY1YjBmMi1kOTZhN2QwNQ==, ActorId: [2:7494358815913081717:2307], ActorState: ExecuteState, TraceId: 01js2hy4a6c4760qc6ja38xvtm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-17T18:59:46.354565Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:59:46.354570Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-17T18:59:46.431050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T18:59:46.514467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-17T18:59:46.672484Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2hy4rh40pstn70dpqx6p8q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA5MGIxOTUtZTExNjM4M2EtOGY4ZWU0NDEtOThkMGRkNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7494358814041852013:3071] 2025-04-17T18:59:49.028599Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358805451915816:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:49.028649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T18:59:49.044982Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7494358807323146815:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:49.045024Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 1 partitions CallPersQueueGRPC request to localhost:5823 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-17T18:59:52.585499Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:5823 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { Partition ... session cookie 3 consumer session _3_3_3961094466515507400_v1TEvPartitionReady. Aval parts: 1 2025-04-17T19:00:06.723038Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1048576 } } 2025-04-17T19:00:06.723129Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 got read request: guid# 2c516ca8-2be1ccb6-a608e1e8-cfbc85fd 2025-04-17T19:00:06.723172Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 performing read request: guid# a81e2a2d-ebdfd92d-776707e7-6e9d833, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 99043, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-17T19:00:06.723241Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 99043 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid a81e2a2d-ebdfd92d-776707e7-6e9d833 2025-04-17T19:00:06.721520Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2025-04-17T19:00:06.721535Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-17T19:00:06.723567Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-17T19:00:06.723587Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-17T19:00:08.937683Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T19:00:08.937723Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:10.549065Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-04-17T19:00:10.549108Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-04-17T19:00:10.549195Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-17T19:00:10.549217Z node 4 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T19:00:10.549368Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T19:00:10.550401Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_15303386695019815033_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1744916403418 CreateTimestampMS: 1744916403416 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1744916403429 CreateTimestampMS: 1744916403426 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1744916403439 CreateTimestampMS: 1744916403437 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1744916403492 CreateTimestampMS: 1744916403483 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 3827 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-04-17T19:00:10.550704Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_15303386695019815033_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-17T19:00:10.550745Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_15303386695019815033_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 9a33c728-b74d5df3-f5e9a43a-fec490d5 has messages 1 2025-04-17T19:00:10.550918Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_15303386695019815033_v1 read done: guid# 9a33c728-b74d5df3-f5e9a43a-fec490d5, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2025-04-17T19:00:10.550955Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_15303386695019815033_v1 response to read: guid# 9a33c728-b74d5df3-f5e9a43a-fec490d5 2025-04-17T19:00:10.551330Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_15303386695019815033_v1 Process answer. Aval parts: 0 Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-04-17T19:00:10.557309Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_15303386695019815033_v1 grpc read done: success# 0, data# { } 2025-04-17T19:00:10.557338Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer session _3_2_15303386695019815033_v1 grpc read failed 2025-04-17T19:00:10.557366Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer session _3_2_15303386695019815033_v1 grpc closed 2025-04-17T19:00:10.557396Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer session _3_2_15303386695019815033_v1 is DEAD 2025-04-17T19:00:10.558446Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _3_2_15303386695019815033_v1 2025-04-17T19:00:10.558501Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494358900765210962:2561] destroyed 2025-04-17T19:00:10.558543Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _3_2_15303386695019815033_v1 2025-04-17T19:00:14.554366Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-04-17T19:00:14.554413Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-04-17T19:00:14.554511Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-04-17T19:00:14.554536Z node 4 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-17T19:00:14.554714Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-17T19:00:14.558714Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1744916403418 CreateTimestampMS: 1744916403416 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1744916403429 CreateTimestampMS: 1744916403426 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1744916403439 CreateTimestampMS: 1744916403437 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1744916403492 CreateTimestampMS: 1744916403483 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 7830 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-04-17T19:00:14.559020Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-17T19:00:14.559064Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid a81e2a2d-ebdfd92d-776707e7-6e9d833 has messages 1 2025-04-17T19:00:14.559221Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 read done: guid# a81e2a2d-ebdfd92d-776707e7-6e9d833, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2025-04-17T19:00:14.559246Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 response to read: guid# a81e2a2d-ebdfd92d-776707e7-6e9d833 2025-04-17T19:00:14.559577Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 Process answer. Aval parts: 0 Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-04-17T19:00:14.567634Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_3961094466515507400_v1 grpc read done: success# 0, data# { } 2025-04-17T19:00:14.567663Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer session _3_3_3961094466515507400_v1 grpc read failed 2025-04-17T19:00:14.567689Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer session _3_3_3961094466515507400_v1 grpc closed 2025-04-17T19:00:14.567713Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer session _3_3_3961094466515507400_v1 is DEAD 2025-04-17T19:00:14.569366Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _3_3_3961094466515507400_v1 2025-04-17T19:00:14.569420Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7494358900765210965:2563] destroyed 2025-04-17T19:00:14.569466Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _3_3_3961094466515507400_v1 2025-04-17T19:00:16.549307Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494358942890873122:2759] TxId: 281474976710726. Ctx: { TraceId: 01js2hz1k4e1p59hh1dy5dsx03, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUzN2YyNzMtYjEyYjFlNC1mN2ZhN2Y1OC1jMWNhNTljYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-04-17T19:00:16.550061Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494358942890873133:2767], TxId: 281474976710726, task: 2. Ctx: { TraceId : 01js2hz1k4e1p59hh1dy5dsx03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZGUzN2YyNzMtYjEyYjFlNC1mN2ZhN2Y1OC1jMWNhNTljYg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494358942890873122:2759], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-17T19:00:16.550064Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494358942890873134:2768], TxId: 281474976710726, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZGUzN2YyNzMtYjEyYjFlNC1mN2ZhN2Y1OC1jMWNhNTljYg==. TraceId : 01js2hz1k4e1p59hh1dy5dsx03. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494358942890873122:2759], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |84.9%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |85.0%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> BackupRestoreS3::RestoreViewDependentOnAnotherView |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] Test command err: Starting YDB, grpc: 24445, msgbus: 5263 2025-04-17T19:00:12.970808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358924398042880:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:12.970851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00016f/r3tmp/tmpzNmMi6/pdisk_1.dat 2025-04-17T19:00:13.435892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T19:00:13.435927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T19:00:13.435939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T19:00:13.435956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T19:00:13.435989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T19:00:13.435998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T19:00:13.436035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T19:00:13.436072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T19:00:13.436379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T19:00:13.486299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-17T19:00:13.495183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-17T19:00:13.495208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:13.507188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T19:00:13.507247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T19:00:13.507332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-17T19:00:13.527131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T19:00:13.527303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T19:00:13.528094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-17T19:00:13.528253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T19:00:13.530409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:13.530491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:13.535880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-17T19:00:13.536472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T19:00:13.536498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T19:00:13.536532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T19:00:13.536549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T19:00:13.536569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T19:00:13.536632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.536956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24445, node 1 2025-04-17T19:00:13.612406Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:13.612445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:13.612460Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:13.612577Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5263 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T19:00:13.809910Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924398043109:2115] Handle TEvNavigate describe path dc-1 2025-04-17T19:00:13.809978Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010907:2438] HANDLE EvNavigateScheme dc-1 2025-04-17T19:00:13.811158Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010907:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:13.847175Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010907:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T19:00:13.857215Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010907:2438] Handle TEvDescribeSchemeResult Forward to# [1:7494358928693010906:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T19:00:13.874707Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924398043109:2115] Handle TEvProposeTransaction 2025-04-17T19:00:13.874734Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924398043109:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T19:00:13.874837Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924398043109:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494358928693010922:2446] 2025-04-17T19:00:13.944754Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010922:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T19:00:13.944834Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010922:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T19:00:13.944850Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010922:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T19:00:13.944928Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010922:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:13.945243Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010922:2446] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:13.945411Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010922:2446] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T19:00:13.945476Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010922:2446] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T19:00:13.945666Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358928693010922:2446] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T19:00:13.947398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T19:00:13.947586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.947735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T19:00:13.947920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T19:00:13.947955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.951848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Comple ... 880729:2858] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:17.889221Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358945872880729:2858] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:17.889310Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358945872880729:2858] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-04-17T19:00:17.889352Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358945872880729:2858] txid# 281474976710665 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72075186224037891} 2025-04-17T19:00:17.889859Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358945872880729:2858] txid# 281474976710665 HANDLE EvClientConnected 2025-04-17T19:00:17.892021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976710665 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:52910" , at schemeshard: 72075186224037891 2025-04-17T19:00:17.892166Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-17T19:00:17.892259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T19:00:17.892275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T19:00:17.892391Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-04-17T19:00:17.892420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-17T19:00:17.892465Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-17T19:00:17.892481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T19:00:17.892498Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-17T19:00:17.892505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T19:00:17.892533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-04-17T19:00:17.892565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2025-04-17T19:00:17.892582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-04-17T19:00:17.892591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T19:00:17.892599Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2025-04-17T19:00:17.892608Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2025-04-17T19:00:17.892617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-04-17T19:00:17.895402Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358945872880729:2858] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-04-17T19:00:17.895447Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358945872880729:2858] txid# 281474976710665 SEND to# [1:7494358945872880728:2354] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} 2025-04-17T19:00:17.895013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-04-17T19:00:17.895213Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-04-17T19:00:17.895410Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-04-17T19:00:17.895425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T19:00:17.895672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T19:00:17.895778Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-04-17T19:00:17.895794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7494358933451281334:2308], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-17T19:00:17.895820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7494358933451281334:2308], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-17T19:00:17.896434Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-17T19:00:17.896519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-17T19:00:17.896530Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976710665 2025-04-17T19:00:17.896543Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976710665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 TEST clusteradmin triggers auth on tenant 2025-04-17T19:00:17.896562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-04-17T19:00:17.896651Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976710665, subscribers: 0 2025-04-17T19:00:17.900482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2025-04-17T19:00:17.971377Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358924398042880:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:17.971456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:5263 TClient::Ls request: /dc-1/tenant-db 2025-04-17T19:00:18.051051Z node 1 :TX_PROXY DEBUG: actor# [1:7494358924398043109:2115] Handle TEvNavigate describe path /dc-1/tenant-db 2025-04-17T19:00:18.051148Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358950167848039:2870] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-04-17T19:00:18.051456Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358950167848039:2870] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:18.051561Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358950167848039:2870] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-04-17T19:00:18.053232Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358950167848039:2870] Handle TEvDescribeSchemeResult Forward to# [1:7494358950167848038:2869] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-04-17T19:00:18.083314Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2025-04-17T19:00:18.084137Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T19:00:18.084012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected >> BackupRestoreS3::RestoreViewQueryText >> BackupRestoreS3::TestAllPrimitiveTypes-JSON >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 3060, msgbus: 13733 2025-04-17T19:00:13.034730Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358929185776790:2229];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:13.054187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00016c/r3tmp/tmp4Hylj4/pdisk_1.dat 2025-04-17T19:00:13.429178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-17T19:00:13.429233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T19:00:13.429251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-17T19:00:13.429274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-17T19:00:13.429301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-17T19:00:13.429310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-17T19:00:13.429354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-17T19:00:13.429389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-17T19:00:13.429700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-17T19:00:13.466278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:13.466400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:13.471863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:13.483704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-17T19:00:13.483779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-17T19:00:13.483792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:13.488127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-17T19:00:13.488237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-17T19:00:13.488339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-17T19:00:13.490578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-17T19:00:13.490750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-17T19:00:13.491488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-17T19:00:13.491694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-17T19:00:13.512213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-17T19:00:13.512797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-17T19:00:13.512911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-17T19:00:13.512957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-17T19:00:13.512988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-17T19:00:13.513009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-17T19:00:13.513074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TServer::EnableGrpc on GrpcPort 3060, node 1 2025-04-17T19:00:13.609271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:13.609298Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:13.609305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:13.609395Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13733 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-17T19:00:13.818945Z node 1 :TX_PROXY DEBUG: actor# [1:7494358929185776845:2113] Handle TEvNavigate describe path dc-1 2025-04-17T19:00:13.819007Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777358:2442] HANDLE EvNavigateScheme dc-1 2025-04-17T19:00:13.820106Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777358:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:13.851777Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777358:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-17T19:00:13.867666Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777358:2442] Handle TEvDescribeSchemeResult Forward to# [1:7494358929185777357:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T19:00:13.890770Z node 1 :TX_PROXY DEBUG: actor# [1:7494358929185776845:2113] Handle TEvProposeTransaction 2025-04-17T19:00:13.890797Z node 1 :TX_PROXY DEBUG: actor# [1:7494358929185776845:2113] TxId# 281474976710657 ProcessProposeTransaction 2025-04-17T19:00:13.890914Z node 1 :TX_PROXY DEBUG: actor# [1:7494358929185776845:2113] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7494358929185777371:2448] 2025-04-17T19:00:13.991246Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777371:2448] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-17T19:00:13.991333Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777371:2448] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-17T19:00:13.991356Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777371:2448] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-17T19:00:13.991416Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777371:2448] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:13.991726Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777371:2448] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:13.991866Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777371:2448] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-17T19:00:13.991915Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777371:2448] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-17T19:00:13.992043Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358929185777371:2448] txid# 281474976710657 HANDLE EvClientConnected 2025-04-17T19:00:13.994351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-17T19:00:13.994604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.994750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T19:00:13.994952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T19:00:13.994991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.997448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Comple ... 2025-04-17T19:00:17.820824Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358946365647170:2853] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-04-17T19:00:17.820866Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358946365647170:2853] txid# 281474976710665 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72075186224037891} 2025-04-17T19:00:17.821464Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358946365647170:2853] txid# 281474976710665 HANDLE EvClientConnected 2025-04-17T19:00:17.823460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976710665 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:57196" , at schemeshard: 72075186224037891 2025-04-17T19:00:17.823640Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-17T19:00:17.823776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T19:00:17.823792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T19:00:17.823961Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-04-17T19:00:17.823995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-17T19:00:17.824065Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-17T19:00:17.824076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T19:00:17.824099Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-17T19:00:17.824106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T19:00:17.824134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-04-17T19:00:17.824183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2025-04-17T19:00:17.824201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-04-17T19:00:17.824209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-17T19:00:17.824217Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2025-04-17T19:00:17.824226Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2025-04-17T19:00:17.824234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-04-17T19:00:17.826875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-04-17T19:00:17.827021Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-04-17T19:00:17.827193Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-04-17T19:00:17.827209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T19:00:17.827404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-17T19:00:17.827478Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-04-17T19:00:17.827494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7494358935926927618:2307], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-17T19:00:17.827514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7494358935926927618:2307], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-17T19:00:17.827710Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358946365647170:2853] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-04-17T19:00:17.827770Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358946365647170:2853] txid# 281474976710665 SEND to# [1:7494358946365647169:2354] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} 2025-04-17T19:00:17.828311Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-17T19:00:17.828409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-17T19:00:17.828423Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976710665 2025-04-17T19:00:17.828438Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976710665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-04-17T19:00:17.828455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-04-17T19:00:17.828525Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976710665, subscribers: 0 2025-04-17T19:00:17.835084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 TEST clusteradmin triggers auth on tenant TClient is connected to server localhost:13733 TClient::Ls request: /dc-1/tenant-db 2025-04-17T19:00:18.011258Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358929185776790:2229];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:18.011347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:18.021059Z node 1 :TX_PROXY DEBUG: actor# [1:7494358929185776845:2113] Handle TEvNavigate describe path /dc-1/tenant-db 2025-04-17T19:00:18.021136Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358950660614476:2862] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-04-17T19:00:18.021486Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358950660614476:2862] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:18.021611Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358950660614476:2862] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-04-17T19:00:18.023292Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358950660614476:2862] Handle TEvDescribeSchemeResult Forward to# [1:7494358950660614475:2861] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-04-17T19:00:18.050490Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxStatus(status=2 node=Connected) - killing node 2 2025-04-17T19:00:18.051096Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2025-04-17T19:00:18.051167Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T19:00:18.051586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-17T19:00:18.053142Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView >> SystemView::PartitionStatsFields >> ShowCreateView::FromTable |85.4%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BackupRestore::RestoreViewDependentOnAnotherView >> SystemView::ShowCreateTableKeyBloomFilter |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ShowCreateView::WithPairedTablePathPrefix |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BackupRestoreS3::RestoreViewDependentOnAnotherView [FAIL] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 15654, MsgBus: 10735 2025-04-17T19:00:16.310339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358941005751563:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:16.310432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0003ad/r3tmp/tmpinMMxa/pdisk_1.dat 2025-04-17T19:00:16.720421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:16.724662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:16.724749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:16.727528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15654, node 1 2025-04-17T19:00:16.865208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:16.865239Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:16.865247Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:16.865361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10735 TClient is connected to server localhost:10735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:17.385731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:19.236953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358953890654072:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.237093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.442408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T19:00:19.559721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358953890654196:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.559821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.566358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358953890654201:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.569796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T19:00:19.579686Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-17T19:00:19.579925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358953890654203:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T19:00:19.661593Z node 1 :TX_PROXY ERROR: Actor# [1:7494358953890654243:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:20.197380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:20.609497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T19:00:21.048358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T19:00:21.311643Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358941005751563:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:21.311697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:21.451448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T19:00:21.888483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.383877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T19:00:22.432741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T19:00:24.113137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 22352, MsgBus: 27477 2025-04-17T19:00:16.526023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358943814408600:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:16.526140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00039b/r3tmp/tmpHom3yv/pdisk_1.dat 2025-04-17T19:00:16.948291Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22352, node 1 2025-04-17T19:00:16.983936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:16.984056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:16.994374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:17.033776Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:17.033801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:17.033808Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:17.033934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27477 TClient is connected to server localhost:27477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:17.525482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:19.208953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358956699311142:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.209065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.498111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T19:00:19.669279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358956699311266:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.669483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.669585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358956699311271:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.673480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T19:00:19.683248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358956699311273:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T19:00:19.741982Z node 1 :TX_PROXY ERROR: Actor# [1:7494358956699311314:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:20.310824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:20.750211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T19:00:21.212583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T19:00:21.526285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358943814408600:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:21.526367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:21.754730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.151401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.607362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T19:00:22.668729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T19:00:24.347550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> BackupRestoreS3::RestoreViewQueryText [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT [FAIL] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [FAIL] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 11089, MsgBus: 21408 2025-04-17T19:00:16.538792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358942779771723:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:16.538874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000389/r3tmp/tmpGuJOIf/pdisk_1.dat 2025-04-17T19:00:16.924906Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11089, node 1 2025-04-17T19:00:16.966344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:16.966483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:16.978059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:17.033369Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:17.033391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:17.033396Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:17.033523Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21408 TClient is connected to server localhost:21408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:17.544348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:19.499562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358955664674276:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.499682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.733977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-17T19:00:19.889624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358955664674399:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.889673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.889961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358955664674404:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:19.892930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-17T19:00:19.900511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358955664674406:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T19:00:20.004274Z node 1 :TX_PROXY ERROR: Actor# [1:7494358955664674447:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:20.564192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:20.967832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T19:00:21.462882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T19:00:21.538674Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358942779771723:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:21.538754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:21.936244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.402438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.892191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T19:00:22.927721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T19:00:24.556602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> BackupRestoreS3::TestAllPrimitiveTypes-JSON [FAIL] >> Viewer::SelectStringWithBase64Encoding [GOOD] >> BackupRestore::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP64 [GOOD] >> Viewer::SelectStringWithNoBase64Encoding [GOOD] |87.9%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::RestoreViewDependentOnAnotherView [GOOD] Test command err: 2025-04-17T19:00:21.350249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358965737613530:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:21.350311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmp2oKLwz/pdisk_1.dat 2025-04-17T19:00:21.905682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:21.905778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:21.911270Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:21.915081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61388, node 1 2025-04-17T19:00:22.067423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:22.067445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:22.067458Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:22.067567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:22.398083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:22.427116Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T19:00:24.526847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358978622516445:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.526930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.530270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358978622516453:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.534649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.560167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358978622516459:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:24.631212Z node 1 :TX_PROXY ERROR: Actor# [1:7494358978622516540:2690] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:25.138415Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2hzaad99dzhcjq2hmpymsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzlkZmUzYzktOGQyYzhkYzktOTZjZjhjY2UtYWQ3ODZiNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/"Create temporary directory "/Root/~backup_20250417T190025" in databaseProcess "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/dependentView"Backup view "/Root/dependentView" to "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/dependentView"Write view into "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/dependentView/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/dependentView/permissions.pb"Process "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/baseView"Backup view "/Root/baseView" to "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/baseView"Write view into "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/baseView/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/baseView/permissions.pb"Remove temporary directory "/Root/~backup_20250417T190025" in database2025-04-17T19:00:25.330663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/" to "/Root"Process "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/baseView"Restore view "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/baseView" to "/Root/baseView"Read view from "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/baseView/create_view.sql"Created "/Root/baseView"Restore ACL "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/baseView" to "/Root/baseView"Read ACL from "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/baseView/permissions.pb"2025-04-17T19:00:25.508888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/dependentView"Restore view "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/dependentView" to "/Root/dependentView"Read view from "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/dependentView/create_view.sql"Created "/Root/dependentView"Restore ACL "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/dependentView" to "/Root/dependentView"Read ACL from "/home/runner/.ya/build/build_root/26k2/00030b/r3tmp/tmpkM3t9C/dependentView/permissions.pb"2025-04-17T19:00:25.600575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710670:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T19:00:25.703068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2hzawabzrcr6y46yz34vpf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzlkZmUzYzktOGQyYzhkYzktOTZjZjhjY2UtYWQ3ODZiNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-04-17T19:00:10.161408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358918020109039:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:10.161457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00048d/r3tmp/tmpp7IW5g/pdisk_1.dat 2025-04-17T19:00:10.659439Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:10.669150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:10.669233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:10.688057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3153, node 1 2025-04-17T19:00:11.033211Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:11.033245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:11.033264Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:11.033474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:11.555513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:11.882126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:55428" , at schemeshard: 72057594046644480 2025-04-17T19:00:11.882545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T19:00:11.882573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-04-17T19:00:11.888680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T19:00:11.889814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-17T19:00:11.896015Z node 1 :TX_PROXY ERROR: Actor# [1:7494358922315077328:2613] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C9935BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CE50670) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8673 (0x1C4C5C71) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C4EEAF8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CE87866) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CE571F9) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C4EDCC4) NUnitTest::TTestFactory::Execute()+2438 (0x1CE58AC6) NUnitTest::RunMain(int, char**)+5213 (0x1CE81DDD) ??+0 (0x7FD3AD5A3D90) __libc_start_main+128 (0x7FD3AD5A3E40) _start+41 (0x19302029) ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP64 [GOOD] Test command err: 2025-04-17T19:00:20.686649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358960728645968:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:20.686711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpBrGSze/pdisk_1.dat 2025-04-17T19:00:21.292370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:21.292447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:21.301978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:21.327027Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15027, node 1 2025-04-17T19:00:21.434550Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:21.434777Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:21.659119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:21.659147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:21.659154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:21.659263Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:22.013833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:24.176949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358977908516211:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.177067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.177365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358977908516223:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.181505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.208296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358977908516225:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:24.281176Z node 1 :TX_PROXY ERROR: Actor# [1:7494358977908516298:2687] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:24.518865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T19:00:24.801672Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2hz9za2rqf5yvr6vkez7hb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzlkYWQzZTMtNTY1MDM4NTYtZGVhMWYzODQtZmM2ZjJlYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:24.945219Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01js2hza3qdeevt0pj3c6esthj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzlkYWQzZTMtNTY1MDM4NTYtZGVhMWYzODQtZmM2ZjJlYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/"Create temporary directory "/Root/~backup_20250417T190024" in databaseProcess "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table"Copy tables: { src: "/Root/Timestamp64Table", dst: "/Root/~backup_20250417T190024/Timestamp64Table" }Backup table "/Root/~backup_20250417T190024/Timestamp64Table" to "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table"Describe table "/Root/~backup_20250417T190024/Timestamp64Table"Write scheme into "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table/scheme.pb"Describe table "/Root/Timestamp64Table"Write ACL into "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table/permissions.pb"Read table "/Root/~backup_20250417T190024/Timestamp64Table"Write data into "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table/data_00.csv"Drop table "/Root/~backup_20250417T190024/Timestamp64Table"2025-04-17T19:00:25.282825Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250417T190024" in database2025-04-17T19:00:25.306610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/" to "/Root"2025-04-17T19:00:25.382262Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/" to "/Root"Process "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table"Read scheme from "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table" to "/Root/Timestamp64Table"2025-04-17T19:00:25.417170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Created "/Root/Timestamp64Table"Read data from "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table/data_00.csv"2025-04-17T19:00:25.575366Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2hzarnar1vb9dc8n72xzfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODBkOWU2NzAtNzZkN2ZmZTYtNTVkMjc3YS0yMDFkMzg3Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table" to "/Root/Timestamp64Table"Read ACL from "/home/runner/.ya/build/build_root/26k2/000320/r3tmp/tmpj0LXw4/Timestamp64Table/permissions.pb"2025-04-17T19:00:25.612394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-17T19:00:25.687232Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358960728645968:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:25.687306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:25.732202Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01js2hzax40k0t1zqpwge9sczj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzlkYWQzZTMtNTY1MDM4NTYtZGVhMWYzODQtZmM2ZjJlYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::SelectStringWithBase64Encoding [GOOD] Test command err: 2025-04-17T19:00:04.329393Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358891711891019:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:04.329566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:04.747190Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:04.757567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:04.757747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:04.766440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19011, node 1 2025-04-17T19:00:04.930832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:04.930855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:04.930865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:04.931011Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:05.458737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:05.499282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.503849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:07.550957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358904596793620:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.551068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358904596793612:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.551368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.555093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:07.563712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358904596793626:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T19:00:07.660460Z node 1 :TX_PROXY ERROR: Actor# [1:7494358904596793677:2358] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:10.028863Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494358916684488300:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:10.028970Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:10.171671Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2543, node 2 2025-04-17T19:00:10.203867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:10.203982Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:10.236732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:10.258701Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:10.258725Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:10.258732Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:10.258845Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:10.533851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:10.548732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T19:00:10.552721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:13.487020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494358929569390872:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.487115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.487601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494358929569390888:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.491381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:13.507348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494358929569390890:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-17T19:00:13.605046Z node 2 :TX_PROXY ERROR: Actor# [2:7494358929569390942:2357] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:15.282597Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494358938198062649:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:15.282696Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:15.413834Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:15.447648Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:15.447749Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:15.449934Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12496, node 3 2025-04-17T19:00:15.506351Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:15.506375Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:15.506385Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:15.506534Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-17T19:00:15.831924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T19:00:15.895293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T19:00:15.901777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:15.909278Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-17T19:00:18.818208Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494358951082965234:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:18.818274Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494358951082965211:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:18.818873Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:18.822402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:18.831972Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494358951082965248:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-17T19:00:18.917235Z node 3 :TX_PROXY ERROR: Actor# [3:7494358951082965299:2355] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:20.670892Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494358959665998349:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:20.671046Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:20.816683Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:20.832259Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:20.832353Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:20.834061Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29558, node 4 2025-04-17T19:00:20.950953Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:20.950980Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:20.950990Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:20.951160Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:21.371704Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:21.383585Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T19:00:21.400679Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T19:00:21.404413Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:21.409997Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-17T19:00:24.746615Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494358976845868206:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.746757Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.747135Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494358976845868241:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.752671Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.767886Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494358976845868243:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-17T19:00:24.830843Z node 4 :TX_PROXY ERROR: Actor# [4:7494358976845868295:2359] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 18942, MsgBus: 24704 2025-04-17T19:00:18.267120Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358950117135149:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:18.267348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000282/r3tmp/tmp2M2oY1/pdisk_1.dat 2025-04-17T19:00:18.679744Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18942, node 1 2025-04-17T19:00:18.709446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:18.709550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:18.710367Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:18.710420Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:18.712245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:18.817548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:18.817581Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:18.817595Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:18.817767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24704 TClient is connected to server localhost:24704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:19.501055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:19.538432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:19.681444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:19.826705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:19.892663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:21.122873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358963002038808:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:21.123003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:22.039643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.071599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.113585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.137027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.209608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.303713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.344197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T19:00:22.449315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358967297006785:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:22.449384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:22.449733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358967297006790:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:22.455914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T19:00:22.473846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358967297006793:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T19:00:22.573625Z node 1 :TX_PROXY ERROR: Actor# [1:7494358967297006848:3561] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:23.268270Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358950117135149:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:23.268377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:24.028898Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2Q1NzQxYmQtYWY1NjYzYWQtNjQ0YzFkMGUtZWI4MzY5NTI=, ActorId: [1:7494358971591974486:2517], ActorState: ExecuteState, TraceId: 01js2hz9905svmz0q1mkce7pk6, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.082291Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2I2NmIwNzUtN2NjNWVmYzItZWEzMTJmMzEtZGUwNzA0ZGE=, ActorId: [1:7494358975886941831:2528], ActorState: ExecuteState, TraceId: 01js2hz9c72dcvgdnpb4nahyck, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.181390Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWU0ZTljMjAtZTZhZWI4N2MtZDFjM2I5ZDEtYTJhZDcyNDI=, ActorId: [1:7494358975886941850:2536], ActorState: ExecuteState, TraceId: 01js2hz9dkbvsayhht2h8dsz5g, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.184022Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2Q5YjU3MGUtOTMxYWE3OTctYzUxNjkzMDMtNjllZDY2MGY=, ActorId: [1:7494358975886941841:2532], ActorState: ExecuteState, TraceId: 01js2hz9cycd8qe079sq8agag5, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.186783Z node 1 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 9 2025-04-17T19:00:24.283939Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjFmMGNlZTAtMzczYWI4YWEtOWUyNzJmMjQtZTMxMjUxZjU=, ActorId: [1:7494358975886941900:2548], ActorState: ExecuteState, TraceId: 01js2hz9h38n1r8rxz225s1a5d, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.352862Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmY5N2M3ODYtZTEyMmNmN2UtMTVkNjU3Y2MtYjEwNWFjOQ==, ActorId: [1:7494358975886941932:2562], ActorState: ExecuteState, TraceId: 01js2hz9ke9r808c0ykt8aq2kt, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.418478Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzJkMzQ3MDctZDk1NDMwODQtZGIxNDRiMTUtOTM0NzJhMDU=, ActorId: [1:7494358975886941946:2568], ActorState: ExecuteState, TraceId: 01js2hz9mpdxffjy8xt9j93c6g, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.429169Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTdiNzM4OTgtODJjZTAxNTQtMWE3NjFmZWUtYTAyM2JjNjk=, ActorId: [1:7494358975886941982:2574], ActorState: ExecuteState, TraceId: 01js2hz9p24ezwr65r2qmad8pt, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.625081Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWMwYjliNmMtMmVmNzVlNDAtNTNhMGNjMzktMzliN2E4MTE=, ActorId: [1:7494358975886942065:2600], ActorState: ExecuteState, TraceId: 01js2hz9w3fdb92m8e6awedyjp, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.793537Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjkyNThkNzEtZGY0MDYyNTEtMjUzYzFiNmMtYjJlMGNhNGY=, ActorId: [1:7494358975886942124:2615], ActorState: ExecuteState, TraceId: 01js2hza043533hhnv58qg9e6n, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.871705Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmI3YWVhYTYtZDRhNzJhZi1jYzkwNjhiZC0xYzM0NjE0OQ==, ActorId: [1:7494358975886942146:2621], ActorState: ExecuteState, TraceId: 01js2hza3e2k85x1qq054x887y, Create QueryResponse for error on request, msg: 2025-04-17T19:00:24.941308Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmE0ZTM0N2UtYzlmZGEzN2MtZWU2OTExNTctNDRmYmVjZDc=, ActorId: [1:7494358975886942192:2630], ActorState: ExecuteState, TraceId: 01js2hza5h5aanpsfchgw43an2, Create QueryResponse for error on request, msg: 2025-04-17T19:00:25.014754Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWU1YTc3NGUtMjA3ZTNjNDctY2Q3ODkzOTMtYzVmNDRlNTg=, ActorId: [1:7494358975886942206:2636], ActorState: ExecuteState, TraceId: 01js2hza7nb237g0e39tkn7nvv, Create QueryResponse for error on request, msg: 2025-04-17T19:00:25.170128Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTcyMDhmYmEtZDI1YmNkZDctMmYwNDQ5MDItZWUwNmU4NTc=, ActorId: [1:7494358980181909550:2652], ActorState: ExecuteState, TraceId: 01js2hzaccfg750b2wb32wafq3, Create QueryResponse for error on request, msg: 2025-04-17T19:00:25.425314Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDM0YWI3ZTAtYTRjZmQwNmQtYjNiN2FkZjYtZWM1OWEzNGI=, ActorId: [1:7494358980181909643:2675], ActorState: ExecuteState, TraceId: 01js2hzam2949pmjz55xcczkrm, Create QueryResponse for error on request, msg: 2025-04-17T19:00:25.600774Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjM2MmExMWUtM2ZlMTc5M2YtNTRiOWU4Yi03MjRhMDQ2Zg==, ActorId: [1:7494358980181909718:2690], ActorState: ExecuteState, TraceId: 01js2hzasc32wknv2t0xx23fpd, Create QueryResponse for error on request, msg: 2025-04-17T19:00:25.700756Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494358980181909773:2703] TxId: 281474976710675. Ctx: { TraceId: 01js2hzawd1kvm1qp0kbnvpsyp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFkMDEzZWQtMzU5MDE3ZjQtOTY3NGVlZTgtMjhiYjczZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-17T19:00:25.701139Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWFkMDEzZWQtMzU5MDE3ZjQtOTY3NGVlZTgtMjhiYjczZDg=, ActorId: [1:7494358980181909747:2703], ActorState: ExecuteState, TraceId: 01js2hzawd1kvm1qp0kbnvpsyp, Create QueryResponse for error on request, msg: 2025-04-17T19:00:25.749716Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494358980181909778:2704], TxId: 281474976710675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWFkMDEzZWQtMzU5MDE3ZjQtOTY3NGVlZTgtMjhiYjczZDg=. CustomerSuppliedId : . TraceId : 01js2hzawd1kvm1qp0kbnvpsyp. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494358980181909773:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T19:00:25.750271Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494358980181909780:2705], TxId: 281474976710675, task: 2. Ctx: { TraceId : 01js2hzawd1kvm1qp0kbnvpsyp. SessionId : ydb://session/3?node_id=1&id=MWFkMDEzZWQtMzU5MDE3ZjQtOTY3NGVlZTgtMjhiYjczZDg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494358980181909773:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T19:00:25.751507Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494358980181909781:2706], TxId: 281474976710675, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWFkMDEzZWQtMzU5MDE3ZjQtOTY3NGVlZTgtMjhiYjczZDg=. TraceId : 01js2hzawd1kvm1qp0kbnvpsyp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494358980181909773:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T19:00:25.751819Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494358980181909782:2707], TxId: 281474976710675, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWFkMDEzZWQtMzU5MDE3ZjQtOTY3NGVlZTgtMjhiYjczZDg=. TraceId : 01js2hzawd1kvm1qp0kbnvpsyp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7494358980181909773:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T19:00:25.752028Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494358980181909783:2708], TxId: 281474976710675, task: 5. Ctx: { TraceId : 01js2hzawd1kvm1qp0kbnvpsyp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWFkMDEzZWQtMzU5MDE3ZjQtOTY3NGVlZTgtMjhiYjczZDg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7494358980181909773:2703], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-17T19:00:25.752616Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916425721, txId: 281474976710674] shutting down 2025-04-17T19:00:25.842457Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjk0NWJhYzItM2QwOWI5NTUtZTNjNjFlYmEtYzc0OGQ4MWE=, ActorId: [1:7494358980181909819:2714], ActorState: ExecuteState, TraceId: 01js2hzazs8emf1nn4d311p5m5, Create QueryResponse for error on request, msg: 2025-04-17T19:00:25.942364Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916425966, txId: 281474976710677] shutting down 2025-04-17T19:00:26.060426Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODljN2NmZjUtZWNiOTMxYWMtZDAzZWM3YTAtN2I1MDdkNDM=, ActorId: [1:7494358980181909951:2738], ActorState: ExecuteState, TraceId: 01js2hzb7bcrndwmz9189h98mz, Create QueryResponse for error on request, msg: 2025-04-17T19:00:26.145759Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916426176, txId: 281474976710679] shutting down 2025-04-17T19:00:26.262502Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTVjNDFlYmYtN2RjMWM3NTMtY2IyMWQ1NmEtNWM3ZjllMA==, ActorId: [1:7494358984476877372:2763], ActorState: ExecuteState, TraceId: 01js2hzbdg9j2yxp6g0nawpt1n, Create QueryResponse for error on request, msg: 2025-04-17T19:00:26.378866Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQ3NTA5MmUtMTg5ZjJlY2QtZTFiM2FkZDQtMzk0ZGUzM2M=, ActorId: [1:7494358984476877393:2772], ActorState: ExecuteState, TraceId: 01js2hzbh03bsbzwe99aevz3p2, Create QueryResponse for error on request, msg: 2025-04-17T19:00:26.575268Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916426610, txId: 281474976710681] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::SelectStringWithNoBase64Encoding [GOOD] Test command err: 2025-04-17T19:00:04.389908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358893357087853:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:04.390133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:04.731606Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30198, node 1 2025-04-17T19:00:04.805641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:04.805834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:04.807953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:04.930166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:04.930207Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:04.930214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:04.930334Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:05.406489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:05.450271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.453657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:05.457875Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-17T19:00:07.500263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358906241990452:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.500263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358906241990444:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.500374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:07.505433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:07.513484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358906241990458:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T19:00:07.577830Z node 1 :TX_PROXY ERROR: Actor# [1:7494358906241990509:2358] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:09.884787Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494358911672656052:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:09.884834Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:10.014970Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:10.040678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:10.040762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:10.042438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19782, node 2 2025-04-17T19:00:10.088853Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:10.088877Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:10.088884Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:10.089017Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:10.376010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:10.383906Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T19:00:10.398570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T19:00:10.401485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:13.148031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494358928852525908:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.148224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.148982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7494358928852525943:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.153297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:13.167636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7494358928852525945:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-17T19:00:13.228622Z node 2 :TX_PROXY ERROR: Actor# [2:7494358928852525996:2356] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:14.838666Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7494358932805946261:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:14.838719Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:14.966686Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:14.997574Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:14.997668Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:14.999077Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63703, node 3 2025-04-17T19:00:15.051978Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:15.052007Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:15.052015Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:15.052149Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:15.361481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:15.369069Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-17T19:00:15.380611Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-17T19:00:15.384422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:18.559957Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494358949985816141:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:18.560051Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7494358949985816149:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:18.560129Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:18.564987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:18.575912Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7494358949985816155:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-17T19:00:18.643926Z node 3 :TX_PROXY ERROR: Actor# [3:7494358949985816206:2356] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:20.746394Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7494358960334499782:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:20.746563Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:20.973213Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:20.990539Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:20.990636Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:20.992413Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23741, node 4 2025-04-17T19:00:21.098241Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:21.098264Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:21.098274Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:21.098427Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21974 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:21.489009Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:21.495082Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T19:00:21.506439Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:21.509865Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T19:00:24.940080Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494358977514369582:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.940184Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.940528Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7494358977514369609:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.945404Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.966033Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7494358977514369611:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-17T19:00:25.062836Z node 4 :TX_PROXY ERROR: Actor# [4:7494358981809336959:2359] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |88.4%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |88.7%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> SystemView::PartitionStatsFields [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::RestoreViewDependentOnAnotherView [FAIL] Test command err: 2025-04-17T19:00:20.443249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358958391300867:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:20.443451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000332/r3tmp/tmpUWmyJU/pdisk_1.dat 2025-04-17T19:00:20.940408Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:20.948262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:20.948316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:20.955523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20366, node 1 2025-04-17T19:00:21.110577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:21.110608Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:21.110614Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:21.110730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:21.481651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:23.760314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358971276203782:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:23.760464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:23.760769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358971276203794:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:23.766383Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958391301086:2141] Handle TEvProposeTransaction 2025-04-17T19:00:23.766422Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958391301086:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:00:23.766471Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958391301086:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494358971276203797:2638] 2025-04-17T19:00:23.847915Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:23.847991Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:23.848008Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:23.850006Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:23.850077Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:23.850525Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:23.850648Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:23.850687Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:00:23.850828Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:00:23.852219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:23.856881Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:00:23.856938Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203797:2638] txid# 281474976710658 SEND to# [1:7494358971276203796:2344] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:00:23.882224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358971276203796:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:23.950275Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958391301086:2141] Handle TEvProposeTransaction 2025-04-17T19:00:23.950320Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958391301086:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:00:23.950381Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958391301086:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494358971276203877:2698] 2025-04-17T19:00:23.952945Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:23.953004Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:23.953021Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:23.953859Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:23.953926Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:23.954060Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:23.958245Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:23.958331Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:00:23.958499Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:00:23.961211Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:00:23.961412Z node 1 :TX_PROXY ERROR: Actor# [1:7494358971276203877:2698] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:23.961440Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358971276203877:2698] txid# 281474976710659 SEND to# [1:7494358971276203796:2344] Source {TEvProposeTransactionSt ... Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:24.560827Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358975571171349:2365] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:24.563840Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358975571171349:2365] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710663 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:63938" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/baseView" destination_prefix: "/baseView" } items { source_path: "/Root/dependentView" destination_prefix: "/dependentView" } } StartTime { seconds: 1744916424 } } 2025-04-17T19:00:24.566926Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.566953Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757 2025-04-17T19:00:24.566973Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757, id# 281474976710663, itemIdx# 4294967295 2025-04-17T19:00:24.567086Z node 1 :EXPORT INFO: TExport::TTxProgress: UploadScheme: info# { Id: 281474976710663 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 8] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:56852' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 2 PendingItems: 2 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/baseView' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 6] SourcePathType: EPathTypeView State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' } 2025-04-17T19:00:24.588995Z node 1 :EXPORT INFO: TExport::TTxProgress: UploadScheme: info# { Id: 281474976710663 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 8] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:56852' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 2 PendingItems: 2 PendingDropItems: 0 }, item# { Idx: 1 SourcePathName: '/Root/dependentView' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 7] SourcePathType: EPathTypeView State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' } 2025-04-17T19:00:24.591430Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:24.591495Z node 1 :EXPORT DEBUG: HandleSchemeDescription, self: [1:7494358975571171366:2824], status: StatusSuccess 2025-04-17T19:00:24.592130Z node 1 :EXPORT DEBUG: HandleSchemeDescription, self: [1:7494358975571171367:2825], status: StatusSuccess REQUEST: PUT /test_bucket/dependentView/create_view.sql HTTP/1.1 HEADERS: Host: localhost:63938 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 886229A0-C778-4D3E-A53B-38E045D45EE3 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=d36d97dff2ebe40e9043e184878259f0cb4061b6e9b2b2b302cbb8cced5700af content-length: 141 content-md5: l9g5vf/dWNoOWE5C9y3Unw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 6ad3a25be46dca1d1cdd95d4b66bd920e2b333607cd84bef890cf4363d1836bc x-amz-date: 20250417T190024Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/dependentView/create_view.sql / / 141 2025-04-17T19:00:24.632407Z node 1 :EXPORT DEBUG: HandleSchemePutResponse, self: [1:7494358975571171367:2825], result: PutObjectResult { ETag: 97d839bdffdd58da0e584e42f72dd49f } REQUEST: PUT /test_bucket/baseView/create_view.sql HTTP/1.1 HEADERS: Host: localhost:63938 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 91D3EFBE-17E4-4DDC-9EE6-26AF764CD95E amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=1e61accd2250f509140b6fa306151596acf5578ceb822f7c0f5fc098e550bd4d content-length: 117 content-md5: lBFaxqtmzEFiACQ23xQnRQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 4db1b6162f0221c05acd67f55eff5f059abb6c30338af5b3d0487274adf295e5 x-amz-date: 20250417T190024Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/baseView/create_view.sql / / 117 REQUEST: PUT /test_bucket/dependentView/metadata.json HTTP/1.1 HEADERS: Host: localhost:63938 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7C5D8B6E-2554-40A0-8622-375FCD3DC1F9 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=28ba6ee631735731c9317c8b377ac70d384220fc8dd5e468f76ede777e16dc41 content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: eff56d34e31fad9096420efe8729173b9963ab7eb04c9c0470c5f06519bd8c7c x-amz-date: 20250417T190024Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/dependentView/metadata.json / / 31 REQUEST: PUT /test_bucket/baseView/metadata.json HTTP/1.1 HEADERS: Host: localhost:63938 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C2505892-DF0C-4A8A-B80A-62740182FB2F amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=91f34650a58a29b82618ea67cbf0ceb26c1b941b9979193c81c97b1cb88bd375 content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: eff56d34e31fad9096420efe8729173b9963ab7eb04c9c0470c5f06519bd8c7c x-amz-date: 20250417T190024Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/baseView/metadata.json / / 31 2025-04-17T19:00:24.643868Z node 1 :EXPORT DEBUG: HandleSchemePutResponse, self: [1:7494358975571171366:2824], result: PutObjectResult { ETag: 94115ac6ab66cc4162002436df142745 } 2025-04-17T19:00:24.644026Z node 1 :EXPORT DEBUG: HandleMetadataPutResponse, self: [1:7494358975571171367:2825], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-04-17T19:00:24.644044Z node 1 :EXPORT INFO: Finish, self: [1:7494358975571171367:2825], success: 1, error: 2025-04-17T19:00:24.644485Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.644534Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 281474976710663, itemIdx# 1, success# 1, error# 2025-04-17T19:00:24.646391Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:24.650954Z node 1 :EXPORT DEBUG: HandleMetadataPutResponse, self: [1:7494358975571171366:2824], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-04-17T19:00:24.650978Z node 1 :EXPORT INFO: Finish, self: [1:7494358975571171366:2824], success: 1, error: 2025-04-17T19:00:24.652732Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.652756Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 281474976710663, itemIdx# 0, success# 1, error# 2025-04-17T19:00:24.654701Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:24.775101Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358975571171381:2367] [0] Resolve database: name# /Root 2025-04-17T19:00:24.775699Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358975571171381:2367] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:24.775716Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358975571171381:2367] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:24.776348Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358975571171381:2367] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710663 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:63938" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/baseView" destination_prefix: "/baseView" } items { source_path: "/Root/dependentView" destination_prefix: "/dependentView" } } StartTime { seconds: 1744916424 } } assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18B6D782) NTestSuiteBackupRestoreS3::TTestCaseRestoreViewDependentOnAnotherView::Execute_(NUnitTest::TTestContext&)+1604 (0x18BC5264) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7F56A85A6D90) __libc_start_main+128 (0x7F56A85A6E40) _start+41 (0x164AC029) |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::RestoreViewQueryText [FAIL] Test command err: 2025-04-17T19:00:20.684300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358959078461075:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:20.684345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000329/r3tmp/tmpTMNkqB/pdisk_1.dat 2025-04-17T19:00:21.254210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:21.254306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:21.280795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:21.335936Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11431, node 1 2025-04-17T19:00:21.339596Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:21.339618Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:21.454736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:21.454763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:21.454769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:21.454882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:21.830101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:24.194204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358976258331297:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.194352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358976258331291:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.194472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.194660Z node 1 :TX_PROXY DEBUG: actor# [1:7494358959078461310:2141] Handle TEvProposeTransaction 2025-04-17T19:00:24.194679Z node 1 :TX_PROXY DEBUG: actor# [1:7494358959078461310:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:00:24.194721Z node 1 :TX_PROXY DEBUG: actor# [1:7494358959078461310:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494358976258331306:2631] 2025-04-17T19:00:24.304020Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:24.304103Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:24.304126Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:24.306533Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:24.306626Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:24.306798Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:24.306941Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:24.306999Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:00:24.307279Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:00:24.308711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.311481Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:00:24.311536Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331306:2631] txid# 281474976710658 SEND to# [1:7494358976258331305:2344] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:00:24.329599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358976258331305:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:24.406275Z node 1 :TX_PROXY DEBUG: actor# [1:7494358959078461310:2141] Handle TEvProposeTransaction 2025-04-17T19:00:24.406314Z node 1 :TX_PROXY DEBUG: actor# [1:7494358959078461310:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:00:24.406374Z node 1 :TX_PROXY DEBUG: actor# [1:7494358959078461310:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494358976258331380:2685] 2025-04-17T19:00:24.409182Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:24.409222Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:24.409238Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:24.409843Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:24.409922Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:24.410066Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:24.410216Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:24.410258Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:00:24.410408Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:00:24.416399Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358976258331380:2685] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:00:24.416585Z node 1 :TX_PROXY ERROR: Actor# [1:7494358976258331380:2685] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025- ... [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:24.839822Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358976258331470:2355] [281474976710661] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:24.840352Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoExecute 2025-04-17T19:00:24.840647Z node 1 :EXPORT DEBUG: TExport::TTxCreate: Reply: status# SUCCESS, error# 2025-04-17T19:00:24.842291Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoComplete 2025-04-17T19:00:24.842401Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.842435Z node 1 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 281474976710661 2025-04-17T19:00:24.842490Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710661 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36150' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 } 2025-04-17T19:00:24.842530Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:24.842596Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.842609Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715757, id# 281474976710661 2025-04-17T19:00:24.842642Z node 1 :EXPORT INFO: TExport::TTxProgress: MkDir propose: info# { Id: 281474976710661 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36150' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, txId# 281474976715757 2025-04-17T19:00:24.842703Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:24.844421Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358976258331470:2355] [281474976710661] Handle TEvExport::TEvCreateExportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:12708" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744916424 } } 2025-04-17T19:00:24.845233Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.845251Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715757, status# StatusAccepted 2025-04-17T19:00:24.845354Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710661 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36150' State: CreateExportDir WaitTxId: 281474976715757 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976715757 2025-04-17T19:00:24.848443Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:24.854953Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.854982Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757 2025-04-17T19:00:24.855005Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757, id# 281474976710661, itemIdx# 4294967295 2025-04-17T19:00:24.855110Z node 1 :EXPORT INFO: TExport::TTxProgress: UploadScheme: info# { Id: 281474976710661 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36150' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/view' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 6] SourcePathType: EPathTypeView State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' } 2025-04-17T19:00:24.870420Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358976258331509:2356] [0] Resolve database: name# /Root 2025-04-17T19:00:24.875267Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358976258331509:2356] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:24.875303Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358976258331509:2356] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:24.880787Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:24.880872Z node 1 :EXPORT DEBUG: HandleSchemeDescription, self: [1:7494358976258331513:2772], status: StatusSuccess 2025-04-17T19:00:24.885015Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358976258331509:2356] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_TRANSFER_DATA ExportToS3Settings { endpoint: "localhost:12708" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } ItemsProgress { } StartTime { seconds: 1744916424 } } REQUEST: PUT /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:12708 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AA04339F-D0B9-46C5-AB1E-6F650306A3D7 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=8ff5f3c9d666f3398a2c88f189ee4c805eb35d6663f61cfd5fe13f6939ad485a content-length: 107 content-md5: s9kG75rwDu9EaSQBrBROTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: acc84b32739165de8cf1d5c0620212d5b7e1e1a551feadcb58d905c2d8db2e42 x-amz-date: 20250417T190024Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/create_view.sql / / 107 2025-04-17T19:00:24.925103Z node 1 :EXPORT DEBUG: HandleSchemePutResponse, self: [1:7494358976258331513:2772], result: PutObjectResult { ETag: b3d906ef9af00eef44692401ac144e4e } REQUEST: PUT /test_bucket/view/metadata.json HTTP/1.1 HEADERS: Host: localhost:12708 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0B1919F6-D978-4E59-AB3A-814B0B978C9B amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=a048856c98b96a55f5d59ede81615feb9c6160871f4f5e4680b5e20d2777f2f7 content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: eff56d34e31fad9096420efe8729173b9963ab7eb04c9c0470c5f06519bd8c7c x-amz-date: 20250417T190024Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/metadata.json / / 31 2025-04-17T19:00:24.928281Z node 1 :EXPORT DEBUG: HandleMetadataPutResponse, self: [1:7494358976258331513:2772], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-04-17T19:00:24.928309Z node 1 :EXPORT INFO: Finish, self: [1:7494358976258331513:2772], success: 1, error: 2025-04-17T19:00:24.930340Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.930374Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 281474976710661, itemIdx# 0, success# 1, error# 2025-04-17T19:00:24.937266Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.102777Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358980553298817:2357] [0] Resolve database: name# /Root 2025-04-17T19:00:25.103367Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358980553298817:2357] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:25.103402Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358980553298817:2357] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:25.104516Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358980553298817:2357] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710661 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:12708" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744916424 } } assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18B62443) NTestSuiteBackupRestoreS3::TTestCaseRestoreViewQueryText::Execute_(NUnitTest::TTestContext&)+1610 (0x18BC345A) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7FC57DF53D90) __libc_start_main+128 (0x7FC57DF53E40) _start+41 (0x164AC029) |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [FAIL] Test command err: 2025-04-17T19:00:20.888630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358958426342650:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:20.910257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00031d/r3tmp/tmpvCFdZ6/pdisk_1.dat 2025-04-17T19:00:21.443353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:21.476660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:21.476733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:21.491753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5316, node 1 2025-04-17T19:00:21.754380Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:21.754410Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:21.754417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:21.754566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:22.122259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:24.188534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358975606212777:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.188604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.188689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358975606212785:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.188935Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958426342797:2141] Handle TEvProposeTransaction 2025-04-17T19:00:24.188949Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958426342797:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:00:24.188993Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958426342797:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494358975606212792:2627] 2025-04-17T19:00:24.265565Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:24.265647Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:24.265666Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:24.267194Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:24.267308Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:24.267538Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:24.267682Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:24.267741Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:00:24.267888Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:00:24.269215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.273479Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:00:24.273544Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212792:2627] txid# 281474976710658 SEND to# [1:7494358975606212791:2344] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:00:24.293737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358975606212791:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:24.357678Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958426342797:2141] Handle TEvProposeTransaction 2025-04-17T19:00:24.357707Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958426342797:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:00:24.357823Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958426342797:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494358975606212864:2679] 2025-04-17T19:00:24.360171Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:24.360228Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:24.360244Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:24.360919Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:24.360972Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:24.361196Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:24.361273Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:24.361302Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:00:24.361435Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:00:24.364383Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:00:24.364552Z node 1 :TX_PROXY ERROR: Actor# [1:7494358975606212864:2679] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:24.364586Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975606212864:2679] txid# 281474976710659 SEND to# [1:7494358975606212791:2344] Source {TEvProposeTransactionSta ... Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:24.988224Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358975606212973:2360] [281474976710662] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:24.988710Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoExecute 2025-04-17T19:00:24.989043Z node 1 :EXPORT DEBUG: TExport::TTxCreate: Reply: status# SUCCESS, error# 2025-04-17T19:00:24.994884Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoComplete 2025-04-17T19:00:24.995000Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.995035Z node 1 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 281474976710662 2025-04-17T19:00:24.995098Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:35594' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 } 2025-04-17T19:00:24.995173Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:24.995254Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:24.995278Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715757, id# 281474976710662 2025-04-17T19:00:24.995313Z node 1 :EXPORT INFO: TExport::TTxProgress: MkDir propose: info# { Id: 281474976710662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:35594' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, txId# 281474976715757 2025-04-17T19:00:24.995384Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:24.997176Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494358975606212973:2360] [281474976710662] Handle TEvExport::TEvCreateExportResponse: record# Entry { Id: 281474976710662 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:5010" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744916424 } } 2025-04-17T19:00:25.003744Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.003784Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715757, status# StatusAccepted 2025-04-17T19:00:25.003900Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:35594' State: CreateExportDir WaitTxId: 281474976715757 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976715757 2025-04-17T19:00:25.007177Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.007539Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358979901180290:2361] [0] Resolve database: name# /Root 2025-04-17T19:00:25.008066Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358979901180290:2361] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:25.008092Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358979901180290:2361] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:25.008578Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358979901180290:2361] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710662 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:5010" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744916424 } } 2025-04-17T19:00:25.015224Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.015251Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757 2025-04-17T19:00:25.015269Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757, id# 281474976710662, itemIdx# 4294967295 2025-04-17T19:00:25.015387Z node 1 :EXPORT INFO: TExport::TTxProgress: UploadScheme: info# { Id: 281474976710662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:35594' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/view' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 6] SourcePathType: EPathTypeView State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' } 2025-04-17T19:00:25.025613Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.025712Z node 1 :EXPORT DEBUG: HandleSchemeDescription, self: [1:7494358979901180308:2772], status: StatusSuccess REQUEST: PUT /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:5010 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B18A00AC-43F8-430B-A262-49200A73A01D amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=0e6fd66030546cf89cadf58a4f7dd4db296c916a43e8bc3ede018b666786a421 content-length: 165 content-md5: VGI/U9aBQRGDg7M5DEll1Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 24d8c773c0010e0420ccc01cdf6767c51f2496ddc97edc8d89de1748e6cf32e9 x-amz-date: 20250417T190025Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/create_view.sql / / 165 2025-04-17T19:00:25.062676Z node 1 :EXPORT DEBUG: HandleSchemePutResponse, self: [1:7494358979901180308:2772], result: PutObjectResult { ETag: 54623f53d68141118383b3390c4965d5 } REQUEST: PUT /test_bucket/view/metadata.json HTTP/1.1 HEADERS: Host: localhost:5010 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B7CBF4F9-9685-4506-8E23-8E5320D84393 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=650f123d3a426e7cd5f0d75d31543c30b9a852efd24151b4398f2727fddae967 content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: eff56d34e31fad9096420efe8729173b9963ab7eb04c9c0470c5f06519bd8c7c x-amz-date: 20250417T190025Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/metadata.json / / 31 2025-04-17T19:00:25.066159Z node 1 :EXPORT DEBUG: HandleMetadataPutResponse, self: [1:7494358979901180308:2772], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-04-17T19:00:25.066199Z node 1 :EXPORT INFO: Finish, self: [1:7494358979901180308:2772], success: 1, error: 2025-04-17T19:00:25.066346Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.066377Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 281474976710662, itemIdx# 0, success# 1, error# 2025-04-17T19:00:25.074861Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.227986Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358979901180318:2363] [0] Resolve database: name# /Root 2025-04-17T19:00:25.228450Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358979901180318:2363] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:25.228475Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358979901180318:2363] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:25.229076Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358979901180318:2363] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710662 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:5010" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744916424 } } assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18B85038) NTestSuiteBackupRestoreS3::TestViewBackupRestore()+1418 (0x18BC876A) NTestSuiteBackupRestoreS3::TTestCaseTestAllSchemeObjectTypes::Execute_(NUnitTest::TTestContext&)+393 (0x18BCA919) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7F0D40E13D90) __libc_start_main+128 (0x7F0D40E13E40) _start+41 (0x164AC029) |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-JSON [FAIL] Test command err: 2025-04-17T19:00:20.700670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358961767535496:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:20.700716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000323/r3tmp/tmpSLGnmy/pdisk_1.dat 2025-04-17T19:00:21.219821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:21.247035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:21.247114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:21.260344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27080, node 1 2025-04-17T19:00:21.433059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:21.433074Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:21.433081Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:21.433159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:21.828814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:24.070095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358978947405715:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.070208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.070657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358978947405727:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.071058Z node 1 :TX_PROXY DEBUG: actor# [1:7494358961767535734:2141] Handle TEvProposeTransaction 2025-04-17T19:00:24.071083Z node 1 :TX_PROXY DEBUG: actor# [1:7494358961767535734:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:00:24.071133Z node 1 :TX_PROXY DEBUG: actor# [1:7494358961767535734:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494358978947405730:2629] 2025-04-17T19:00:24.133818Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:24.133894Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:24.133911Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:24.136488Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:24.136596Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:24.136779Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:24.136909Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:24.136954Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:00:24.137079Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:00:24.138494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.142813Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:00:24.142863Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405730:2629] txid# 281474976710658 SEND to# [1:7494358978947405729:2344] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:00:24.159594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358978947405729:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:24.210522Z node 1 :TX_PROXY DEBUG: actor# [1:7494358961767535734:2141] Handle TEvProposeTransaction 2025-04-17T19:00:24.210550Z node 1 :TX_PROXY DEBUG: actor# [1:7494358961767535734:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:00:24.210602Z node 1 :TX_PROXY DEBUG: actor# [1:7494358961767535734:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494358978947405798:2677] 2025-04-17T19:00:24.212959Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:24.213004Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:24.213018Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:24.213625Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:24.213693Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:24.213839Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:24.213934Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:24.213975Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:00:24.214093Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:00:24.217186Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:00:24.217350Z node 1 :TX_PROXY ERROR: Actor# [1:7494358978947405798:2677] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:24.217383Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358978947405798:2677] txid# 281474976710659 SEND to# [1:7494358978947405729:2344] Source {TEvProposeTransactionSt ... e: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 0, txId# 281474976715759 2025-04-17T19:00:25.192270Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete REQUEST: PUT /test_bucket/JsonTable/metadata.json HTTP/1.1 HEADERS: Host: localhost:18290 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C0D81228-638B-40CB-A54E-7788049CC318 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=b7c128949a36970ded0c6f7b84fb1bb7812fea28a7ba152c1bcffa5d75bef73a content-length: 79 content-md5: oO2yIqnzj0ASOCqegq+4qw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 80fcdddb86c3327e25801f73f65e4b70be84edbd42c1a061b116dd2a6c08f57c x-amz-date: 20250417T190025Z S3_MOCK::HttpServeWrite: /test_bucket/JsonTable/metadata.json / / 79 REQUEST: PUT /test_bucket/JsonTable/scheme.pb HTTP/1.1 HEADERS: Host: localhost:18290 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E851385D-8878-4B47-8DF2-71824080496F amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=886733042004409faf910e3f7cc4764873e3bf02ca8876c0f3499b5e20c1b604 content-length: 356 content-md5: CCEbK9i040rtugNQKN9q3Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 35718b0869eaf179ce83570504d8ef52de36df7fc26757ca7a54eae419006f20 x-amz-date: 20250417T190025Z S3_MOCK::HttpServeWrite: /test_bucket/JsonTable/scheme.pb / / 356 REQUEST: PUT /test_bucket/JsonTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:18290 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D023E4B4-D9FB-4D27-8080-8C545CFA56D9 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=ac61bad4bdce5f8c290d524453bfe47371846d58e1feca3b750ee2c3c0ccab9b content-length: 35 content-md5: HinJLl33eYmWSWTCUz2U5A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: da25f8ed8fb38104b959bad6f09b1b43c031d06be3b2a13c9f3b92a850089738 x-amz-date: 20250417T190025Z S3_MOCK::HttpServeWrite: /test_bucket/JsonTable/data_00.csv / / 35 2025-04-17T19:00:25.266072Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.266091Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715759 2025-04-17T19:00:25.266099Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715759, id# 281474976710665, itemIdx# 0 2025-04-17T19:00:25.266306Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:58506' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 1 } 2025-04-17T19:00:25.267477Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.267545Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.267557Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715760, id# 281474976710665 2025-04-17T19:00:25.267620Z node 1 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:58506' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/JsonTable' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 6] SourcePathType: EPathTypeTable State: Dropping SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715760 2025-04-17T19:00:25.267685Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.269944Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.269966Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715760, status# StatusAccepted 2025-04-17T19:00:25.270039Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:58506' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 0, txId# 281474976715760 2025-04-17T19:00:25.272413Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.275396Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358983242373636:2386] [0] Resolve database: name# /Root 2025-04-17T19:00:25.275734Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358983242373636:2386] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:25.275757Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358983242373636:2386] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:25.280414Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358983242373636:2386] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710665 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:18290" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/JsonTable" destination_prefix: "/JsonTable" } } StartTime { seconds: 1744916425 } } 2025-04-17T19:00:25.295197Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.295239Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715760 2025-04-17T19:00:25.295256Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715760, id# 281474976710665, itemIdx# 0 2025-04-17T19:00:25.295344Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:58506' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T19:00:25.298690Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.298758Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.298809Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715761, id# 281474976710665 2025-04-17T19:00:25.298850Z node 1 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:58506' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976715761 2025-04-17T19:00:25.298925Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.299807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715761:0, at schemeshard: 72057594046644480 2025-04-17T19:00:25.303100Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.303128Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715761, status# StatusAccepted 2025-04-17T19:00:25.303208Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:58506' State: AutoDropping WaitTxId: 281474976715761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976715761 2025-04-17T19:00:25.305522Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.314674Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T19:00:25.319291Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.319312Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715761 2025-04-17T19:00:25.319329Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715761, id# 281474976710665, itemIdx# 4294967295 2025-04-17T19:00:25.321417Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18BA7B44) NTestSuiteBackupRestoreS3::TTestCaseTestAllPrimitiveTypes::Execute_(NUnitTest::TTestContext&)+1753 (0x18BCB849) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7FBC84E1BD90) __libc_start_main+128 (0x7FBC84E1BE40) _start+41 (0x164AC029) |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::WriteIntoRenamingSyncIndex |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::PartitionStatsFields [GOOD] Test command err: 2025-04-17T19:00:21.180917Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358963144177062:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:21.180964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00019d/r3tmp/tmpBUjpnl/pdisk_1.dat 2025-04-17T19:00:21.889060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:21.895065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:21.895159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:21.906747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17131, node 1 2025-04-17T19:00:22.256726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:22.256750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:22.256757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:22.256867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:22.907625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:22.982988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:24.827756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358976029079820:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.827863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.828329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358976029079832:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.839222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.860208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358976029079834:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-17T19:00:24.941708Z node 1 :TX_PROXY ERROR: Actor# [1:7494358976029079901:2702] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:25.643645Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01js2hza3ja7ax81r4em9nchth, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGZiNTg5NjItYWU0MDEzOTctZTgzMGIwYmQtMzZjMDAwMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:25.915311Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01js2hzazf1jv9dyrpeyxpv80x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGU2YzI4NmYtNjI0MjVjOTMtYTBkNTgyYWUtMTc3YzNiODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:25.921183Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494358980324047278:2360], owner: [1:7494358980324047274:2358], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:25.923105Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494358980324047278:2360], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T19:00:25.923545Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494358980324047278:2360], row count: 1, finished: 1 2025-04-17T19:00:25.923585Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494358980324047278:2360], owner: [1:7494358980324047274:2358], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:25.933744Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916425891, txId: 281474976710662] shutting down 2025-04-17T19:00:26.183728Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358963144177062:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:26.183823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:27.046964Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01js2hzc5z1g0paa89pz5ay9s4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRiY2JjODktZjJiZjA5ODgtYWM5MWMzOTUtYTU0OGNjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:27.049376Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494358988913981921:2371], owner: [1:7494358988913981918:2369], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:27.050860Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494358988913981921:2371], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T19:00:27.051044Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494358988913981921:2371], row count: 1, finished: 1 2025-04-17T19:00:27.051076Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494358988913981921:2371], owner: [1:7494358988913981918:2369], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:27.053167Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916427045, txId: 281474976710664] shutting down 2025-04-17T19:00:28.132754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01js2hzd8y87m91ehczyacaj25, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzMzMDRiYTItM2I1MTQzZmQtZTNjZTdhZTEtYzNiNGQ0OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:28.134585Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494358993208949270:2383], owner: [1:7494358993208949267:2381], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:28.135237Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494358993208949270:2383], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T19:00:28.135458Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494358993208949270:2383], row count: 1, finished: 1 2025-04-17T19:00:28.135489Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494358993208949270:2383], owner: [1:7494358993208949267:2381], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:28.139616Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916428131, txId: 281474976710666] shutting down 2025-04-17T19:00:29.195560Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01js2hzearajbsvjk5bx6cjq96, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTY2NjYwMjQtNzc0NzFjZGMtNDg0NDM3ZDUtOTEyZWM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:29.196719Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494358997503916623:2400], owner: [1:7494358997503916618:2397], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:29.197298Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494358997503916623:2400], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T19:00:29.197500Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494358997503916623:2400], row count: 1, finished: 1 2025-04-17T19:00:29.197540Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494358997503916623:2400], owner: [1:7494358997503916618:2397], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:29.199384Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916429194, txId: 281474976710668] shutting down 2025-04-17T19:00:29.335340Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01js2hzecm0tfjv593fgfetq0a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVkNjFhYjMtYmNlZDNjNy1iYjQ1NDg5ZS00ZTliNjZj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:29.337270Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494358997503916656:2409], owner: [1:7494358997503916652:2407], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:29.337640Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494358997503916656:2409], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-17T19:00:29.337859Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494358997503916656:2409], row count: 1, finished: 1 2025-04-17T19:00:29.337899Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494358997503916656:2409], owner: [1:7494358997503916652:2407], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-17T19:00:29.340564Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916429334, txId: 281474976710670] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT [FAIL] Test command err: 2025-04-17T19:00:20.650052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358958701993458:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:20.650099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00032f/r3tmp/tmpDHGyqe/pdisk_1.dat 2025-04-17T19:00:21.174495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:21.174580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:21.182649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:21.218737Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27597, node 1 2025-04-17T19:00:21.286820Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:21.286840Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:00:21.426124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:21.426158Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:21.426215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:21.426350Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:21.736855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:24.027256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358975881863678:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.027747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358975881863670:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.027832Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958701993689:2141] Handle TEvProposeTransaction 2025-04-17T19:00:24.027840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.027853Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958701993689:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:00:24.027899Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958701993689:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494358975881863685:2630] 2025-04-17T19:00:24.090100Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:24.090413Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:24.090434Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:24.092213Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:24.092301Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:24.092487Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:24.092630Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:24.092676Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:00:24.092879Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:00:24.094566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.098498Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:00:24.098566Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863685:2630] txid# 281474976710658 SEND to# [1:7494358975881863684:2344] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:00:24.119852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358975881863684:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:24.201249Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958701993689:2141] Handle TEvProposeTransaction 2025-04-17T19:00:24.201278Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958701993689:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:00:24.201323Z node 1 :TX_PROXY DEBUG: actor# [1:7494358958701993689:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494358975881863755:2680] 2025-04-17T19:00:24.204326Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:00:24.204403Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:00:24.204422Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:00:24.205087Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:00:24.205155Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:00:24.205358Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:00:24.205474Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:00:24.205526Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:00:24.206063Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:00:24.209075Z node 1 :TX_PROXY DEBUG: Actor# [1:7494358975881863755:2680] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:00:24.209258Z node 1 :TX_PROXY ERROR: Actor# [1:7494358975881863755:2680] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025- ... tems: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 0, txId# 281474976715759 2025-04-17T19:00:25.153842Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete REQUEST: PUT /test_bucket/FloatTable/metadata.json HTTP/1.1 HEADERS: Host: localhost:12037 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 04787B74-6CA3-4B88-B151-6FFCE1F7E045 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=44ad98e24cd8a5798704020f18c3e72d37a767c0e45bd4001ba8e5e5eef063f4 content-length: 79 content-md5: A8HdxtvyKI/3kVROMSi50A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 4450d1217b06d24233b61c9ef57798e194c015abc6f1731da92db5a8511af18c x-amz-date: 20250417T190025Z S3_MOCK::HttpServeWrite: /test_bucket/FloatTable/metadata.json / / 79 REQUEST: PUT /test_bucket/FloatTable/scheme.pb HTTP/1.1 HEADERS: Host: localhost:12037 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 41F4B2FC-9792-4B55-98DC-663BE301A341 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=dbc073a110ffbdb313bac1f9ce91214d58905e0887b28b1426e31726fed63e6e content-length: 357 content-md5: /xgP18BuChzdQ/zFyFuBxw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 69ee94afe0ad1ab092c7ad9d111dc301fca1a9d18efd6bd05e3cd4a89694e803 x-amz-date: 20250417T190025Z S3_MOCK::HttpServeWrite: /test_bucket/FloatTable/scheme.pb / / 357 REQUEST: PUT /test_bucket/FloatTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:12037 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E72E1D71-4D9B-481E-8E0C-0D7B71322531 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=258ce2b6bcd1ac4b66113924b17cf06df0b32ffd0848dbefd5f3946588c1b44b content-length: 4 content-md5: gYC7eAu3k2KXv6GejXIjVw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: e34c25fc2aecfd275f72c70684c4b4e61346cd238beb01b2446abf1149b95957 x-amz-date: 20250417T190025Z S3_MOCK::HttpServeWrite: /test_bucket/FloatTable/data_00.csv / / 4 2025-04-17T19:00:25.227354Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.227388Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715759 2025-04-17T19:00:25.227401Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715759, id# 281474976710665, itemIdx# 0 2025-04-17T19:00:25.227659Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:60430' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 1 } 2025-04-17T19:00:25.230923Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.231010Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.231023Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715760, id# 281474976710665 2025-04-17T19:00:25.231131Z node 1 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:60430' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/FloatTable' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 6] SourcePathType: EPathTypeTable State: Dropping SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715760 2025-04-17T19:00:25.231224Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.234285Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.234311Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715760, status# StatusAccepted 2025-04-17T19:00:25.234393Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:60430' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 0, txId# 281474976715760 2025-04-17T19:00:25.236602Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.259131Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358980176831641:2388] [0] Resolve database: name# /Root 2025-04-17T19:00:25.259372Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358980176831641:2388] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:00:25.259399Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358980176831641:2388] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:00:25.263167Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.263191Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715760 2025-04-17T19:00:25.263201Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715760, id# 281474976710665, itemIdx# 0 2025-04-17T19:00:25.263283Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:60430' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T19:00:25.263296Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494358980176831641:2388] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710665 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:12037" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/FloatTable" destination_prefix: "/FloatTable" } } StartTime { seconds: 1744916425 } } 2025-04-17T19:00:25.264648Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.264717Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.264727Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715761, id# 281474976710665 2025-04-17T19:00:25.264801Z node 1 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:60430' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976715761 2025-04-17T19:00:25.264875Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.265152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715761:0, at schemeshard: 72057594046644480 2025-04-17T19:00:25.266720Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.266738Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715761, status# StatusAccepted 2025-04-17T19:00:25.266800Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710665 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:60430' State: AutoDropping WaitTxId: 281474976715761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976715761 2025-04-17T19:00:25.268910Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:00:25.274093Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-17T19:00:25.275577Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:00:25.275595Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715761 2025-04-17T19:00:25.275606Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715761, id# 281474976710665, itemIdx# 4294967295 2025-04-17T19:00:25.277198Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18BA7B44) NTestSuiteBackupRestoreS3::TTestCaseTestAllPrimitiveTypes::Execute_(NUnitTest::TTestContext&)+1753 (0x18BCB849) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7FE6D028DD90) __libc_start_main+128 (0x7FE6D028DE40) _start+41 (0x164AC029) |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.0%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> TKeyValueTracingTest::ReadHuge >> TKeyValueTracingTest::ReadSmall |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge >> TKeyValueTracingTest::WriteSmall >> ShowCreateView::FromTable [GOOD] >> ShowCreateView::WithPairedTablePathPrefix [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> ShowCreateView::FromTable [GOOD] Test command err: 2025-04-17T19:00:21.196292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358963422278424:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:21.197060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00019b/r3tmp/tmp2pgHDY/pdisk_1.dat 2025-04-17T19:00:21.870506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:21.871309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:21.922288Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:21.925342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11509, node 1 2025-04-17T19:00:22.325785Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:22.325810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:22.325816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:22.325930Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14652 TClient is connected to server localhost:14652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:23.177302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:24.890729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358976307181324:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.890842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358976307181316:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.890986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.895365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.925520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358976307181330:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:25.007446Z node 1 :TX_PROXY ERROR: Actor# [1:7494358980602148701:2755] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:25.507763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-17T19:00:26.195510Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358963422278424:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:26.195560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:26.489668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T19:00:26.901514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T19:00:27.383495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-17T19:00:27.823462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T19:00:28.255331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T19:00:28.747797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T19:00:28.841658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T19:00:31.934750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-17T19:00:34.540688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710748:0, at schemeshard: 72057594046644480 2025-04-17T19:00:34.605005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710749:0, at schemeshard: 72057594046644480 2025-04-17T19:00:34.849681Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494359019256858104:3116], owner: [1:7494359019256858101:3114], scan id: 0, table id: [1:0:0:show_create] 2025-04-17T19:00:34.849781Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494359019256858104:3116] 2025-04-17T19:00:34.868114Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494359019256858104:3116], row count: 1, finished: 1 2025-04-17T19:00:34.868155Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494359019256858104:3116], owner: [1:7494359019256858101:3114], scan id: 0, table id: [1:0:0:show_create] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> ShowCreateView::WithPairedTablePathPrefix [GOOD] Test command err: 2025-04-17T19:00:21.934704Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358966277022643:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:21.934895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00018b/r3tmp/tmph9fLqC/pdisk_1.dat 2025-04-17T19:00:22.421353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:22.438868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:22.438996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:22.447250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30352, node 1 2025-04-17T19:00:22.585322Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:22.585344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:22.585354Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:22.585485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23080 TClient is connected to server localhost:23080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:23.198915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:25.452146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358983456892802:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:25.452165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358983456892814:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:25.452289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:25.457157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:25.485283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358983456892816:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:25.600870Z node 1 :TX_PROXY ERROR: Actor# [1:7494358983456892887:2750] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:25.937094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-17T19:00:26.685161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:26.924895Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494358966277022643:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:26.924951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:27.113322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-17T19:00:27.621819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T19:00:28.121849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T19:00:28.594469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T19:00:29.102521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-17T19:00:29.167875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-17T19:00:32.180671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-17T19:00:34.673360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710748:0, at schemeshard: 72057594046644480 2025-04-17T19:00:34.724269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710751:3, at schemeshard: 72057594046644480 2025-04-17T19:00:34.942795Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7494359022111602378:3136], owner: [1:7494359022111602375:3134], scan id: 0, table id: [1:0:0:show_create] 2025-04-17T19:00:34.944084Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7494359022111602378:3136] 2025-04-17T19:00:34.959456Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7494359022111602378:3136], row count: 1, finished: 1 2025-04-17T19:00:34.959516Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7494359022111602378:3136], owner: [1:7494359022111602375:3134], scan id: 0, table id: [1:0:0:show_create] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs+Stats |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] >> TKeyValueTracingTest::ReadHuge [FAIL] >> TKeyValueTracingTest::ReadSmall [FAIL] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> TKeyValueTracingTest::WriteSmall [FAIL] >> KqpLimits::ReadsetCountLimit |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [GOOD] Test command err: Trying to start YDB, gRPC: 26926, MsgBus: 4417 2025-04-17T19:00:00.634320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358874768567200:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:00.634380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0003ae/r3tmp/tmpK18Pna/pdisk_1.dat 2025-04-17T19:00:00.907152Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26926, node 1 2025-04-17T19:00:00.992158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:00.992292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:00.994022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:01.006658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:01.006692Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:01.006712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:01.006798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4417 TClient is connected to server localhost:4417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:01.468283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:03.333288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358887653469692:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:03.333298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358887653469684:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:03.333415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:03.337273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:03.346900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358887653469698:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:03.440235Z node 1 :TX_PROXY ERROR: Actor# [1:7494358887653469749:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:03.732253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T19:00:03.843030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-17T19:00:03.872710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T19:00:03.897162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:03.923884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.018668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.055464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.081308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.106671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.132406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.157976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.179436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.202948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.740383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-17T19:00:04.773533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.803849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.836940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.866881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.893839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.924069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.958980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-17T19:00:04.994137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.031365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.095468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.131595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.170217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.209664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.238501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.268776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.299021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.335019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.366846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-17T19:00:05.397113Z node 1 :FLAT_TX_S ... tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.786936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.792699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.792714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.798423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.798423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.804062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.804165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.810145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.810145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.815766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.815766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.822443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.835710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.842046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.847893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.853414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.859378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.864687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.869451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.870458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.875437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.875830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.881655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.881662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.887730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.887730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.893790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.893793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.900464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.900464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.906944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.906944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.913250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.913251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.919496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.919496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.925966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.925966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.932471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.932471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.938735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.938737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.944722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.944723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.950523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:30.950524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:31.068847Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2hyr2cff7tghws30td4qwa", SessionId: ydb://session/3?node_id=1&id=NDBiZWNkYWItOWViODIxYTMtNjY0MWJmNjEtMjljMTYxOGM=, Slow query, duration: 24.719364s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T19:00:31.281043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T19:00:31.281164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T19:00:31.281644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> QuoterWithKesusTest::KesusRecreation >> KqpExplain::CreateTableAs+Stats [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs+Stats [GOOD] Test command err: Trying to start YDB, gRPC: 7273, MsgBus: 3227 2025-04-17T19:00:38.036337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494359038980935382:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:38.036651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0001ad/r3tmp/tmpaB33a3/pdisk_1.dat 2025-04-17T19:00:38.449198Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:38.483889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:38.484083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:38.487387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7273, node 1 2025-04-17T19:00:38.686952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:38.686987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:38.687006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:38.687148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3227 TClient is connected to server localhost:3227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:39.493058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:39.531267Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T19:00:41.152126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359051865837940:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:41.152145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359051865837950:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:41.152272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:41.159665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:41.168689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494359051865837954:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:41.275166Z node 1 :TX_PROXY ERROR: Actor# [1:7494359051865838005:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:41.760668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 PLAN::{"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"Tables":["Destination"],"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Col1 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Source","ReadColumns":["Col1","Col2"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Destination","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]},{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Sink"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"Tables":["test\/Destination2"],"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Col1 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Source","ReadColumns":["Col1","Col2"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/Destination2","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Sink"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Col1 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Source","ReadColumns":["Col1","Col2"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Sink"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> SystemView::ShowCreateTableKeyBloomFilter [GOOD] >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> KqpLimits::ReadsetCountLimit [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> test_tpch.py::TestTpchS1::test_tpch[4] >> test_drain.py::TestHive::test_drain_on_stop |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableKeyBloomFilter [GOOD] Test command err: 2025-04-17T19:00:21.438371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358963687341036:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:21.441207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000197/r3tmp/tmpUYLVCx/pdisk_1.dat 2025-04-17T19:00:21.972939Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:21.984753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:21.984851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:21.993662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3374, node 1 2025-04-17T19:00:22.265333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:22.265366Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:22.265372Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:22.265514Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9742 TClient is connected to server localhost:9742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:22.966320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:24.812247Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-17T19:00:24.812305Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-17T19:00:24.846406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358976572243915:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.846570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.847292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358976572243927:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:24.855913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:24.876202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358976572243929:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:24.975070Z node 1 :TX_PROXY ERROR: Actor# [1:7494358976572243998:2753] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:24.976989Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-17T19:00:24.981145Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F0001D0FA8 2025-04-17T19:00:24.981220Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7494358976572243896:2339], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01js2hza4b2mje35hh1hw0p75s, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ5ZTljZmUtMWY2NmY4MDYtZDEyZDY5ODItNTU2ZDM1NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-17T19:00:24.981383Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-17T19:00:24.981462Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7494358976572243896:2339], queueSize: 1 2025-04-17T19:00:24.982068Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7494358976572243896:2339], compileActor: [1:7494358976572244017:2350] 2025-04-17T19:00:25.378648Z node 1 :KQP_YQL INFO: TraceId: 01js2hza4b2mje35hh1hw0p75s, SessionId: CompileActor 2025-04-17 19:00:25.375 INFO ydb-core-sys_view-ut(pid=982414, tid=0x00007FAABE7F7640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-04-17T19:00:25.383678Z node 1 :KQP_YQL TRACE: TraceId: 01js2hza4b2mje35hh1hw0p75s, SessionId: CompileActor 2025-04-17 19:00:25.382 TRACE ydb-core-sys_view-ut(pid=982414, tid=0x00007FAABE7F7640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (let $4 (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-17T19:00:25.384259Z node 1 :KQP_YQL DEBUG: TraceId: 01js2hza4b2mje35hh1hw0p75s, SessionId: CompileActor 2025-04-17 19:00:25.384 DEBUG ydb-core-sys_view-ut(pid=982414, tid=0x00007FAABE7F7640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 473us 2025-04-17T19:00:25.384876Z node 1 :KQP_YQL DEBUG: TraceId: 01js2hza4b2mje35hh1hw0p75s, SessionId: CompileActor 2025-04-17 19:00:25.384 DEBUG ydb-core-sys_view-ut(pid=982414, tid=0x00007FAABE7F7640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-17T19:00:25.387084Z node 1 :KQP_YQL DEBUG: TraceId: 01js2hza4b2mje35hh1hw0p75s, SessionId: CompileActor 2025-04-17 19:00:25.387 DEBUG ydb-core-sys_view-ut(pid=982414, tid=0x00007FAABE7F7640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-04-17T19:00:25.388185Z node 1 :KQP_YQL TRACE: TraceId: 01js2hza4b2mje35hh1hw0p75s, SessionId: CompileActor 2025-04-17 19:00:25.387 TRACE ydb-core-sys_view-ut(pid=982414, tid=0x00007FAABE7F7640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String ... essionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T19:00:42.381829Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 2 at actor [1:7494359053881660617:3278] 2025-04-17T19:00:42.381854Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Task: 2, input channelId: 1, src task: 1, at actor [1:7494359053881660616:3277] 2025-04-17T19:00:42.381891Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:7494359053881660616:3277], channels: 1 2025-04-17T19:00:42.381923Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:7494359053881660617:3278], channels: 2 2025-04-17T19:00:42.381974Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7494359053881660616:3277], CA [1:7494359053881660617:3278], 2025-04-17T19:00:42.382014Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7494359053881660616:3277], CA [1:7494359053881660617:3278], 2025-04-17T19:00:42.382038Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-17T19:00:42.382091Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7494359053881660616:3277], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-17T19:00:42.382120Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7494359053881660616:3277], CA [1:7494359053881660617:3278], 2025-04-17T19:00:42.382203Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7494359053881660616:3277], CA [1:7494359053881660617:3278], 2025-04-17T19:00:42.382555Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7494359053881660617:3278], task: 2, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-17T19:00:42.382592Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7494359053881660616:3277], CA [1:7494359053881660617:3278], 2025-04-17T19:00:42.382629Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7494359053881660616:3277], CA [1:7494359053881660617:3278], 2025-04-17T19:00:42.385322Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7494359053881660616:3277], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 844 Tasks { TaskId: 1 CpuTimeUs: 238 FinishTimeMs: 1744916442384 OutputRows: 7 OutputBytes: 178 Tables { TablePath: "//Root/.metadata/initialization/migrations" ReadRows: 7 ReadBytes: 151 AffectedPartitions: 1 } IngressRows: 7 ComputeCpuTimeUs: 156 BuildCpuTimeUs: 82 HostName: "ghrun-gkdwnlcfnq" NodeId: 1 StartTimeMs: 1744916442384 CreateTimeMs: 1744916442382 UpdateTimeMs: 1744916442384 } MaxMemoryUsage: 1048576 } 2025-04-17T19:00:42.385429Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494359053881660616:3277] 2025-04-17T19:00:42.385489Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7494359053881660617:3278], 2025-04-17T19:00:42.385512Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7494359053881660617:3278], 2025-04-17T19:00:42.386165Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7494359053881660617:3278], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1368 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 967 FinishTimeMs: 1744916442385 InputRows: 7 InputBytes: 178 OutputRows: 7 OutputBytes: 178 ResultRows: 7 ResultBytes: 178 ComputeCpuTimeUs: 890 BuildCpuTimeUs: 77 HostName: "ghrun-gkdwnlcfnq" NodeId: 1 StartTimeMs: 1744916442384 CreateTimeMs: 1744916442382 UpdateTimeMs: 1744916442385 } MaxMemoryUsage: 1048576 } 2025-04-17T19:00:42.386266Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7494359053881660617:3278] 2025-04-17T19:00:42.386410Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-17T19:00:42.386454Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660612:3265] TxId: 281474976710773. Ctx: { TraceId: 01js2hzv15bg409qh8ec6ssypw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002212s ReadRows: 7 ReadBytes: 151 ru: 7 rate limiter was not found force flag: 1 2025-04-17T19:00:42.387641Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710774. Resolved key sets: 0 2025-04-17T19:00:42.387733Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710774. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-17T19:00:42.387779Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7494359053881660624:3265] TxId: 281474976710774. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-17T19:00:42.387864Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660624:3265] TxId: 281474976710774. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-04-17T19:00:42.387908Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7494359053881660624:3265] TxId: 281474976710774. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MzRlOGFlZGYtYmFmN2VlZGMtOTUzMTVjNjktMzg3ZWM4YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFEA342D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFEAF268) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7F1F71E73D90) __libc_start_main+128 (0x7F1F71E73E40) _start+41 (0xD80D029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFEA342D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFEAEF58) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7FDE0B945D90) __libc_start_main+128 (0x7FDE0B945E40) _start+41 (0xD80D029) |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFEA8DFC) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFEAF9CE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7F9830A0CD90) __libc_start_main+128 (0x7F9830A0CE40) _start+41 (0xD80D029) |95.3%| [TA] $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFEA8DFC) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFEAF5DE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7F6AFC108D90) __libc_start_main+128 (0x7F6AFC108E40) _start+41 (0xD80D029) |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_simple.py::TestSimple::test[alter_table] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReadsetCountLimit [GOOD] Test command err: Trying to start YDB, gRPC: 10002, MsgBus: 24290 2025-04-17T19:00:39.023202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494359043334385024:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:39.023327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0001aa/r3tmp/tmpSbYUvN/pdisk_1.dat 2025-04-17T19:00:39.498621Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:39.499517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:39.499572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:39.501412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10002, node 1 2025-04-17T19:00:39.595109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:39.595136Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:39.595143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:39.595257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24290 TClient is connected to server localhost:24290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:40.140006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:40.162011Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T19:00:40.187592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:40.431162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:40.604459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:40.685351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:42.098117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359056219288576:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:42.098257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:42.422887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T19:00:42.448059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:42.474262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T19:00:42.501019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T19:00:42.529240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T19:00:42.558378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T19:00:42.589411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T19:00:42.658097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359056219289232:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:42.658192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:42.658203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359056219289237:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:42.660755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T19:00:42.667857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494359056219289239:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T19:00:42.741178Z node 1 :TX_PROXY ERROR: Actor# [1:7494359056219289294:3548] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:43.707193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:44.022981Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494359043334385024:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:44.023045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.6%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] Test command err: 2025-04-17T19:00:12.597931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:12.598466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:12.598532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/00049d/r3tmp/tmpoSR1sG/pdisk_1.dat 2025-04-17T19:00:13.026158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.074518Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:13.116035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:13.116190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:13.127852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:13.221351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.622879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.623000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:749:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.623074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.630980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:13.796420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T19:00:13.870656Z node 1 :TX_PROXY ERROR: Actor# [1:828:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:17.842513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2hyz5g66z8mbjgynqdm1xr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzUzYjI4MDQtNGE5ZmVkOGYtZjJlNDdlMDMtMjIyMjZkM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:21.143310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2hz3cm91bwh27bn9655dqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q1NDY3MWEtZjIzYWQ4ZDAtNjA5NTUxMjgtODgwYWQ4NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:24.600425Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2hz6jxa0rpna5hznnrs32y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3N2Q0YTktMTFjZTc0NDAtZDI0OWM3MzUtODRkYjMyZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:28.258776Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2hz9z28cjax0q4kz4gwrx0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTMyNDcxNGEtZDdiM2QxYzMtNjU5OWZiYjUtYjM1MjQ2YWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:31.492568Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2hzdh5ajc0r4grz1vt22y8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDUwMGNmNjUtNDA2NTlkOC1iNzViNGViLWE5ZWUwYmYw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:34.728183Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2hzgnc9xqn9s359er2zk4x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM5ZjgxN2QtZDIzYzlmMi1iNmE5OWJiNi0zNmY2ZWJjYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:38.104780Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2hzkth3t59rcgcenhq1ytk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiYmFiNDctNGIzNjUzZWUtYmE0NWQyMjItZmUxNTljODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:41.495454Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2hzq4jbxeqcv79hkc0m0s6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmZiMGQxNTAtMmVkOGExZTktNzU4MGE0YmUtYzdkMjFiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:44.759389Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2hztey10c39533hqntdvtq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM1NTA4ZDUtNTE5YjAwMGQtZmEyNWVlNDctOGUwODg3MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:48.576556Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2hzxmcdf6qwt0nj44t6tkm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk5ZjJkOTEtYWM5OTAwODAtYzZhNzYxNTctYWY0YjlkZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-17T19:00:50.908771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T19:00:50.908860Z node 1 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1507 LastUpdateTime: 1507 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 6235 Memory: 17425464 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1507 LastUpdateTime: 1507 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3781 Memory: 124948 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1507 LastUpdateTime: 1507 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 10486220 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3781 Memory: 124948 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T19:00:55.601245Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2j07zj3mkx6dhtmxe3k8za, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYxZTc3YjgtOTBlMzRhNzktZjc0ZTA3MzEtZWM2YTViMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-04-17T19:00:27.056178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:27.056799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:27.056856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0003bb/r3tmp/tmp78zTgD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14271, node 1 TClient is connected to server localhost:20857 2025-04-17T19:00:28.089872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:28.154536Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:28.170506Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:28.170573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:28.170620Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:28.172018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T19:00:28.210359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:28.210838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:28.226981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:28.364056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-17T19:00:28.479088Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:745:2626]: NKikimr::TEvTablet::TEvBoot 2025-04-17T19:00:28.480492Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:745:2626]: NKikimr::TEvTablet::TEvRestored 2025-04-17T19:00:28.480799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:745:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T19:00:28.507734Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:745:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T19:00:28.508322Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-17T19:00:28.517547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:28.517833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:00:28.518130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:00:28.518286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:00:28.518400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:00:28.518509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:00:28.518617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:00:28.518758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:00:28.518880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:00:28.518999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:00:28.519113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:00:28.519249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:00:28.542818Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:745:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T19:00:28.544372Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-17T19:00:28.544800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T19:00:28.544861Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T19:00:28.545070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:00:28.545227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T19:00:28.545297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T19:00:28.545358Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T19:00:28.545457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T19:00:28.545520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T19:00:28.545559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T19:00:28.545592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T19:00:28.545775Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:00:28.545846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T19:00:28.545888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T19:00:28.545919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T19:00:28.546010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T19:00:28.546057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T19:00:28.546093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T19:00:28.546118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T19:00:28.546224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T19:00:28.546266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T19:00:28.546294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T19:00:28.546343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T19:00:28.546388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T19:00:28.546427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T19:00:28.546901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-04-17T19:00:28.546981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-17T19:00:28.547609Z node 1 :TX_COLUMNSHARD INFO: tablet_id ... size=1445744;count=1;;12:size=1445408;count=1;;13:size=1445360;count=1;;14:size=1445920;count=1;;15:size=2598872;count=4;;16:size=1832712;count=3;;17:size=1018248;count=1;;18:size=994312;count=1;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-17T19:00:54.603446Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:1275:3077], Recipient [1:745:2626]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2025-04-17T19:00:54.603865Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1276:3078], Recipient [1:745:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-04-17T19:00:54.603904Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-04-17T19:00:54.604124Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[26] (CS::GENERAL) apply at tablet 72075186224037888 2025-04-17T19:00:54.607552Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:16 Blob count: 1 2025-04-17T19:00:54.607703Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2945656;raw_bytes=96859974;count=2;records=82561} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=10985168;raw_bytes=371060503;count=6;records=311449} inactive {blob_bytes=21748920;raw_bytes=731151681;count=14;records=617870} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:72/0:size=2030;count=15;;1:size=53667;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445376;count=1;;7:size=1445528;count=1;;8:size=808584;count=1;;9:size=2093168;count=3;;10:size=1247888;count=1;;11:size=1445744;count=1;;12:size=1445408;count=1;;13:size=1445360;count=1;;14:size=1445920;count=1;;15:size=2598872;count=4;;16:size=1832712;count=3;;17:size=1018248;count=1;;18:size=994312;count=1;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:72/0:size=2099;count=16;;1:size=53667;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445376;count=1;;7:size=1445528;count=1;;8:size=808584;count=1;;9:size=2093168;count=3;;10:size=1247888;count=1;;11:size=1445744;count=1;;12:size=1445408;count=1;;13:size=1445360;count=1;;14:size=1445920;count=1;;15:size=2598872;count=4;;16:size=1832712;count=3;;17:size=1018248;count=1;;18:size=994312;count=1;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-17T19:00:54.619922Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-17T19:00:54.619990Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;fline=with_appended.cpp:65;portions=23,;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29; 2025-04-17T19:00:54.620281Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:23;path_id:3;records_count:70098;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:2469232;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-17T19:00:54.620478Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/429610.000000s;; 2025-04-17T19:00:54.620583Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::4963268e-1bbe11f0-a18fe64c-1b4c9e29; 2025-04-17T19:00:54.620653Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:13930824;portions_count:23;); 2025-04-17T19:00:54.620701Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T19:00:54.620759Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;tablet_id=72075186224037888;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:00:54.620820Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-17T19:00:54.620887Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-04-17T19:00:54.620934Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T19:00:54.620984Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T19:00:54.621026Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T19:00:54.621100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.797000s; 2025-04-17T19:00:54.621153Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T19:00:54.621338Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:16 Blob count: 1 VERIFY failed (2025-04-17T19:00:54.621563Z): tablet_id=72075186224037888;task_id=4963268e-1bbe11f0-a18fe64c-1b4c9e29;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18E6FBB9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18E5DE4B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x1A175E96) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x48C9C201) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x308D0F6D) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1EBA5183) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1EA88B60) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E8CEF54) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E86BCD5) NActors::IActor::Receive(TAutoPtr&)+237 (0x1A0A766D) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x35C14E45) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x35C0D6BA) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x35C0A461) NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const+7904 (0x36592F30) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4568 (0x18A4D5B8) std::__y1::__function::__func, void ()>::operator()()+280 (0x18A5F8A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1931CAA6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x192EC419) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x18A5E854) NUnitTest::TTestFactory::Execute()+2438 (0x192EDCE6) NUnitTest::RunMain(int, char**)+5213 (0x1931701D) ??+0 (0x7FE94BEC2D90) __libc_start_main+128 (0x7FE94BEC2E40) _start+41 (0x163A0029) |96.7%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> test_workload.py::TestYdbWorkload::test |96.9%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[table] >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [GOOD] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> KqpMultishardIndex::WriteIntoRenamingSyncIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::WriteIntoRenamingSyncIndex [GOOD] Test command err: Trying to start YDB, gRPC: 20452, MsgBus: 15368 2025-04-17T19:00:31.903735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494359007788910264:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:31.903842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000252/r3tmp/tmpMQDFx6/pdisk_1.dat 2025-04-17T19:00:32.289518Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:32.303697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:32.304223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:32.315652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20452, node 1 2025-04-17T19:00:32.508089Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:32.508136Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:32.508148Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:32.508277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15368 TClient is connected to server localhost:15368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:33.229035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:33.269241Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T19:00:33.299545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:33.493736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:33.671570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:33.734119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:34.869691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359020673813937:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:34.869824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:35.507328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T19:00:35.539547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:35.562516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T19:00:35.584923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T19:00:35.611267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T19:00:35.677473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T19:00:35.706974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T19:00:35.766035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359024968781899:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:35.766094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:35.766248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359024968781904:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:35.772169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T19:00:35.781023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494359024968781906:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T19:00:35.846930Z node 1 :TX_PROXY ERROR: Actor# [1:7494359024968781960:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:36.906559Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494359007788910264:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:36.915322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:37.085554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:38.480375Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_SCHEME_CHANGED;details=Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. at tablet# 72075186224037940;tx_id=281474976710697; 2025-04-17T19:00:38.480813Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Cannot parse tx 281474976710697. SCHEME_ERROR: Table '19' doesn't exist. at tablet# 72075186224037922;tx_id=281474976710697; 2025-04-17T19:00:38.491350Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. 2025-04-17T19:00:38.491912Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494359037853685878:2574], Table: `/Root/MultiShardIndexed` ([72057594046644480:17:1]), SessionActorId: [1:7494359033558717684:2574]Got SCHEME CHANGED for table `/Root/MultiShardIndexed`. ShardID=72075186224037940, Sink=[1:7494359037853685878:2574].{
: Error: Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. at tablet# 72075186224037940, code: 2034 } 2025-04-17T19:00:38.492510Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494359037853685776:2574], SessionActorId: [1:7494359033558717684:2574], statusCode=SCHEME_ERROR. Issue=
: Error: Scheme changed. Table: `/Root/MultiShardIndexed`., code: 2028
: Error: Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. at tablet# 72075186224037940, code: 2034 . sessionActorId=[1:7494359033558717684:2574]. isRollback=0 2025-04-17T19:00:38.492846Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTllZTRkOGYtNDk4MWRiZi0xYjdjM2JlMy03NjE1YTRhYg==, ActorId: [1:7494359033558717684:2574], ActorState: ExecuteState, TraceId: 01js2hzqd7bwqtemht7j6a4d03, got TEvKqpBuffer::TEvError in ExecuteState, status: SCHEME_ERROR send to: [1:7494359037853685829:2574] from: [1:7494359037853685776:2574] 2025-04-17T19:00:38.492924Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494359037853685829:2574] TxId: 281474976710697. Ctx: { TraceId: 01js2hzqd7bwqtemht7j6a4d03, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTllZTRkOGYtNDk4MWRiZi0xYjdjM2JlMy03NjE1YTRhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. SCHEME_ERROR: {
: Error: Scheme changed. Table: `/Root/MultiShardIndexed`., code: 2028 subissue: {
: Error: Cannot parse tx 281474976710697. SCHEME_CHANGED: Table '/Root/MultiShardIndexed' scheme changed. at tablet# 72075186224037940, code: 2034 } } 2025-04-17T19:00:38.493119Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTllZTRkOGYtNDk4MWRiZi0xYjdjM2JlMy03NjE1YTRhYg==, ActorId: [1:7494359033558717684:2574], ActorState: ExecuteState, TraceId: 01js2hzqd7bwqtemht7j6a4d03, Create QueryResponse for error on request, msg: 2025-04-17T19:00:38.494147Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 281474976710697. SCHEME_ERROR: Table '19' doesn't exist. 2025-04-17T19:00:38.494434Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Cannot parse tx 281474976710697. SCHEME_ERROR: Table '19' doesn't exist. at tablet# 72075186224037930;tx_id=281474976710697; 2025-04-17T19:00:38.494451Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 281474976710697. SCHEME_ERROR: Table '19' doesn't exist. 2025-04-17T19:00:47.281353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T19:00:47.281383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:47.499697Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-04-17T19:00:47.503735Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-04-17T19:00:47.514566Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-17T19:00:47.514601Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-17T19:00:47.516745Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-04-17T19:00:47.520249Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-04-17T19:00:47.520468Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-17T19:00:47.528349Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-04-17T19:00:47.528384Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-04-17T19:00:47.530982Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2025-04-17T19:00:47.534347Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-17T19:00:47.534378Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-17T19:00:47.534393Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2025-04-17T19:00:47.534410Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-04-17T19:00:57.577555Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 281474976711923 because datashard 72075186224037930: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=281474976711923; 2025-04-17T19:00:57.577752Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494359119458092811:2574], Table: `/Root/MultiShardIndexed/index_new/indexImplTable` ([72057594046644480:21:2]), SessionActorId: [1:7494359033558717684:2574]Got WRONG SHARD STATE for table `/Root/MultiShardIndexed/index_new/indexImplTable`. ShardID=72075186224037930, Sink=[1:7494359119458092811:2574].{
: Error: Rejecting data TxId 281474976711923 because datashard 72075186224037930: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } 2025-04-17T19:00:57.577852Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7494359119458092775:2574], SessionActorId: [1:7494359033558717684:2574], statusCode=UNAVAILABLE. Issue=
: Error: Wrong shard state. Table `/Root/MultiShardIndexed/index_new/indexImplTable`., code: 2005
: Error: Rejecting data TxId 281474976711923 because datashard 72075186224037930: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 . sessionActorId=[1:7494359033558717684:2574]. isRollback=0 2025-04-17T19:00:57.578118Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTllZTRkOGYtNDk4MWRiZi0xYjdjM2JlMy03NjE1YTRhYg==, ActorId: [1:7494359033558717684:2574], ActorState: ExecuteState, TraceId: 01js2j0a23c6dmav08j4bmr5vj, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [1:7494359119458092795:2574] from: [1:7494359119458092775:2574] 2025-04-17T19:00:57.578198Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7494359119458092795:2574] TxId: 281474976711923. Ctx: { TraceId: 01js2j0a23c6dmav08j4bmr5vj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTllZTRkOGYtNDk4MWRiZi0xYjdjM2JlMy03NjE1YTRhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: {
: Error: Wrong shard state. Table `/Root/MultiShardIndexed/index_new/indexImplTable`., code: 2005 subissue: {
: Error: Rejecting data TxId 281474976711923 because datashard 72075186224037930: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } 2025-04-17T19:00:57.578369Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTllZTRkOGYtNDk4MWRiZi0xYjdjM2JlMy03NjE1YTRhYg==, ActorId: [1:7494359033558717684:2574], ActorState: ExecuteState, TraceId: 01js2j0a23c6dmav08j4bmr5vj, Create QueryResponse for error on request, msg: 2025-04-17T19:00:57.582226Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-04-17T19:01:02.641396Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-17T19:01:02.677070Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2025-04-17T19:01:02.677107Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2025-04-17T19:01:02.689599Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2025-04-17T19:01:02.698394Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-04-17T19:01:07.644869Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found >> S3PathStyleBackup::DisableVirtualAddressing >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [GOOD] |96.9%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12435, MsgBus: 18401 2025-04-17T19:00:00.716474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358873841319801:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:00.716541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0003a5/r3tmp/tmpwheZiI/pdisk_1.dat 2025-04-17T19:00:01.060627Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12435, node 1 2025-04-17T19:00:01.114947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:01.115079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:01.116906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:01.125703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:01.125728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:01.125735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:01.125884Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18401 TClient is connected to server localhost:18401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:01.625441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:03.299139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358886726222362:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:03.299140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358886726222354:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:03.299235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:03.302666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:03.312446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358886726222368:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:03.373915Z node 1 :TX_PROXY ERROR: Actor# [1:7494358886726222419:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:03.661763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T19:00:03.898547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:03.898549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:03.898782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:00:03.899085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:00:03.899217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:00:03.899353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:00:03.899483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:00:03.899593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:00:03.899710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:00:03.899871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:00:03.899996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:00:03.900000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:00:03.900178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:00:03.900230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:00:03.900310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494358886726222660:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:00:03.900335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:00:03.900466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:00:03.900569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:00:03.900673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:00:03.900779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:00:03.900989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:00:03.901117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:00:03.901230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:00:03.901351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7494358886726222653:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:00:03.933960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7494358886726222689:2354];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:03.933959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7494358886726222858:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:03.934011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:74943588 ... oller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.439901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.441020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.448465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.448964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.453854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.458062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.462532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.465707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.468082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.471630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.474030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.479857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.480130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.486302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.489223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.492874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.495358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.500128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.500930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.506714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.506714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.512962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.512961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039210;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.520247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.521002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.526776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.529091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.532972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.536202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.542805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.547608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.551915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039190;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.553673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.559541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.571537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.576910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.582774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.589151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.594896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.600763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.606108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.612211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.618512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.618512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.699287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:01:02.724646Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2hzm71a5vyjet8rvgj2bv2", SessionId: ydb://session/3?node_id=1&id=MmQ3OTEwOWYtOTQ4Y2Y0ZDYtNDE2MzFiZDgtOWE4M2Y1MDI=, Slow query, duration: 27.554811s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T19:01:03.043733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T19:01:03.043777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494359028460180740:7805];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T19:01:03.044584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T19:01:03.044860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [GOOD] Test command err: Trying to start YDB, gRPC: 21028, MsgBus: 7795 2025-04-17T18:59:59.085500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494358869013289720:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T18:59:59.085643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0003d2/r3tmp/tmpdgENEu/pdisk_1.dat 2025-04-17T18:59:59.365946Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21028, node 1 2025-04-17T18:59:59.371623Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T18:59:59.405244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:59:59.405278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:59:59.405286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:59:59.405408Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:59:59.443191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:59.443302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:59.444496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7795 TClient is connected to server localhost:7795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T18:59:59.873543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T18:59:59.893959Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-17T19:00:01.639755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358877603224987:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:01.639758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494358877603224979:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:01.639839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:01.643178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:01.652446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494358877603224993:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:00:01.725696Z node 1 :TX_PROXY ERROR: Actor# [1:7494358877603225044:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:01.971638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T19:00:02.143628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:02.143628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:02.143866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:00:02.144170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:00:02.144301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:00:02.144317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:00:02.144470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:00:02.144518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:00:02.144618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:00:02.144647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:00:02.144755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:00:02.144827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:00:02.144866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:00:02.145014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:00:02.145130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:00:02.145167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:00:02.145255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:00:02.145288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:00:02.145399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:00:02.145455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:00:02.145505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:00:02.145562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494358881898192603:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:00:02.145600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:00:02.145711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7494358881898192591:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:00:02.186994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494358881898192621:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:02.187061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7494358881 ... ontroller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.718332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.724355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.724355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.730004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.730040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.736234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.736252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.741172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.742626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.746010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.748842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.750503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.754651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.755005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.758922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.761004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.763613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.767198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.767706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.772260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.773215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.776606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.779015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.781911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.785233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.786919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.791279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.791666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.796086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.797344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.800358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.804209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.805462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.810298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.810463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.816007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.816513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.820974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.829139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.833669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.838123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.843124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.858097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.875673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.879827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710714; 2025-04-17T19:00:58.973671Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01js2hzjdeejk1adfb2b5dmmcs", SessionId: ydb://session/3?node_id=1&id=OTlkMmNmNS03ZWVhN2EwZS0xYzM1NGNhZC1mNjE2NTA4, Slow query, duration: 25.646211s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-17T19:00:59.592379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T19:00:59.592573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; 2025-04-17T19:00:59.593773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7494359023632150519:7775];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-17T19:00:59.594160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710716; >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test.py::test_topic_data >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] |97.1%| [TA] $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] |97.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test.py::test_topic_data [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::test_topic_data [GOOD] |97.5%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |97.6%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |97.7%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |97.8%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpStats::SysViewClientLost [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [FAIL] Test command err: Trying to start YDB, gRPC: 14406, MsgBus: 20214 2025-04-17T19:00:40.987408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494359044999913617:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:40.988057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000185/r3tmp/tmpEm5sqw/pdisk_1.dat 2025-04-17T19:00:41.326914Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14406, node 1 2025-04-17T19:00:41.380384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:41.380472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:41.382192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:41.390807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:41.390836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:41.390845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:41.390969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20214 TClient is connected to server localhost:20214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:00:41.802090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:41.831904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:41.961562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:42.107681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:42.169722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:44.020624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359062179784600:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:44.020702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:44.304958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T19:00:44.331762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:00:44.358843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T19:00:44.385654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T19:00:44.415346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T19:00:44.446134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T19:00:44.478201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T19:00:44.548425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359062179785256:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:44.548518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:44.548702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494359062179785262:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:44.552132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T19:00:44.561336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494359062179785264:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T19:00:44.656290Z node 1 :TX_PROXY ERROR: Actor# [1:7494359062179785320:3545] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } waiting... 2025-04-17T19:00:45.555251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T19:00:45.990441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494359044999913617:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:45.990995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:00:56.326770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T19:00:56.326803Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:01:18.276923Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916478246, txId: 281474976710673] shutting down 2025-04-17T19:01:18.332747Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-17T19:01:19.538882Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916479527, txId: 281474976710675] shutting down 2025-04-17T19:01:20.735898Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916480729, txId: 281474976710677] shutting down 2025-04-17T19:01:21.931817Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916481925, txId: 281474976710679] shutting down 2025-04-17T19:01:23.112177Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916483106, txId: 281474976710681] shutting down 2025-04-17T19:01:24.275664Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916484270, txId: 281474976710683] shutting down 2025-04-17T19:01:25.438210Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916485432, txId: 281474976710685] shutting down 2025-04-17T19:01:26.650035Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916486642, txId: 281474976710687] shutting down 2025-04-17T19:01:27.849930Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916487842, txId: 281474976710689] shutting down 2025-04-17T19:01:29.102136Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916489086, txId: 281474976710691] shutting down 2025-04-17T19:01:30.306487Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744916490298, txId: 281474976710693] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x198368DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x19CFB43F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x193D3478 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x193E6487 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x193E6487 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x193E6487 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19D32635 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19D32635 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19D32635 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x19D01FC8 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x193E560B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19D03895 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19D2CBAC 15. ??:0: ?? @ 0x7FBA88B34D8F 16. ??:0: ?? @ 0x7FBA88B34E3F 17. ??:0: ?? @ 0x166DE028 |97.9%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |97.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_drain.py::TestHive::test_drain_on_stop [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [FAIL] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |98.0%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |98.1%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] Test command err: 2025-04-17T18:59:59.408160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:59:59.408630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:59:59.408673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0004a1/r3tmp/tmpvWHyoj/pdisk_1.dat 2025-04-17T18:59:59.873335Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T18:59:59.873436Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T18:59:59.873475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T18:59:59.873575Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-17T18:59:59.873606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T18:59:59.995398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-17T18:59:59.999725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:00.003022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T19:00:00.005421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T19:00:00.005537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:00.005662Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T19:00:00.012816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T19:00:00.013934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T19:00:00.014004Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:00:00.014044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T19:00:00.014284Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T19:00:00.014328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T19:00:00.014437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:00.014502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T19:00:00.014593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T19:00:00.014625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T19:00:00.016454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T19:00:00.016886Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:00:00.016915Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T19:00:00.017037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T19:00:00.017079Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T19:00:00.017135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:00.017195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T19:00:00.017232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T19:00:00.017293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T19:00:00.017564Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:00:00.017589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T19:00:00.017664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T19:00:00.017689Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T19:00:00.017725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:00.017748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:00.017778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-17T19:00:00.017800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T19:00:00.017831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T19:00:00.021400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T19:00:00.021849Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:00:00.021886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T19:00:00.022840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-17T19:00:00.023894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T19:00:00.023936Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T19:00:00.023970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-17T19:00:00.024077Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-17T19:00:00.024423Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T19:00:00.024475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T19:00:00.024510Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T19:00:00.024643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-17T19:00:00.024666Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T19:00:00.024723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-17T19:00:00.024753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-17T19:00:00.024781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-17T19:00:00.071636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-17T19:00:00.071769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-17T19:00:00.071805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:00.072590Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T19:00:00.072668Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-17T19:00:00.117666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:00.118227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:00.131056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:00.210350Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-17T19:00:00.211129Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T19:00:00.211182Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T19:00:00.211216Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T19:00:00.211366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-17T19:00:00.211401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T19:00:00.211486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T19:00:00.211613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T19:02:15.022428Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T19:02:15.022459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T19:02:15.022533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T19:02:15.022567Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T19:02:15.022661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 22 shard idx 72057594046644480:7 data size 0 row count 0 2025-04-17T19:02:15.022729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 22], pathId map=TableA, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T19:02:15.022761Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-04-17T19:02:15.022825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-17T19:02:15.022880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037894 2025-04-17T19:02:15.022956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:02:15.033465Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T19:02:15.033567Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T19:02:15.033598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T19:02:15.107833Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T19:02:15.108400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1583:3193], Recipient [1:409:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 24 Generation: 1 Round: 58 TableStats { DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 23 Memory: 119576 Storage: 142 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 1 StartTime: 4950 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T19:02:15.108441Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T19:02:15.108489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] state 'Ready' dataSize 54 rowCount 2 cpuUsage 0.0023 2025-04-17T19:02:15.108590Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] raw table stats: DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T19:02:15.108634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T19:02:15.162823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T19:02:15.162888Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T19:02:15.162908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T19:02:15.162965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T19:02:15.162988Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T19:02:15.163060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 24 shard idx 72057594046644480:8 data size 54 row count 2 2025-04-17T19:02:15.163115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037895 maps to shardIdx: 72057594046644480:8 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 24], pathId map=TableA, is column=0, is olap=0, RowCount 2, DataSize 54 2025-04-17T19:02:15.163136Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037895, followerId 0 2025-04-17T19:02:15.163187Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:8 with partCount# 1, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-17T19:02:15.163244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037895 2025-04-17T19:02:15.163301Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:02:15.173749Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T19:02:15.173829Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T19:02:15.173858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T19:02:15.235706Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:02:15.235792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:02:15.235891Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:02:15.235921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:02:15.257185Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T19:02:15.341646Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T19:02:15.415672Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:02:15.415743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:02:15.415814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:02:15.415834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:02:15.437182Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T19:02:15.521629Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037899 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T19:02:15.607338Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjY5Zjc4ZmMtNDFhY2MzN2YtMjEwNzY1NDYtOGU3M2I0MQ==, ActorId: [1:2033:3520], ActorState: ExecuteState, TraceId: 01js2hyq4x9v8fne3qcdcrx0h1, Create QueryResponse for error on request, msg: 2025-04-17T19:02:15.607596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:02:15.607641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:02:15.607790Z node 1 :KQP_SLOW_LOG WARN: SessionId: ydb://session/3?node_id=1&id=YjY5Zjc4ZmMtNDFhY2MzN2YtMjEwNzY1NDYtOGU3M2I0MQ==, Slow query, duration: 600.000000s, status: GENERIC_ERROR, user: UNAUTHENTICATED, results: 0b, text: "RESTORE `MyCollection`;", parameters: 0b 2025-04-17T19:02:15.609225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:02:15.609272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture()+28 (0x19266BDC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x19723D20) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+3639 (0x492CAD97) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+26179 (0x18EB72E3) std::__y1::__function::__func, void ()>::operator()()+280 (0x18E72428) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1975AF16) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1972A8A9) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+1204 (0x18E712D4) NUnitTest::TTestFactory::Execute()+2438 (0x1972C176) NUnitTest::RunMain(int, char**)+5213 (0x1975548D) ??+0 (0x7F389055ED90) __libc_start_main+128 (0x7F389055EE40) _start+41 (0x165C5029) |98.1%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] Test command err: 2025-04-17T19:00:51.076876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3156:2436], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:51.077977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:51.078734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:00:51.081314Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3162:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:51.082771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3152:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:51.082972Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3159:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:51.083093Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:51.083623Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:00:51.084676Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:51.084755Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:51.085111Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1519:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:51.085592Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:00:51.086495Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:00:51.086865Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3114:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:51.087022Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:1516:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:51.087109Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:51.087873Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:00:51.088566Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3108:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:51.088721Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:51.088785Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:51.089495Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3111:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:51.089581Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:00:51.089649Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:00:51.090493Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:51.090928Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:00:51.090989Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:51.091353Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:00:51.605232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:51.866938Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T19:00:51.888386Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T19:00:52.350927Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 11885, node 1 TClient is connected to server localhost:19103 2025-04-17T19:00:52.636115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:00:52.636183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:00:52.636224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:00:52.636822Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T19:02:18.861167Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:2882:2430], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:02:18.863391Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:02:18.864063Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:02:18.868134Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:2885:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:02:18.868819Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3143:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:02:18.870050Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:02:18.870318Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:2888:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:02:18.871000Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:02:18.871110Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:02:18.871506Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:2903:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:02:18.871889Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:02:18.871978Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:02:18.872290Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:2897:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:02:18.872478Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:2900:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:02:18.873158Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:02:18.873650Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:02:18.873983Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:02:18.874056Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:02:18.874117Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:02:18.874640Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:2894:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:02:18.874739Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:02:18.875553Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:02:18.876276Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:02:18.876820Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-17T19:02:18.880688Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:2891:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:02:18.881684Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:02:18.882288Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-17T19:02:19.343007Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:02:19.690322Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-17T19:02:19.738402Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-17T19:02:20.504107Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 28414, node 10 TClient is connected to server localhost:30332 2025-04-17T19:02:20.925877Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:02:20.925964Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:02:20.926019Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:02:20.926566Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration |98.3%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.3%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> QuoterWithKesusTest::KesusRecreation [FAIL] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] |98.5%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::KesusRecreation [FAIL] Test command err: 2025-04-17T19:00:42.529475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494359054659669461:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:42.530539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T19:00:42.557273Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494359056709080708:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:00:42.557591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000166/r3tmp/tmp8wco1B/pdisk_1.dat 2025-04-17T19:00:42.919488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:42.919614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:42.920920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:42.920986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:42.922033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:42.927311Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T19:00:42.930792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:42.931026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22840 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T19:00:43.222131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:43.249692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:00:43.275296Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72075186224037888 2025-04-17T19:00:43.275422Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInitSchema::Execute 2025-04-17T19:00:43.278724Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInitSchema::Complete 2025-04-17T19:00:43.278805Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInit::Execute 2025-04-17T19:00:43.282925Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInit::Complete 2025-04-17T19:00:43.313338Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxConfigSet::Execute (sender=[1:7494359054659669916:2250], cookie=1, path="/dc-1/KesusQuoter") 2025-04-17T19:00:43.317213Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxConfigSet::Complete (sender=[1:7494359054659669916:2250], cookie=1, status=SUCCESS) AddQuoterResource: { Resource { ResourcePath: "Resource" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 } } } TClient::Ls request: /dc-1/KesusQuoter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "KesusQuoter" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744916443368 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 P... (TRUNCATED) 2025-04-17T19:00:43.339737Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Execute (sender=[1:7494359058954637638:2578], cookie=0, path="Resource", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T19:00:43.340604Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Created new quoter resource 1 "Resource" 2025-04-17T19:00:43.343026Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Complete (sender=[1:7494359058954637638:2578], cookie=0) 2025-04-17T19:00:43.343295Z node 1 :QUOTER_SERVICE TRACE: Request({ Operator: And Deadline: no Cookie: 0 [ { 1, "/dc-1/KesusQuoter":"Resource" } ] }) 2025-04-17T19:00:43.343435Z node 1 :QUOTER_SERVICE INFO: resolve new quoter /dc-1/KesusQuoter 2025-04-17T19:00:43.343562Z node 1 :QUOTER_SERVICE INFO: path resolved as Kesus /dc-1/KesusQuoter 2025-04-17T19:00:43.344558Z node 1 :QUOTER_SERVICE INFO: resolve resource Resource on quoter /dc-1/KesusQuoter 2025-04-17T19:00:43.344634Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Created kesus quoter proxy. Tablet id: 72075186224037888 2025-04-17T19:00:43.344834Z node 1 :QUOTER_PROXY DEBUG: [/dc-1/KesusQuoter]: Connecting to kesus 2025-04-17T19:00:43.345878Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: ProxyRequest "Resource" 2025-04-17T19:00:43.346282Z node 1 :QUOTER_PROXY DEBUG: [/dc-1/KesusQuoter]: Successfully connected to tablet 2025-04-17T19:00:43.348829Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvSubscribeOnResourcesResult to [1:7494359058954637652:2304]. Cookie: 1. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Resource" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T19:00:43.348876Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Subscribe on quoter resources (sender=[1:7494359058954637652:2304], cookie=1) 2025-04-17T19:00:43.349169Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: SubscribeOnResourceResult({ Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Resource" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 }) 2025-04-17T19:00:43.349210Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Initialized new session with resource "Resource" 2025-04-17T19:00:43.349246Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxySession("Resource", 1) 2025-04-17T19:00:43.349307Z node 1 :QUOTER_SERVICE INFO: resource session established: /dc-1/KesusQuoter:Resource as 1 2025-04-17T19:00:43.349320Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(2, 2)} }]) 2025-04-17T19:00:43.349401Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T19:00:43.349429Z node 1 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 2. FreeBalance: 2 2025-04-17T19:00:43.349451Z node 1 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-17T19:00:43.350000Z 2025-04-17T19:00:43.349462Z node 1 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-17T19:00:43.349515Z node 1 :QUOTER_SERVICE TRACE: Charge "Resource" for 1. Balance: 2. FreeBalance: 2. TicksToFullfill: 0.5. DurationToFullfillInUs: 50000. TimeToFullfill: 1970-01-01T00:00:00.050000Z. Now: 2025-04-17T19:00:43.349019Z. LastAllocated: 1970-01-01T00:00:00.000000Z 2025-04-17T19:00:43.349627Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 1, Queue: 0}]) 2025-04-17T19:00:43.349658Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: 1, QueueWeight: 0 } 2025-04-17T19:00:43.349669Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Activate session to "Resource". Connected: 1 2025-04-17T19:00:43.350633Z node 1 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 2. FreeBalance: 2 2025-04-17T19:00:43.350818Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 1 ConsumeResource: true Amount: inf } ActorID { RawX1: 7494359058954637652 RawX2: 4503603922340096 } }) 2025-04-17T19:00:43.350884Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(1, 2)} }]) 2025-04-17T19:00:43.350927Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T19:00:43.350948Z node 1 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 1. FreeBalance: 1 2025-04-17T19:00:43.351009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-17T19:00:43.351687Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvUpdateConsumptionStateAck to [1:7494359058954637652:2304]. Cookie: 0. Data: { } 2025-04-17T19:00:43.351732Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Update quoter resources consumption state (sender=[1:7494359058954637652:2304], cookie=0) waiting... 2025-04-17T19:00:43.359018Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Disconnected from tablet 2025-04-17T19:00:43.359038Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:00:43.359074Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Mark "Resource" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 1 2025-04-17T19:00:43.359100Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Schedule offline allocation in 0.000000s: [{ "Resource", 1 }] 2025-04-17T19:00:43.359200Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T19:00:43.359346Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: OfflineResourceAllocation([{ "Resource", 1 ... c-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:46.704138Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:46.784928Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:46.784955Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:46.868981Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:46.869011Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:46.951448Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:46.951473Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.024545Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.024564Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.102518Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.102546Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.183786Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.183814Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.258062Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.258097Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.329711Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.329748Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.402815Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.402838Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.479201Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.479221Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.563654Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.563680Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.642010Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.642029Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.719403Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.719428Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.801774Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.801798Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.882137Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.882156Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:47.968168Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:47.968194Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.051069Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.051088Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.132003Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.132025Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.207204Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.207224Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.287071Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.287091Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.361378Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.361404Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.439244Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.439269Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.519162Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.519187Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.603046Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.603070Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.680878Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.680903Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.757171Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.757191Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.832455Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.832478Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.904638Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.904664Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:48.986460Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:48.986481Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.071426Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.071445Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.152958Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.152986Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.229724Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.229742Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.313663Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.313683Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.395647Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.395686Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.474520Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.474544Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.550777Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.550799Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.629556Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.629576Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.711063Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.711090Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.787828Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.787850Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.873827Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.873848Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:49.953692Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:49.953715Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:02:50.031044Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:02:50.031069Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus (TWithBackTrace) Event queue is still empty.ydb/library/actors/testlib/test_runtime.cpp:1375: TBackTrace::Capture()+28 (0x18F230CC) TWithBackTrace::TWithBackTrace<>()+65 (0x35D91D01) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+29118 (0x35D8DCFE) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x35D93FF4) NKikimr::TEvQuota::TEvClearance* NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TAutoPtr&, std::__y1::function, TDuration)+643 (0x18B1B3A3) THolder NActors::TTestActorRuntimeBase::GrabEdgeEvent(TDuration)+332 (0x18B0178C) NKikimr::TKesusQuoterTestSetup::GetQuota(std::__y1::vector>, TBasicString>, unsigned long>, std::__y1::allocator>, TBasicString>, unsigned long>>> const&, NKikimr::TEvQuota::EResourceOperator, TDuration, NKikimr::TEvQuota::TEvClearance::EResult)+304 (0x18B25C30) NKikimr::TKesusQuoterTestSetup::GetQuota(TBasicString> const&, TBasicString> const&, unsigned long, TDuration, NKikimr::TEvQuota::TEvClearance::EResult)+575 (0x18B27D3F) NKikimr::NTestSuiteQuoterWithKesusTest::TTestCaseKesusRecreation::Execute_(NUnitTest::TTestContext&)+926 (0x18A7006E) std::__y1::__function::__func, void ()>::operator()()+280 (0x18AAC828) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19417416) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x193E6D89) NKikimr::NTestSuiteQuoterWithKesusTest::TCurrentTest::Execute()+1204 (0x18AAB7D4) NUnitTest::TTestFactory::Execute()+2438 (0x193E8656) NUnitTest::RunMain(int, char**)+5213 (0x1941198D) ??+0 (0x7F0C72807D90) __libc_start_main+128 (0x7F0C72807E40) _start+41 (0x163D4029) |98.6%| [TM] {RESULT} ydb/core/quoter/ut/unittest |98.6%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: 2025-04-17T19:00:13.753540Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T19:00:13.973359Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T19:00:13.997620Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T19:00:14.005512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T19:00:14.030373Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T19:00:14.032773Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T19:00:14.044823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:14.045069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:00:14.045306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:00:14.045432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:00:14.045545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:00:14.045654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:00:14.045764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:00:14.045872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:00:14.045985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:00:14.046089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:00:14.046202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:00:14.046304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:00:14.072371Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T19:00:14.076116Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T19:00:14.076272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T19:00:14.076325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T19:00:14.076649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:00:14.078895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T19:00:14.079000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T19:00:14.079062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T19:00:14.079259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T19:00:14.079355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T19:00:14.079424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T19:00:14.079472Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T19:00:14.079642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:00:14.079703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T19:00:14.079738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T19:00:14.079782Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T19:00:14.079906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T19:00:14.079975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T19:00:14.080026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T19:00:14.080053Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T19:00:14.080123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T19:00:14.080162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T19:00:14.080211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T19:00:14.080266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T19:00:14.080302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T19:00:14.080329Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T19:00:14.080758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-17T19:00:14.080846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T19:00:14.080962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=48; 2025-04-17T19:00:14.081089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=77; 2025-04-17T19:00:14.081236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T19:00:14.081294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T19:00:14.081324Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T19:00:14.081553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T19:00:14.081610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T19:00:14.081640Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T19:00:14.081800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T19:00:14.081847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T19:00:14.081882Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T19:00:14.082066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T19:00:14.082120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T19:00:14.082151Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... 32:20 #26 0x315586e7 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:97:5 #27 0x315709e7 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:502:9 #28 0xfff8fba in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:218:21 #29 0xfff5efb in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #30 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #31 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a288576 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a288576 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a288576 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a288576 in __allocate_at_least > *> > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x2a288576 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:358:25 #6 0x2a288576 in std::__y1::deque>, std::__y1::allocator>>>::__add_back_capacity() /-S/contrib/libs/cxxsupp/libcxx/include/deque:2144:51 #7 0x2a27a8b4 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/deque:1528:5 #8 0x2a27a8b4 in NKikimr::NOlap::TBlobManager::StartBlobBatch() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:380:23 #9 0x2a25febd in TWriteAction /-S/ydb/core/tx/columnshard/blobs_action/bs/write.h:41:30 #10 0x2a25febd in void std::__y1::allocator::construct[abi:fe190000]> const&, std::__y1::shared_ptr&>(NKikimr::NOlap::NBlobOperations::NBlobStorage::TWriteAction*, TBasicString> const&, std::__y1::shared_ptr&) /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #11 0x2a25562a in construct > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #12 0x2a25562a in __shared_ptr_emplace > &, std::__y1::shared_ptr &, std::__y1::allocator, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #13 0x2a25562a in allocate_shared, const TBasicString > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #14 0x2a25562a in make_shared > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #15 0x2a25562a in NKikimr::NOlap::NBlobOperations::NBlobStorage::TOperator::DoStartWritingAction() /-S/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp:16:12 #16 0x25c9b4f5 in NKikimr::NOlap::IBlobsStorageOperator::StartWritingAction(NKikimr::NOlap::NBlobOperations::EConsumer) /-S/ydb/core/tx/columnshard/blobs_action/abstract/storage.h:106:23 #17 0x25cf39a0 in NKikimr::NColumnShard::TWriteOperation::Start(NKikimr::NColumnShard::TColumnShard&, std::__y1::shared_ptr const&, NActors::TActorId const&, NKikimr::NOlap::TWritingContext const&) /-S/ydb/core/tx/columnshard/operations/write.cpp:39:53 #18 0x25eae844 in NKikimr::NColumnShard::TWriteTask::Execute(NKikimr::NColumnShard::TColumnShard*, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:26:21 #19 0x25eafdd8 in NKikimr::NColumnShard::TWriteTasksQueue::Drain(bool, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:45:15 #20 0x25e5fa6e in NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__write.cpp:616:22 #21 0x25de1ff6 in NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/columnshard/columnshard_impl.h:449:13 #22 0x116159cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #23 0x2cd33274 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #24 0x2cd2bae9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #25 0x2cd35e63 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #26 0x3157cfe2 in NKikimr::NEvents::TDataEvents::TEvWriteResult* NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TAutoPtr&, std::__y1::function, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:446:13 #27 0x3155beb4 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:510:20 #28 0x3155beb4 in NKikimr::NTxUT::WaitWriteResult(NActors::TTestBasicRuntime&, unsigned long, std::__y1::vector>*) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:120:26 #29 0x3155d631 in NKikimr::NTxUT::WriteDataImpl(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, unsigned long, TBasicString> const&, std::__y1::shared_ptr const&, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:146:16 #30 0x3155e660 in NKikimr::NTxUT::WriteData(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:161:16 #31 0xfff977f in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:230:9 #32 0xfff5ecf in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #33 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #34 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #42 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #43 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #44 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7fe8feb5bd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 3071440 byte(s) leaked in 54874 allocation(s). ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: 2025-04-17T19:00:14.773781Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T19:00:14.886831Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T19:00:14.891388Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T19:00:14.891887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T19:00:14.917179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T19:00:14.917523Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T19:00:14.926064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:00:14.926333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:00:14.926618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:00:14.926795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:00:14.926927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:00:14.927052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:00:14.927175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:00:14.927298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:00:14.927459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:00:14.927569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:00:14.927675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:00:14.927813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:00:14.952633Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T19:00:14.955797Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T19:00:14.955980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T19:00:14.956039Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T19:00:14.956237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:00:14.956412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T19:00:14.956583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T19:00:14.956677Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T19:00:14.956805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T19:00:14.956885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T19:00:14.956938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T19:00:14.956979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T19:00:14.957183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:00:14.957268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T19:00:14.957316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T19:00:14.957348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T19:00:14.957435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T19:00:14.957511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T19:00:14.957576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T19:00:14.957632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T19:00:14.957733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T19:00:14.957784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T19:00:14.957820Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T19:00:14.957876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T19:00:14.957936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T19:00:14.957976Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T19:00:14.958435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-17T19:00:14.958527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-17T19:00:14.958621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-17T19:00:14.958714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-17T19:00:14.958889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T19:00:14.958946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T19:00:14.958979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T19:00:14.959165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T19:00:14.959213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T19:00:14.959258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T19:00:14.959480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T19:00:14.959531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T19:00:14.959566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T19:00:14.959785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T19:00:14.959835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T19:00:14.959882Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... std::__y1::allocator>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:146:16 #30 0x3155e660 in NKikimr::NTxUT::WriteData(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:161:16 #31 0xfff977f in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:230:9 #32 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #33 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #34 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #35 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #36 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #37 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #38 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #39 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #40 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #41 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #42 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #43 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #44 0x7f5f9f4abd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0xf53bcdd in NObjectFactory::TFactoryObjectCreator::Create() const /-S/library/cpp/object_factory/object_factory.h:38:20 #2 0x1ca4f2d9 in MakeHolder /-S/library/cpp/object_factory/object_factory.h:137:38 #3 0x1ca4f2d9 in NKikimr::NOlap::NStorageOptimizer::IOptimizerPlannerConstructor::BuildDefault() /-S/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.h:204:23 #4 0x1ca4d66f in NKikimr::NOlap::TIndexInfo::DeserializeOptionsFromProto(NKikimrSchemeOp::TColumnTableSchemeOptions const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:209:40 #5 0x1ca53d3e in NKikimr::NOlap::TIndexInfo::DeserializeFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:250:5 #6 0x1ca6084b in NKikimr::NOlap::TIndexInfo::BuildFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:333:17 #7 0x25bfef6b in NKikimr::NOlap::TColumnEngineForLogs::RegisterSchemaVersion(NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:103:29 #8 0x25bfaa2e in NKikimr::NOlap::TColumnEngineForLogs::TColumnEngineForLogs(unsigned long, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:42:5 #9 0x2a2e4fea in make_unique &, std::__y1::shared_ptr &, std::__y1::shared_ptr &, const NKikimr::NOlap::TSnapshot &, const unsigned int &, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData, std::__y1::shared_ptr &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:30 #10 0x2a2e4fea in NKikimr::NColumnShard::TTablesManager::AddSchemaVersion(unsigned int, NKikimr::NOlap::TSnapshot const&, NKikimrSchemeOp::TColumnTableSchema const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:282:24 #11 0x2a2e6877 in NKikimr::NColumnShard::TTablesManager::AddTableVersion(NKikimr::NColumnShard::TInternalPathId, NKikimr::NOlap::TSnapshot const&, NKikimrTxColumnShard::TTableVersionInfo const&, std::__y1::optional const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:320:9 #12 0x2a1b7c79 in NKikimr::NColumnShard::TColumnShard::RunEnsureTable(NKikimrTxColumnShard::TCreateTable const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:431:19 #13 0x2a1b65e0 in NKikimr::NColumnShard::TColumnShard::RunInit(NKikimrTxColumnShard::TInitShard const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:373:9 #14 0x2a1b5d51 in NKikimr::NColumnShard::TColumnShard::RunSchemaTx(NKikimrTxColumnShard::TSchemaTxBody const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:328:13 #15 0xff6795e in NKikimr::NColumnShard::TSchemaTransactionOperator::ProgressOnExecute(NKikimr::NColumnShard::TColumnShard&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/transactions/operators/schema.h:94:19 #16 0x25d9236d in NKikimr::NColumnShard::TColumnShard::TTxProgressTx::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__progress_tx.cpp:80:13 #17 0x1852b274 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #18 0x184f09f6 in NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&) /-S/ydb/core/tablet_flat/flat_executor.cpp:4143:9 #19 0x116159cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #20 0x2cd33274 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #21 0x2cd2bae9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #22 0x2cd35e63 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #23 0x3157b473 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #24 0x315597d2 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #25 0x315597d2 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #26 0x315586e7 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:97:5 #27 0x315709e7 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:502:9 #28 0xfff8fba in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:218:21 #29 0xfff5dfc in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #30 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #31 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 SUMMARY: AddressSanitizer: 2623722 byte(s) leaked in 61944 allocation(s). |98.8%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |98.8%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |98.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] Test command err: 2025-04-17T19:00:12.615187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:00:12.615728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:00:12.615817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/000494/r3tmp/tmpfRRSav/pdisk_1.dat 2025-04-17T19:00:13.025412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.074452Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:00:13.116242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:00:13.116370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:00:13.129246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:00:13.221472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T19:00:13.622918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.623035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.623116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:00:13.630846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T19:00:13.796928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T19:00:13.881768Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:00:18.200984Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2hyz5g9gbc1qjbzx1bdbsj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgyOTU2NmEtNWNlZGNjLTYxYTc5NjUzLWM2MGYxNzNl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:21.784777Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2hz3pmf6ps8cqedxdg5cc6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiYjE3NGMtNjEyODBjNTEtMjZiMzJjZTgtYzg5NDIzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:25.488766Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2hz76jbdj1b8kryct1y9pg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjYzYmUyYjQtNDM1YThkZS02MDAyYjExNC1iOGIxNDcyYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:29.170438Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2hzasbbn7m890mbavedkkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY2MGU0NjgtZTI0ZWMxMTEtNDVlZTk3NDYtZDY5ZGQyOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:32.799022Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2hzecwf6fysq5eht4t5tjm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWYyNzJjMWItZTg5ZmFhYzAtNmEyY2I3MS1iNzc4OWQ5Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:35.915593Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2hzhygdwf7bpe8tjpnkwrz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkyOWM4NjgtMTY3YzcyMjgtYjk0NDY3NmYtYzliYzE3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:39.420497Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2hzmzr334e9n2fq1asd3ab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM3NmY1NDctZDllZWNhODgtMmFmZDFhM2YtZTE2ZTM2ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:43.136129Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2hzrcpajrn7s3jdd8v5b7w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI4OWIyY2MtMTgzMGUxZmItNjBhOTgwMGQtMjRkOTA0ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:46.360838Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2hzw1rcjczwrx4gerec3sw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI1NzJiZmMtZmYwN2IxZWYtN2NkOWQ0ZjAtYTE3MTU0YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:49.956752Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2hzz670p7p3c0bhmew9hxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmIxMjBjYTItZmY1NTMzZWItM2E4M2M3MmQtN2QxMzEwZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:53.649384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2j02nv6d5mejhwm9twpn3n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVmODZkYzUtYWIyNGI3M2QtZDlmYzFlZDEtMTJjYzU2MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:00:57.214506Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2j06a71b85mfjdqac1z785, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI1OWYzN2EtYWJiOGYxOGYtNzY3MTRhOGYtYTMxYWY5M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:01:00.694621Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2j09sf7ssbavfws5e4zqfq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA1NDYyODUtZGZlOWRjNTItM2JmYjdlYjctMzBkMjhjYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:01:04.852576Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2j0d69664egecrkjgrk892, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTMzY2NjZDYtNTAwNDBmZDYtMmE5NzY2MGItODMxYWQ5NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:01:09.157803Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01js2j0h886x2dh1vhvgxdmt52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc4NDdkMzYtNmMzY2IwMTMtNWJlYjRiODktODYzOTdkMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:01:12.845749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2j0ndxez4x7x2cdcze90sz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Q5OTA2MGMtNzFmNWU0NGYtM2FjMTAzMGEtMzc4NmE0NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:01:16.290522Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01js2j0s343883da5jd1vkmn1w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQxYzU2ZTctZGI5OGExM2EtNjY4ZTFmN2ItYWMxMjZhNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:01:19.731543Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01js2j0wdk2jb833tvn1785bkc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWY5Njg4OTAtZDAzZTc0YzAtY2Y3ZDI0Y2QtMWQ3OTgyYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-17T19:01:21.835474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T19:01:21.835541Z node 1 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 18875828 RowCount: 18 IndexSize: 0 InMemSize: 2097888 LastAccessTime: 1507 LastUpdateTime: 1507 ImmediateTxCompleted: 18 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 18 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 596 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4696 Memory: 2223139 Storage: 16778930 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 18875828 RowCount: 18 IndexSize: 0 InMemSize: 2097888 LastAccessTime: 1507 LastUpdateTime: 1507 ImmediateTxCompleted: 18 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 18 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 596 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1307 Memory: 126163 Storage: 18876266 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T19:01:27.575025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2j147s0nq1a7h546x7w6n1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2M0N2U1ODUtNzU5OWJiZjItM2U0ZDdlN2MtYjIxNDQ4NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:01:31.065847Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01js2j17ed5m53190xca2rc1gh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY4M ... .295401Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:05:00.295503Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:05:00.306689Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:05:00.386030Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T19:05:00.628171Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:05:00.628266Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:05:00.628321Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:05:00.631924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-17T19:05:00.783227Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-17T19:05:00.816529Z node 5 :TX_PROXY ERROR: Actor# [5:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:05:03.312292Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01js2j7qekca13d6cp18cjcnyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=N2JhYWE2NTAtZDE0MTEyMWQtZTE0YTA0NDItNjFmMTIzM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:05.591456Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01js2j7t3139p9n8qe2p6e7jqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NGYwMTIyMWQtYTkzZGI5Zi1kNGMzODQ5Yi1jMzk2MDk5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:07.913882Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01js2j7wb1723g26kzpbxrr2k6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmM3NmNkNzItM2FmYzM0ZTgtMzdkMzI1NWYtNjQzODEwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:10.231994Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01js2j7yjt8kent6cckgznej15, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OWUwYjJhYzMtN2UyODI0YWUtNjIwZmZmYzktYjNhZDA3ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:12.643173Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01js2j80w16pb357hbb85jdr0v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjBiODE4ZDEtNmNjZWE0ODItODc3MGI5NjUtYjkwNTUzOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:14.931459Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01js2j837pdgkqbh1151q0fx1e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTYyNWFlMzUtZGIzODg0ZDMtZGM3MzUzYS1kZWYxY2E3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:17.173142Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01js2j85e31esq0f6bk0dr0ng7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=Nzc0N2IzN2EtNzlkOWY0NjYtZWYzMDJjMmEtNzcyYWEzZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:19.392562Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01js2j87mwakwsfqyhqnj5yh95, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjM5ZTU2MWItYzczNDFjNjItZTdkMzQ3M2YtODJlZTI4OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:21.826024Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01js2j89snbazzzkvqkyyfp25s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjcyMThhNmItYTc0ZjYxODYtNWNhZDY0M2ItMjY4NjNlYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:24.088488Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01js2j8c6tctjgmnbyd1nzhpwf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDEyMjJjMjQtOGZjNmU2NTMtMTNhMWNlNTItZjIxODIzMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-17T19:05:26.108598Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T19:05:26.108642Z node 5 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487392 RowCount: 10 IndexSize: 0 InMemSize: 10487392 LastAccessTime: 1519 LastUpdateTime: 1519 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1915 Memory: 10609800 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487392 RowCount: 10 IndexSize: 0 InMemSize: 10487392 LastAccessTime: 1519 LastUpdateTime: 1519 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 519 Memory: 125395 Storage: 10486922 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1519 LastUpdateTime: 1519 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 380 IndexSize: 0 } Channels { Channel: 2 DataSize: 10485840 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 519 Memory: 125395 Storage: 10486922 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T19:05:32.212178Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01js2j8m2ya7vy4g7zewfwhcps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=Mzk4MzNmN2UtZTFlYjA1Y2MtMTVjNzg2ZDQtNWNmYzIyM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:34.533989Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01js2j8paw7a8f9xyh8y32ycdj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTQ4YzgwZTQtODY0OWViMzgtZWEyMjY1YzQtZGI4ZjBiMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:36.796115Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01js2j8rkj8jftgeshhskg3nm3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ODY1M2JjNzctYmFhYmMwOTQtM2QyZjFjZi1iMDkyY2IwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:39.027277Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01js2j8tsc9c520dvjnmc33m5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YmE4MGRlZGYtYzdiMmU3NDctOGZkYmRhMWEtMzNmZDdiNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:41.327716Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01js2j8wzx4s0hn1rxgj5cgtdf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MWJmNWRjM2YtMjI1ODBlYzEtYTlmMDIxODYtYzA2YTNkYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:43.570730Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01js2j8z6zewnnwhag1wadhk5c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTNjNGI3YTYtYTA4ZTZlNGYtNjk2OTIyMzUtODE5MDU0ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:45.740969Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01js2j91ds7nj0n927n7xedgp0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YWRhN2NlOGItZDMzN2Q5ODctMmQ2MjU5OS1mYTNjNDk5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:47.982207Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01js2j93gw6gzb8f7a0634xd16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjgyNGRlNzQtOGJjNWE1My01MzAzOTIxZC01YTk5NzIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:50.204797Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01js2j95qs1s8b8ck9mqxtvnc6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZjNiM2Y0M2MtN2E0YTIwMjMtYmUyYWM5NmEtODk0YWNiY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:52.427585Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01js2j97wc0cgp7f6m8qqghh7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZDYxMjY3YWYtNzMwZjA2MWQtMmJhMTQ3OWItN2E2MjE4NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-17T19:05:52.568690Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01js2j9a2k03nyt4c0m0k36sgn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=N2FmYTEwMzUtYTM4MWM4MTUtZDAwMTRjN2QtNmFlNWI1ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |99.0%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[4] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-04-17T18:59:53.124067Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:59:53.211648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:59:53.236516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:59:53.236800Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:59:53.244345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:53.244584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:53.244839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:53.245013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:59:53.245139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:59:53.245265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:59:53.245391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:59:53.245527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:59:53.245713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:59:53.245845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:59:53.245971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:59:53.246079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:59:53.271352Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:59:53.271504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:59:53.271559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:59:53.271724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:59:53.271904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:59:53.271980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:59:53.272019Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:59:53.272109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:59:53.272170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:59:53.272208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:59:53.272235Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:59:53.272412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:59:53.272483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:59:53.272523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:59:53.272557Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:59:53.272649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:59:53.272698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:59:53.272739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:59:53.272791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:59:53.272889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:59:53.272929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:59:53.272963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:59:53.273010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:59:53.273052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:59:53.273080Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:59:53.273472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-04-17T18:59:53.273568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-17T18:59:53.273667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-17T18:59:53.273757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-17T18:59:53.273939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:59:53.273994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:59:53.274038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:59:53.274247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:59:53.274298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:59:53.274330Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:59:53.274497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:59:53.274543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:59:53.274573Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:59:53.274743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:59:53.274798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:59:53.274839Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:59:53.274969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:59:53.275012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:59:53.275058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... tablet_id=9437184;self_id=[1:15675:17633];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; 2025-04-17T19:06:31.672220Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T19:06:31.672296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-04-17T19:06:31.672564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=217; 2025-04-17T19:06:31.672590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=260; 2025-04-17T19:06:31.676423Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T19:06:31.676485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=8; 2025-04-17T19:06:31.682206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=5653; 2025-04-17T19:06:31.688466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5329; 2025-04-17T19:06:31.688545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6283; 2025-04-17T19:06:31.688662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=76; 2025-04-17T19:06:31.688744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=52; 2025-04-17T19:06:31.688877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=95; 2025-04-17T19:06:31.688986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=75; 2025-04-17T19:06:31.689150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=128; 2025-04-17T19:06:31.689179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=12662; 2025-04-17T19:06:31.693069Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T19:06:31.693130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=8; 2025-04-17T19:06:31.698757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5564; 2025-04-17T19:06:31.721093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=22254; 2025-04-17T19:06:31.721199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=33; 2025-04-17T19:06:31.721256Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-17T19:06:31.721293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-17T19:06:31.721331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-17T19:06:31.721367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-17T19:06:31.721447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2025-04-17T19:06:31.721489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-17T19:06:31.721565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=49; 2025-04-17T19:06:31.721599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-17T19:06:31.721653Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=27; 2025-04-17T19:06:31.721740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2025-04-17T19:06:31.721811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=42; 2025-04-17T19:06:31.721841Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=28673; 2025-04-17T19:06:31.721987Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111592960;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990344;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T19:06:31.722561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15675:17633];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T19:06:31.722620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15675:17633];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T19:06:31.722685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T19:06:31.722726Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T19:06:31.722865Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T19:06:31.722917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:06:31.723061Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-17T19:06:31.723144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744916098772;tx_id=18446744073709551615;;current_snapshot_ts=1744916394276; 2025-04-17T19:06:31.723187Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T19:06:31.723241Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T19:06:31.723276Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T19:06:31.723361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T19:06:31.726671Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:06:31.728487Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T19:06:31.730070Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T19:06:31.730106Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T19:06:31.730132Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T19:06:31.730170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T19:06:31.730242Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:06:31.730299Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-17T19:06:31.730357Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744916098772;tx_id=18446744073709551615;;current_snapshot_ts=1744916394276; 2025-04-17T19:06:31.730404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T19:06:31.730461Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T19:06:31.730504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T19:06:31.730591Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-17T19:06:31.730648Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15675:17633];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; >> test_tpch.py::TestTpchS1::test_tpch[5] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[6] >> test_tpch.py::TestTpchS1::test_tpch[6] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] >> alter_compression.py::TestAlterCompression::test_all_supported_compression [FAIL] >> alter_compression.py::TestAlterCompression::test_availability_data >> test_tpch.py::TestTpchS1::test_tpch[7] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] >> test_tpch.py::TestTpchS1::test_tpch[8] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[9] >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] >> test_tpch.py::TestTpchS1::test_tpch[9] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[10] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] Test command err: 2025-04-17T18:59:45.190949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T18:59:45.191065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T18:59:45.191188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/26k2/0004f4/r3tmp/tmpSKAyST/pdisk_1.dat 2025-04-17T18:59:45.594283Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28683, node 1 2025-04-17T18:59:46.066117Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T18:59:46.066200Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T18:59:46.066239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T18:59:46.066968Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T18:59:46.076501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T18:59:46.182054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:46.182657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:46.197360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64401 2025-04-17T18:59:46.725772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T18:59:49.106817Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T18:59:49.140677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:49.140761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:49.177608Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T18:59:49.179386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:59:49.387762Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:59:49.389859Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:59:49.390389Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:59:49.390701Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:59:49.390785Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:59:49.390858Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:59:49.390911Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:59:49.390957Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:59:49.391007Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T18:59:49.548600Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T18:59:49.548705Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T18:59:49.561868Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T18:59:49.694083Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T18:59:49.740575Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T18:59:49.740684Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T18:59:49.775331Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T18:59:49.780612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T18:59:49.780818Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T18:59:49.780882Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T18:59:49.780979Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T18:59:49.781026Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T18:59:49.781083Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T18:59:49.781142Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T18:59:49.782495Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T18:59:49.813793Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:59:49.813881Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T18:59:49.820243Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T18:59:49.826091Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T18:59:49.826221Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T18:59:49.835242Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T18:59:49.853686Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T18:59:49.853745Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T18:59:49.853810Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T18:59:49.867412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T18:59:49.873647Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T18:59:49.873771Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T18:59:50.047309Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T18:59:50.224177Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T18:59:50.282986Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T18:59:51.224995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:51.225172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T18:59:51.306624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T18:59:51.405387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:51.405631Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:51.405884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:51.405968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:59:51.406042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:59:51.406164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:59:51.406302Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:59:51.406424Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:59:51.406577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:59:51.406664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:59:51.406747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:59:51.406861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:59:51.427623Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:59:51.427747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 17T19:08:58.857472Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:08:58.857519Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:08:58.857538Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:00.116871Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:00.116941Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:00.116971Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:01.288651Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:01.288727Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:01.288761Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:02.519725Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T19:09:02.530190Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:02.530235Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:02.530258Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:03.718049Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:03.718109Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:03.718133Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:04.895993Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T19:09:04.896133Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T19:09:04.906725Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:04.906780Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:04.906803Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:06.117117Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:06.117176Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:06.117199Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:07.322411Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:07.322478Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:07.322508Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:08.492318Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T19:09:08.502996Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:08.503058Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:08.503104Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:09.814718Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:09.814781Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:09.814808Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:11.031432Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T19:09:11.031596Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T19:09:11.042037Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:11.042078Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:11.042104Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:12.238105Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:12.238156Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:12.238193Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:13.414506Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:13.414556Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:13.414578Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:14.639334Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T19:09:14.649684Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:14.649716Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:14.649739Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:16.016279Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:16.016330Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:16.016351Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:17.281741Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T19:09:17.281874Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T19:09:17.292275Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:17.292316Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:17.292338Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:18.506884Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:18.506949Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:18.506979Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:19.713448Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:19.713499Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:19.713524Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:20.919354Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T19:09:20.929782Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:20.929824Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:20.929849Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:22.269323Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:22.269385Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:22.269407Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:09:23.475060Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T19:09:23.475217Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T19:09:23.485783Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:09:23.485830Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:23.485852Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:09:24.714986Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:09:24.715038Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:09:24.715059Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture()+28 (0x18EA949C) TWithBackTrace::TWithBackTrace<>()+80 (0x18AD13D0) NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+485 (0x18AA5A55) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TTestCaseAnalyzeRebootColumnShard::Execute_(NUnitTest::TTestContext&)+4263 (0x18AC25F7) std::__y1::__function::__func, void ()>::operator()()+280 (0x18ACD488) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19394ED6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1936D929) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TCurrentTest::Execute()+1204 (0x18ACC654) NUnitTest::TTestFactory::Execute()+2438 (0x1936F1F6) NUnitTest::RunMain(int, char**)+5213 (0x1938F44D) ??+0 (0x7FA2DAEEAD90) __libc_start_main+128 (0x7FA2DAEEAE40) _start+41 (0x163FC029) |99.2%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2025-04-17T18:59:51.795322Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:59:51.884118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:59:51.910728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:59:51.911074Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:59:51.919877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:51.920116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:51.920374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:51.920482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:59:51.920604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:59:51.920713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:59:51.920829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:59:51.921011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:59:51.921139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:59:51.921234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:59:51.921337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:59:51.921434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:59:51.951105Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:59:51.951282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:59:51.951345Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:59:51.951540Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:59:51.951697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:59:51.951797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:59:51.951845Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:59:51.951965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:59:51.952027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:59:51.952066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:59:51.952101Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:59:51.952290Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:59:51.952361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:59:51.952402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:59:51.952436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:59:51.952523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:59:51.952585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:59:51.952634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:59:51.952664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:59:51.952753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:59:51.952795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:59:51.952827Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:59:51.952874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:59:51.952923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:59:51.952955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:59:51.953398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-17T18:59:51.953492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-17T18:59:51.953578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-17T18:59:51.953658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-17T18:59:51.953836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:59:51.953913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:59:51.953952Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:59:51.954137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:59:51.954200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:59:51.954230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:59:51.954412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:59:51.954475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:59:51.954513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:59:51.954706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:59:51.954747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:59:51.954782Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:59:51.954899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:59:51.954941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:59:51.954994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LUMNSHARD INFO: tablet_id=9437184;self_id=[1:18362:20314];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; 2025-04-17T19:09:38.372052Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T19:09:38.372131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=9; 2025-04-17T19:09:38.372403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=224; 2025-04-17T19:09:38.372430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=267; 2025-04-17T19:09:38.375689Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T19:09:38.375749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=7; 2025-04-17T19:09:38.379107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3303; 2025-04-17T19:09:38.382846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=2827; 2025-04-17T19:09:38.382924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=3765; 2025-04-17T19:09:38.383025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=61; 2025-04-17T19:09:38.383104Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=49; 2025-04-17T19:09:38.383221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=88; 2025-04-17T19:09:38.383312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=62; 2025-04-17T19:09:38.383468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=122; 2025-04-17T19:09:38.383506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=7715; 2025-04-17T19:09:38.389435Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T19:09:38.389493Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=7; 2025-04-17T19:09:38.415690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=26123; 2025-04-17T19:09:38.458236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=42444; 2025-04-17T19:09:38.458340Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=31; 2025-04-17T19:09:38.458397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=20; 2025-04-17T19:09:38.458434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-17T19:09:38.458469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-04-17T19:09:38.458503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-04-17T19:09:38.458564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2025-04-17T19:09:38.458601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-17T19:09:38.458676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2025-04-17T19:09:38.458715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-17T19:09:38.458769Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=27; 2025-04-17T19:09:38.458846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=45; 2025-04-17T19:09:38.458935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=50; 2025-04-17T19:09:38.458969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=69436; 2025-04-17T19:09:38.459131Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128666812;raw_bytes=191860710;count=63;records=1845000} inactive {blob_bytes=248617552;raw_bytes=364986346;count=132;records=3543273} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T19:09:38.459638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:18362:20314];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T19:09:38.459691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:18362:20314];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T19:09:38.459750Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T19:09:38.459791Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T19:09:38.459971Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T19:09:38.460027Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:09:38.460209Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-04-17T19:09:38.460266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744916098986;tx_id=18446744073709551615;;current_snapshot_ts=1744916392960; 2025-04-17T19:09:38.460304Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T19:09:38.460343Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T19:09:38.460374Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T19:09:38.460453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T19:09:38.464679Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:09:38.469575Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T19:09:38.471135Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T19:09:38.471170Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T19:09:38.471188Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T19:09:38.471228Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T19:09:38.471285Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:09:38.471521Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-04-17T19:09:38.471582Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744916098986;tx_id=18446744073709551615;;current_snapshot_ts=1744916392960; 2025-04-17T19:09:38.471623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T19:09:38.471683Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T19:09:38.471718Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T19:09:38.471786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998000s; 2025-04-17T19:09:38.471830Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18362:20314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2025-04-17T18:59:52.134444Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T18:59:52.236387Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T18:59:52.262843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T18:59:52.263175Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T18:59:52.271756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T18:59:52.272008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T18:59:52.272248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T18:59:52.272381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T18:59:52.272524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T18:59:52.272644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T18:59:52.272778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T18:59:52.272982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T18:59:52.273114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T18:59:52.273222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T18:59:52.273326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T18:59:52.273447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T18:59:52.305867Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T18:59:52.306024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T18:59:52.306086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T18:59:52.306303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:59:52.306467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T18:59:52.306555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T18:59:52.306610Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T18:59:52.306710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T18:59:52.306799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T18:59:52.306851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T18:59:52.306883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T18:59:52.307087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T18:59:52.307166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T18:59:52.307212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T18:59:52.307249Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T18:59:52.307343Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T18:59:52.307417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T18:59:52.307492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T18:59:52.307527Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T18:59:52.307629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T18:59:52.307674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T18:59:52.307707Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T18:59:52.307773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T18:59:52.307841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T18:59:52.307894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T18:59:52.308299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-17T18:59:52.308385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-17T18:59:52.308461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-17T18:59:52.308530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-17T18:59:52.308730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T18:59:52.308804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T18:59:52.308847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T18:59:52.309072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T18:59:52.309132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T18:59:52.309164Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T18:59:52.309353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T18:59:52.309409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T18:59:52.309444Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T18:59:52.309657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T18:59:52.309714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T18:59:52.309745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T18:59:52.309885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T18:59:52.309933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T18:59:52.309983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LUMNSHARD INFO: tablet_id=9437184;self_id=[1:18368:20319];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1501;stage=finished; 2025-04-17T19:09:45.216603Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T19:09:45.216683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=9; 2025-04-17T19:09:45.216961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=222; 2025-04-17T19:09:45.216994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=275; 2025-04-17T19:09:45.221416Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T19:09:45.221474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=8; 2025-04-17T19:09:45.224969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3437; 2025-04-17T19:09:45.228881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=2904; 2025-04-17T19:09:45.228958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=3933; 2025-04-17T19:09:45.229070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=66; 2025-04-17T19:09:45.229157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=55; 2025-04-17T19:09:45.229285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=98; 2025-04-17T19:09:45.229385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=69; 2025-04-17T19:09:45.229553Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=134; 2025-04-17T19:09:45.229585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=8075; 2025-04-17T19:09:45.233871Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-17T19:09:45.233918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=6; 2025-04-17T19:09:45.260382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=26398; 2025-04-17T19:09:45.304205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=43729; 2025-04-17T19:09:45.304323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=36; 2025-04-17T19:09:45.304380Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-17T19:09:45.304417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-17T19:09:45.304454Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-17T19:09:45.304490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-17T19:09:45.304553Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2025-04-17T19:09:45.304590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-17T19:09:45.304664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2025-04-17T19:09:45.304698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-17T19:09:45.304752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=27; 2025-04-17T19:09:45.304826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=43; 2025-04-17T19:09:45.304892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=40; 2025-04-17T19:09:45.304922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=70966; 2025-04-17T19:09:45.305089Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128667156;raw_bytes=191860710;count=63;records=1845000} inactive {blob_bytes=248617344;raw_bytes=364986346;count=132;records=3543273} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-17T19:09:45.305602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:18368:20319];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-17T19:09:45.305654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:18368:20319];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-17T19:09:45.305723Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-17T19:09:45.305770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-17T19:09:45.306009Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T19:09:45.306068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:09:45.306281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-04-17T19:09:45.306355Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744916099333;tx_id=18446744073709551615;;current_snapshot_ts=1744916393307; 2025-04-17T19:09:45.306408Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T19:09:45.306463Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T19:09:45.306501Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T19:09:45.306591Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T19:09:45.311062Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:09:45.315869Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-17T19:09:45.318119Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-17T19:09:45.318168Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-17T19:09:45.318211Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-17T19:09:45.318254Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T19:09:45.318322Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:09:45.318546Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-04-17T19:09:45.318615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=1744916099333;tx_id=18446744073709551615;;current_snapshot_ts=1744916393307; 2025-04-17T19:09:45.318662Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T19:09:45.318707Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T19:09:45.318748Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T19:09:45.318837Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-17T19:09:45.318891Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:18368:20319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; |99.4%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[10] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[11] |99.4%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[11] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-04-17 19:09:52,312 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:09:52,485 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 972878 736M 741M 653M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/26k2/00052d/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 973665 2.7G 2.7G 2.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/26k2/00052d/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_m 975827 460M 460M 426M └─ moto_server s3 --port 13103 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 88, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ..._migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/26k2/00052d', '--source-root', '/home/runner/.ya/build/build_root/26k2/00052d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/26k2/00052d/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("..._migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/26k2/00052d', '--source-root', '/home/runner/.ya/build/build_root/26k2/00052d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/26k2/00052d/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete 2025-04-17 19:09:52,239 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:09:52,567 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 972873 643M 647M 559M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/26k2/000534/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 973670 7.2G 7.2G 6.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/26k2/000534/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_de 975891 395M 395M 361M └─ moto_server s3 --port 7651 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 265, in test_ttl_delete if not self.wait_for(lambda: self.portions_actualized_in_sys(self.table), 120): File "ydb/tests/olap/ttl_tiering/base.py", line 86, in wait_for if condition_func(): File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 265, in if not self.wait_for(lambda: self.portions_actualized_in_sys(self.table), 120): File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 23, in portions_actualized_in_sys logger.info(f"portions: {portions}, blobs: {table.get_blob_stat_by_tier()}") File "ydb/tests/olap/common/column_table_helper.py", line 41, in get_blob_stat_by_tier results = self.ydb_client.query(stmt) File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ring/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/26k2/000534', '--source-root', '/home/runner/.ya/build/build_root/26k2/000534/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/26k2/000534/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ring/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/26k2/000534', '--source-root', '/home/runner/.ya/build/build_root/26k2/000534/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/26k2/000534/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) |99.5%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[12] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[13] >> test_tpch.py::TestTpchS1::test_tpch[13] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[14] >> test_tpch.py::TestTpchS1::test_tpch[14] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] >> test_tpch.py::TestTpchS1::test_tpch[15] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[15] [FAIL] 2025-04-17 19:10:41,189 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:10:41,741 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 987088 631M 634M 550M ydb-tests-functional-tpc-medium --basetemp /home/runner/.ya/build/build_root/26k2/00051a/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 987469 10.1G 10.1G 9.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/26k2/00051a/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/ 990573 4.6G 4.6G 4.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/26k2/00051a/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/ 1010466 128M 131M 64.6M └─ ydb -e grpc://localhost:10095 -d /local/test_db workload tpch --path olap_yatests/tpch/s1 run --json /home/runner/.ya/build/build_root/26k2/00051a/ydb/tests/functional/ Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/load/lib/tpch.py", line 49, in test_tpch self.run_workload_test(self._get_path(), query_num) File "ydb/tests/olap/load/lib/conftest.py", line 266, in run_workload_test result = YdbCliHelper.workload_run( File "ydb/tests/olap/lib/ydb_cli.py", line 320, in workload_run ).process() File "ydb/tests/olap/lib/ydb_cli.py", line 293, in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) File "library/python/testing/yatest_common/yatest/common/process.py", line 656, in execute res.wait(check_exit_code, timeout, on_timeout) File "library/python/testing/yatest_common/yatest/common/process.py", line 400, in wait _wait() File "library/python/testing/yatest_common/yatest/common/process.py", line 335, in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...tTpchS1::test_tpch[8]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[9]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[10]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[11]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[12]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[13]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[14]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[15]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[16]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[17]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[18]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[19]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[20]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[21]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[22]', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...tTpchS1::test_tpch[8]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[9]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[10]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[11]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[12]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[13]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[14]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[15]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[16]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[17]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[18]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[19]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[20]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[21]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[22]', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout",), {}) |99.7%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-04-17 19:10:40,964 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:10:41,341 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 987038 597M 600M 515M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/26k2/00043b/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 987492 11.3G 11.3G 10.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/26k2/00043b/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpc 990587 488M 489M 454M └─ moto_server s3 --port 1692 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 97, in test_import_and_export self.ydb_client.query("INSERT INTO s3_table SELECT * FROM lineitem") File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...import', '--basetemp', '/home/runner/.ya/build/build_root/26k2/00043b/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/26k2/00043b/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/26k2/00043b', '--source-root', '/home/runner/.ya/build/build_root/26k2/00043b/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/26k2/00043b/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...import', '--basetemp', '/home/runner/.ya/build/build_root/26k2/00043b/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/26k2/00043b/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/26k2/00043b', '--source-root', '/home/runner/.ya/build/build_root/26k2/00043b/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/26k2/00043b/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data 2025-04-17 19:10:41,089 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:10:41,318 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 987068 738M 745M 659M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/26k2/000169/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 987473 6.0G 6.0G 5.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/26k2/000169/ydb/tests/olap/column_family/compression/test-results/py3test/testing_ou Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 169, in test_availability_data self.upsert_and_wait_portions(test_table, single_upsert_rows_count, upsert_rows_count) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 47, in upsert_and_wait_portions if not self.wait_for( File "ydb/tests/olap/column_family/compression/base.py", line 48, in wait_for if condition_func(): File "ydb/tests/olap/column_family/compression/alter_compression.py", line 48, in lambda: table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == number_rows_for_insert * count_upsert, plain_or_under_sanitizer(70, 140) File "ydb/tests/olap/common/column_table_helper.py", line 26, in get_portion_stat_by_tier results = self.ydb_client.query( File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/26k2/000169/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/26k2/000169', '--source-root', '/home/runner/.ya/build/build_root/26k2/000169/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/26k2/000169/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/26k2/000169/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/26k2/000169', '--source-root', '/home/runner/.ya/build/build_root/26k2/000169/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/26k2/000169/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-04-17 19:10:40,840 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:10:40,967 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 987014 1.0G 1.0G 958M ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/26k2/00016a/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 1003532 2.0G 2.0G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/26k2/00016a/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_ins Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/26k2/00016a/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...b/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--test-filter', 'test_simple.py::TestSimple::test[alter_table]', '--test-filter', 'test_simple.py::TestSimple::test[alter_tablestore]', '--test-filter', 'test_simple.py::TestSimple::test[table]', '--test-filter', 'test_simple.py::TestSimple::test[tablestores]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...b/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--test-filter', 'test_simple.py::TestSimple::test[alter_table]', '--test-filter', 'test_simple.py::TestSimple::test[alter_tablestore]', '--test-filter', 'test_simple.py::TestSimple::test[table]', '--test-filter', 'test_simple.py::TestSimple::test[tablestores]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT ydb/tests/fq/streaming_optimize [size:medium] nchunks:4 ------ [test_sql_streaming.py 0/4] chunk ran 7 tests (total:102.49s - recipes:0.55s test:101.31s recipes:0.51s) [fail] test_sql_streaming.py::test[suites-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (11.67s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12knvgkn/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa8f2f0f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa8f2f09500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa8f2792d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa8f2890464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa8f2890464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa8f2890464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa8f2791b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa8f283e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa8f2736e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa8f2736e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa8f2736c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa8f288de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa8f288de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (13.42s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmri0hak/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7feb37a7f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7feb37a79500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7feb37302d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7feb37400464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7feb37400464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7feb37400464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7feb37301b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7feb373ae90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7feb372a6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7feb372a6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7feb372a6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7feb373fde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7feb373fde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (13.47s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gvxl8ush/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0a04c5f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0a04c59500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0a044e2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0a045e0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0a045e0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0a045e0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0a044e1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0a0458e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0a04486e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0a04486e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0a04486c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0a045dde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0a045dde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (13.92s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8093loe7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f136494f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f1364949500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f13641d2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f13642d0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f13642d0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f13642d0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f13641d1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f136427e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f1364176e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f1364176e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f1364176c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f13642cde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f13642cde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (14.06s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z_l0ged_/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2320dff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2320df9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2320682d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2320780464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2320780464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2320780464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2320681b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f232072e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2320626e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2320626e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2320626c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f232077de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f232077de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (16.15s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mo82wkvr/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f9be776f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9be7769500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9be6ff2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9be70f0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9be70f0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9be70f0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9be6ff1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9be709e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9be6f96e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9be6f96e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9be6f96c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9be70ede0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9be70ede0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (15.25s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1f_vdaw/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7fc9baf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7fc9ba9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7fc9432d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7fc9530464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7fc9530464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7fc9530464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7fc9431b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7fc94de90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7fc93d6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7fc93d6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7fc93d6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7fc952de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7fc952de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 7 tests (total:101.09s - recipes:0.58s test:99.88s recipes:0.53s) [fail] test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (11.48s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_blrgcwp7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2bf959f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2bf9599500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2bf8e22d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2bf8f20464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2bf8f20464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2bf8f20464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2bf8e21b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2bf8ece90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2bf8dc6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2bf8dc6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2bf8dc6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2bf8f1de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2bf8f1de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (12.85s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_irowwbl9/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb05393f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb053939500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb0531c2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb0532c0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb0532c0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb0532c0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb0531c1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb05326e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb053166e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb053166e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb053166c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb0532bde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb0532bde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (12.88s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9tm7yo9w/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f69baebf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f69baeb9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f69ba742d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f69ba840464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f69ba840464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f69ba840464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f69ba741b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f69ba7ee90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f69ba6e6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f69ba6e6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f69ba6e6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f69ba83de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f69ba83de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (13.88s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5lc_72x7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fbf72bef9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fbf72be9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fbf72472d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fbf72570464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fbf72570464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fbf72570464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fbf72471b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fbf7251e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fbf72416e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fbf72416e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fbf72416c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fbf7256de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fbf7256de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (14.11s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzxe8pfk/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f07134cf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f07134c9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0712d52d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0712e50464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0712e50464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0712e50464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0712d51b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0712dfe90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0712cf6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0712cf6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0712cf6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0712e4de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0712e4de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (16.22s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_obje1rfh/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f3e8649f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f3e86499500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f3e85d22d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f3e85e20464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f3e85e20464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f3e85e20464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f3e85d21b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f3e85dce90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f3e85cc6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f3e85cc6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f3e85cc6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f3e85e1de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f3e85e1de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (15.00s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_afyazefa/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f3d157bf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f3d157b9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f3d15042d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f3d15140464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f3d15140464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f3d15140464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f3d15041b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f3d150ee90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f3d14fe6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f3d14fe6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f3d14fe6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f3d1513de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f3d1513de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 7 tests (total:99.76s - recipes:0.61s test:98.37s recipes:0.61s) [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (11.40s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xqwn2j1c/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fd09df5f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fd09df59500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fd09d7e2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fd09d8e0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fd09d8e0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fd09d8e0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fd09d7e1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fd09d88e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fd09d786e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fd09d786e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fd09d786c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fd09d8dde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fd09d8dde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (13.39s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7w9fsnil/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f99f7bbf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f99f7bb9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f99f7442d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f99f7540464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f99f7540464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f99f7540464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f99f7441b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f99f74ee90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f99f73e6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f99f73e6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f99f73e6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f99f753de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f99f753de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (13.08s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s7nzbq26/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fba7eb0f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fba7eb09500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fba7e392d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fba7e490464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fba7e490464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fba7e490464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fba7e391b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fba7e43e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fba7e336e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fba7e336e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fba7e336c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fba7e48de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fba7e48de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (13.66s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bl9_c6lu/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0cd825f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0cd8259500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0cd7ae2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0cd7be0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0cd7be0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0cd7be0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0cd7ae1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0cd7b8e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0cd7a86e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0cd7a86e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0cd7a86c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0cd7bdde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0cd7bdde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (13.31s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j5z6g0l0/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f15a28ff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f15a28f9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f15a2182d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f15a2280464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f15a2280464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f15a2280464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f15a2181b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f15a222e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f15a2126e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f15a2126e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f15a2126c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f15a227de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f15a227de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (14.73s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dsh5do_5/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f93aba6f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f93aba69500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f93ab2f2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f93ab3f0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f93ab3f0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f93ab3f0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f93ab2f1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f93ab39e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f93ab296e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f93ab296e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f93ab296c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f93ab3ede0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f93ab3ede0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (15.23s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ix2mowjn/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f51eddbf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f51eddb9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f51ed642d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f51ed740464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f51ed740464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f51ed740464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f51ed641b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f51ed6ee90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f51ed5e6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f51ed5e6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f51ed5e6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f51ed73de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f51ed73de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:97.68s - recipes:0.54s test:96.45s recipes:0.57s) [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (10.82s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inhtrk3t/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fce1217f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fce12179500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fce11a02d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fce11b00464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fce11b00464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fce11b00464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fce11a01b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fce11aae90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fce119a6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fce119a6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fce119a6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fce11afde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fce11afde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (12.06s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ier6vbpp/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f196de0f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f196de09500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f196d692d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f196d790464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f196d790464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f196d790464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f196d691b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f196d73e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f196d636e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f196d636e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f196d636c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f196d78de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f196d78de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (13.02s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pmyxnxre/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2a03c3f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2a03c39500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2a034c2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2a035c0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2a035c0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2a035c0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2a034c1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2a0356e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2a03466e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2a03466e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2a03466c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2a035bde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2a035bde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (13.67s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_agqkw2w3/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6a7412f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6a74129500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6a739b2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6a73ab0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6a73ab0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6a73ab0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6a739b1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6a73a5e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6a73956e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6a73956e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6a73956c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6a73aade0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6a73aade0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (12.10s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_js8klvv5/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa0b43ff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa0b43f9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa0b3c82d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa0b3d80464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa0b3d80464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa0b3d80464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa0b3c81b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa0b3d2e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa0b3c26e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa0b3c26e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa0b3c26c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa0b3d7de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa0b3d7de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (16.07s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4efpb2_v/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f774a6cf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f774a6c9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7749f52d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f774a050464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f774a050464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f774a050464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7749f51b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7749ffe90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7749ef6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7749ef6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7749ef6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f774a04de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f774a04de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (15.35s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_thv9p7xg/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f919147f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9191479500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9190d02d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9190e00464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9190e00464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9190e00464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9190d01b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9190dae90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9190ca6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9190ca6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9190ca6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9190dfde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9190dfde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 28 - FAIL ydb/tests/fq/streaming_optimize ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:123.18s - test:122.74s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (115.98s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:120.49s - setup:0.03s test:119.99s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (113.42s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/tpc/medium [size:medium] ------ [test_tpch.py] chunk ran 19 tests (total:611.54s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch.py::TestTpchS1::test_tpch[4] (good) duration: 321.15s test_tpch.py::TestTpchS1::test_tpch[7] (good) duration: 52.86s test_tpch.py::TestTpchS1::test_tpch[8] (good) duration: 51.60s test_tpch.py::TestTpchS1::test_tpch[9] (good) duration: 45.36s test_tpch.py::TestTpchS1::test_tpch[5] (good) duration: 35.15s test_tpch.py::TestTpchS1::test_tpch[10] (good) duration: 24.33s test_tpch.py::TestTpchS1::test_tpch[12] (good) duration: 22.90s test_tpch.py::TestTpchS1::test_tpch[6] (good) duration: 15.53s test_tpch.py::TestTpchS1::test_tpch[15] (fail) duration: 14.07s test_tpch.py::TestTpchS1::test_tpch[14] (good) duration: 8.38s 2 more tests with 14.30s total duration are not listed. 7 tests were not launched inside chunk. Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/stderr [fail] test_tpch.py::TestTpchS1::test_tpch[15] [default-linux-x86_64-release-asan] (14.07s) ydb/tests/olap/load/lib/tpch.py:49: in test_tpch self.run_workload_test(self._get_path(), query_num) ydb/tests/olap/load/lib/conftest.py:278: in run_workload_test self.process_query_result(result, query_num, qparams.iterations, True) ydb/tests/olap/load/lib/conftest.py:233: in process_query_result raise exc ydb/tests/olap/lib/ydb_cli.py:293: in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:400: in wait _wait() library/python/testing/yatest_common/yatest/common/process.py:335: in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) library/python/pytest/plugins/ya.py:347: in _graceful_shutdown _graceful_shutdown_on_log(not capman.is_globally_capturing()) library/python/pytest/plugins/ya.py:321: in _graceful_shutdown_on_log pytest.exit("Graceful shutdown requested") E Failed: Graceful shutdown requested Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch.py.TestTpchS1.test_tpch.15.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff ------ TIMEOUT: 11 - GOOD, 1 - FAIL, 7 - NOT_LAUNCHED ydb/tests/functional/tpc/medium ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:619.62s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (fail) duration: 405.21s alter_compression.py::TestAlterCompression::test_availability_data (timeout) duration: 210.17s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [fail] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (405.21s) ydb/tests/olap/column_family/compression/alter_compression.py:106: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == single_upsert_rows_count * upsert_count E assert 1004794 == (100000 * 10) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff [timeout] alter_compression.py::TestAlterCompression::test_availability_data [default-linux-x86_64-release-asan] (210.17s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_availability_data.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - FAIL, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:617.87s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 613.54s Info: Test run has exceeded 8.0G (8388608K) memory limit with 12.6G (13200212K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 986975 44.9M 44.9M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 987035 38.3M 26.6M 14.0M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 987038 597M 600M 515M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 987492 11.4G 11.4G 10.9G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/te 990587 494M 494M 459M └─ moto_server s3 --port 1692 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (613.54s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 8 tests (total:620.32s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 486.41s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 41.59s test_alter_tiering.py::TestAlterTiering::test[many_tables] (fail) duration: 31.27s test_simple.py::TestSimple::test[tablestores] (good) duration: 26.81s test_simple.py::TestSimple::test[alter_table] (good) duration: 15.67s test_simple.py::TestSimple::test[alter_tablestore] (good) duration: 7.36s test_simple.py::TestSimple::test[table] (good) duration: 4.79s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [fail] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-release-asan] (31.27s) ydb/tests/olap/scenario/conftest.py:88: in test ctx.executable(self, ctx) ydb/tests/olap/scenario/test_alter_tiering.py:297: in scenario_many_tables self._setup_tiering_test(ctx) ydb/tests/olap/scenario/test_alter_tiering.py:161: in _setup_tiering_test self._override_external_data_source(sth, self.sources[-1], s3_config) ydb/tests/olap/scenario/test_alter_tiering.py:177: in _override_external_data_source sth.execute_scheme_query(CreateExternalDataSource(path, config, True)) ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:430: in execute_scheme_query self._run_with_expected_status( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:357: in _run_with_expected_status pytest.fail(f'Unexpected status: must be in {repr(expected_status)}, but get {repr(error or status)}') E Failed: Unexpected status: must be in {}, but get SchemeError('message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 362 } message: "Executing CREATE OBJECT EXTERNAL_DATA_SOURCE" end_position { row: 1 column: 362 } severity: 1 issues { message: "
: Error: (NKikimr::NExternalSource::TExternalSourceException) External source with type ObjectStorage is disabled. Please contact your system administrator to enable it, code: 2003\\n" issue_code: 2003 severity: 1 } } (server_code: 400070)') Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (486.41s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 5 - GOOD, 1 - FAIL, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:2 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:610.33s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 606.47s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (606.47s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 2 tests (total:615.43s - test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete (timeout) duration: 611.51s ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering test was not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.3G (8656740K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 972709 44.9M 44.9M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 972865 36.5M 24.8M 12.0M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 972873 641M 645M 557M └─ ydb-tests-olap-ttl_tiering --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 973670 7.2G 7.2G 6.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ 975891 395M 395M 361M └─ moto_server s3 --port 7651 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (611.51s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ------ sole chunk ran 1 test (total:262.76s - setup:0.04s test:262.56s) Info: Test run has exceeded 32.0G (33554432K) memory limit with 35.6G (37324600K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 991936 44.9M 44.9M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 992011 33.3M 21.4M 8.7M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 992046 738M 743M 663M └─ ydb-tests-stress-olap_workload-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:f 993042 3.2G 3.2G 2.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 993048 2.7G 2.7G 2.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1005021 2.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 993051 2.4G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1005033 2.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 993100 3.3G 3.4G 2.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 993165 2.7G 2.7G 2.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1005071 2.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 993185 2.4G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1005024 2.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 993265 3.2G 3.2G 2.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 993370 2.7G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1005023 2.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 993403 2.5G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1005070 2.5G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:12.43s - test:12.40s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.41s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:12.63s - test:12.60s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.38s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:12.53s - test:12.50s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.36s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:12.53s - test:12.50s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.35s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:21.68s - test:21.65s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==972531==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018e0e20d bp 0x7ffe5bf7e220 sp 0x7ffe5bf7e080 T0) ==972531==The signal is caused by a READ memory access. ==972531==Hint: address points to the zero page. 2025-04-17T19:00:07.311738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T19:00:07.311785Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18e0e20d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18e0e20d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18e0e20d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18e0e20d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18e0e20d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18e33327 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18e33327 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18e33327 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18e33327 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18e33327 in std::__y1::__function::__func< ..[snippet truncated].. 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x197855d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19754f68 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #14 0x18e321d3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19756835 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #16 0x1977fb4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f11a26d2d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f11a26d2e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16571028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16571028) (BuildId: 1bdc4290ca22bc21b0df7e2b25fc7bf1adbeab0b) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==972531==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [2/50] chunk ran 1 test (total:60.16s - test:60.12s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (55.60s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x198368DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x19CFB43F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x193D3478 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x193E6487 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x193E6487 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x193E6487 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19D32635 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19D32635 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19D32635 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x19D01FC8 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x193E560B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19D03895 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19D2CBAC 15. ??:0: ?? @ 0x7FBA88B34D8F 16. ??:0: ?? @ 0x7FBA88B34E3F 17. ??:0: ?? @ 0x166DE028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 2 - GOOD, 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:29.37s - test:29.30s) [fail] KqpSinkMvcc::OlapMultiSinks [default-linux-x86_64-release-asan] (21.97s) assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:588, void NKikimr::NKqp::CompareYson(const TString &, const TString &, const TString &): (ReformatYson(expected) == ReformatYson(actual)) failed: ("[[[\"2\"]]]" != "[[[\"1\"]]]") , with diff: "[[[\"(2|1)\"]]]" 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:588: CompareYson @ 0x4900DB47 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:368: DoExecute @ 0x1900EF4A 4. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:389: Execute_ @ 0x18FDD558 6. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: operator() @ 0x18FE36B7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18FE36B7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18FE36B7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 15. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: Execute @ 0x18FE2883 16. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 18. ??:0: ?? @ 0x7F446FB99D8F 19. ??:0: ?? @ 0x7F446FB99E3F 20. ??:0: ?? @ 0x16619028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkMvcc.OlapMultiSinks.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkMvcc.OlapMultiSinks.out ------ [1/50] chunk ran 1 test (total:28.14s - test:28.08s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (21.82s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x1903269E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x190112E8 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x19018767 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x19018767 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x19018767 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x19017933 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 17. ??:0: ?? @ 0x7F0BBBD7AD8F 18. ??:0: ?? @ 0x7F0BBBD7AE3F 19. ??:0: ?? @ 0x16619028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ FAIL: 2 - FAIL ydb/core/kqp/ut/tx ydb/core/quoter/ut [size:medium] ------ sole chunk ran 1 test (total:134.31s - test:134.26s) [fail] QuoterWithKesusTest::KesusRecreation [default-linux-x86_64-release-asan] (128.62s) (TWithBackTrace) Event queue is still empty.ydb/library/actors/testlib/test_runtime.cpp:1375: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) at /-S/ydb/library/actors/testlib/test_runtime.cpp:0:24 NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:0 __root at /-S/contrib/libs/cxxsupp/libcxx/include/__tree:966:54 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 TIntrusivePtr at /-S/util/generic/ptr.h:510:11 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:527:18 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/quoter/kesus_quoter_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.KesusRecreation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.KesusRecreation.out ------ FAIL: 1 - FAIL ydb/core/quoter/ut ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:595.34s - test:595.31s) [fail] AnalyzeColumnshard::AnalyzeRebootColumnShard [default-linux-x86_64-release-asan] (583.53s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.h:0:24 DoDestroy at /-S/util/generic/ptr.h:237:13 operator() at /-S/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.out ------ FAIL: 1 - FAIL ydb/core/statistics/aggregator/ut ydb/core/tx/columnshard/ut_schema [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:184.83s - test:184.13s) [crashed] TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [default-linux-x86_64-release-asan] (154.43s) Test crashed (return code: 100) ==978999==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 56224 byte(s) in 2 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x1cb33f22 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x1cb33f22 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x1cb33f22 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x1cb33f22 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x1cb33f22 in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:789:25 #6 0x1cb33f22 in void std::__y1::vector>::__assign_with_size[abi:fe190000](NKikimr::NOlap::TUnifiedBlobId*, NKikimr::NOlap::TUnifiedBlobId*, long) /-S/contrib/libs/cxxsupp/libcxx/include/vector:1378:5 #7 0x1cb2c3b5 in assign /-S/contrib/libs/cxxsupp/libcxx/include/vector:1359:3 #8 0x1cb2c3b5 in operator= /-S/contrib/libs/cxxsupp/libcxx/include/vector:1330:5 #9 0x1cb2c3b5 in NKikimr::NOlap::TPortionMetaConstructor::Build() /-S/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp:53:20 #10 0x1caedcc1 in ..[snippet truncated].. () /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #33 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #34 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #35 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #36 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #37 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #38 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #39 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #40 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #41 0x7fe8feb5bd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 3071440 byte(s) leaked in 54874 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.out ------ [1/60] chunk ran 1 test (total:185.01s - test:184.35s) [crashed] TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [default-linux-x86_64-release-asan] (154.61s) Test crashed (return code: 100) ==980030==ERROR: LeakSanitizer: detected memory leaks Too many leaks! Only the first 5000 leaks encountered will be reported. Indirect leak of 31192 byte(s) in 557 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a2853a4 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a2853a4 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a2853a4 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a2853a4 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:281:16 #5 0x2a2853a4 in __construct_node /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1812:21 #6 0x2a2853a4 in std::__y1::pair*, long>, bool> std::__y1::__tree>::__emplace_unique_key_args(NKikimr::TLogoBlobID const&, NKikimr::TLogoBlobID const&) /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1779:25 #7 0x2a27c8b9 in __emplace_unique_extract_key /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1054:12 #8 0x2a27c8b9 in __emplace_unique /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1032:12 #9 0x2a27c8b9 in emplace, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #37 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #38 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #39 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #40 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #41 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #42 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #43 0x7f5f9f4abd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2623722 byte(s) leaked in 61944 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.out ------ FAIL: 2 - CRASHED ydb/core/tx/columnshard/ut_schema ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 1 test (total:156.82s - test:156.74s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (139.89s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:36.31s - test:36.25s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 1 - CRASHED ydb/core/tx/tiering/ut ydb/services/ydb/backup_ut [size:medium] nchunks:10 ------ [2/10] chunk ran 1 test (total:12.26s - test:12.23s) [fail] BackupRestoreS3::RestoreViewDependentOnAnotherView [default-linux-x86_64-release-asan] (6.08s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestViewDependentOnAnotherViewIsRestored(char const*, char const*, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewDependentOnAnotherView.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewDependentOnAnotherView.out ------ [3/10] chunk ran 1 test (total:12.40s - test:12.36s) [fail] BackupRestoreS3::RestoreViewQueryText [default-linux-x86_64-release-asan] (6.23s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestViewQueryTextIsPreserved(char const*, NYdb::Dev::NView::TViewClient&, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewQueryText.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewQueryText.out ------ [4/10] chunk ran 1 test (total:13.05s - test:13.01s) [fail] BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [default-linux-x86_64-release-asan] (5.05s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestRestoreTableWithIndex(char const*, char const*, NKikimrSchemeOp::EIndexType, bool, NYdb::Dev::NTable::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 NTestSuiteBackupRestoreS3::TTestCaseTestAllIndexTypes::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2659:5 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree.out ------ [5/10] chunk ran 1 test (total:13.00s - test:12.96s) [fail] BackupRestoreS3::TestAllPrimitiveTypes-FLOAT [default-linux-x86_64-release-asan] (6.24s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestPrimitiveType(Ydb::Type_PrimitiveTypeId, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:1392:27 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllPrimitiveTypes-FLOAT.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllPrimitiveTypes-FLOAT.out ------ [6/10] chunk ran 1 test (total:12.61s - setup:0.01s test:12.57s) [fail] BackupRestoreS3::TestAllPrimitiveTypes-JSON [default-linux-x86_64-release-asan] (6.29s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestPrimitiveType(Ydb::Type_PrimitiveTypeId, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:1392:27 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllPrimitiveTypes-JSON.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllPrimitiveTypes-JSON.out ------ [7/10] chunk ran 1 test (total:12.35s - test:12.32s) [fail] BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [default-linux-x86_64-release-asan] (6.07s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestViewOutputIsPreserved(char const*, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 NTestSuiteBackupRestoreS3::TTestCaseTestAllSchemeObjectTypes::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:17 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllSchemeObjectTypes-EPathTypeView.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllSchemeObjectTypes-EPathTypeView.out ------ FAIL: 2 - GOOD, 6 - FAIL ydb/services/ydb/backup_ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:19.70s - test:19.67s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (4.57s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut Total 35 suites: 17 - GOOD 13 - FAIL 5 - TIMEOUT Total 129 tests: 61 - GOOD 50 - FAIL 9 - NOT_LAUNCHED 5 - TIMEOUT 4 - CRASHED Cache efficiency ratio is 97.83% (42852 of 43803). Local: 392 (0.89%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 42460 (96.93%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [620722 ms] [TM] [rnd-m05dlx0daqime5dm asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 0 (1744916440364), finished: 620722 (1744917061086)] Time from start: 703389.0009765625 ms, time elapsed by graph 620722 ms, time diff 82667.0009765625 ms. The longest 10 tasks: [620722 ms] [TM] [rnd-m05dlx0daqime5dm asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1744916440364, finished: 1744917061086] [620057 ms] [TM] [rnd-kmrn8xfen34qmugx asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1744916440603, finished: 1744917060660] [618286 ms] [TM] [rnd-wd0feu0crdhhaap5 asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1744916440545, finished: 1744917058831] [615874 ms] [TM] [rnd-12215387966356991298 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744916391796, finished: 1744917007670] [611959 ms] [TM] [rnd-13901606543261824492 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1744916440744, finished: 1744917052703] [610799 ms] [TM] [rnd-12190598770146504229 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744916391805, finished: 1744917002604] [598511 ms] [TM] [rnd-2210384486149132711 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1744916390272, finished: 1744916988783] [595647 ms] [TM] [rnd-8165736765714388740 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1744916379003, finished: 1744916974650] [591863 ms] [TM] [rnd-8424538017984403903 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1744916390102, finished: 1744916981965] [402692 ms] [TM] [rnd-17557341258771556584 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1744916391500, finished: 1744916794192] Total time by type: [11219428 ms] [TM] [count: 914, ave time 12275.09 msec] [ 88018 ms] [prepare:get from local cache] [count: 392, ave time 224.54 msec] [ 79652 ms] [TS] [count: 10, ave time 7965.20 msec] [ 29675 ms] [prepare:AC] [count: 2, ave time 14837.50 msec] [ 25565 ms] [prepare:put to dist cache] [count: 382, ave time 66.92 msec] [ 21114 ms] [TA] [count: 27, ave time 782.00 msec] [ 7082 ms] [prepare:tools] [count: 16, ave time 442.62 msec] [ 6262 ms] [prepare:bazel-store] [count: 1, ave time 6262.00 msec] [ 733 ms] [prepare:clean] [count: 3, ave time 244.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 11320194 ms (100.00%) Total run tasks time - 11320194 ms Configure time - 25.0 s Statistics overhead 1382 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.91yrPoDpBb --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools Configuring dependencies for platform test_tool_tc1-global Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution | 1.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering | 1.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit | 1.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat | 1.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut | 1.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut | 2.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs | 2.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk | 2.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut | 2.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain | 2.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark | 3.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant | 5.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test | 6.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects | 7.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator | 7.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut | 8.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a | 8.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut | 8.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut | 9.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations | 9.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap | 9.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut | 9.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview | 9.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb | 9.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |10.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |10.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |10.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |10.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |10.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |10.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |10.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |10.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |11.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |11.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |11.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |11.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |12.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |12.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |12.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |12.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |12.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |12.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |13.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |13.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |13.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |13.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |13.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |13.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |13.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |14.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |14.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |14.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |14.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |14.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |14.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |14.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |14.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |14.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |15.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |15.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |15.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |15.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |15.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |15.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |15.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |15.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |15.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |16.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |16.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |15.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |15.5%| PREPARE $(FLAKE8_PY3-715603131) |15.6%| PREPARE $(TEST_TOOL_HOST-sbr:8444524403) |15.8%| PREPARE $(LLD_ROOT-3808007503) |15.9%| PREPARE $(JDK17-472926544) |16.2%| PREPARE $(WITH_JDK17-sbr:7832760150) |16.4%| PREPARE $(WITH_JDK-sbr:7832760150) |16.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |16.6%| PREPARE $(JDK_DEFAULT-472926544) |16.7%| PREPARE $(CLANG-1922233694) |16.9%| PREPARE $(CLANG-2518231432) |17.0%| PREPARE $(GDB) |17.1%| PREPARE $(YMAKE_PYTHON3-4256832079) |17.3%| PREPARE $(CLANG_FORMAT-1286082657) |17.4%| PREPARE $(CLANG16-1380963495) |17.6%| PREPARE $(CLANG18-3363451693) |17.8%| PREPARE $(PYTHON) |18.5%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |20.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |21.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |21.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |21.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |21.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |22.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |22.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |22.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |22.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |23.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |23.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |23.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |23.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |24.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |23.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |24.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |24.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |24.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |24.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |24.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |25.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |25.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |25.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |25.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |25.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |25.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |25.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |26.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |26.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |26.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |26.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |26.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |26.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |26.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |26.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |27.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |27.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |27.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |27.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |27.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |27.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |27.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |28.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |28.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |28.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |28.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |28.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |28.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |28.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |28.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |28.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |29.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |29.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |28.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |31.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |31.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |31.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |31.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |31.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |31.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |31.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |31.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |32.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |32.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |32.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |32.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |32.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |32.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |33.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |33.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |33.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |35.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |35.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |35.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |36.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |36.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |35.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |35.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |37.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |37.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |37.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |37.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |37.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |37.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |37.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |37.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |38.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |38.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |38.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |38.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |38.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |38.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |39.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |39.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |39.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |39.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |39.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |39.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |39.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |40.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |40.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |40.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |40.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |40.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |40.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |41.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |41.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |41.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |42.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |42.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |42.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |42.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |42.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |42.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |43.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |43.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |43.6%| CLEANING SYMRES |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |43.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |43.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |44.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |44.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |44.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |44.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |44.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |44.9%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |45.2%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |45.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |45.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |45.9%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |46.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |46.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |46.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |46.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |46.8%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |46.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |47.1%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |47.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |47.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |47.3%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |46.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |46.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |47.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |47.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |47.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |47.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |47.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |47.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |47.8%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |47.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |48.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |48.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |48.4%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |48.5%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |48.7%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |48.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |49.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |49.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |49.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |49.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |48.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |48.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |49.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |49.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |49.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |49.3%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |49.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |49.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |49.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |49.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |50.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |50.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |50.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |50.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |50.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |50.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |50.8%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |50.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |51.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |51.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |51.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |51.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |51.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |51.9%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |52.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |52.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |52.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |52.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |52.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |52.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |52.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |51.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |52.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |52.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |52.6%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |52.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |52.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |53.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |53.1%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |53.3%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |53.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |53.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |53.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |53.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |53.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |53.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |54.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |54.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |54.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |54.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |54.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |54.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |54.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |54.4%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |54.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |54.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |54.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |54.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |55.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |55.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |55.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |55.6%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |55.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |56.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |56.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |56.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |56.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |57.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |57.1%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas >> TKeyValueTracingTest::WriteSmall >> TKeyValueTracingTest::ReadSmall |56.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |56.5%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |56.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |56.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |56.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |56.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |56.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |57.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |57.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |57.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |57.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |58.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |58.1%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |58.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |58.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |58.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |58.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |58.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |58.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TKeyValueTracingTest::ReadHuge >> TKeyValueTracingTest::WriteHuge |58.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |58.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |58.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |58.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |58.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |58.9%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |59.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |59.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |59.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |59.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |60.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |60.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |60.4%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |60.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |60.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |60.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |60.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |60.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |60.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |61.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |61.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |60.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |60.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |60.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |60.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |60.8%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |61.1%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |61.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |61.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |61.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |61.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |61.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |61.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |61.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |62.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |62.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |62.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |62.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |62.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |62.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |62.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |63.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |63.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |63.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |63.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |63.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |63.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |63.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |64.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |64.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |64.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |64.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |65.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |65.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat >> AnalyzeColumnshard::AnalyzeRebootColumnShard |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |65.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |65.7%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |65.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |66.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |67.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |66.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |66.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |67.1%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |67.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |67.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |67.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |68.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |68.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |64.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |64.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |64.9%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |64.9%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |65.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |65.1%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |65.1%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |65.2%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |55.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |56.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |56.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |56.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |56.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |56.2%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |56.3%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |56.4%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |56.4%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |56.5%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |56.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |56.6%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |56.7%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |56.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |56.9%| COMPACTING CACHE 905.5GiB |56.9%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |57.0%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |57.0%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |57.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |57.2%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |57.2%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |57.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |57.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |57.4%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |57.5%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |57.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |57.6%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |57.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |57.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |57.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |57.8%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |57.9%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |58.0%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |58.0%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |58.1%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |58.1%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |58.2%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |58.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |58.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |58.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |58.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |58.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |58.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |58.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |58.7%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |58.8%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |58.8%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |58.9%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |58.9%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |59.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |59.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |59.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |59.2%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |59.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |59.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |59.4%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |59.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |59.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |59.5%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |59.6%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |59.7%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |59.7%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |59.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |59.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |59.9%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |60.0%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |60.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |60.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |60.2%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |60.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |60.3%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |60.3%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |60.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |60.5%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |60.5%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |60.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |60.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |60.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |60.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |60.8%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |60.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |61.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |61.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |61.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |61.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |61.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |61.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |61.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |61.4%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |61.4%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |61.5%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |61.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |61.6%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |61.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |61.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |61.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |61.9%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |61.9%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |62.0%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |62.1%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |62.1%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |62.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |62.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |62.3%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |62.4%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |62.4%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |62.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |62.5%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |62.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |62.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |62.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |62.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |62.9%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |62.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |63.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |63.0%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |63.1%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |63.2%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |63.2%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |63.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |63.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |63.4%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |63.5%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |63.5%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |63.6%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |63.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |63.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |63.8%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |63.9%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |64.0%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |64.0%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |64.1%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |64.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |64.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |64.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |64.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |64.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |64.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |64.6%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |64.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |64.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |64.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |64.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |64.9%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |65.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |65.1%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |65.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |65.2%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |65.2%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |65.3%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |65.4%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |65.4%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |65.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |65.5%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |65.6%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |65.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |65.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |65.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |65.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |66.0%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |66.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |66.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |66.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |66.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |66.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |66.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |66.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |66.5%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |66.6%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |66.6%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |66.7%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |66.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |66.8%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |66.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |67.0%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |67.0%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |67.1%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |67.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |67.3%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |67.3%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |67.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |67.6%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |67.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |67.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |67.8%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |67.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |67.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |68.0%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |68.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |68.1%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |68.2%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |68.2%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy |68.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |68.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |68.4%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |68.5%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |68.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |68.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |68.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |68.7%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |68.8%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |68.8%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |68.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |69.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |69.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |69.1%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |69.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |69.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |69.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |69.5%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.6%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |69.7%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |69.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |69.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |69.9%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |70.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |70.0%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |70.1%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |70.1%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |70.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |70.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |70.3%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |70.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |70.4%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |70.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |70.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |70.6%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |70.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |70.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |70.8%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |70.9%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |70.9%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |71.0%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |71.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |71.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |71.2%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |71.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |71.4%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |71.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |71.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |71.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |71.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |71.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |71.7%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |71.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |71.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |72.2%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |72.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |72.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |72.4%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |72.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.6%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |72.6%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.8%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |73.3%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |73.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |73.4%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.6%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |73.6%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |73.7%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |73.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |73.9%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |74.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |74.2%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |74.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |74.4%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |74.5%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |74.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |74.6%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |74.7%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |74.8%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |74.8%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |74.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.2%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |75.3%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.4%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |75.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> ColumnShardTiers::TTLUsage |75.7%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |75.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |76.0%| [LD] {RESULT} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |76.2%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |76.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |76.3%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] >> TKeyValueTracingTest::ReadHuge [FAIL] >> TKeyValueTracingTest::ReadSmall [FAIL] >> KqpSinkTx::OlapInvalidateOnError >> TKeyValueTracingTest::WriteHuge [FAIL] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFEA342D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFEAEF58) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7F888B431D90) __libc_start_main+128 (0x7F888B431E40) _start+41 (0xD80D029) |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFEA8DFC) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFEAF9CE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7FF41D537D90) __libc_start_main+128 (0x7FF41D537E40) _start+41 (0xD80D029) |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFEA8DFC) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFEAF5DE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7F2EA4909D90) __libc_start_main+128 (0x7F2EA4909E40) _start+41 (0xD80D029) |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1026092C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1071C7C0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFEA342D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFEAF268) std::__y1::__function::__func, void ()>::operator()()+280 (0xFEC31C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1074A8F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x10723349) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFEC2074) NUnitTest::TTestFactory::Execute()+2438 (0x10724C16) NUnitTest::RunMain(int, char**)+5213 (0x10744E6D) ??+0 (0x7F3EF54EED90) __libc_start_main+128 (0x7F3EF54EEE40) _start+41 (0xD80D029) >> YdbLogStore::AlterLogTable |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.6%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpSinkMvcc::OlapMultiSinks |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> YdbLogStore::AlterLogTable [FAIL] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT >> BackupRestoreS3::RestoreViewQueryText |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-JSON |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> BackupRestoreS3::RestoreViewDependentOnAnotherView >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpCost::OlapWriteRow |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-04-17T19:15:59.757822Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494362994773443774:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:15:59.757923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/000191/r3tmp/tmpB1F6Uo/pdisk_1.dat 2025-04-17T19:16:00.120397Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:16:00.152954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:00.153041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 2282, node 1 2025-04-17T19:16:00.161452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:00.245882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:00.245921Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:00.245929Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:00.246040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:00.514498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:00.660311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:34800" , at schemeshard: 72057594046644480 2025-04-17T19:16:00.660664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T19:16:00.660696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-04-17T19:16:00.662925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-17T19:16:00.663162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-17T19:16:00.665325Z node 1 :TX_PROXY ERROR: Actor# [1:7494362999068411982:2601] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C9935BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CE50670) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8673 (0x1C4C5C71) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C4EEAF8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CE87866) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CE571F9) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C4EDCC4) NUnitTest::TTestFactory::Execute()+2438 (0x1CE58AC6) NUnitTest::RunMain(int, char**)+5213 (0x1CE81DDD) ??+0 (0x7F473BFDBD90) __libc_start_main+128 (0x7F473BFDBE40) _start+41 (0x19302029) |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.6%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn >> BackupRestoreS3::RestoreViewQueryText [FAIL] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> QuoterWithKesusTest::KesusRecreation >> BackupRestoreS3::RestoreViewDependentOnAnotherView [FAIL] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT [GOOD] >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> BackupRestoreS3::TestAllPrimitiveTypes-JSON [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT [GOOD] Test command err: 2025-04-17T19:16:04.981723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494363013503114638:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:04.983247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/00026c/r3tmp/tmp2OhTEt/pdisk_1.dat 2025-04-17T19:16:05.318734Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10998, node 1 2025-04-17T19:16:05.378817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:05.378926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:05.399739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:05.410983Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:05.411003Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:05.411020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:05.411137Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:05.692783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:07.585723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363026388017558:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:07.585868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363026388017550:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:07.585964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:07.586012Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017798082162:2140] Handle TEvProposeTransaction 2025-04-17T19:16:07.586032Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017798082162:2140] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:16:07.586082Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017798082162:2140] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494363026388017565:2630] 2025-04-17T19:16:07.665608Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:07.665687Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:07.665705Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:07.667268Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:07.667353Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:07.667557Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:07.667708Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:07.667754Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:16:07.667900Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:16:07.669223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:16:07.672678Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:16:07.672746Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017565:2630] txid# 281474976710658 SEND to# [1:7494363026388017564:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:16:07.692933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494363026388017564:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:16:07.780200Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017798082162:2140] Handle TEvProposeTransaction 2025-04-17T19:16:07.780237Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017798082162:2140] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:16:07.780297Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017798082162:2140] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494363026388017644:2689] 2025-04-17T19:16:07.782967Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:07.783024Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:07.783039Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:07.783546Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:07.783625Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:07.783801Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:07.783925Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:07.784003Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:16:07.784191Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:16:07.791089Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:16:07.791248Z node 1 :TX_PROXY ERROR: Actor# [1:7494363026388017644:2689] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:16:07.791279Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026388017644:2689] txid# 281474976710659 SEND to# [1:7494363026388017564:2343] Source {TEvProposeTransactionSt ... x-amz-content-sha256;x-amz-date, Signature=d27734011ecbac540244b62b36e29d9db7e589de3c4c8d5e3a4b31ec41e3227f content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T191609Z 2025-04-17T19:16:09.009051Z node 1 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [1:7494363030682985721:2279], result# No response body. REQUEST: GET /test_bucket?prefix=FloatTable HTTP/1.1 HEADERS: Host: localhost:61757 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F92A9F4C-6026-43C1-A1A8-A981978B6292 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=d60dc076d9cc456a0061319c01f9696ca796ef43068718fbaa0042abffc066b4 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T191609Z S3_MOCK::HttpServeList: FloatTable 2025-04-17T19:16:09.047206Z node 1 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [1:7494363030682985721:2279], result# ListObjectsResult { } 2025-04-17T19:16:09.047267Z node 1 :IMPORT INFO: Reply: self# [1:7494363030682985721:2279], success# 1, error# 2025-04-17T19:16:09.047388Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.047414Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976710667, itemIdx# 0, success# 1 2025-04-17T19:16:09.047796Z node 1 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/FloatTable' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T19:16:09.055009Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.055130Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.055143Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715762, id# 281474976710667 2025-04-17T19:16:09.055211Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateTable propose: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/FloatTable' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715762 2025-04-17T19:16:09.055375Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.056628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715762:0, at schemeshard: 72057594046644480 2025-04-17T19:16:09.059164Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.059185Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715762, status# StatusAccepted 2025-04-17T19:16:09.059298Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/FloatTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976715762 Issue: '' } 2025-04-17T19:16:09.061726Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.114606Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363034977953104:2403] [0] Resolve database: name# /Root 2025-04-17T19:16:09.115060Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363034977953104:2403] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.115085Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363034977953104:2403] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.116334Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363034977953104:2403] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710667 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:61757" scheme: HTTP bucket: "test_bucket" items { source_prefix: "FloatTable" destination_path: "/Root/FloatTable" } } StartTime { seconds: 1744917368 } } 2025-04-17T19:16:09.128398Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.128422Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715762 2025-04-17T19:16:09.128531Z node 1 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/FloatTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T19:16:09.129525Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.129602Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.129612Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715763, id# 281474976710667 2025-04-17T19:16:09.129651Z node 1 :IMPORT INFO: TImport::TTxProgress: Restore propose: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/FloatTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715763 2025-04-17T19:16:09.130193Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.130597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRestore, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-04-17T19:16:09.131683Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.131701Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715763, status# StatusAccepted 2025-04-17T19:16:09.131782Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/FloatTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Subscribed WaitTxId: 281474976715763 Issue: '' } 2025-04-17T19:16:09.133014Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete REQUEST: HEAD /test_bucket/FloatTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:61757 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4050D4B1-4AD6-4CBA-B27F-93B56E6B7095 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=d5c8aaddff638751519350914cbbb4b2e43c8a287b5a1575a8c85a0b8342cd41 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T191609Z S3_MOCK::HttpServeRead: /test_bucket/FloatTable/data_00.csv / 4 REQUEST: GET /test_bucket/FloatTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:61757 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B7AD386E-41B0-4F9C-A6F7-958CCCA6A306 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=01df9c3fccf38a0f77b82a9ce24b922cb19a904c9cadb7d3acb69cde0247ab69 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T191609Z S3_MOCK::HttpServeRead: /test_bucket/FloatTable/data_00.csv / 4 2025-04-17T19:16:09.188910Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.188934Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715763 2025-04-17T19:16:09.194907Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.522066Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363034977953229:2406] [0] Resolve database: name# /Root 2025-04-17T19:16:09.522391Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363034977953229:2406] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.522412Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363034977953229:2406] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.522843Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363034977953229:2406] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710667 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:61757" scheme: HTTP bucket: "test_bucket" items { source_prefix: "FloatTable" destination_path: "/Root/FloatTable" } } StartTime { seconds: 1744917368 } EndTime { seconds: 1744917369 } } 2025-04-17T19:16:09.618683Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017798082162:2140] Handle TEvExecuteKqpTransaction 2025-04-17T19:16:09.618741Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017798082162:2140] TxId# 281474976710668 ProcessProposeKqpTransaction 2025-04-17T19:16:09.619477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01js2jw4np5ev50dqamtjnb7x9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQzM2M5MmUtMjg2OTU3MzctMWMwYTA2Ni1iNDc2MDk5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-JSON [GOOD] Test command err: 2025-04-17T19:16:05.270016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494363017905678067:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:05.270071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/000263/r3tmp/tmp9ikfiN/pdisk_1.dat 2025-04-17T19:16:05.610630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:16:05.661575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:05.661681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15420, node 1 2025-04-17T19:16:05.701776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:05.756216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:05.756241Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:05.756251Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:05.756386Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:05.990593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:07.955908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363026495613688:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:07.955908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363026495613680:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:07.956096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:07.956320Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017905678292:2141] Handle TEvProposeTransaction 2025-04-17T19:16:07.956354Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017905678292:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:16:07.956416Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017905678292:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494363026495613695:2631] 2025-04-17T19:16:08.042460Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:08.042533Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:08.042552Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:08.043715Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:08.043811Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:08.043997Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:08.044140Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:08.044199Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:16:08.044341Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:16:08.045701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:16:08.051267Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:16:08.051325Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363026495613695:2631] txid# 281474976710658 SEND to# [1:7494363026495613694:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:16:08.067174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494363026495613694:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:16:08.132377Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017905678292:2141] Handle TEvProposeTransaction 2025-04-17T19:16:08.132406Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017905678292:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:16:08.132446Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017905678292:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494363030790581065:2685] 2025-04-17T19:16:08.134267Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:08.134302Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:08.134334Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:08.134795Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:08.134879Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:08.135022Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:08.135114Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:08.135145Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:16:08.135237Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:16:08.137151Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:16:08.137290Z node 1 :TX_PROXY ERROR: Actor# [1:7494363030790581065:2685] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:16:08.137337Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363030790581065:2685] txid# 281474976710659 SEND to# [1:7494363026495613694:2343] Source {TEvProposeTransactionSta ... -version;x-amz-content-sha256;x-amz-date, Signature=fd00ad55fd00a10db04cac062bb7f1a2940581a07ad1fc2918b07704dac59f0e content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T191609Z 2025-04-17T19:16:09.254250Z node 1 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [1:7494363035085549121:2281], result# No response body. REQUEST: GET /test_bucket?prefix=JsonTable HTTP/1.1 HEADERS: Host: localhost:19721 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7EE7CC60-CB15-4B72-9F42-871A0C3EA4A8 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=9c8e33cb5b69a64ac3c36a0b701199c46c037522c955e8ace528e32bdb6277fc content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T191609Z S3_MOCK::HttpServeList: JsonTable 2025-04-17T19:16:09.288570Z node 1 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [1:7494363035085549121:2281], result# ListObjectsResult { } 2025-04-17T19:16:09.288629Z node 1 :IMPORT INFO: Reply: self# [1:7494363035085549121:2281], success# 1, error# 2025-04-17T19:16:09.288760Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.288781Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976710667, itemIdx# 0, success# 1 2025-04-17T19:16:09.289157Z node 1 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonTable' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T19:16:09.296788Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.296922Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.296948Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715762, id# 281474976710667 2025-04-17T19:16:09.297016Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateTable propose: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonTable' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715762 2025-04-17T19:16:09.297178Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.298383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715762:0, at schemeshard: 72057594046644480 2025-04-17T19:16:09.300750Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.300772Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715762, status# StatusAccepted 2025-04-17T19:16:09.300911Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976715762 Issue: '' } 2025-04-17T19:16:09.303757Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.341313Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.341343Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715762 2025-04-17T19:16:09.341437Z node 1 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-17T19:16:09.342395Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.342457Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.342477Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715763, id# 281474976710667 2025-04-17T19:16:09.342536Z node 1 :IMPORT INFO: TImport::TTxProgress: Restore propose: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715763 2025-04-17T19:16:09.343172Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.343547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRestore, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-04-17T19:16:09.344752Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.344772Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715763, status# StatusAccepted 2025-04-17T19:16:09.344846Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710667 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/JsonTable' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: Transferring SubState: Subscribed WaitTxId: 281474976715763 Issue: '' } 2025-04-17T19:16:09.345753Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.352955Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363035085549279:2404] [0] Resolve database: name# /Root 2025-04-17T19:16:09.353206Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363035085549279:2404] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.353236Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363035085549279:2404] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.353641Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363035085549279:2404] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710667 Status: SUCCESS Progress: PROGRESS_TRANSFER_DATA ImportFromS3Settings { endpoint: "localhost:19721" scheme: HTTP bucket: "test_bucket" items { source_prefix: "JsonTable" destination_path: "/Root/JsonTable" } } StartTime { seconds: 1744917369 } } REQUEST: HEAD /test_bucket/JsonTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:19721 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DEBD3D68-343D-4339-B498-299A7E63ECE0 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=b33c1b04bce35183804dafc64f5ea2db1fc8c19494daf325c68b236fe8ba4cee content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T191609Z S3_MOCK::HttpServeRead: /test_bucket/JsonTable/data_00.csv / 35 REQUEST: GET /test_bucket/JsonTable/data_00.csv HTTP/1.1 HEADERS: Host: localhost:19721 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A3CB6580-66AD-4891-9BAE-08F0025FAD3E amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=30b98dadd6aee8ac6a6e01c0d799ad8344c2706fd08952e2d417bcad15c37f60 content-type: application/xml range: bytes=0-34 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T191609Z S3_MOCK::HttpServeRead: /test_bucket/JsonTable/data_00.csv / 35 2025-04-17T19:16:09.395484Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-17T19:16:09.395514Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715763 2025-04-17T19:16:09.396869Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-17T19:16:09.758615Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363035085549335:2406] [0] Resolve database: name# /Root 2025-04-17T19:16:09.758953Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363035085549335:2406] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.758976Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363035085549335:2406] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.759529Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7494363035085549335:2406] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710667 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:19721" scheme: HTTP bucket: "test_bucket" items { source_prefix: "JsonTable" destination_path: "/Root/JsonTable" } } StartTime { seconds: 1744917369 } EndTime { seconds: 1744917369 } } 2025-04-17T19:16:09.845878Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017905678292:2141] Handle TEvExecuteKqpTransaction 2025-04-17T19:16:09.845908Z node 1 :TX_PROXY DEBUG: actor# [1:7494363017905678292:2141] TxId# 281474976710668 ProcessProposeKqpTransaction 2025-04-17T19:16:09.846592Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01js2jw4x36kp9xsbz43b5hqva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDk3NWY0MGQtZDE2NTQwNTUtNDQ2MmJjMmItOTJjMDRiYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> test_tpch.py::TestTpchS1::test_tpch[15] >> alter_compression.py::TestAlterCompression::test_all_supported_compression |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::RestoreViewQueryText [FAIL] Test command err: 2025-04-17T19:16:04.973617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494363015693643463:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:04.974967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/00026e/r3tmp/tmpkNfRoE/pdisk_1.dat 2025-04-17T19:16:05.323684Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:16:05.350840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:05.350961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:05.360165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64728, node 1 2025-04-17T19:16:05.502560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:05.508113Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:05.508143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:05.508292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:05.725126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:07.699687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363028578546366:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:07.699766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363028578546374:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:07.699821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:07.700197Z node 1 :TX_PROXY DEBUG: actor# [1:7494363015693643684:2141] Handle TEvProposeTransaction 2025-04-17T19:16:07.700225Z node 1 :TX_PROXY DEBUG: actor# [1:7494363015693643684:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:16:07.700270Z node 1 :TX_PROXY DEBUG: actor# [1:7494363015693643684:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494363028578546381:2630] 2025-04-17T19:16:07.768488Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:07.768589Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:07.768607Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:07.770119Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:07.770218Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:07.770439Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:07.770618Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:07.770724Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:16:07.770914Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:16:07.772205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:16:07.774732Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:16:07.774796Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546381:2630] txid# 281474976710658 SEND to# [1:7494363028578546380:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:16:07.788796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494363028578546380:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:16:07.868175Z node 1 :TX_PROXY DEBUG: actor# [1:7494363015693643684:2141] Handle TEvProposeTransaction 2025-04-17T19:16:07.868221Z node 1 :TX_PROXY DEBUG: actor# [1:7494363015693643684:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:16:07.868270Z node 1 :TX_PROXY DEBUG: actor# [1:7494363015693643684:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494363028578546449:2678] 2025-04-17T19:16:07.870829Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:07.870887Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:07.870899Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:07.871490Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:07.871552Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:07.871697Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:07.871796Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:07.871837Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:16:07.872041Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:16:07.874969Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:16:07.875148Z node 1 :TX_PROXY ERROR: Actor# [1:7494363028578546449:2678] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:16:07.875184Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363028578546449:2678] txid# 281474976710659 SEND to# [1:7494363028578546380:2343] Source {TEvProposeTransactionSt ... 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:08.205150Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363032873513825:2354] [281474976710661] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:08.206569Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoExecute 2025-04-17T19:16:08.206889Z node 1 :EXPORT DEBUG: TExport::TTxCreate: Reply: status# SUCCESS, error# 2025-04-17T19:16:08.211157Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoComplete 2025-04-17T19:16:08.211302Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:08.211333Z node 1 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 281474976710661 2025-04-17T19:16:08.211396Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710661 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:57230' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 } 2025-04-17T19:16:08.211443Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:08.211513Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:08.211528Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715757, id# 281474976710661 2025-04-17T19:16:08.211566Z node 1 :EXPORT INFO: TExport::TTxProgress: MkDir propose: info# { Id: 281474976710661 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:57230' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, txId# 281474976715757 2025-04-17T19:16:08.211675Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:08.213397Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363032873513825:2354] [281474976710661] Handle TEvExport::TEvCreateExportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:62808" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744917368 } } 2025-04-17T19:16:08.214307Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:08.214334Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715757, status# StatusAccepted 2025-04-17T19:16:08.214471Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710661 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:57230' State: CreateExportDir WaitTxId: 281474976715757 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976715757 2025-04-17T19:16:08.218975Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:08.220217Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363032873513851:2355] [0] Resolve database: name# /Root 2025-04-17T19:16:08.220472Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363032873513851:2355] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:08.220494Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363032873513851:2355] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:08.220957Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363032873513851:2355] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:62808" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744917368 } } 2025-04-17T19:16:08.235006Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:08.235043Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757 2025-04-17T19:16:08.235059Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757, id# 281474976710661, itemIdx# 4294967295 2025-04-17T19:16:08.235171Z node 1 :EXPORT INFO: TExport::TTxProgress: UploadScheme: info# { Id: 281474976710661 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:57230' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/view' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 6] SourcePathType: EPathTypeView State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' } 2025-04-17T19:16:08.246885Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:08.246957Z node 1 :EXPORT DEBUG: HandleSchemeDescription, self: [1:7494363032873513870:2772], status: StatusSuccess REQUEST: PUT /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:62808 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D34BA5D9-FE3D-436F-8AE3-A3B5A59FCB20 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=6e11ddce03325305fef9c48c4931ca797d2c843d6074e131c6eeadc8f1be8d20 content-length: 107 content-md5: s9kG75rwDu9EaSQBrBROTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: acc84b32739165de8cf1d5c0620212d5b7e1e1a551feadcb58d905c2d8db2e42 x-amz-date: 20250417T191608Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/create_view.sql / / 107 2025-04-17T19:16:08.286409Z node 1 :EXPORT DEBUG: HandleSchemePutResponse, self: [1:7494363032873513870:2772], result: PutObjectResult { ETag: b3d906ef9af00eef44692401ac144e4e } REQUEST: PUT /test_bucket/view/metadata.json HTTP/1.1 HEADERS: Host: localhost:62808 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FF8E1605-B63F-46A6-B704-0EFA029ACD9F amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=f366b3d09d8e533fe5745c8af4a0ef658ae47d08430dbf2ab01c4092722d7502 content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: eff56d34e31fad9096420efe8729173b9963ab7eb04c9c0470c5f06519bd8c7c x-amz-date: 20250417T191608Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/metadata.json / / 31 2025-04-17T19:16:08.290395Z node 1 :EXPORT DEBUG: HandleMetadataPutResponse, self: [1:7494363032873513870:2772], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-04-17T19:16:08.290428Z node 1 :EXPORT INFO: Finish, self: [1:7494363032873513870:2772], success: 1, error: 2025-04-17T19:16:08.290552Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:08.290595Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 281474976710661, itemIdx# 0, success# 1, error# 2025-04-17T19:16:08.292744Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:08.427301Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363032873513878:2356] [0] Resolve database: name# /Root 2025-04-17T19:16:08.427640Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363032873513878:2356] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:08.427664Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363032873513878:2356] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:08.428109Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363032873513878:2356] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710661 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:62808" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744917368 } } assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18B62443) NTestSuiteBackupRestoreS3::TTestCaseRestoreViewQueryText::Execute_(NUnitTest::TTestContext&)+1610 (0x18BC345A) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7F29B21FCD90) __libc_start_main+128 (0x7F29B21FCE40) _start+41 (0x164AC029) >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] Test command err: Trying to start YDB, gRPC: 7288, MsgBus: 29017 2025-04-17T19:15:56.808476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494362981241700102:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:15:56.808540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/000208/r3tmp/tmpSKX78a/pdisk_1.dat 2025-04-17T19:15:57.109996Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7288, node 1 2025-04-17T19:15:57.163395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:15:57.163525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:15:57.165278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:15:57.174678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:15:57.174705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:15:57.174716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:15:57.174804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29017 TClient is connected to server localhost:29017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:15:57.582490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:15:59.018951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494362994126602656:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:15:59.018957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494362994126602631:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:15:59.019081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:15:59.023241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:15:59.032237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494362994126602667:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:15:59.115018Z node 1 :TX_PROXY ERROR: Actor# [1:7494362994126602718:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:15:59.379542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T19:15:59.535892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:15:59.535892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:15:59.536097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:15:59.536131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:15:59.536420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:15:59.536433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:15:59.536555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:15:59.536558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:15:59.536655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:15:59.536677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:15:59.536912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:15:59.536925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:15:59.537007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:15:59.537016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:15:59.537110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:15:59.537119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:15:59.537212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:15:59.537234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:15:59.537284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:15:59.537333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:15:59.537388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:15:59.537443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:15:59.537475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7494362994126602910:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:15:59.537541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7494362994126602907:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:15:59.569741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494362994126602932:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:15:59.569803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7494362994126602932:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:15:59.570038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_ ... t_id=72075186224038034;self_id=[1:7494363011306478110:3302];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.328108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[1:7494363011306478110:3302];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.330037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[1:7494363011306478168:3311];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.330260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[1:7494363011306478168:3311];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.330540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7494363011306478164:3310];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.330714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7494363011306478164:3310];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.333082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[1:7494363011306478292:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.333104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[1:7494363011306478007:3280];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.333250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[1:7494363011306478292:3344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.333271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[1:7494363011306478007:3280];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.336343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7494363011306478086:3292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.336526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7494363011306478086:3292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.336658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7494363011306477819:3262];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.336811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7494363011306477819:3262];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.339241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7494363011306478551:3422];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.339436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7494363011306478551:3422];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.339720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7494363011306477940:3270];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.339852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7494363011306477940:3270];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.340783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[1:7494363011306477771:3232];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038071;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.340918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[1:7494363011306477771:3232];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038071;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.342793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7494363011306478040:3282];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038055;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.342937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7494363011306478040:3282];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.344319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7494363011306478170:3312];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.344483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7494363011306478170:3312];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.344823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7494363011306478404:3407];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.344946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7494363011306478404:3407];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.346976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7494363011306478156:3308];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.347201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7494363011306478156:3308];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.347929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[1:7494363011306478323:3353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.348107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[1:7494363011306478323:3353];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.349493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7494363011306478150:3307];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.349694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7494363011306478150:3307];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.352039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7494363011306478284:3343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.352267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7494363011306478284:3343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.356600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7494363011306478162:3309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:09.356781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7494363011306478162:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x1903269E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x190112E8 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x19018767 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x19018767 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x19018767 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x19017933 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 17. ??:0: ?? @ 0x7F49BDAB8D8F 18. ??:0: ?? @ 0x7F49BDAB8E3F 19. ??:0: ?? @ 0x16619028 |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::RestoreViewDependentOnAnotherView [FAIL] Test command err: 2025-04-17T19:16:05.570733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494363018726677441:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:05.570778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/0001a6/r3tmp/tmpkFWSlo/pdisk_1.dat 2025-04-17T19:16:05.927419Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5778, node 1 2025-04-17T19:16:05.970164Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:16:05.970439Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-17T19:16:05.971563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:05.971645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:06.031135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:06.050604Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:06.050624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:06.050631Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:06.050748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:06.287680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:08.276210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363031611580399:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:08.276210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363031611580391:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:08.276280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:08.276591Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018726677710:2141] Handle TEvProposeTransaction 2025-04-17T19:16:08.276626Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018726677710:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:16:08.276682Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018726677710:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494363031611580406:2631] 2025-04-17T19:16:08.349024Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:08.349319Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:08.349338Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:08.350787Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:08.350876Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:08.351071Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:08.351244Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:08.351300Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:16:08.351452Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:16:08.352750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:16:08.357743Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:16:08.357831Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580406:2631] txid# 281474976710658 SEND to# [1:7494363031611580405:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:16:08.372555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494363031611580405:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:16:08.461839Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018726677710:2141] Handle TEvProposeTransaction 2025-04-17T19:16:08.461869Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018726677710:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:16:08.461916Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018726677710:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494363031611580482:2687] 2025-04-17T19:16:08.464452Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:08.464536Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:08.464553Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:08.465136Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:08.465213Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:08.465410Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:08.465526Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:08.465593Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:16:08.465746Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:16:08.468588Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031611580482:2687] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:16:08.468774Z node 1 :TX_PROXY ERROR: Actor# [1:7494363031611580482:2687] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-0 ... Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.048035Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035906547954:2364] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.048507Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035906547954:2364] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710663 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:20918" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/baseView" destination_prefix: "/baseView" } items { source_path: "/Root/dependentView" destination_prefix: "/dependentView" } } StartTime { seconds: 1744917369 } } 2025-04-17T19:16:09.059622Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.059654Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757 2025-04-17T19:16:09.059687Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757, id# 281474976710663, itemIdx# 4294967295 2025-04-17T19:16:09.059805Z node 1 :EXPORT INFO: TExport::TTxProgress: UploadScheme: info# { Id: 281474976710663 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 8] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36458' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 2 PendingItems: 2 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/baseView' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 6] SourcePathType: EPathTypeView State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' } 2025-04-17T19:16:09.071999Z node 1 :EXPORT INFO: TExport::TTxProgress: UploadScheme: info# { Id: 281474976710663 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 8] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:36458' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 2 PendingItems: 2 PendingDropItems: 0 }, item# { Idx: 1 SourcePathName: '/Root/dependentView' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 7] SourcePathType: EPathTypeView State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' } 2025-04-17T19:16:09.074099Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.074204Z node 1 :EXPORT DEBUG: HandleSchemeDescription, self: [1:7494363035906547976:2818], status: StatusSuccess 2025-04-17T19:16:09.074344Z node 1 :EXPORT DEBUG: HandleSchemeDescription, self: [1:7494363035906547977:2819], status: StatusSuccess REQUEST: PUT /test_bucket/dependentView/create_view.sql HTTP/1.1 HEADERS: Host: localhost:20918 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8726DFB3-586B-4B47-94D5-F65876B039B3 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=4a0cf98525c85630491727e851f9c3c174a19da3bf83552724673190e795602d content-length: 141 content-md5: l9g5vf/dWNoOWE5C9y3Unw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 6ad3a25be46dca1d1cdd95d4b66bd920e2b333607cd84bef890cf4363d1836bc x-amz-date: 20250417T191609Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/dependentView/create_view.sql / / 141 REQUEST: PUT /test_bucket/baseView/create_view.sql HTTP/1.1 HEADERS: Host: localhost:20918 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AD3DDB04-AAB3-4806-9177-3DE4033F5654 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=d3c3026fcf7fcf17f4b19f49cc101b560962f5421817f02187590bbe495cbc0a content-length: 117 content-md5: lBFaxqtmzEFiACQ23xQnRQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 4db1b6162f0221c05acd67f55eff5f059abb6c30338af5b3d0487274adf295e5 x-amz-date: 20250417T191609Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/baseView/create_view.sql / / 117 2025-04-17T19:16:09.124746Z node 1 :EXPORT DEBUG: HandleSchemePutResponse, self: [1:7494363035906547977:2819], result: PutObjectResult { ETag: 97d839bdffdd58da0e584e42f72dd49f } 2025-04-17T19:16:09.124748Z node 1 :EXPORT DEBUG: HandleSchemePutResponse, self: [1:7494363035906547976:2818], result: PutObjectResult { ETag: 94115ac6ab66cc4162002436df142745 } REQUEST: PUT /test_bucket/dependentView/metadata.json HTTP/1.1 HEADERS: Host: localhost:20918 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2748D6FE-4833-4386-AA7E-BD6A024817E4 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=8c20845a6e82699b4fcfe53da7dd1f868f74da07102dad8defec696724e93220 content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: eff56d34e31fad9096420efe8729173b9963ab7eb04c9c0470c5f06519bd8c7c x-amz-date: 20250417T191609Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/dependentView/metadata.json / / 31 REQUEST: PUT /test_bucket/baseView/metadata.json HTTP/1.1 HEADERS: Host: localhost:20918 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EA728FE5-BB02-48D4-BC08-969587067B24 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=7a65ea4ec9978c156836097ca0dcbcaab4f41a2a564a1554cd36d403a9834db9 content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: eff56d34e31fad9096420efe8729173b9963ab7eb04c9c0470c5f06519bd8c7c x-amz-date: 20250417T191609Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/baseView/metadata.json / / 31 2025-04-17T19:16:09.128067Z node 1 :EXPORT DEBUG: HandleMetadataPutResponse, self: [1:7494363035906547977:2819], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-04-17T19:16:09.128101Z node 1 :EXPORT INFO: Finish, self: [1:7494363035906547977:2819], success: 1, error: 2025-04-17T19:16:09.128883Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.128932Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 281474976710663, itemIdx# 1, success# 1, error# 2025-04-17T19:16:09.129097Z node 1 :EXPORT DEBUG: HandleMetadataPutResponse, self: [1:7494363035906547976:2818], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-04-17T19:16:09.129114Z node 1 :EXPORT INFO: Finish, self: [1:7494363035906547976:2818], success: 1, error: 2025-04-17T19:16:09.135232Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.135261Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 281474976710663, itemIdx# 0, success# 1, error# 2025-04-17T19:16:09.139351Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.139418Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.256186Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035906547991:2366] [0] Resolve database: name# /Root 2025-04-17T19:16:09.256515Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035906547991:2366] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.256539Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035906547991:2366] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.256965Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035906547991:2366] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710663 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:20918" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/baseView" destination_prefix: "/baseView" } items { source_path: "/Root/dependentView" destination_prefix: "/dependentView" } } StartTime { seconds: 1744917369 } } assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18B6D782) NTestSuiteBackupRestoreS3::TTestCaseRestoreViewDependentOnAnotherView::Execute_(NUnitTest::TTestContext&)+1604 (0x18BC5264) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7F7510D57D90) __libc_start_main+128 (0x7F7510D57E40) _start+41 (0x164AC029) |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [FAIL] Test command err: 2025-04-17T19:16:05.569524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494363018199287030:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:05.569698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/00025a/r3tmp/tmprHDwvt/pdisk_1.dat 2025-04-17T19:16:05.975614Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:16:06.009651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:06.009733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4879, node 1 2025-04-17T19:16:06.034818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:06.105321Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:06.105344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:06.105351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:06.105455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:06.324985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:08.302855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363031084189964:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:08.302938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:08.569477Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018199287273:2135] Handle TEvProposeTransaction 2025-04-17T19:16:08.569515Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018199287273:2135] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:16:08.569557Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018199287273:2135] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494363031084189986:2631] 2025-04-17T19:16:08.629360Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Group" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } IndexDescription { Name: "value_idx" KeyColumnNames: "Value" Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } } } } UserToken: "" DatabaseName: "" 2025-04-17T19:16:08.629399Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:08.629683Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:08.629747Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:08.629898Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:08.630008Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:08.630063Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:16:08.630163Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:16:08.632132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-17T19:16:08.637938Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:16:08.638020Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084189986:2631] txid# 281474976710658 SEND to# [1:7494363031084189985:2342] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:16:08.798362Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018199287273:2135] Handle TEvNavigate describe path /Root 2025-04-17T19:16:08.798419Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084190285:2871] HANDLE EvNavigateScheme /Root 2025-04-17T19:16:08.798710Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084190285:2871] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:08.798804Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084190285:2871] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root" Options { ShowPrivateTable: false } 2025-04-17T19:16:08.799465Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031084190285:2871] Handle TEvDescribeSchemeResult Forward to# [1:7494363031084190283:2358] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 65 Record# Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744917366388 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/Root:test" Kind: "test" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "ssd" Kind: "ssd" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046644480 2025-04-17T19:16:08.814633Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363031084190287:2359] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/table TableId: [72057594046644480:2:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:08.814665Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363031084190287:2359] [0] Allocate txId 2025-04-17T19:16:08.814806Z node 1 :TX_PROXY DEBUG: actor# [1:7494363018199287273:2135] Handle TEvAllocateTxId 2025-04-17T19:16:08.814861Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363031084190287:2359] [281474976710659] TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-17T19:16:08.814871Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363031084190287:2359] [281474976710659] Resolve database: name# /Root 2025-04-17T19:16:08.815080Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363031084190287:2359] [281474976710659] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:08.815107Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363031084190287:2359] [281474976710659] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:08.815571Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoExecute 2025-04-17T19:16:08.815837Z node 1 :EXPORT DEBUG: TExport::TTxCreate ... 281474976715759 2025-04-17T19:16:08.912230Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete REQUEST: PUT /test_bucket/table/metadata.json HTTP/1.1 HEADERS: Host: localhost:11626 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A4087985-1D9F-4083-BB6E-1CE33634DDA6 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=13e39dcc0839cd0fab5ffcf34c7f612ac704d5f5244c2725940ba06d332aefc1 content-length: 79 content-md5: 4Gkuy5NMsLe5V8K2rFkCGA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 2baf29622beac13691d7a0955c04b335d2f199aa186aea7bd819ffc46602285f x-amz-date: 20250417T191608Z S3_MOCK::HttpServeWrite: /test_bucket/table/metadata.json / / 79 REQUEST: PUT /test_bucket/table/scheme.pb HTTP/1.1 HEADERS: Host: localhost:11626 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 898B65D3-422A-4567-A8E5-409323C32CE4 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=306ec34586fbc8d0a3ebbd9b9b16fc2052c5b9842766d69da60a2a67e0f9c2a9 content-length: 1192 content-md5: zVIm2WDU6upM/0yq6GRCkw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 3a1e3a967f9c3f3ab1544da8b73f32f1f2a4e401ccdca3c54786a72125d1f966 x-amz-date: 20250417T191608Z S3_MOCK::HttpServeWrite: /test_bucket/table/scheme.pb / / 1192 REQUEST: PUT /test_bucket/table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:11626 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CE8C2AD1-4200-4B1F-9EE4-E3DDA337A315 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=30a4f8705905fe3e7954471bd0bb20ce849bd986cee653d4ecbc0dcc78e98aa8 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250417T191609Z S3_MOCK::HttpServeWrite: /test_bucket/table/data_00.csv / / 0 2025-04-17T19:16:09.013007Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.013032Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715759 2025-04-17T19:16:09.013042Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715759, id# 281474976710659, itemIdx# 0 2025-04-17T19:16:09.013218Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:33632' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 1 } 2025-04-17T19:16:09.014471Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.014554Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.014571Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715760, id# 281474976710659 2025-04-17T19:16:09.014686Z node 1 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:33632' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/table' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 2] SourcePathType: EPathTypeTable State: Dropping SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715760 2025-04-17T19:16:09.014761Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.017209Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.017233Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715760, status# StatusAccepted 2025-04-17T19:16:09.017298Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:33632' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 0, txId# 281474976715760 2025-04-17T19:16:09.019275Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.038195Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.038222Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715760 2025-04-17T19:16:09.038246Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715760, id# 281474976710659, itemIdx# 0 2025-04-17T19:16:09.038327Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:33632' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-04-17T19:16:09.039582Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.039641Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.039650Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715761, id# 281474976710659 2025-04-17T19:16:09.039695Z node 1 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:33632' State: AutoDropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976715761 2025-04-17T19:16:09.039765Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.039995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715761:0, at schemeshard: 72057594046644480 2025-04-17T19:16:09.041482Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035379157912:2367] [0] Resolve database: name# /Root 2025-04-17T19:16:09.041831Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035379157912:2367] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.041855Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035379157912:2367] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.042137Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.042160Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715761, status# StatusAccepted 2025-04-17T19:16:09.042247Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710659 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 6] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:33632' State: AutoDropping WaitTxId: 281474976715761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976715761 2025-04-17T19:16:09.044483Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.044780Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363035379157912:2367] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710659 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:11626" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/table" destination_prefix: "/table" } } StartTime { seconds: 1744917368 } } 2025-04-17T19:16:09.047392Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-17T19:16:09.055080Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.055108Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715761 2025-04-17T19:16:09.055121Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715761, id# 281474976710659, itemIdx# 4294967295 2025-04-17T19:16:09.056359Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18B7703D) NTestSuiteBackupRestoreS3::TestTableWithIndexBackupRestore(NKikimrSchemeOp::EIndexType, bool)+1398 (0x18BC6D86) NTestSuiteBackupRestoreS3::TTestCaseTestAllIndexTypes::Execute_(NUnitTest::TTestContext&)+217 (0x18BCAD59) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7FC1300DAD90) __libc_start_main+128 (0x7FC1300DAE40) _start+41 (0x164AC029) ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [FAIL] Test command err: 2025-04-17T19:16:05.583461Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494363019065628705:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:05.583574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/000234/r3tmp/tmpDWtTOx/pdisk_1.dat 2025-04-17T19:16:06.067180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:16:06.088034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:06.088115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:06.099315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32662, node 1 2025-04-17T19:16:06.196744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:06.196771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:06.196778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:06.196923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:06.428420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:08.385634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363031950531543:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:08.385646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363031950531538:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:08.385733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:08.386001Z node 1 :TX_PROXY DEBUG: actor# [1:7494363019065628866:2141] Handle TEvProposeTransaction 2025-04-17T19:16:08.386035Z node 1 :TX_PROXY DEBUG: actor# [1:7494363019065628866:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-17T19:16:08.386069Z node 1 :TX_PROXY DEBUG: actor# [1:7494363019065628866:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7494363031950531553:2622] 2025-04-17T19:16:08.435909Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:08.435999Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:08.436019Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:08.438168Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:08.438309Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:08.438575Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:08.438745Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:08.438801Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-17T19:16:08.438946Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 HANDLE EvClientConnected 2025-04-17T19:16:08.440379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:16:08.442832Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-17T19:16:08.442888Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531553:2622] txid# 281474976710658 SEND to# [1:7494363031950531552:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-17T19:16:08.457862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494363031950531552:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:16:08.546717Z node 1 :TX_PROXY DEBUG: actor# [1:7494363019065628866:2141] Handle TEvProposeTransaction 2025-04-17T19:16:08.546768Z node 1 :TX_PROXY DEBUG: actor# [1:7494363019065628866:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-17T19:16:08.546834Z node 1 :TX_PROXY DEBUG: actor# [1:7494363019065628866:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7494363031950531621:2670] 2025-04-17T19:16:08.549479Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-17T19:16:08.549532Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-17T19:16:08.549546Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-17T19:16:08.550138Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-17T19:16:08.550231Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-17T19:16:08.550498Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-17T19:16:08.550620Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-17T19:16:08.550707Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-17T19:16:08.550901Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 HANDLE EvClientConnected 2025-04-17T19:16:08.553582Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-17T19:16:08.553752Z node 1 :TX_PROXY ERROR: Actor# [1:7494363031950531621:2670] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:16:08.553783Z node 1 :TX_PROXY DEBUG: Actor# [1:7494363031950531621:2670] txid# 281474976710659 SEND to# [1:7494363031950531552:2343] Source {TEvProposeTransactionSt ... Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.097715Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363036245499009:2359] [281474976710662] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.098350Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoExecute 2025-04-17T19:16:09.098656Z node 1 :EXPORT DEBUG: TExport::TTxCreate: Reply: status# SUCCESS, error# 2025-04-17T19:16:09.103379Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoComplete 2025-04-17T19:16:09.103499Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.103527Z node 1 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 281474976710662 2025-04-17T19:16:09.103574Z node 1 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 281474976710662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:38590' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 } 2025-04-17T19:16:09.103613Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.103702Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.103721Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976715757, id# 281474976710662 2025-04-17T19:16:09.103765Z node 1 :EXPORT INFO: TExport::TTxProgress: MkDir propose: info# { Id: 281474976710662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:38590' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, txId# 281474976715757 2025-04-17T19:16:09.103839Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.107819Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7494363036245499009:2359] [281474976710662] Handle TEvExport::TEvCreateExportResponse: record# Entry { Id: 281474976710662 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:3755" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744917369 } } 2025-04-17T19:16:09.115028Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363036245499020:2360] [0] Resolve database: name# /Root 2025-04-17T19:16:09.115615Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.115635Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976715757, status# StatusAccepted 2025-04-17T19:16:09.115745Z node 1 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 281474976710662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:38590' State: CreateExportDir WaitTxId: 281474976715757 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976715757 2025-04-17T19:16:09.116511Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363036245499020:2360] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.116532Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363036245499020:2360] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.119519Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.120397Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363036245499020:2360] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710662 Status: SUCCESS Progress: PROGRESS_PREPARING ExportToS3Settings { endpoint: "localhost:3755" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744917369 } } 2025-04-17T19:16:09.127285Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.127316Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757 2025-04-17T19:16:09.127344Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976715757, id# 281474976710662, itemIdx# 4294967295 2025-04-17T19:16:09.127446Z node 1 :EXPORT INFO: TExport::TTxProgress: UploadScheme: info# { Id: 281474976710662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046644480, LocalPathId: 7] UserSID: '(empty maybe)' PeerName: 'ipv6:[::1]:38590' State: CreateExportDir WaitTxId: 0 Issue: '' Items: 1 PendingItems: 1 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/Root/view' SourcePathId: [OwnerId: 72057594046644480, LocalPathId: 6] SourcePathType: EPathTypeView State: Transferring SubState: Proposed WaitTxId: 0 Issue: '' } 2025-04-17T19:16:09.139711Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.139777Z node 1 :EXPORT DEBUG: HandleSchemeDescription, self: [1:7494363036245499052:2765], status: StatusSuccess REQUEST: PUT /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:3755 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 03259E3D-6642-45EF-B593-937E5C1D53E7 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=bbef647ec42c86a96dc74844a1d413b8aa23a85bdbfc0e5c3efb0cad10f9f5a2 content-length: 165 content-md5: VGI/U9aBQRGDg7M5DEll1Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: 24d8c773c0010e0420ccc01cdf6767c51f2496ddc97edc8d89de1748e6cf32e9 x-amz-date: 20250417T191609Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/create_view.sql / / 165 2025-04-17T19:16:09.183266Z node 1 :EXPORT DEBUG: HandleSchemePutResponse, self: [1:7494363036245499052:2765], result: PutObjectResult { ETag: 54623f53d68141118383b3390c4965d5 } REQUEST: PUT /test_bucket/view/metadata.json HTTP/1.1 HEADERS: Host: localhost:3755 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 91364DC4-8AC3-4E10-A549-E443C2AA96C2 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250417/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=fb70b425320173f281d3235f89f1c3197624b391939a647fdf6dfcf27edf8ccd content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-content-sha256: eff56d34e31fad9096420efe8729173b9963ab7eb04c9c0470c5f06519bd8c7c x-amz-date: 20250417T191609Z x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /test_bucket/view/metadata.json / / 31 2025-04-17T19:16:09.186620Z node 1 :EXPORT DEBUG: HandleMetadataPutResponse, self: [1:7494363036245499052:2765], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-04-17T19:16:09.186661Z node 1 :EXPORT INFO: Finish, self: [1:7494363036245499052:2765], success: 1, error: 2025-04-17T19:16:09.186784Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-17T19:16:09.186813Z node 1 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 281474976710662, itemIdx# 0, success# 1, error# 2025-04-17T19:16:09.188670Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-17T19:16:09.326968Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363036245499075:2361] [0] Resolve database: name# /Root 2025-04-17T19:16:09.327362Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363036245499075:2361] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-17T19:16:09.327385Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363036245499075:2361] [0] Send request: schemeShardId# 72057594046644480 2025-04-17T19:16:09.327889Z node 1 :TX_PROXY DEBUG: [GetExport] [1:7494363036245499075:2361] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976710662 Status: UNDETERMINED Progress: PROGRESS_UNSPECIFIED ExportToS3Settings { endpoint: "localhost:3755" scheme: HTTP bucket: "test_bucket" items { source_path: "/Root/view" destination_prefix: "/view" } } StartTime { seconds: 1744917369 } } assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture()+28 (0x1900F1BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194CC790) bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration)+3969 (0x18BAE3A1) NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&)+4278 (0x18BAB846) ??+0 (0x18C693C5) ??+0 (0x18B85038) NTestSuiteBackupRestoreS3::TestViewBackupRestore()+1418 (0x18BC876A) NTestSuiteBackupRestoreS3::TTestCaseTestAllSchemeObjectTypes::Execute_(NUnitTest::TTestContext&)+393 (0x18BCA919) std::__y1::__function::__func, void ()>::operator()()+280 (0x18C483B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x195039A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x194D3319) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+1204 (0x18C47584) NUnitTest::TTestFactory::Execute()+2438 (0x194D4BE6) NUnitTest::RunMain(int, char**)+5213 (0x194FDF1D) ??+0 (0x7F0AB7DAAD90) __libc_start_main+128 (0x7F0AB7DAAE40) _start+41 (0x164AC029) |97.0%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkMvcc::OlapMultiSinks [FAIL] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 30872, MsgBus: 18063 2025-04-17T19:16:07.143758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494363025489987738:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:07.143826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/000294/r3tmp/tmp0fpxWR/pdisk_1.dat 2025-04-17T19:16:07.450763Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30872, node 1 2025-04-17T19:16:07.502865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:07.503038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:07.505194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:07.511574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:07.511594Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:07.511601Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:07.511694Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18063 TClient is connected to server localhost:18063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:07.964570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:07.982633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:08.114909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:08.247623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:08.322090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:09.669225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363034079924119:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:09.669326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:09.895695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-17T19:16:09.920027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-17T19:16:09.947667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-17T19:16:09.973754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-17T19:16:10.000915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-17T19:16:10.040034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-17T19:16:10.064398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-17T19:16:10.137297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363038374892077:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:10.137342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:10.137450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363038374892082:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:10.139700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-17T19:16:10.148814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494363038374892085:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-17T19:16:10.250645Z node 1 :TX_PROXY ERROR: Actor# [1:7494363038374892143:3544] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:16:10.908100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-17T19:16:11.019749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:16:11.019886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:16:11.020092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494363038374892583:2520];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:16:11.020097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:16:11.020143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494363038374892583:2520];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:16:11.020163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:16:11.020237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:16:11.020305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:16:11.020305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494363038374892583:2520];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:16:11.020369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:16:11.020393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494363038374892583:2520];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:16:11.020446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:16:11.020487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7494363038374892583:2520];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:16:11.020517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:16:11.020588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7494363038374892585:2521];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description ... pp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T19:16:11.137692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T19:16:11.137761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T19:16:11.137790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T19:16:11.137971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T19:16:11.138020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T19:16:11.138086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T19:16:11.138128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T19:16:11.138201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T19:16:11.138227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T19:16:11.138259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T19:16:11.138285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T19:16:11.139182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T19:16:11.139233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T19:16:11.139364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T19:16:11.139394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T19:16:11.139516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T19:16:11.139551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T19:16:11.139688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T19:16:11.139720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T19:16:11.139795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-17T19:16:11.139818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-17T19:16:11.175272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.175538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.179076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.180595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.182412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.185420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.185850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.188826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.190638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.191876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710672; 2025-04-17T19:16:11.278223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T19:16:11.278223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T19:16:11.278660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710674;tx_id=281474976710674;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710674; 2025-04-17T19:16:11.366222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710676;tx_id=281474976710676;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710676; query_phases { duration_us: 5140 cpu_time_us: 1417 affected_shards: 1 } query_phases { duration_us: 5163 cpu_time_us: 212 affected_shards: 1 } compilation { duration_us: 72093 cpu_time_us: 69996 } process_cpu_time_us: 470 total_duration_us: 83875 total_cpu_time_us: 72095 AddressSanitizer:DEADLYSIGNAL ================================================================= ==1018128==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018e0e20d bp 0x7fff2e9ea5e0 sp 0x7fff2e9ea440 T0) ==1018128==The signal is caused by a READ memory access. ==1018128==Hint: address points to the zero page. 2025-04-17T19:16:12.144026Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494363025489987738:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:12.144107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; #0 0x18e0e20d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18e0e20d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18e0e20d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18e0e20d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18e0e20d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18e33327 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18e33327 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18e33327 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18e33327 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18e33327 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x197855d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19754f68 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #14 0x18e321d3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19756835 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #16 0x1977fb4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fd815223d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fd815223e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16571028 in _start (/home/runner/.ya/build/build_root/4cxu/000294/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16571028) (BuildId: 1bdc4290ca22bc21b0df7e2b25fc7bf1adbeab0b) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==1018128==ABORTING >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] |97.2%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [FAIL] Test command err: Trying to start YDB, gRPC: 17178, MsgBus: 25405 2025-04-17T19:15:59.975658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494362992433507901:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:15:59.975790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/0001f3/r3tmp/tmpRQrTdN/pdisk_1.dat 2025-04-17T19:16:00.326172Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17178, node 1 2025-04-17T19:16:00.366824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:00.367120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:00.370130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:00.399149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:00.399167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:00.399175Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:00.399284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25405 TClient is connected to server localhost:25405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:00.857509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:02.761448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363005318410456:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:02.761535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363005318410437:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:02.761997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:02.768613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-17T19:16:02.779128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494363005318410466:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-17T19:16:02.857530Z node 1 :TX_PROXY ERROR: Actor# [1:7494363005318410517:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:16:03.132967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-17T19:16:03.255164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:16:03.255164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:16:03.255343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:16:03.255628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:16:03.255782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:16:03.255956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:16:03.255965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:16:03.256062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:16:03.256070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:16:03.256185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:16:03.256200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:16:03.256306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:16:03.256321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:16:03.256403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:16:03.256446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:16:03.256510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:16:03.256555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:16:03.256594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:16:03.256671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:16:03.256698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:16:03.256862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:16:03.256870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:16:03.256999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7494363009613378018:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:16:03.257010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7494363009613378016:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:16:03.293496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494363009613378009:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:16:03.293582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7494363009613378009:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:16:03.293800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;sel ... vNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.259350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7494363026793252857:3228];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.259390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7494363026793253188:3299];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.259497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7494363026793252857:3228];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.259539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7494363026793253188:3299];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.259860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[1:7494363026793253280:3314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.259878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[1:7494363026793252731:3166];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.260016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[1:7494363026793253280:3314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.260023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[1:7494363026793252731:3166];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038074;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.260172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7494363026793253338:3321];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038058;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.260312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7494363026793253338:3321];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.260400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7494363026793252967:3245];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.260437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[1:7494363026793253102:3283];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.260540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7494363026793252967:3245];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.260574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[1:7494363026793253102:3283];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.260902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7494363026793252969:3246];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.261043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7494363026793252969:3246];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.261386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7494363026793253121:3295];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.261394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7494363026793252997:3247];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.261522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7494363026793253121:3295];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.261544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7494363026793252997:3247];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.261877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7494363026793253030:3271];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.262021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7494363026793253030:3271];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.262411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[1:7494363026793252706:3154];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038064;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.262418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7494363026793252801:3211];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.262545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7494363026793252801:3211];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.262567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[1:7494363026793252706:3154];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.262895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7494363026793253190:3300];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.262928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7494363026793253453:3327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.263020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7494363026793253190:3300];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.263110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7494363026793253453:3327];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.263446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7494363026793252742:3174];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.263584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7494363026793252742:3174];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.263935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7494363026793253583:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.264087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7494363026793253583:3346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.264443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7494363026793252999:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; 2025-04-17T19:16:13.264585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7494363026793252999:3248];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:763;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:588, void NKikimr::NKqp::CompareYson(const TString &, const TString &, const TString &): (ReformatYson(expected) == ReformatYson(actual)) failed: ("[[[\"2\"]]]" != "[[[\"1\"]]]") , with diff: "[[[\"(2|1)\"]]]" 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:588: CompareYson @ 0x4900DB47 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:368: DoExecute @ 0x1900EF4A 4. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:389: Execute_ @ 0x18FDD558 6. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: operator() @ 0x18FE36B7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18FE36B7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18FE36B7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 15. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: Execute @ 0x18FE2883 16. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 18. ??:0: ?? @ 0x7F7EB220ED8F 19. ??:0: ?? @ 0x7F7EB220EE3F 20. ??:0: ?? @ 0x16619028 |97.4%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-04-17T19:15:58.780727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:15:58.781214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:15:58.781261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/00027f/r3tmp/tmpKxXREx/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3339, node 1 TClient is connected to server localhost:16801 2025-04-17T19:15:59.336150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:15:59.378553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:15:59.386039Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:15:59.386090Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:15:59.386115Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:15:59.386512Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T19:15:59.422662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:15:59.422826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:15:59.435369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:15:59.565378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-17T19:15:59.651237Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:745:2626]: NKikimr::TEvTablet::TEvBoot 2025-04-17T19:15:59.652449Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:745:2626]: NKikimr::TEvTablet::TEvRestored 2025-04-17T19:15:59.652742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:745:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T19:15:59.671071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:745:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T19:15:59.671346Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-17T19:15:59.677874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:15:59.678085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:15:59.678383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:15:59.678513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:15:59.678611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:15:59.678724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:15:59.678823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:15:59.678963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:15:59.679079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:15:59.679173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:15:59.679274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:15:59.679395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:745:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:15:59.693495Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:745:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T19:15:59.694678Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-17T19:15:59.694938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T19:15:59.694983Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T19:15:59.695140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:15:59.695296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T19:15:59.695370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T19:15:59.695428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T19:15:59.695497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T19:15:59.695541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T19:15:59.695567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T19:15:59.695589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T19:15:59.695725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:15:59.695788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T19:15:59.695822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T19:15:59.695845Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T19:15:59.695901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T19:15:59.695936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T19:15:59.695990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T19:15:59.696009Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T19:15:59.696061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T19:15:59.696091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T19:15:59.696107Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T19:15:59.696132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T19:15:59.696160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T19:15:59.696185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T19:15:59.696503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-17T19:15:59.696560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=23; 2025-04-17T19:15:59.696628Z node 1 :TX_COLUMNSHARD INFO: tablet_id= ... 47888;count=1;;11:size=1445744;count=1;;12:size=1445408;count=1;;13:size=1445360;count=1;;14:size=1445920;count=1;;15:size=2598872;count=4;;16:size=1832712;count=3;;17:size=1018248;count=1;;18:size=994312;count=1;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-17T19:16:21.374199Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:1275:3077], Recipient [1:745:2626]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2025-04-17T19:16:21.374520Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1276:3078], Recipient [1:745:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-04-17T19:16:21.374550Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-04-17T19:16:21.374719Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[26] (CS::GENERAL) apply at tablet 72075186224037888 2025-04-17T19:16:21.377012Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:16 Blob count: 1 2025-04-17T19:16:21.377122Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2945656;raw_bytes=96859974;count=2;records=82561} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=10985168;raw_bytes=371060503;count=6;records=311449} inactive {blob_bytes=21748920;raw_bytes=731151681;count=14;records=617870} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:72/0:size=2030;count=15;;1:size=53667;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445376;count=1;;7:size=1445528;count=1;;8:size=808584;count=1;;9:size=2093168;count=3;;10:size=1247888;count=1;;11:size=1445744;count=1;;12:size=1445408;count=1;;13:size=1445360;count=1;;14:size=1445920;count=1;;15:size=2598872;count=4;;16:size=1832712;count=3;;17:size=1018248;count=1;;18:size=994312;count=1;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:72/0:size=2099;count=16;;1:size=53667;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445376;count=1;;7:size=1445528;count=1;;8:size=808584;count=1;;9:size=2093168;count=3;;10:size=1247888;count=1;;11:size=1445744;count=1;;12:size=1445408;count=1;;13:size=1445360;count=1;;14:size=1445920;count=1;;15:size=2598872;count=4;;16:size=1832712;count=3;;17:size=1018248;count=1;;18:size=994312;count=1;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-17T19:16:21.388639Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-17T19:16:21.388711Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;fline=with_appended.cpp:65;portions=23,;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5; 2025-04-17T19:16:21.388940Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:23;path_id:3;records_count:70098;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:2469232;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-17T19:16:21.389113Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/429610.000000s;; 2025-04-17T19:16:21.389190Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::71e6fd0e-1bc011f0-8119f114-9daecb5; 2025-04-17T19:16:21.389244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:13930824;portions_count:23;); 2025-04-17T19:16:21.389279Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-17T19:16:21.389326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-04-17T19:16:21.389373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-17T19:16:21.389422Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-04-17T19:16:21.389455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-17T19:16:21.389494Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-04-17T19:16:21.389525Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-04-17T19:16:21.389577Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.797000s; 2025-04-17T19:16:21.389612Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-04-17T19:16:21.389750Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:16 Blob count: 1 VERIFY failed (2025-04-17T19:16:21.389882Z): tablet_id=72075186224037888;task_id=71e6fd0e-1bc011f0-8119f114-9daecb5;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18E6FBB9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18E5DE4B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x1A175E96) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x48C9C201) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x308D0F6D) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1EBA5183) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1EA88B60) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E8CEF54) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E86BCD5) NActors::IActor::Receive(TAutoPtr&)+237 (0x1A0A766D) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x35C14E45) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x35C0D6BA) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x35C0A461) NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const+7904 (0x36592F30) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4568 (0x18A4D5B8) std::__y1::__function::__func, void ()>::operator()()+280 (0x18A5F8A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1931CAA6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x192EC419) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x18A5E854) NUnitTest::TTestFactory::Execute()+2438 (0x192EDCE6) NUnitTest::RunMain(int, char**)+5213 (0x1931701D) ??+0 (0x7FC4883B3D90) __libc_start_main+128 (0x7FC4883B3E40) _start+41 (0x163A0029) >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] |97.6%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> KqpStats::SysViewClientLost [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [FAIL] Test command err: Trying to start YDB, gRPC: 15997, MsgBus: 9813 2025-04-17T19:16:02.841220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494363005139655228:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:02.842866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/0001c1/r3tmp/tmp0LJlpm/pdisk_1.dat 2025-04-17T19:16:03.120173Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15997, node 1 2025-04-17T19:16:03.180778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:16:03.180801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:16:03.180807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:16:03.180913Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T19:16:03.228575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:03.228674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:03.230511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9813 TClient is connected to server localhost:9813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-17T19:16:03.599835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:03.628795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:03.748946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-17T19:16:03.908783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-17T19:16:03.971341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:05.377574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363018024558826:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:05.377686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:05.736339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.776335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.806796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.873895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.899892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.974670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-17T19:16:06.018478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-17T19:16:06.099564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363022319526791:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:06.099623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:06.099750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7494363022319526796:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:06.103294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-04-17T19:16:06.113148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7494363022319526798:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-04-17T19:16:06.173631Z node 1 :TX_PROXY ERROR: Actor# [1:7494363022319526853:3547] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-17T19:16:06.906107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:07.840712Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494363005139655228:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:07.840839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-17T19:16:18.112954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-17T19:16:18.113005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:16:26.751961Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917386745, txId: 281474976715673] shutting down 2025-04-17T19:16:26.806569Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-17T19:16:27.965979Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917387959, txId: 281474976715675] shutting down 2025-04-17T19:16:29.114347Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917389110, txId: 281474976715677] shutting down 2025-04-17T19:16:30.269847Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917390265, txId: 281474976715679] shutting down 2025-04-17T19:16:31.443797Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917391438, txId: 281474976715681] shutting down 2025-04-17T19:16:32.612632Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917392607, txId: 281474976715683] shutting down 2025-04-17T19:16:33.794124Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917393787, txId: 281474976715685] shutting down 2025-04-17T19:16:34.933118Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917394928, txId: 281474976715687] shutting down 2025-04-17T19:16:36.091311Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917396085, txId: 281474976715689] shutting down 2025-04-17T19:16:37.309969Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917397303, txId: 281474976715691] shutting down 2025-04-17T19:16:38.481829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744917398477, txId: 281474976715693] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x198368DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x19CFB43F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x193D3478 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x193E6487 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x193E6487 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x193E6487 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19D32635 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19D32635 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19D32635 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x19D01FC8 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x193E560B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19D03895 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19D2CBAC 15. ??:0: ?? @ 0x7FB90A101D8F 16. ??:0: ?? @ 0x7FB90A101E3F 17. ??:0: ?? @ 0x166DE028 |97.8%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |97.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |98.2%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |98.4%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |98.5%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] >> QuoterWithKesusTest::KesusRecreation [FAIL] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::KesusRecreation [FAIL] Test command err: 2025-04-17T19:16:10.423035Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7494363038997309236:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:10.423166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-17T19:16:10.443658Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7494363039824927711:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:10.443819Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/000176/r3tmp/tmpEPVj6n/pdisk_1.dat 2025-04-17T19:16:10.705835Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:16:10.732851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:10.732922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:10.737302Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T19:16:10.738508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:10.785005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:10.785071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:10.787895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10435 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-17T19:16:10.946709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:10.966231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:10.987597Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72075186224037888 2025-04-17T19:16:10.987767Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInitSchema::Execute 2025-04-17T19:16:10.990685Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInitSchema::Complete 2025-04-17T19:16:10.990742Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInit::Execute 2025-04-17T19:16:10.993047Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInit::Complete 2025-04-17T19:16:11.027846Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxConfigSet::Execute (sender=[1:7494363038997309707:2267], cookie=1, path="/dc-1/KesusQuoter") 2025-04-17T19:16:11.031074Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxConfigSet::Complete (sender=[1:7494363038997309707:2267], cookie=1, status=SUCCESS) AddQuoterResource: { Resource { ResourcePath: "Resource" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 } } } TClient::Ls request: /dc-1/KesusQuoter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "KesusQuoter" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744917371078 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 P... (TRUNCATED) 2025-04-17T19:16:11.065183Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Execute (sender=[1:7494363043292277405:2580], cookie=0, path="Resource", config={ MaxUnitsPerSecond: 10 }) 2025-04-17T19:16:11.065498Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Created new quoter resource 1 "Resource" 2025-04-17T19:16:11.067773Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Complete (sender=[1:7494363043292277405:2580], cookie=0) 2025-04-17T19:16:11.068042Z node 1 :QUOTER_SERVICE TRACE: Request({ Operator: And Deadline: no Cookie: 0 [ { 1, "/dc-1/KesusQuoter":"Resource" } ] }) 2025-04-17T19:16:11.068165Z node 1 :QUOTER_SERVICE INFO: resolve new quoter /dc-1/KesusQuoter 2025-04-17T19:16:11.068229Z node 1 :QUOTER_SERVICE INFO: path resolved as Kesus /dc-1/KesusQuoter 2025-04-17T19:16:11.068291Z node 1 :QUOTER_SERVICE INFO: resolve resource Resource on quoter /dc-1/KesusQuoter 2025-04-17T19:16:11.068316Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Created kesus quoter proxy. Tablet id: 72075186224037888 2025-04-17T19:16:11.068471Z node 1 :QUOTER_PROXY DEBUG: [/dc-1/KesusQuoter]: Connecting to kesus 2025-04-17T19:16:11.068548Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: ProxyRequest "Resource" 2025-04-17T19:16:11.068814Z node 1 :QUOTER_PROXY DEBUG: [/dc-1/KesusQuoter]: Successfully connected to tablet 2025-04-17T19:16:11.069266Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvSubscribeOnResourcesResult to [1:7494363043292277419:2304]. Cookie: 1. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Resource" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-17T19:16:11.069293Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Subscribe on quoter resources (sender=[1:7494363043292277419:2304], cookie=1) 2025-04-17T19:16:11.069491Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: SubscribeOnResourceResult({ Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Resource" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 }) 2025-04-17T19:16:11.069510Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Initialized new session with resource "Resource" 2025-04-17T19:16:11.069552Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxySession("Resource", 1) 2025-04-17T19:16:11.069599Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(2, 2)} }]) 2025-04-17T19:16:11.069606Z node 1 :QUOTER_SERVICE INFO: resource session established: /dc-1/KesusQuoter:Resource as 1 2025-04-17T19:16:11.069743Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T19:16:11.069768Z node 1 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 2. FreeBalance: 2 2025-04-17T19:16:11.069796Z node 1 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-17T19:16:11.070000Z 2025-04-17T19:16:11.069808Z node 1 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-17T19:16:11.069880Z node 1 :QUOTER_SERVICE TRACE: Charge "Resource" for 1. Balance: 2. FreeBalance: 2. TicksToFullfill: 0.5. DurationToFullfillInUs: 50000. TimeToFullfill: 1970-01-01T00:00:00.050000Z. Now: 2025-04-17T19:16:11.069751Z. LastAllocated: 1970-01-01T00:00:00.000000Z 2025-04-17T19:16:11.070003Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 1, Queue: 0}]) 2025-04-17T19:16:11.070030Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: 1, QueueWeight: 0 } 2025-04-17T19:16:11.070051Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Activate session to "Resource". Connected: 1 2025-04-17T19:16:11.071152Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 1 ConsumeResource: true Amount: inf } ActorID { RawX1: 7494363043292277419 RawX2: 4503603922340096 } }) 2025-04-17T19:16:11.071202Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(1, 2)} }]) 2025-04-17T19:16:11.071223Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-17T19:16:11.071309Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvUpdateConsumptionStateAck to [1:7494363043292277419:2304]. Cookie: 0. Data: { } 2025-04-17T19:16:11.071332Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Update quoter resources consumption state (sender=[1:7494363043292277419:2304], cookie=0) 2025-04-17T19:16:11.071346Z node 1 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 1. FreeBalance: 1 2025-04-17T19:16:11.071663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-17T19:16:11.080386Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-17T19:16:11.080523Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Disconnected from tablet 2025-04-17T19:16:11.080541Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:16:11.080612Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Mark "Resource" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 1 2025-04-17T19:16:11.080655Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Schedule offline allocation in 0.000000s: [{ "Resource", 1 }] 2025-04-17T19:16:11.081044Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: OfflineResourceAllocation([{ "Resource", 1 }]) 2025-04-17T19:16:11.081071Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Allocated {"Resource", 1} offl ... c-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:09.569948Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:09.648810Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:09.648830Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:09.721480Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:09.721501Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:09.801855Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:09.801881Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:09.877070Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:09.877096Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:09.948811Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:09.948836Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.026074Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.026098Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.101846Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.101871Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.181719Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.181739Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.255968Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.255995Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.330186Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.330206Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.409555Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.409580Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.491009Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.491037Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.567751Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.567772Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.642522Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.642548Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.716772Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.716792Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.801212Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.801239Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.877056Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.877083Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:10.954826Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:10.954852Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.030674Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.030701Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.111063Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.111085Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.189399Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.189431Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.269812Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.269839Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.347143Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.347170Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.431583Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.431609Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.510467Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.510489Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.585124Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.585145Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.665009Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.665033Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.748943Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.748963Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.827334Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.827359Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.907132Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.907155Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:11.986572Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:11.986597Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.067935Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.067958Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.146316Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.146341Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.223083Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.223107Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.306026Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.306047Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.380766Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.380790Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.455056Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.455076Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.531788Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.531812Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.612667Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.612688Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.688442Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.688466Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.764713Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.764734Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus 2025-04-17T19:18:12.849793Z node 1 :QUOTER_PROXY WARN: [/dc-1/KesusQuoter]: Failed to connect to tablet. Status: ERROR 2025-04-17T19:18:12.849816Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Reconnecting to kesus (TWithBackTrace) Event queue is still empty.ydb/library/actors/testlib/test_runtime.cpp:1375: TBackTrace::Capture()+28 (0x18F230CC) TWithBackTrace::TWithBackTrace<>()+65 (0x35D91D01) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+29118 (0x35D8DCFE) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x35D93FF4) NKikimr::TEvQuota::TEvClearance* NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TAutoPtr&, std::__y1::function, TDuration)+643 (0x18B1B3A3) THolder NActors::TTestActorRuntimeBase::GrabEdgeEvent(TDuration)+332 (0x18B0178C) NKikimr::TKesusQuoterTestSetup::GetQuota(std::__y1::vector>, TBasicString>, unsigned long>, std::__y1::allocator>, TBasicString>, unsigned long>>> const&, NKikimr::TEvQuota::EResourceOperator, TDuration, NKikimr::TEvQuota::TEvClearance::EResult)+304 (0x18B25C30) NKikimr::TKesusQuoterTestSetup::GetQuota(TBasicString> const&, TBasicString> const&, unsigned long, TDuration, NKikimr::TEvQuota::TEvClearance::EResult)+575 (0x18B27D3F) NKikimr::NTestSuiteQuoterWithKesusTest::TTestCaseKesusRecreation::Execute_(NUnitTest::TTestContext&)+926 (0x18A7006E) std::__y1::__function::__func, void ()>::operator()()+280 (0x18AAC828) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19417416) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x193E6D89) NKikimr::NTestSuiteQuoterWithKesusTest::TCurrentTest::Execute()+1204 (0x18AAB7D4) NUnitTest::TTestFactory::Execute()+2438 (0x193E8656) NUnitTest::RunMain(int, char**)+5213 (0x1941198D) ??+0 (0x7F76DDAD4D90) __libc_start_main+128 (0x7F76DDAD4E40) _start+41 (0x163D4029) |98.6%| [TM] {RESULT} ydb/core/quoter/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] Test command err: 2025-04-17T19:16:05.198900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:16:05.199422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:16:05.199477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/0001a0/r3tmp/tmpVJXbgX/pdisk_1.dat 2025-04-17T19:16:05.536183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T19:16:05.536270Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T19:16:05.536306Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T19:16:05.536391Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-17T19:16:05.536419Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-17T19:16:05.639364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-17T19:16:05.639623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.639820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-17T19:16:05.640077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-17T19:16:05.640168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.640258Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T19:16:05.640945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-17T19:16:05.641102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-17T19:16:05.641143Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:16:05.641181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T19:16:05.641357Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T19:16:05.641394Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T19:16:05.641493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.641564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-17T19:16:05.641597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-17T19:16:05.641625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-17T19:16:05.641716Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T19:16:05.642114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:16:05.642152Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T19:16:05.642291Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T19:16:05.642332Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T19:16:05.642388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.642432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-17T19:16:05.642474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-17T19:16:05.642543Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T19:16:05.642910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:16:05.642938Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-17T19:16:05.643034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-17T19:16:05.643070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-17T19:16:05.643115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.643146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:16:05.643185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-17T19:16:05.643214Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-17T19:16:05.643255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-17T19:16:05.653421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-17T19:16:05.654053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:16:05.654112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-17T19:16:05.654302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-17T19:16:05.655605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-17T19:16:05.655651Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-17T19:16:05.655695Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-17T19:16:05.655840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-17T19:16:05.656242Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T19:16:05.656288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T19:16:05.656320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T19:16:05.656469Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-17T19:16:05.656499Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-17T19:16:05.656573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-17T19:16:05.656613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-17T19:16:05.656649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-17T19:16:05.702014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-17T19:16:05.702129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-17T19:16:05.702706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:16:05.703400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-17T19:16:05.703480Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-17T19:16:05.743204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:05.743317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:05.754913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:05.829567Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-17T19:16:05.830302Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-17T19:16:05.830345Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-17T19:16:05.830374Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-17T19:16:05.830509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-17T19:16:05.830543Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-17T19:16:05.830617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-17T19:16:05.830754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T19:18:13.034089Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T19:18:13.034113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T19:18:13.034167Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T19:18:13.034210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T19:18:13.034274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 22 shard idx 72057594046644480:7 data size 0 row count 0 2025-04-17T19:18:13.034326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 22], pathId map=TableA, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-17T19:18:13.034348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-04-17T19:18:13.034413Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-17T19:18:13.034458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037894 2025-04-17T19:18:13.034509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:18:13.044806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T19:18:13.044853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T19:18:13.044873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T19:18:13.117603Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T19:18:13.118048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1583:3193], Recipient [1:409:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 24 Generation: 1 Round: 58 TableStats { DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 16 Memory: 119576 Storage: 142 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 1 StartTime: 4950 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-17T19:18:13.118085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-17T19:18:13.118124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] state 'Ready' dataSize 54 rowCount 2 cpuUsage 0.0016 2025-04-17T19:18:13.118240Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] raw table stats: DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-17T19:18:13.118282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-17T19:18:13.171279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T19:18:13.171334Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T19:18:13.171357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-17T19:18:13.171433Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-17T19:18:13.171459Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-17T19:18:13.171527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 24 shard idx 72057594046644480:8 data size 54 row count 2 2025-04-17T19:18:13.171585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037895 maps to shardIdx: 72057594046644480:8 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 24], pathId map=TableA, is column=0, is olap=0, RowCount 2, DataSize 54 2025-04-17T19:18:13.171608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037895, followerId 0 2025-04-17T19:18:13.171669Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:8 with partCount# 1, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-17T19:18:13.171725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037895 2025-04-17T19:18:13.171778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-17T19:18:13.182120Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-17T19:18:13.182190Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-17T19:18:13.182215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-17T19:18:13.243630Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:18:13.243691Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:18:13.243778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:18:13.243816Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:18:13.264803Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T19:18:13.347712Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T19:18:13.420351Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:18:13.420433Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:18:13.420495Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:18:13.420518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:18:13.441619Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-17T19:18:13.524409Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037899 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-17T19:18:13.607374Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:18:13.607437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-17T19:18:13.607625Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzhlOTIzMGMtOGNlZTQzOWItYTA5ZTFhODQtYTVjMjRiNjc=, ActorId: [1:2033:3520], ActorState: ExecuteState, TraceId: 01js2jw5g57besqazvne5kxj8x, Create QueryResponse for error on request, msg: 2025-04-17T19:18:13.607821Z node 1 :KQP_SLOW_LOG WARN: SessionId: ydb://session/3?node_id=1&id=YzhlOTIzMGMtOGNlZTQzOWItYTA5ZTFhODQtYTVjMjRiNjc=, Slow query, duration: 600.000000s, status: GENERIC_ERROR, user: UNAUTHENTICATED, results: 0b, text: "RESTORE `MyCollection`;", parameters: 0b 2025-04-17T19:18:13.608869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-17T19:18:13.608903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture()+28 (0x19266BDC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x19723D20) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+3639 (0x492CAD97) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+26179 (0x18EB72E3) std::__y1::__function::__func, void ()>::operator()()+280 (0x18E72428) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1975AF16) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1972A8A9) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+1204 (0x18E712D4) NUnitTest::TTestFactory::Execute()+2438 (0x1972C176) NUnitTest::RunMain(int, char**)+5213 (0x1975548D) ??+0 (0x7F073C1C6D90) __libc_start_main+128 (0x7F073C1C6E40) _start+41 (0x165C5029) |98.7%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: 2025-04-17T19:16:07.845151Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T19:16:07.949120Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T19:16:07.953792Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T19:16:07.954292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T19:16:07.979928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T19:16:07.980276Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T19:16:07.989051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:16:07.989271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:16:07.989519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:16:07.989657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:16:07.989777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:16:07.989906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:16:07.990032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:16:07.990148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:16:07.990293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:16:07.990395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:16:07.990464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:16:07.990525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:16:08.013759Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T19:16:08.023461Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T19:16:08.023628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T19:16:08.023682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T19:16:08.023857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:16:08.024031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T19:16:08.024114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T19:16:08.024155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T19:16:08.024267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T19:16:08.024336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T19:16:08.024384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T19:16:08.024416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T19:16:08.024607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:16:08.024668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T19:16:08.024708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T19:16:08.024734Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T19:16:08.024834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T19:16:08.024917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T19:16:08.024961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T19:16:08.024992Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T19:16:08.025060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T19:16:08.025103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T19:16:08.025129Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T19:16:08.025175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T19:16:08.025224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T19:16:08.025258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T19:16:08.025727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=103; 2025-04-17T19:16:08.025810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-17T19:16:08.025908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-04-17T19:16:08.025995Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-17T19:16:08.026161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T19:16:08.026246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T19:16:08.026283Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T19:16:08.026484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T19:16:08.026530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T19:16:08.026566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T19:16:08.027517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T19:16:08.027616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T19:16:08.027668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T19:16:08.027897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T19:16:08.027932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T19:16:08.027960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T ... :function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #24 0x315597d2 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #25 0x315597d2 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #26 0x315586e7 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:97:5 #27 0x315709e7 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:502:9 #28 0xfff8fba in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:218:21 #29 0xfff5e0f in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #30 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #31 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0xf53bcdd in NObjectFactory::TFactoryObjectCreator::Create() const /-S/library/cpp/object_factory/object_factory.h:38:20 #2 0x1ca4f2d9 in MakeHolder /-S/library/cpp/object_factory/object_factory.h:137:38 #3 0x1ca4f2d9 in NKikimr::NOlap::NStorageOptimizer::IOptimizerPlannerConstructor::BuildDefault() /-S/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.h:204:23 #4 0x1ca4d66f in NKikimr::NOlap::TIndexInfo::DeserializeOptionsFromProto(NKikimrSchemeOp::TColumnTableSchemeOptions const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:209:40 #5 0x1ca53d3e in NKikimr::NOlap::TIndexInfo::DeserializeFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:250:5 #6 0x1ca6084b in NKikimr::NOlap::TIndexInfo::BuildFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:333:17 #7 0x25bfef6b in NKikimr::NOlap::TColumnEngineForLogs::RegisterSchemaVersion(NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:103:29 #8 0x25bfaa2e in NKikimr::NOlap::TColumnEngineForLogs::TColumnEngineForLogs(unsigned long, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:42:5 #9 0x2a2e4fea in make_unique &, std::__y1::shared_ptr &, std::__y1::shared_ptr &, const NKikimr::NOlap::TSnapshot &, const unsigned int &, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData, std::__y1::shared_ptr &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:30 #10 0x2a2e4fea in NKikimr::NColumnShard::TTablesManager::AddSchemaVersion(unsigned int, NKikimr::NOlap::TSnapshot const&, NKikimrSchemeOp::TColumnTableSchema const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:282:24 #11 0x2a2e6877 in NKikimr::NColumnShard::TTablesManager::AddTableVersion(NKikimr::NColumnShard::TInternalPathId, NKikimr::NOlap::TSnapshot const&, NKikimrTxColumnShard::TTableVersionInfo const&, std::__y1::optional const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:320:9 #12 0x2a1b7c79 in NKikimr::NColumnShard::TColumnShard::RunEnsureTable(NKikimrTxColumnShard::TCreateTable const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:431:19 #13 0x2a1b65e0 in NKikimr::NColumnShard::TColumnShard::RunInit(NKikimrTxColumnShard::TInitShard const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:373:9 #14 0x2a1b5d51 in NKikimr::NColumnShard::TColumnShard::RunSchemaTx(NKikimrTxColumnShard::TSchemaTxBody const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:328:13 #15 0xff6795e in NKikimr::NColumnShard::TSchemaTransactionOperator::ProgressOnExecute(NKikimr::NColumnShard::TColumnShard&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/transactions/operators/schema.h:94:19 #16 0x25d9236d in NKikimr::NColumnShard::TColumnShard::TTxProgressTx::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__progress_tx.cpp:80:13 #17 0x1852b274 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #18 0x184f09f6 in NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&) /-S/ydb/core/tablet_flat/flat_executor.cpp:4143:9 #19 0x116159cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #20 0x2cd33274 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #21 0x2cd2bae9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #22 0x2cd35e63 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #23 0x3157b473 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #24 0x315597d2 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #25 0x315597d2 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #26 0x315586e7 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:97:5 #27 0x315709e7 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:502:9 #28 0xfff8fba in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:218:21 #29 0xfff5de9 in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #30 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #31 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 SUMMARY: AddressSanitizer: 2628850 byte(s) leaked in 62008 allocation(s). ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: 2025-04-17T19:16:09.786344Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-17T19:16:09.855406Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-17T19:16:09.858567Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-17T19:16:09.858859Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-17T19:16:09.873491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-17T19:16:09.873715Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-17T19:16:09.879909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:16:09.880116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:16:09.880344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:16:09.880474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:16:09.880589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:16:09.880712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:16:09.880815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:16:09.880881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:16:09.880952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:16:09.881015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:16:09.881068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:16:09.881121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:16:09.894559Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-17T19:16:09.896487Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-17T19:16:09.896594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-17T19:16:09.896642Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-17T19:16:09.896764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:16:09.896885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T19:16:09.896951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-17T19:16:09.896979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-17T19:16:09.897047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-17T19:16:09.897090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-17T19:16:09.897135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-17T19:16:09.897161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-17T19:16:09.897262Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-17T19:16:09.897299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-17T19:16:09.897322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-17T19:16:09.897340Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-17T19:16:09.897399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-17T19:16:09.897444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-17T19:16:09.897488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-17T19:16:09.897507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-17T19:16:09.897550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-17T19:16:09.897575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-17T19:16:09.897591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-17T19:16:09.897634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-17T19:16:09.897660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-17T19:16:09.897675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-17T19:16:09.897950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=34; 2025-04-17T19:16:09.898006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=22; 2025-04-17T19:16:09.898071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-17T19:16:09.898135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-04-17T19:16:09.898258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-17T19:16:09.898292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-17T19:16:09.898313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-17T19:16:09.898465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-17T19:16:09.898496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-17T19:16:09.898532Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-17T19:16:09.898648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-17T19:16:09.898694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-17T19:16:09.898719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-17T19:16:09.898832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-17T19:16:09.898865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-17T19:16:09.898883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-17T1 ... anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:230:9 #32 0xfff5eb9 in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #33 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #34 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #42 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #43 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #44 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7f423a6b4d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a288576 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a288576 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a288576 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a288576 in __allocate_at_least > *> > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x2a288576 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:358:25 #6 0x2a288576 in std::__y1::deque>, std::__y1::allocator>>>::__add_back_capacity() /-S/contrib/libs/cxxsupp/libcxx/include/deque:2144:51 #7 0x2a27a8b4 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/deque:1528:5 #8 0x2a27a8b4 in NKikimr::NOlap::TBlobManager::StartBlobBatch() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:380:23 #9 0x2a25febd in TWriteAction /-S/ydb/core/tx/columnshard/blobs_action/bs/write.h:41:30 #10 0x2a25febd in void std::__y1::allocator::construct[abi:fe190000]> const&, std::__y1::shared_ptr&>(NKikimr::NOlap::NBlobOperations::NBlobStorage::TWriteAction*, TBasicString> const&, std::__y1::shared_ptr&) /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #11 0x2a25562a in construct > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #12 0x2a25562a in __shared_ptr_emplace > &, std::__y1::shared_ptr &, std::__y1::allocator, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #13 0x2a25562a in allocate_shared, const TBasicString > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #14 0x2a25562a in make_shared > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #15 0x2a25562a in NKikimr::NOlap::NBlobOperations::NBlobStorage::TOperator::DoStartWritingAction() /-S/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp:16:12 #16 0x25c9b4f5 in NKikimr::NOlap::IBlobsStorageOperator::StartWritingAction(NKikimr::NOlap::NBlobOperations::EConsumer) /-S/ydb/core/tx/columnshard/blobs_action/abstract/storage.h:106:23 #17 0x25cf39a0 in NKikimr::NColumnShard::TWriteOperation::Start(NKikimr::NColumnShard::TColumnShard&, std::__y1::shared_ptr const&, NActors::TActorId const&, NKikimr::NOlap::TWritingContext const&) /-S/ydb/core/tx/columnshard/operations/write.cpp:39:53 #18 0x25eae844 in NKikimr::NColumnShard::TWriteTask::Execute(NKikimr::NColumnShard::TColumnShard*, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:26:21 #19 0x25eafdd8 in NKikimr::NColumnShard::TWriteTasksQueue::Drain(bool, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:45:15 #20 0x25e5fa6e in NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__write.cpp:616:22 #21 0x25de1ff6 in NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/columnshard/columnshard_impl.h:449:13 #22 0x116159cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #23 0x2cd33274 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #24 0x2cd2bae9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #25 0x2cd35e63 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #26 0x3157cfe2 in NKikimr::NEvents::TDataEvents::TEvWriteResult* NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TAutoPtr&, std::__y1::function, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:446:13 #27 0x3155beb4 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:510:20 #28 0x3155beb4 in NKikimr::NTxUT::WaitWriteResult(NActors::TTestBasicRuntime&, unsigned long, std::__y1::vector>*) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:120:26 #29 0x3155d631 in NKikimr::NTxUT::WriteDataImpl(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, unsigned long, TBasicString> const&, std::__y1::shared_ptr const&, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:146:16 #30 0x3155e660 in NKikimr::NTxUT::WriteData(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:161:16 #31 0xfff977f in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:230:9 #32 0xfff5ee5 in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1166:13 #33 0xfff0857 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #34 0xfff0857 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xfff0857 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xfff0857 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xfff0857 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #42 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #43 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #44 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7f423a6b4d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2628610 byte(s) leaked in 62005 allocation(s). |99.0%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[15] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[16] >> test_tpch.py::TestTpchS1::test_tpch[16] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] >> test_tpch.py::TestTpchS1::test_tpch[17] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] >> test_tpch.py::TestTpchS1::test_tpch[18] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] >> test_tpch.py::TestTpchS1::test_tpch[19] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] >> test_tpch.py::TestTpchS1::test_tpch[20] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] >> test_tpch.py::TestTpchS1::test_tpch[21] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[22] >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] 2025-04-17 19:21:56,807 WARNING libarchive: File (test_tpch.py.TestTpchS1.test_tpch.15/cluster/slot_1/logfile_7147t9vh.log) size has changed. Can't write more data than was declared in the tar header (211424408). (probably file was changed during archiving) |99.1%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] Test command err: 2025-04-17T19:15:57.155417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-17T19:15:57.155521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-17T19:15:57.155605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/4cxu/000220/r3tmp/tmp6rjqIH/pdisk_1.dat 2025-04-17T19:15:57.487078Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17728, node 1 2025-04-17T19:15:57.723829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-17T19:15:57.723905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-17T19:15:57.723942Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-17T19:15:57.724697Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-17T19:15:57.727079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-17T19:15:57.813231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:15:57.813445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:15:57.827964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11567 2025-04-17T19:15:58.371088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-17T19:16:01.420837Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-17T19:16:01.454610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:01.454734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:01.498917Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-17T19:16:01.500955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:01.760741Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T19:16:01.761260Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T19:16:01.761800Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T19:16:01.762139Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T19:16:01.762270Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T19:16:01.762355Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T19:16:01.762430Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T19:16:01.762502Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T19:16:01.762586Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-17T19:16:01.932083Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-17T19:16:01.932182Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-17T19:16:01.945475Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-17T19:16:02.083236Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-17T19:16:02.126132Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-17T19:16:02.126218Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-17T19:16:02.149304Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-17T19:16:02.152650Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-17T19:16:02.152883Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-17T19:16:02.152970Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-17T19:16:02.153021Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-17T19:16:02.153070Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-17T19:16:02.153120Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-17T19:16:02.153168Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-17T19:16:02.154501Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-17T19:16:02.188082Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-17T19:16:02.188188Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-17T19:16:02.196480Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2609] 2025-04-17T19:16:02.200866Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2618] 2025-04-17T19:16:02.201008Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1911:2618], schemeshard id = 72075186224037897 2025-04-17T19:16:02.213745Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-17T19:16:02.233025Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-17T19:16:02.233088Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-17T19:16:02.233171Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-17T19:16:02.244685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-17T19:16:02.252595Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-17T19:16:02.252757Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-17T19:16:02.454242Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-17T19:16:02.649705Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-17T19:16:02.711475Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-17T19:16:03.491679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:03.491856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-17T19:16:03.510998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-17T19:16:03.629945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-17T19:16:03.630192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-17T19:16:03.630483Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-17T19:16:03.630606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-17T19:16:03.630734Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-17T19:16:03.630881Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-17T19:16:03.631001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-17T19:16:03.631161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-17T19:16:03.631310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-17T19:16:03.631427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-17T19:16:03.631533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-17T19:16:03.631664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-17T19:16:03.661274Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-17T19:16:03.661386Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 17T19:24:47.709069Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:24:47.709121Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:47.709147Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:24:48.966783Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:24:48.966848Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:48.966877Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:24:50.151367Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:24:50.151432Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:50.151460Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:24:51.369251Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T19:24:51.379781Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:24:51.379838Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:51.379866Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:24:52.578556Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:24:52.578620Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:52.578646Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:24:53.782695Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T19:24:53.782835Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T19:24:53.793282Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:24:53.793330Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:53.793355Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:24:55.021788Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:24:55.021860Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:55.021887Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:24:56.210913Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:24:56.210980Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:56.211009Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:24:57.388591Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T19:24:57.399017Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:24:57.399066Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:57.399095Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:24:58.705326Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:24:58.705387Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:58.705414Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:24:59.917472Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T19:24:59.917628Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T19:24:59.928124Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:24:59.928170Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:24:59.928195Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:25:01.128938Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:25:01.129007Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:01.129034Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:25:02.309505Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:25:02.309569Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:02.309594Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:25:03.529965Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T19:25:03.540350Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:25:03.540389Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:03.540426Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:25:04.890687Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:25:04.890756Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:04.890783Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:25:06.174296Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T19:25:06.174435Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T19:25:06.184989Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:25:06.185038Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:06.185064Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:25:07.402848Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:25:07.402912Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:07.402938Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:25:08.585025Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:25:08.585091Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:08.585119Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:25:09.782257Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-17T19:25:09.792659Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:25:09.792705Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:09.792731Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:25:11.099890Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:25:11.099958Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:11.099984Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-17T19:25:12.305857Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-17T19:25:12.306005Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-17T19:25:12.316537Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-17T19:25:12.316590Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:12.316615Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-17T19:25:13.561536Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-17T19:25:13.561597Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-17T19:25:13.561625Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture()+28 (0x18EA949C) TWithBackTrace::TWithBackTrace<>()+80 (0x18AD13D0) NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+485 (0x18AA5A55) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TTestCaseAnalyzeRebootColumnShard::Execute_(NUnitTest::TTestContext&)+4263 (0x18AC25F7) std::__y1::__function::__func, void ()>::operator()()+280 (0x18ACD488) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19394ED6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1936D929) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TCurrentTest::Execute()+1204 (0x18ACC654) NUnitTest::TTestFactory::Execute()+2438 (0x1936F1F6) NUnitTest::RunMain(int, char**)+5213 (0x1938F44D) ??+0 (0x7F3ABB2A9D90) __libc_start_main+128 (0x7F3ABB2A9E40) _start+41 (0x163FC029) |99.3%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.4%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-04-17 19:25:53,454 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:25:53,644 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1011636 738M 743M 655M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/4cxu/00019a/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 1013153 2.7G 2.7G 2.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/4cxu/00019a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_ 1016034 483M 483M 449M └─ moto_server s3 --port 3146 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 88, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ..._migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/00019a', '--source-root', '/home/runner/.ya/build/build_root/4cxu/00019a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/00019a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("..._migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/00019a', '--source-root', '/home/runner/.ya/build/build_root/4cxu/00019a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/00019a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete 2025-04-17 19:25:53,574 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:25:53,900 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1011672 661M 664M 576M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/4cxu/0001a1/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 1013154 7.2G 7.2G 6.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/4cxu/0001a1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_d 1016025 396M 396M 362M └─ moto_server s3 --port 2546 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 265, in test_ttl_delete if not self.wait_for(lambda: self.portions_actualized_in_sys(self.table), 120): File "ydb/tests/olap/ttl_tiering/base.py", line 86, in wait_for if condition_func(): File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 265, in if not self.wait_for(lambda: self.portions_actualized_in_sys(self.table), 120): File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 22, in portions_actualized_in_sys portions = table.get_portion_stat_by_tier() File "ydb/tests/olap/common/column_table_helper.py", line 26, in get_portion_stat_by_tier results = self.ydb_client.query( File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ring/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/0001a1', '--source-root', '/home/runner/.ya/build/build_root/4cxu/0001a1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/0001a1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ring/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/0001a1', '--source-root', '/home/runner/.ya/build/build_root/4cxu/0001a1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/0001a1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) |99.6%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-04-17 19:25:53,469 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:25:53,973 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1011649 583M 587M 501M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/4cxu/00018c/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modul 1013122 10.8G 10.8G 10.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/4cxu/00018c/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tp 1014900 507M 507M 472M └─ moto_server s3 --port 10607 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 99, in test_import_and_export self.validate_table("s3_table") File "ydb/tests/olap/s3_import/test_tpch_import.py", line 12, in validate_table result_sets = self.ydb_client.query(f""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...import', '--basetemp', '/home/runner/.ya/build/build_root/4cxu/00018c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/4cxu/00018c/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/00018c', '--source-root', '/home/runner/.ya/build/build_root/4cxu/00018c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/00018c/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...import', '--basetemp', '/home/runner/.ya/build/build_root/4cxu/00018c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/4cxu/00018c/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/00018c', '--source-root', '/home/runner/.ya/build/build_root/4cxu/00018c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/00018c/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression 2025-04-17 19:26:09,089 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:26:09,354 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1020182 792M 799M 713M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/4cxu/000175/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 1021187 5.9G 5.9G 5.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/4cxu/000175/ydb/tests/olap/column_family/compression/test-results/py3test/testing_o Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 110, in test_all_supported_compression volumes: tuple[int, int] = tables[i].get_volumes_column("value") File "ydb/tests/olap/common/column_table_helper.py", line 73, in get_volumes_column time.sleep(10) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/4cxu/000175/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/000175', '--source-root', '/home/runner/.ya/build/build_root/4cxu/000175/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/000175/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/4cxu/000175/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/000175', '--source-root', '/home/runner/.ya/build/build_root/4cxu/000175/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/000175/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-04-17 19:26:09,189 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-17 19:26:09,349 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1020251 999M 1005M 905M ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/4cxu/000177/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 1030682 2.0G 2.0G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/4cxu/000177/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_ins Test command err: Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 236 30% 236 50% 236 90% 236 99% 236 ms Write: 10% 1086 30% 1086 50% 1086 90% 1086 99% 1086 ms Write: 10% 1440 30% 1440 50% 1440 90% 1440 99% 1440 ms Write: 10% 9128 30% 9128 50% 9128 90% 9128 99% 9128 ms Write: 10% 9175 30% 9175 50% 9175 90% 9175 99% 9175 ms Write: 10% 8491 30% 8491 50% 8491 90% 8491 99% 8491 ms Write: 10% 9108 30% 9108 50% 9108 90% 9108 99% 9108 ms Write: 10% 8585 30% 8585 50% 8585 90% 8585 99% 8585 ms Write: 10% 9170 30% 9170 50% 9170 90% 9170 99% 9170 ms Write: 10% 8941 30% 8941 50% 8941 90% 8941 99% 8941 ms Write: 10% 8484 30% 8484 50% 8484 90% 8484 99% 8484 ms Write: 10% 7939 30% 7939 50% 7939 90% 7939 99% 7939 ms Write: 10% 8024 30% 8024 50% 8024 90% 8024 99% 8024 ms Write: 10% 8276 30% 8276 50% 8276 90% 8276 99% 8276 ms Write: 10% 8172 30% 8172 50% 8172 90% 8172 99% 8172 ms Write: 10% 7909 30% 7909 50% 7909 90% 7909 99% 7909 ms Write: 10% 7821 30% 7821 50% 7821 90% 7821 99% 7821 ms Write: 10% 8291 30% 8291 50% 8291 90% 8291 99% 8291 ms Write: 10% 7471 30% 7471 50% 7471 90% 7471 99% 7471 ms Write: 10% 7394 30% 7394 50% 7394 90% 7394 99% 7394 ms Write: 10% 7132 30% 7132 50% 7132 90% 7132 99% 7132 ms Write: 10% 7792 30% 7792 50% 7792 90% 7792 99% 7792 ms Write: 10% 7062 30% 7062 50% 7062 90% 7062 99% 7062 ms Write: 10% 6341 30% 6341 50% 6341 90% 6341 99% 6341 ms Write: 10% 6192 30% 6192 50% 6192 90% 6192 99% 6192 ms Write: 10% 6981 30% 6981 50% 6981 90% 6981 99% 6981 ms Write: 10% 5723 30% 5723 50% 5723 90% 5723 99% 5723 ms Write: 10% 5708 30% 5708 50% 5708 90% 5708 99% 5708 ms Write: 10% 6968 30% 6968 50% 6968 90% 6968 99% 6968 ms Write: 10% 4642 30% 4642 50% 4642 90% 4642 99% 4642 ms Write: 10% 4680 30% 4680 50% 4680 90% 4680 99% 4680 ms Write: 10% 5247 30% 5247 50% 5247 90% 5247 99% 5247 ms Write: 10% 3794 30% 3794 50% 3794 90% 3794 99% 3794 ms Write: 10% 5805 30% 5805 50% 5805 90% 5805 99% 5805 ms Write: 10% 3193 30% 3193 50% 3193 90% 3193 99% 3193 ms Write: 10% 3088 30% 3088 50% 3088 90% 3088 99% 3088 ms Write: 10% 3098 30% 3098 50% 3098 90% 3098 99% 3098 ms Write: 10% 3007 30% 3007 50% 3007 90% 3007 99% 3007 ms Write: 10% 3030 30% 3030 50% 3030 90% 3030 99% 3030 ms Write: 10% 2850 30% 2850 50% 2850 90% 2850 99% 2850 ms Write: 10% 3002 30% 3002 50% 3002 90% 3002 99% 3002 ms Write: 10% 3105 30% 3105 50% 3105 90% 3105 99% 3105 ms Write: 10% 3259 30% 3259 50% 3259 90% 3259 99% 3259 ms Write: 10% 2752 30% 2752 50% 2752 90% 2752 99% 2752 ms Write: 10% 2479 30% 2479 50% 2479 90% 2479 99% 2479 ms Write: 10% 3230 30% 3230 50% 3230 90% 3230 99% 3230 ms Write: 10% 2395 30% 2395 50% 2395 90% 2395 99% 2395 ms Write: 10% 3117 30% 3117 50% 3117 90% 3117 99% 3117 ms Write: 10% 2458 30% 2458 50% 2458 90% 2458 99% 2458 ms Write: 10% 2262 30% 2262 50% 2262 90% 2262 99% 2262 ms Write: 10% 2114 30% 2114 50% 2114 90% 2114 99% 2114 ms Write: 10% 2210 30% 2210 50% 2210 90% 2210 99% 2210 ms Write: 10% 2166 30% 2166 50% 2166 90% 2166 99% 2166 ms Write: 10% 2900 30% 2900 50% 2900 90% 2900 99% 2900 ms Write: 10% 1695 30% 1695 50% 1695 90% 1695 99% 1695 ms Write: 10% 1735 30% 1735 50% 1735 90% 1735 99% 1735 ms Write: 10% 1709 30% 1709 50% 1709 90% 1709 99% 1709 ms Write: 10% 1893 30% 1893 50% 1893 90% 1893 99% 1893 ms Write: 10% 2007 30% 2007 50% 2007 90% 2007 99% 2007 ms Write: 10% 1693 30% 1693 50% 1693 90% 1693 99% 1693 ms Write: 10% 1630 30% 1630 50% 1630 90% 1630 99% 1630 ms Write: 10% 1611 30% 1611 50% 1611 90% 1611 99% 1611 ms Write: 10% 2446 30% 2446 50% 2446 90% 2446 99% 2446 ms Write: 10% 1635 30% 1635 50% 1635 90% 1635 99% 1635 ms Step 2. read write Write: 10% 3005 30% 3005 50% 3005 90% 3005 99% 3005 ms Write: 10% 3836 30% 3836 50% 3836 90% 3836 99% 3836 ms Write: 10% 6300 30% 6300 50% 6300 90% 6300 99% 6300 ms Write: 10% 9322 30% 9322 50% 9322 90% 9322 99% 9322 ms Write: 10% 10427 30% 10427 50% 10427 90% 10427 99% 10427 ms Write: 10% 10718 30% 10718 50% 10718 90% 10718 99% 10718 ms Write: 10% 10564 30% 10564 50% 10564 90% 10564 99% 10564 ms Write: 10% 10522 30% 10522 50% 10522 90% 10522 99% 10522 ms Write: 10% 10208 30% 10208 50% 10208 90% 10208 99% 10208 ms Write: 10% 10278 30% 10278 50% 10278 90% 10278 99% 10278 ms Write: 10% 10502 30% 10502 50% 10502 90% 10502 99% 10502 ms Write: 10% 10726 30% 10726 50% 10726 90% 10726 99% 10726 ms Write: 10% 10327 30% 10327 50% 10327 90% 10327 99% 10327 ms Write: 10% 10416 30% 10416 50% 10416 90% 10416 99% 10416 ms Write: 10% 10514 30% 10514 50% 10514 90% 10514 99% 10514 ms Write: 10% 10503 30% 10503 50% 10503 90% 10503 99% 10503 ms Write: 10% 9769 30% 9769 50% 9769 90% 9769 99% 9769 ms Write: 10% 9610 30% 9610 50% 9610 90% 9610 99% 9610 ms Write: 10% 9579 30% 9579 50% 9579 90% 9579 99% 9579 ms Write: 10% 8782 30% 8782 50% 8782 90% 8782 99% 8782 ms Write: 10% 8951 30% 8951 50% 8951 90% 8951 99% 8951 ms Write: 10% 8542 30% 8542 50% 8542 90% 8542 99% 8542 ms Write: 10% 8497 30% 8497 50% 8497 90% 8497 99% 8497 ms Write: 10% 8499 30% 8499 50% 8499 90% 8499 99% 8499 ms Write: 10% 8568 30% 8568 50% 8568 90% 8568 99% 8568 ms Write: 10% 7848 30% 7848 50% 7848 90% 7848 99% 7848 ms Write: 10% 6558 30% 6558 50% 6558 90% 6558 99% 6558 ms Write: 10% 6719 30% 6719 50% 6719 90% 6719 99% 6719 ms Write: 10% 7761 30% 7761 50% 7761 90% 7761 99% 7761 ms Write: 10% 7309 30% 7309 50% 7309 90% 7309 99% 7309 ms Write: 10% 7592 30% 7592 50% 7592 90% 7592 99% 7592 ms Write: 10% 5234 30% 5234 50% 5234 90% 5234 99% 5234 ms Write: 10% 5949 30% 5949 50% 5949 90% 5949 99% 5949 ms Write: 10% 6112 30% 6112 50% 6112 90% 6112 99% 6112 ms Write: 10% 5524 30% 5524 50% 5524 90% 5524 99% 5524 ms Write: 10% 4892 30% 4892 50% 4892 90% 4892 99% 4892 ms Write: 10% 6227 30% 6227 50% 6227 90% 6227 99% 6227 ms Write: 10% 5042 30% 5042 50% 5042 90% 5042 99% 5042 ms Write: 10% 5144 30% 5144 50% 5144 90% 5144 99% 5144 ms Write: 10% 4537 30% 4537 50% 4537 90% 4537 99% 4537 ms Write: 10% 3739 30% 3739 50% 3739 90% 3739 99% 3739 ms Write: 10% 4028 30% 4028 50% 4028 90% 4028 99% 4028 ms Write: 10% 3358 30% 3358 50% 3358 90% 3358 99% 3358 ms Write: 10% 3534 30% 3534 50% 3534 90% 3534 99% 3534 ms Write: 10% 2929 30% 2929 50% 2929 90% 2929 99% 2929 ms Write: 10% 3599 30% 3599 50% 3599 90% 3599 99% 3599 ms Write: 10% 3066 30% 3066 50% 3066 90% 3066 99% 3066 ms Write: 10% 2780 30% 2780 50% 2780 90% 2780 99% 2780 ms Write: 10% 2960 30% 2960 50% 2960 90% 2960 99% 2960 ms Write: 10% 2656 30% 2656 50% 2656 90% 2656 99% 2656 ms Write: 10% 2620 30% 2620 50% 2620 90% 2620 99% 2620 ms Write: 10% 2323 30% 2323 50% 2323 90% 2323 99% 2323 ms Write: 10% 2714 30% 2714 50% 2714 90% 2714 99% 2714 ms Write: 10% 1990 30% 1990 50% 1990 90% 1990 99% 1990 ms Write: 10% 1821 30% 1821 50% 1821 90% 1821 99% 1821 ms Write: 10% 1947 30% 1947 50% 1947 90% 1947 99% 1947 ms Write: 10% 1778 30% 1778 50% 1778 90% 1778 99% 1778 ms Write: 10% 1859 30% 1859 50% 1859 90% 1859 99% 1859 ms Write: 10% 2049 30% 2049 50% 2049 90% 2049 99% 2049 ms Write: 10% 1852 30% 1852 50% 1852 90% 1852 99% 1852 ms Write: 10% 1725 30% 1725 50% 1725 90% 1725 99% 1725 ms Write: 10% 1765 30% 1765 50% 1765 90% 1765 99% 1765 ms Write: 10% 1906 30% 1906 50% 1906 90% 1906 99% 1906 ms Write: 10% 1637 30% 1637 50% 1637 90% 1637 99% 1637 ms Read: 10% 4324 30% 6698 50% 9071 90% 13818 99% 14886 ms Step 3. write modify Write: 10% 1715 30% 1715 50% 1715 90% 1715 99% 1715 ms Write: 10% 4578 30% 4578 50% 4578 90% 4578 99% 4578 ms Write: 10% 8643 30% 8643 50% 8643 90% 8643 99% 8643 ms Write: 10% 8164 30% 8164 50% 8164 90% 8164 99% 8164 ms Write: 10% 8826 30% 8826 50% 8826 90% 8826 99% 8826 ms Write: 10% 9504 30% 9504 50% 9504 90% 9504 99% 9504 ms Write: 10% 9664 30% 9664 50% 9664 90% 9664 99% 9664 ms Write: 10% 9481 30% 9481 50% 9481 90% 9481 99% 9481 ms Write: 10% 9552 30% 9552 50% 9552 90% 9552 99% 9552 ms Write: 10% 9206 30% 9206 50% 9206 90% 9206 99% 9206 ms Write: 10% 8762 30% 8762 50% 8762 90% 8762 99% 8762 ms Write: 10% 8822 30% 8822 50% 8822 90% 8822 99% 8822 ms Write: 10% 8966 30% 8966 50% 8966 90% 8966 99% 8966 ms Write: 10% 8645 30% 8645 50% 8645 90% 8645 99% 8645 ms Write: 10% 8389 30% 8389 50% 8389 90% 8389 99% 8389 ms Write: 10% 8812 30% 8812 50% 8812 90% 8812 99% 8812 ms Write: 10% 8423 30% 8423 50% 8423 90% 8423 99% 8423 ms Write: 10% 8499 30% 8499 50% 8499 90% 8499 99% 8499 ms Write: 10% 8289 30% 8289 50% 8289 90% 8289 99% 8289 ms Write: 10% 8308 30% 8308 50% 8308 90% 8308 99% 8308 ms Write: 10% 8605 30% 8605 50% 8605 90% 8605 99% 8605 ms Write: 10% 8209 30% 8209 50% 8209 90% 8209 99% 8209 ms Write: 10% 7976 30% 7976 50% 7976 90% 7976 99% 7976 ms Write: 10% 8031 30% 8031 50% 8031 90% 8031 99% 8031 ms Write: 10% 7925 30% 7925 50% 7925 90% 7925 99% 7925 ms Write: 10% 7820 30% 7820 50% 7820 90% 7820 99% 7820 ms Write: 10% 7768 30% 7768 50% 7768 90% 7768 99% 7768 ms Write: 10% 7229 30% 7229 50% 7229 90% 7229 99% 7229 ms Write: 10% 7489 30% 7489 50% 7489 90% 7489 99% 7489 ms Write: 10% 6817 30% 6817 50% 6817 90% 6817 99% 6817 ms Write: 10% 7177 30% 7177 50% 7177 90% 7177 99% 7177 ms Write: 10% 7044 30% 7044 50% 7044 90% 7044 99% 7044 ms Write: 10% 6365 30% 6365 50% 6365 90% 6365 99% 6365 ms Write: 10% 6803 30% 6803 50% 6803 90% 6803 99% 6803 ms Write: 10% 7026 30% 7026 50% 7026 90% 7026 99% 7026 ms Write: 10% 6588 30% 6588 50% 6588 90% 6588 99% 6588 ms Write: 10% 6128 30% 6128 50% 6128 90% 6128 99% 6128 ms Write: 10% 6095 30% 6095 50% 6095 90% 6095 99% 6095 ms Write: 10% 5601 30% 5601 50% 5601 90% 5601 99% 5601 ms Write: 10% 5090 30% 5090 50% 5090 90% 5090 99% 5090 ms Write: 10% 5494 30% 5494 50% 5494 90% 5494 99% 5494 ms Write: 10% 4792 30% 4792 50% 4792 90% 4792 99% 4792 ms Write: 10% 4658 30% 4658 50% 4658 90% 4658 99% 4658 ms Write: 10% 4499 30% 4499 50% 4499 90% 4499 99% 4499 ms Write: 10% 6526 30% 6526 50% 6526 90% 6526 99% 6526 ms Write: 10% 4319 30% 4319 50% 4319 90% 4319 99% 4319 ms Write: 10% 4595 30% 4595 50% 4595 90% 4595 99% 4595 ms Write: 10% 4929 30% 4929 50% 4929 90% 4929 99% 4929 ms Write: 10% 4169 30% 4169 50% 4169 90% 4169 99% 4169 ms Write: 10% 4280 30% 4280 50% 4280 90% 4280 99% 4280 ms Write: 10% 4027 30% 4027 50% 4027 90% 4027 99% 4027 ms Write: 10% 3657 30% 3657 50% 3657 90% 3657 99% 3657 ms Write: 10% 3245 30% 3245 50% 3245 90% 3245 99% 3245 ms Write: 10% 3982 30% 3982 50% 3982 90% 3982 99% 3982 ms Write: 10% 3194 30% 3194 50% 3194 90% 3194 99% 3194 ms Write: 10% 3101 30% 3101 50% 3101 90% 3101 99% 3101 ms Write: 10% 4203 30% 4203 50% 4203 90% 4203 99% 4203 ms Write: 10% 3106 30% 3106 50% 3106 90% 3106 99% 3106 ms Write: 10% 3671 30% 3671 50% 3671 90% 3671 99% 3671 ms Write: 10% 3102 30% 3102 50% 3102 90% 3102 99% 3102 ms Write: 10% 3108 30% 3108 50% 3108 90% 3108 99% 3108 ms Write: 10% 3254 30% 3254 50% 3254 90% 3254 99% 3254 ms Write: 10% 3048 30% 3048 50% 3048 90% 3048 99% 3048 ms Write: 10% 3486 30% 3486 50% 3486 90% 3486 99% 3486 ms Update: 10% 831 30% 831 50% 831 90% 831 99% 831 ms Step 4. read modify write Write: 10% 3351 30% 3351 50% 3351 90% 3351 99% 3351 ms Write: 10% 3482 30% 3482 50% 3482 90% 3482 99% 3482 ms Write: 10% 4320 30% 4320 50% 4320 90% 4320 99% 4320 ms Write: 10% 5334 30% 5334 50% 5334 90% 5334 99% 5334 ms Write: 10% 8047 30% 8047 50% 8047 90% 8047 99% 8047 ms Write: 10% 10655 30% 10655 50% 10655 90% 10655 99% 10655 ms Write: 10% 10672 30% 10672 50% 10672 90% 10672 99% 10672 ms Write: 10% 10459 30% 10459 50% 10459 90% 10459 99% 10459 ms Write: 10% 10940 30% 10940 50% 10940 90% 10940 99% 10940 ms Write: 10% 10767 30% 10767 50% 10767 90% 10767 99% 10767 ms Was written: 25.0 MiB, Speed: 0.4166666666666667 MiB/s Write: 10% 10744 30% 10744 50% 10744 90% 10744 99% 10744 ms Write: 10% 10702 30% 10702 50% 10702 90% 10702 99% 10702 ms Write: 10% 10435 30% 10435 50% 10435 90% 10435 99% 10435 ms Write: 10% 10235 30% 10235 50% 10235 90% 10235 99% 10235 ms Write: 10% 10127 30% 10127 50% 10127 90% 10127 99% 10127 ms Write: 10% 10016 30% 10016 50% 10016 90% 10016 99% 10016 ms Write: 10% 10084 30% 10084 50% 10084 90% 10084 99% 10084 ms Write: 10% 9975 30% 9975 50% 9975 90% 9975 99% 9975 ms Write: 10% 9909 30% 9909 50% 9909 90% 9909 99% 9909 ms Write: 10% 9943 30% 9943 50% 9943 90% 9943 99% 9943 ms Write: 10% 9792 30% 9792 50% 9792 90% 9792 99% 9792 ms Write: 10% 9295 30% 9295 50% 9295 90% 9295 99% 9295 ms Write: 10% 9159 30% 9159 50% 9159 90% 9159 99% 9159 ms Write: 10% 9079 30% 9079 50% 9079 90% 9079 99% 9079 ms Write: 10% 9042 30% 9042 50% 9042 90% 9042 99% 9042 ms Write: 10% 8681 30% 8681 50% 8681 90% 8681 99% 8681 ms Write: 10% 8440 30% 8440 50% 8440 90% 8440 99% 8440 ms Write: 10% 8516 30% 8516 50% 8516 90% 8516 99% 8516 ms Write: 10% 7759 30% 7759 50% 7759 90% 7759 99% 7759 ms Write: 10% 8206 30% 8206 50% 8206 90% 8206 99% 8206 ms Write: 10% 7164 30% 7164 50% 7164 90% 7164 99% 7164 ms Write: 10% 7837 30% 7837 50% 7837 90% 7837 99% 7837 ms Write: 10% 6288 30% 6288 50% 6288 90% 6288 99% 6288 ms Write: 10% 6889 30% 6889 50% 6889 90% 6889 99% 6889 ms Write: 10% 6571 30% 6571 50% 6571 90% 6571 99% 6571 ms Write: 10% 7638 30% 7638 50% 7638 90% 7638 99% 7638 ms Write: 10% 6118 30% 6118 50% 6118 90% 6118 99% 6118 ms Write: 10% 6214 30% 6214 50% 6214 90% 6214 99% 6214 ms Write: 10% 4703 30% 4703 50% 4703 90% 4703 99% 4703 ms Write: 10% 4684 30% 4684 50% 4684 90% 4684 99% 4684 ms Write: 10% 5660 30% 5660 50% 5660 90% 5660 99% 5660 ms Write: 10% 5623 30% 5623 50% 5623 90% 5623 99% 5623 ms Write: 10% 5644 30% 5644 50% 5644 90% 5644 99% 5644 ms Write: 10% 4576 30% 4576 50% 4576 90% 4576 99% 4576 ms Write: 10% 5003 30% 5003 50% 5003 90% 5003 99% 5003 ms Write: 10% 7257 30% 7257 50% 7257 90% 7257 99% 7257 ms Write: 10% 5586 30% 5586 50% 5586 90% 5586 99% 5586 ms Write: 10% 4323 30% 4323 50% 4323 90% 4323 99% 4323 ms Write: 10% 4817 30% 4817 50% 4817 90% 4817 99% 4817 ms Write: 10% 4522 30% 4522 50% 4522 90% 4522 99% 4522 ms Write: 10% 4678 30% 4678 50% 4678 90% 4678 99% 4678 ms Write: 10% 4489 30% 4489 50% 4489 90% 4489 99% 4489 ms Write: 10% 4231 30% 4231 50% 4231 90% 4231 99% 4231 ms Write: 10% 3598 30% 3598 50% 3598 90% 3598 99% 3598 ms Write: 10% 3667 30% 3667 50% 3667 90% 3667 99% 3667 ms Write: 10% 3471 30% 3471 50% 3471 90% 3471 99% 3471 ms Write: 10% 3555 30% 3555 50% 3555 90% 3555 99% 3555 ms Write: 10% 3992 30% 3992 50% 3992 90% 3992 99% 3992 ms Write: 10% 3849 30% 3849 50% 3849 90% 3849 99% 3849 ms Write: 10% 3672 30% 3672 50% 3672 90% 3672 99% 3672 ms Write: 10% 3972 30% 3972 50% 3972 90% 3972 99% 3972 ms Write: 10% 3667 30% 3667 50% 3667 90% 3667 99% 3667 ms Write: 10% 3645 30% 3645 50% 3645 90% 3645 99% 3645 ms Write: 10% 3926 30% 3926 50% 3926 90% 3926 99% 3926 ms Update: 10% 886 30% 886 50% 886 90% 886 99% 886 ms Read: 10% 4731 30% 7197 50% 9663 90% 14595 99% 15705 ms File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/4cxu/000177/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...les', '--ya-trace', '/home/runner/.ya/build/build_root/4cxu/000177/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/000177', '--source-root', '/home/runner/.ya/build/build_root/4cxu/000177/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/000177/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...les', '--ya-trace', '/home/runner/.ya/build/build_root/4cxu/000177/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4cxu/000177', '--source-root', '/home/runner/.ya/build/build_root/4cxu/000177/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4cxu/000177/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT ydb/tests/fq/streaming_optimize [size:medium] nchunks:4 ------ [test_sql_streaming.py 0/4] chunk ran 7 tests (total:81.70s - recipes:0.53s test:80.69s recipes:0.40s) [fail] test_sql_streaming.py::test[suites-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (12.29s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8og496a0/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2b97f2f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2b97f29500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2b977b2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2b978b0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2b978b0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2b978b0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2b977b1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2b9785e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2b97756e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2b97756e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2b97756c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2b978ade0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2b978ade0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (9.96s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2zciclw4/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc6714ff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc6714f9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc670d82d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc670e80464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc670e80464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc670e80464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc670d81b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc670e2e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc670d26e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc670d26e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc670d26c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc670e7de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc670e7de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (10.93s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pjytvu03/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f575f4cf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f575f4c9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f575ed52d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f575ee50464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f575ee50464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f575ee50464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f575ed51b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f575edfe90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f575ecf6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f575ecf6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f575ecf6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f575ee4de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f575ee4de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423694 byte(s) leaked in 8242 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (12.12s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnk6s_06/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fca9989f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fca99899500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fca99122d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fca99220464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fca99220464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fca99220464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fca99121b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fca991ce90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fca990c6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fca990c6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fca990c6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fca9921de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fca9921de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (11.68s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cuegr277/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa1b6d2f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa1b6d29500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa1b65b2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa1b66b0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa1b66b0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa1b66b0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa1b65b1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa1b665e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa1b6556e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa1b6556e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa1b6556c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa1b66ade0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa1b66ade0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (10.10s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d8d4m_3d/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc567caf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc567ca9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc567532d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc567630464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc567630464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc567630464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc567531b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc5675de90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc5674d6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc5674d6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc5674d6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc56762de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc56762de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (10.36s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_77cyj8sd/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f5b70daf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f5b70da9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f5b70632d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f5b70730464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f5b70730464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f5b70730464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f5b70631b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f5b706de90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f5b705d6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f5b705d6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f5b705d6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f5b7072de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f5b7072de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 7 tests (total:81.17s - recipes:0.61s test:80.00s recipes:0.47s) [fail] test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (12.39s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o9qvu7cl/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7faa39c0f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7faa39c09500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7faa39492d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7faa39590464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7faa39590464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7faa39590464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7faa39491b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7faa3953e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7faa39436e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7faa39436e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7faa39436c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7faa3958de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7faa3958de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (10.72s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_biqzls4n/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc65946f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc659469500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc658cf2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc658df0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc658df0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc658df0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc658cf1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc658d9e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc658c96e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc658c96e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc658c96c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc658dede0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc658dede0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (10.53s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pfcyb20u/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fde491ff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fde491f9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fde48a82d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fde48b80464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fde48b80464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fde48b80464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fde48a81b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fde48b2e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fde48a26e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fde48a26e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fde48a26c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fde48b7de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fde48b7de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423457 byte(s) leaked in 8237 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (12.14s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_90sehe63/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f338a11f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f338a119500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f33899a2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f3389aa0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f3389aa0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f3389aa0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f33899a1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f3389a4e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f3389946e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f3389946e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f3389946c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f3389a9de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f3389a9de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (11.19s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_knaitv20/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0d0ebdf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0d0ebd9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0d0e462d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0d0e560464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0d0e560464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0d0e560464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0d0e461b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0d0e50e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0d0e406e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0d0e406e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0d0e406c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0d0e55de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0d0e55de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (9.69s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5zsur613/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fdab526f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fdab5269500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fdab4af2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fdab4bf0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fdab4bf0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fdab4bf0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fdab4af1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fdab4b9e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fdab4a96e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fdab4a96e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fdab4a96c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fdab4bede0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fdab4bede0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423552 byte(s) leaked in 8239 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (10.02s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ws9p_li9/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fd3add4f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fd3add49500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fd3ad5d2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fd3ad6d0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fd3ad6d0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fd3ad6d0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fd3ad5d1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fd3ad67e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fd3ad576e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fd3ad576e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fd3ad576c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fd3ad6cde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fd3ad6cde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 7 tests (total:80.50s - recipes:0.55s test:79.38s recipes:0.48s) [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (12.53s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1x9j5mu7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f8b425af9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f8b425a9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f8b41e32d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f8b41f30464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f8b41f30464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f8b41f30464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f8b41e31b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f8b41ede90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f8b41dd6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f8b41dd6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f8b41dd6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f8b41f2de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f8b41f2de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (10.64s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c4vy4xnr/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0fe8f7f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0fe8f79500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0fe8802d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0fe8900464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0fe8900464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0fe8900464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0fe8801b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0fe88ae90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0fe87a6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0fe87a6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0fe87a6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0fe88fde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0fe88fde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (9.93s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_b8sg72/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f25c01ef9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f25c01e9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f25bfa72d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f25bfb70464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f25bfb70464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f25bfb70464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f25bfa71b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f25bfb1e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f25bfa16e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f25bfa16e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f25bfa16c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f25bfb6de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f25bfb6de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (12.35s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i8x5nhnx/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fbd53dff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fbd53df9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fbd53682d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fbd53780464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fbd53780464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fbd53780464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fbd53681b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fbd5372e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fbd53626e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fbd53626e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fbd53626c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fbd5377de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fbd5377de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (11.57s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gl2_ptab/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fd8d151f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fd8d1519500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fd8d0da2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fd8d0ea0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fd8d0ea0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fd8d0ea0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fd8d0da1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fd8d0e4e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fd8d0d46e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fd8d0d46e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fd8d0d46c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fd8d0e9de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fd8d0e9de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423005 byte(s) leaked in 8227 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (9.67s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u4le8e4l/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7faafed3f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7faafed39500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7faafe5c2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7faafe6c0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7faafe6c0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7faafe6c0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7faafe5c1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7faafe66e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7faafe566e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7faafe566e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7faafe566c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7faafe6bde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7faafe6bde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (9.69s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1e4z_v80/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7257a6f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7257a69500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f72572f2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f72573f0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f72573f0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f72573f0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f72572f1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f725739e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7257296e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7257296e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7257296c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f72573ede0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f72573ede0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:79.24s - recipes:0.48s test:78.21s recipes:0.46s) [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (12.68s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jlf7x6wq/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7c2714f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7c27149500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7c269d2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7c26ad0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7c26ad0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7c26ad0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7c269d1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7c26a7e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7c26976e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7c26976e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7c26976c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7c26acde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7c26acde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (9.98s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__yz8tecq/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa66a64f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa66a649500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa669ed2d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa669fd0464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa669fd0464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa669fd0464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa669ed1b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa669f7e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa669e76e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa669e76e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa669e76c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa669fcde0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa669fcde0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (10.30s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8vgjacfc/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7efcf74ff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7efcf74f9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7efcf6d82d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7efcf6e80464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7efcf6e80464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7efcf6e80464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7efcf6d81b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7efcf6e2e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7efcf6d26e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7efcf6d26e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7efcf6d26c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7efcf6e7de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7efcf6e7de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (12.07s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e3x7vm_6/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f22ed5ff9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f22ed5f9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f22ece82d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f22ecf80464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f22ecf80464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f22ecf80464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f22ece81b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f22ecf2e90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f22ece26e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f22ece26e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f22ece26c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f22ecf7de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f22ecf7de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423601 byte(s) leaked in 8240 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (10.82s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g72s93q6/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f01af38f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f01af389500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f01aec12d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f01aed10464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f01aed10464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f01aed10464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f01aec11b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f01aecbe90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f01aebb6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f01aebb6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f01aebb6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f01aed0de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f01aed0de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (9.47s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nhydrko8/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f52419cf9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f52419c9500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f5241252d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f5241350464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f5241350464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f5241350464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f5241251b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f52412fe90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f52411f6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f52411f6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f52411f6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f524134de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f524134de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423005 byte(s) leaked in 8227 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (9.69s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kuzd3pa6/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f8380d8f9e4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f8380d89500 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f8380612d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f8380710464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f8380710464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f8380710464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f8380611b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f83806be90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f83805b6e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f83805b6e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f83805b6c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f838070de0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f838070de0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 28 - FAIL ydb/tests/fq/streaming_optimize ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:104.69s - test:104.29s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (99.42s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:104.76s - test:104.39s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (99.42s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:619.60s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (timeout) duration: 615.85s alter_compression.py::TestAlterCompression::test_availability_data test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (615.85s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:621.46s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 617.74s Info: Test run has exceeded 8.0G (8388608K) memory limit with 12.1G (12724212K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1011516 44.9M 44.9M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1011634 36.5M 24.6M 12.0M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1011649 583M 586M 501M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 1013122 11.0G 11.0G 10.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/t 1014900 507M 507M 472M └─ moto_server s3 --port 10607 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (617.74s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 3 tests (total:622.01s - test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 523.17s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 94.28s test_alter_tiering.py::TestAlterTiering::test[many_tables] test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (523.17s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:2 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:610.78s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 607.20s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (607.20s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 2 tests (total:616.01s - test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete (timeout) duration: 612.37s ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering test was not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 10.6G (11103876K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1011510 44.9M 44.9M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1011647 39.7M 28.1M 15.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1011672 830M 834M 746M └─ ydb-tests-olap-ttl_tiering --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --do 1013154 7.2G 7.2G 6.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff 1016025 396M 0b 0b └─ moto_server s3 --port 2546 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (612.37s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:5.57s - test:5.54s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.05s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:5.67s - test:5.64s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.05s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:5.86s - test:5.84s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.07s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:5.47s - test:5.44s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.03s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:9.99s - test:9.96s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==1018128==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018e0e20d bp 0x7fff2e9ea5e0 sp 0x7fff2e9ea440 T0) ==1018128==The signal is caused by a READ memory access. ==1018128==Hint: address points to the zero page. 2025-04-17T19:16:12.144026Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7494363025489987738:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-17T19:16:12.144107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; #0 0x18e0e20d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18e0e20d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18e0e20d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18e0e20d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18e0e20d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18e33327 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18e33327 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18e33327 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18e33327 in ..[snippet truncated].. 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x197855d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x197855d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19754f68 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #14 0x18e321d3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19756835 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #16 0x1977fb4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fd815223d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fd815223e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16571028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16571028) (BuildId: 1bdc4290ca22bc21b0df7e2b25fc7bf1adbeab0b) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==1018128==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:44.11s - test:44.09s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (40.13s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x198368DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x19CFB43F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x193D3478 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x193E6487 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x193E6487 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x193E6487 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x193E6487 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19D32635 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19D32635 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19D32635 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x19D01FC8 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x193E560B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19D03895 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19D2CBAC 15. ??:0: ?? @ 0x7FB90A101D8F 16. ??:0: ?? @ 0x7FB90A101E3F 17. ??:0: ?? @ 0x166DE028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:18.29s - test:18.24s) [fail] KqpSinkMvcc::OlapMultiSinks [default-linux-x86_64-release-asan] (14.61s) assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:588, void NKikimr::NKqp::CompareYson(const TString &, const TString &, const TString &): (ReformatYson(expected) == ReformatYson(actual)) failed: ("[[[\"2\"]]]" != "[[[\"1\"]]]") , with diff: "[[[\"(2|1)\"]]]" 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:588: CompareYson @ 0x4900DB47 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:368: DoExecute @ 0x1900EF4A 4. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:389: Execute_ @ 0x18FDD558 6. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: operator() @ 0x18FE36B7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18FE36B7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18FE36B7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18FE36B7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 15. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: Execute @ 0x18FE2883 16. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 18. ??:0: ?? @ 0x7F7EB220ED8F 19. ??:0: ?? @ 0x7F7EB220EE3F 20. ??:0: ?? @ 0x16619028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkMvcc.OlapMultiSinks.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkMvcc.OlapMultiSinks.out ------ [1/50] chunk ran 1 test (total:17.90s - test:17.86s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (13.90s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1947664B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1993E75F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x1903269E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18FA28BA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x190112E8 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x19018767 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x19018767 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x19018767 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x19018767 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19975955 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19975955 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19975955 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x199452E8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x19017933 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x19946BB5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1996FECC 17. ??:0: ?? @ 0x7F49BDAB8D8F 18. ??:0: ?? @ 0x7F49BDAB8E3F 19. ??:0: ?? @ 0x16619028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ FAIL: 2 - FAIL ydb/core/kqp/ut/tx ydb/core/quoter/ut [size:medium] ------ sole chunk ran 1 test (total:127.59s - test:127.54s) [fail] QuoterWithKesusTest::KesusRecreation [default-linux-x86_64-release-asan] (123.10s) (TWithBackTrace) Event queue is still empty.ydb/library/actors/testlib/test_runtime.cpp:1375: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) at /-S/ydb/library/actors/testlib/test_runtime.cpp:0:24 NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:0 __root at /-S/contrib/libs/cxxsupp/libcxx/include/__tree:966:54 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 TIntrusivePtr at /-S/util/generic/ptr.h:510:11 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:527:18 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/quoter/kesus_quoter_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.KesusRecreation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.KesusRecreation.out ------ FAIL: 1 - FAIL ydb/core/quoter/ut ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:564.32s - test:564.29s) [fail] AnalyzeColumnshard::AnalyzeRebootColumnShard [default-linux-x86_64-release-asan] (560.00s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.h:0:24 DoDestroy at /-S/util/generic/ptr.h:237:13 operator() at /-S/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.out ------ FAIL: 1 - FAIL ydb/core/statistics/aggregator/ut ydb/core/tx/columnshard/ut_schema [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:135.32s - test:134.68s) [crashed] TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [default-linux-x86_64-release-asan] (114.00s) Test crashed (return code: 100) ==1020337==ERROR: LeakSanitizer: detected memory leaks Too many leaks! Only the first 5000 leaks encountered will be reported. Indirect leak of 31192 byte(s) in 557 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a2853a4 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a2853a4 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a2853a4 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a2853a4 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:281:16 #5 0x2a2853a4 in __construct_node /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1812:21 #6 0x2a2853a4 in std::__y1::pair*, long>, bool> std::__y1::__tree>::__emplace_unique_key_args(NKikimr::TLogoBlobID const&, NKikimr::TLogoBlobID const&) /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1779:25 #7 0x2a27c8b9 in __emplace_unique_extract_key /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1054:12 #8 0x2a27c8b9 in __emplace_unique /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1032:12 #9 0x2a27c8b9 in emplace, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #37 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #38 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #39 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #40 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #41 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #42 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #43 0x7f423a6b4d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2628610 byte(s) leaked in 62005 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.out ------ [1/60] chunk ran 1 test (total:137.17s - test:136.50s) [crashed] TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [default-linux-x86_64-release-asan] (115.64s) Test crashed (return code: 100) ==1018520==ERROR: LeakSanitizer: detected memory leaks Too many leaks! Only the first 5000 leaks encountered will be reported. Indirect leak of 31192 byte(s) in 557 object(s) allocated from: #0 0x101124fd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a2853a4 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a2853a4 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a2853a4 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a2853a4 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:281:16 #5 0x2a2853a4 in __construct_node /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1812:21 #6 0x2a2853a4 in std::__y1::pair*, long>, bool> std::__y1::__tree>::__emplace_unique_key_args(NKikimr::TLogoBlobID const&, NKikimr::TLogoBlobID const&) /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1779:25 #7 0x2a27c8b9 in __emplace_unique_extract_key /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1054:12 #8 0x2a27c8b9 in __emplace_unique /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1032:12 #9 0x2a27c8b9 in emplace, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #36 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #37 0x108ce325 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #38 0x108ce325 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #39 0x108a6d78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #40 0xffef703 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1075:1 #41 0x108a8645 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #42 0x108c889c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #43 0x7f6a5eaced8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2628850 byte(s) leaked in 62008 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.out ------ FAIL: 2 - CRASHED ydb/core/tx/columnshard/ut_schema ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 1 test (total:136.25s - test:136.17s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (131.48s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:28.49s - test:28.46s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 1 - CRASHED ydb/core/tx/tiering/ut ydb/services/ydb/backup_ut [size:medium] nchunks:10 ------ [0/10] chunk ran 1 test (total:10.00s - test:9.97s) [fail] BackupRestoreS3::RestoreViewDependentOnAnotherView [default-linux-x86_64-release-asan] (5.16s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestViewDependentOnAnotherViewIsRestored(char const*, char const*, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewDependentOnAnotherView.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewDependentOnAnotherView.out ------ [1/10] chunk ran 1 test (total:10.01s - test:9.98s) [fail] BackupRestoreS3::RestoreViewQueryText [default-linux-x86_64-release-asan] (4.96s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestViewQueryTextIsPreserved(char const*, NYdb::Dev::NView::TViewClient&, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewQueryText.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.RestoreViewQueryText.out ------ [2/10] chunk ran 1 test (total:10.19s - test:10.16s) [fail] BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [default-linux-x86_64-release-asan] (5.18s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestRestoreTableWithIndex(char const*, char const*, NKikimrSchemeOp::EIndexType, bool, NYdb::Dev::NTable::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 NTestSuiteBackupRestoreS3::TTestCaseTestAllIndexTypes::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2659:5 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree.out ------ [5/10] chunk ran 1 test (total:10.39s - test:10.36s) [fail] BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [default-linux-x86_64-release-asan] (5.41s) assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2227, bool NTestSuiteBackupRestoreS3::WaitForOperation(NOperation::TOperationClient &, NOperationId::TOperationId, int, TDuration) [TOperation = NYdb::NExport::TExportToS3Response]: (result.Status().GetStatus() == EStatus::SUCCESS) failed: (UNDETERMINED != SUCCESS) , with diff: (|S)U(ND|CC)E(TERMINED|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 bool NTestSuiteBackupRestoreS3::WaitForOperation(NYdb::Dev::NOperation::TOperationClient&, NKikimr::Dev::NOperationId::TOperationId, int, TDuration) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2224:17 NTestSuiteBackupRestoreS3::ExportToS3(NYdb::Dev::NScheme::TSchemeClient&, NYdb::Dev::NExport::TExportClient&, unsigned short, NYdb::Dev::NOperation::TOperationClient&, TBasicString> const&, TBasicString> const&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:2278:9 ~shared_ptr at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:699:9 (anonymous namespace)::TestViewOutputIsPreserved(char const*, NYdb::Dev::NQuery::TSession&, std::__y1::function&&, std::__y1::function&&) at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 NTestSuiteBackupRestoreS3::TTestCaseTestAllSchemeObjectTypes::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:17 operator() at /-S/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllSchemeObjectTypes-EPathTypeView.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/BackupRestoreS3.TestAllSchemeObjectTypes-EPathTypeView.out ------ FAIL: 2 - GOOD, 4 - FAIL ydb/services/ydb/backup_ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:9.71s - test:9.69s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (3.99s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut Total 18 suites: 1 - GOOD 13 - FAIL 4 - TIMEOUT Total 68 tests: 11 - GOOD 45 - FAIL 3 - NOT_LAUNCHED 5 - TIMEOUT 4 - CRASHED Cache efficiency ratio is 98.97% (42842 of 43286). Local: 389 (0.90%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 42453 (98.08%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [622406 ms] [TM] [rnd-oez2x2fg4edzhrzg asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 0 (1744917368725), finished: 622406 (1744917991131)] Time from start: 650785.2429199219 ms, time elapsed by graph 622406 ms, time diff 28379.242919921875 ms. The longest 10 tasks: [622406 ms] [TM] [rnd-oez2x2fg4edzhrzg asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1744917368725, finished: 1744917991131] [621794 ms] [TM] [rnd-evt9uuor1q5l4569 asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1744917353151, finished: 1744917974945] [619970 ms] [TM] [rnd-ntettmv58c4l1w6n asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1744917368704, finished: 1744917988674] [616386 ms] [TM] [rnd-3148327406789775913 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744917353134, finished: 1744917969520] [611127 ms] [TM] [rnd-4635475225955403552 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744917353134, finished: 1744917964261] [564735 ms] [TM] [rnd-1230803618020042803 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1744917353319, finished: 1744917918054] [347918 ms] [TM] [rnd-36568712788173369 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1744917368972, finished: 1744917716890] [137590 ms] [TM] [rnd-13651287166747748910 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_schema/unittest [started: 1744917365852, finished: 1744917503442] [136629 ms] [TM] [rnd-4204934372024882643 asan default-linux-x86_64 release]: ydb/core/tx/datashard/ut_incremental_backup/unittest [started: 1744917360862, finished: 1744917497491] [135729 ms] [TM] [rnd-4383007389729190024 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_schema/unittest [started: 1744917367738, finished: 1744917503467] Total time by type: [5900512 ms] [TM] [count: 430, ave time 13722.12 msec] [ 66054 ms] [prepare:get from local cache] [count: 389, ave time 169.80 msec] [ 23385 ms] [prepare:AC] [count: 2, ave time 11692.50 msec] [ 18248 ms] [prepare:put to dist cache] [count: 382, ave time 47.77 msec] [ 10806 ms] [TA] [count: 14, ave time 771.86 msec] [ 6101 ms] [prepare:bazel-store] [count: 1, ave time 6101.00 msec] [ 3974 ms] [prepare:tools] [count: 16, ave time 248.38 msec] [ 1481 ms] [prepare:clean] [count: 3, ave time 493.67 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 5911318 ms (100.00%) Total run tasks time - 5911318 ms Configure time - 21.0 s Statistics overhead 1407 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json Ok + echo 0